signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class RedisInner { /** * Gets any upgrade notifications for a Redis cache . * @ param resourceGroupName The name of the resource group . * @ param name The name of the Redis cache . * @ param history how many minutes in past to look for upgrade notifications * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the NotificationListResponseInner object if successful . */ public NotificationListResponseInner listUpgradeNotifications ( String resourceGroupName , String name , double history ) { } }
return listUpgradeNotificationsWithServiceResponseAsync ( resourceGroupName , name , history ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Processor { /** * Transforms an input file into HTML . * @ param file * The File to process . * @ param safeMode * Set to < code > true < / code > to escape unsafe HTML tags . * @ return The processed String . * @ throws IOException * if an IO error occurs * @ see Configuration # DEFAULT */ public final static String process ( final File file , final boolean safeMode ) throws IOException { } }
return process ( file , Configuration . builder ( ) . setSafeMode ( safeMode ) . build ( ) ) ;
public class QrCodeCodeWordLocations { /** * Blocks out the location of features in the image . Needed for codeworld location extraction * @ param numModules * @ param alignment * @ param hasVersion */ private void computeFeatureMask ( int numModules , int [ ] alignment , boolean hasVersion ) { } }
// mark alignment patterns + format info markSquare ( 0 , 0 , 9 ) ; markRectangle ( numModules - 8 , 0 , 9 , 8 ) ; markRectangle ( 0 , numModules - 8 , 8 , 9 ) ; // timing pattern markRectangle ( 8 , 6 , 1 , numModules - 8 - 8 ) ; markRectangle ( 6 , 8 , numModules - 8 - 8 , 1 ) ; // version info if ( hasVersion ) { markRectangle ( numModules - 11 , 0 , 6 , 3 ) ; markRectangle ( 0 , numModules - 11 , 3 , 6 ) ; } // alignment patterns for ( int i = 0 ; i < alignment . length ; i ++ ) { int row = alignment [ i ] ; for ( int j = 0 ; j < alignment . length ; j ++ ) { if ( i == 0 & j == 0 ) continue ; if ( i == alignment . length - 1 & j == 0 ) continue ; if ( i == 0 & j == alignment . length - 1 ) continue ; int col = alignment [ j ] ; markSquare ( row - 2 , col - 2 , 5 ) ; } }
public class UriBuilder { /** * Sets the path part of the URI . * @ param str the path part of the URI * @ return a reference to the builder */ public UriBuilder setPath ( final String str ) { } }
final String [ ] parts ; if ( str . startsWith ( "/" ) ) { parts = new String [ ] { str } ; } else { final String base = getPath ( ) . toString ( ) ; parts = new String [ ] { base , base . endsWith ( "/" ) ? "" : "/" , str } ; } return setPath ( new GStringImpl ( EMPTY , parts ) ) ;
public class Anima { /** * Set the query to fix columns with lambda * @ param functions column lambdas * @ return Select */ @ SafeVarargs public static < T extends Model , R > Select select ( TypeFunction < T , R > ... functions ) { } }
return select ( Arrays . stream ( functions ) . map ( AnimaUtils :: getLambdaColumnName ) . collect ( joining ( ", " ) ) ) ;
public class MariaDbConnection { /** * Returns the value of the client info property specified by name . This method may return null * if the specified client info property has not been set and does not have a default value . This * method will also return null if the specified client info property name is not supported by the * driver . Applications may use the < code > DatabaseMetaData . getClientInfoProperties < / code > method * to determine the client info properties supported by the driver . * @ param name The name of the client info property to retrieve * @ return The value of the client info property specified * @ throws SQLException if the database server returns an error when fetching the client info * value from the database or this method is called on a closed connection * @ see DatabaseMetaData # getClientInfoProperties * @ since 1.6 */ public String getClientInfo ( final String name ) throws SQLException { } }
checkConnection ( ) ; if ( ! "ApplicationName" . equals ( name ) && ! "ClientUser" . equals ( name ) && ! "ClientHostname" . equals ( name ) ) { throw new SQLException ( "name must be \"ApplicationName\", \"ClientUser\" or \"ClientHostname\", but was \"" + name + "\"" ) ; } try ( Statement statement = createStatement ( ) ) { try ( ResultSet rs = statement . executeQuery ( "SELECT @" + name ) ) { if ( rs . next ( ) ) { return rs . getString ( 1 ) ; } } } return null ;
public class JobTargetExecutionsInner { /** * Lists the target executions of a job step execution . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param jobAgentName The name of the job agent . * @ param jobName The name of the job to get . * @ param jobExecutionId The id of the job execution * @ param stepName The name of the step . * @ param createTimeMin If specified , only job executions created at or after the specified time are included . * @ param createTimeMax If specified , only job executions created before the specified time are included . * @ param endTimeMin If specified , only job executions completed at or after the specified time are included . * @ param endTimeMax If specified , only job executions completed before the specified time are included . * @ param isActive If specified , only active or only completed job executions are included . * @ param skip The number of elements in the collection to skip . * @ param top The number of elements to return from the collection . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; JobExecutionInner & gt ; object if successful . */ public PagedList < JobExecutionInner > listByStep ( final String resourceGroupName , final String serverName , final String jobAgentName , final String jobName , final UUID jobExecutionId , final String stepName , final DateTime createTimeMin , final DateTime createTimeMax , final DateTime endTimeMin , final DateTime endTimeMax , final Boolean isActive , final Integer skip , final Integer top ) { } }
ServiceResponse < Page < JobExecutionInner > > response = listByStepSinglePageAsync ( resourceGroupName , serverName , jobAgentName , jobName , jobExecutionId , stepName , createTimeMin , createTimeMax , endTimeMin , endTimeMax , isActive , skip , top ) . toBlocking ( ) . single ( ) ; return new PagedList < JobExecutionInner > ( response . body ( ) ) { @ Override public Page < JobExecutionInner > nextPage ( String nextPageLink ) { return listByStepNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class Caster { /** * casts a Object to a Node List * @ param o Object to Cast * @ param defaultValue * @ return NodeList from Object */ public static NodeList toNodeList ( Object o , NodeList defaultValue ) { } }
// print . ln ( " nodeList : " + o ) ; if ( o instanceof NodeList ) { return ( NodeList ) o ; } else if ( o instanceof ObjectWrap ) { return toNodeList ( ( ( ObjectWrap ) o ) . getEmbededObject ( defaultValue ) , defaultValue ) ; } return defaultValue ;
public class ReflectionUtils { /** * Obtain the wrapper type for the given primitive . * @ param primitiveType The primitive type * @ return The wrapper type */ public static Class getWrapperType ( Class primitiveType ) { } }
if ( primitiveType . isPrimitive ( ) ) { return PRIMITIVES_TO_WRAPPERS . get ( primitiveType ) ; } return primitiveType ;
public class CQLTranslator { /** * Builds the element collection value . * @ param field * the field * @ param record * the record * @ param metaModel * the meta model * @ param attribute * the attribute * @ return the string builder */ private StringBuilder buildElementCollectionValue ( Field field , Object record , MetamodelImpl metaModel , Attribute attribute ) { } }
StringBuilder elementCollectionValueBuilder = new StringBuilder ( ) ; EmbeddableType embeddableKey = metaModel . embeddable ( ( ( AbstractAttribute ) attribute ) . getBindableJavaType ( ) ) ; ( ( AbstractAttribute ) attribute ) . getJavaMember ( ) ; Object value = PropertyAccessorHelper . getObject ( record , field ) ; boolean isPresent = false ; if ( Collection . class . isAssignableFrom ( field . getType ( ) ) ) { if ( value instanceof Collection ) { Collection collection = ( ( Collection ) value ) ; isPresent = true ; if ( List . class . isAssignableFrom ( field . getType ( ) ) ) { elementCollectionValueBuilder . append ( Constants . OPEN_SQUARE_BRACKET ) ; } if ( Set . class . isAssignableFrom ( field . getType ( ) ) ) { elementCollectionValueBuilder . append ( Constants . OPEN_CURLY_BRACKET ) ; } for ( Object o : collection ) { // Allowing null values . // build embedded value if ( o != null ) { StringBuilder embeddedValueBuilder = new StringBuilder ( Constants . OPEN_CURLY_BRACKET ) ; for ( Field embeddableColumn : ( ( AbstractAttribute ) attribute ) . getBindableJavaType ( ) . getDeclaredFields ( ) ) { if ( ! ReflectUtils . isTransientOrStatic ( embeddableColumn ) ) { AbstractAttribute subAttribute = ( AbstractAttribute ) embeddableKey . getAttribute ( embeddableColumn . getName ( ) ) ; if ( metaModel . isEmbeddable ( subAttribute . getBindableJavaType ( ) ) ) { // construct map ; recursive // send attribute if ( embeddableColumn . getType ( ) . isAnnotationPresent ( ElementCollection . class ) ) { // build element collection value StringBuilder elementCollectionValue = buildElementCollectionValue ( embeddableColumn , o , metaModel , ( Attribute ) subAttribute ) ; appendColumnName ( embeddedValueBuilder , ( ( AbstractAttribute ) ( embeddableKey . getAttribute ( embeddableColumn . getName ( ) ) ) ) . getJPAColumnName ( ) ) ; embeddedValueBuilder . append ( Constants . COLON ) ; embeddedValueBuilder . append ( elementCollectionValue ) ; } else { buildEmbeddedValue ( o , metaModel , embeddedValueBuilder , ( SingularAttribute ) subAttribute ) ; } } else { // append key value appendColumnName ( embeddedValueBuilder , ( ( AbstractAttribute ) ( embeddableKey . getAttribute ( embeddableColumn . getName ( ) ) ) ) . getJPAColumnName ( ) ) ; embeddedValueBuilder . append ( Constants . COLON ) ; appendColumnValue ( embeddedValueBuilder , o , embeddableColumn ) ; } embeddedValueBuilder . append ( Constants . COMMA ) ; } } // strip last char and append ' } ' embeddedValueBuilder . deleteCharAt ( embeddedValueBuilder . length ( ) - 1 ) ; embeddedValueBuilder . append ( Constants . CLOSE_CURLY_BRACKET ) ; // add to columnbuilder and builder elementCollectionValueBuilder . append ( embeddedValueBuilder ) ; // end if } elementCollectionValueBuilder . append ( Constants . COMMA ) ; } if ( ! collection . isEmpty ( ) ) { elementCollectionValueBuilder . deleteCharAt ( elementCollectionValueBuilder . length ( ) - 1 ) ; } if ( List . class . isAssignableFrom ( field . getType ( ) ) ) { elementCollectionValueBuilder . append ( Constants . CLOSE_SQUARE_BRACKET ) ; } if ( Set . class . isAssignableFrom ( field . getType ( ) ) ) { elementCollectionValueBuilder . append ( Constants . CLOSE_CURLY_BRACKET ) ; } return elementCollectionValueBuilder ; } return null ; } else if ( Map . class . isAssignableFrom ( field . getType ( ) ) ) { if ( value instanceof Map ) { Map map = ( ( Map ) value ) ; isPresent = true ; elementCollectionValueBuilder . append ( Constants . OPEN_CURLY_BRACKET ) ; for ( Object mapKey : map . keySet ( ) ) { Object mapValue = map . get ( mapKey ) ; // Allowing null keys . // key is basic type . . no support for embeddable keys appendValue ( elementCollectionValueBuilder , mapKey != null ? mapKey . getClass ( ) : null , mapKey , false ) ; elementCollectionValueBuilder . append ( Constants . COLON ) ; // Allowing null values . if ( mapValue != null ) { StringBuilder embeddedValueBuilder = new StringBuilder ( Constants . OPEN_CURLY_BRACKET ) ; for ( Field embeddableColumn : ( ( AbstractAttribute ) attribute ) . getBindableJavaType ( ) . getDeclaredFields ( ) ) { if ( ! ReflectUtils . isTransientOrStatic ( embeddableColumn ) ) { AbstractAttribute subAttribute = ( AbstractAttribute ) embeddableKey . getAttribute ( embeddableColumn . getName ( ) ) ; if ( metaModel . isEmbeddable ( subAttribute . getBindableJavaType ( ) ) ) { // construct map ; recursive // send attribute if ( embeddableColumn . getType ( ) . isAnnotationPresent ( ElementCollection . class ) ) { // build element collection value StringBuilder elementCollectionValue = buildElementCollectionValue ( embeddableColumn , mapValue , metaModel , ( Attribute ) subAttribute ) ; appendColumnName ( embeddedValueBuilder , ( ( AbstractAttribute ) ( embeddableKey . getAttribute ( embeddableColumn . getName ( ) ) ) ) . getJPAColumnName ( ) ) ; embeddedValueBuilder . append ( Constants . COLON ) ; embeddedValueBuilder . append ( elementCollectionValue ) ; } else { buildEmbeddedValue ( mapValue , metaModel , embeddedValueBuilder , ( SingularAttribute ) subAttribute ) ; } } else { // append key value appendColumnName ( embeddedValueBuilder , ( ( AbstractAttribute ) ( embeddableKey . getAttribute ( embeddableColumn . getName ( ) ) ) ) . getJPAColumnName ( ) ) ; embeddedValueBuilder . append ( Constants . COLON ) ; appendColumnValue ( embeddedValueBuilder , mapValue , embeddableColumn ) ; } embeddedValueBuilder . append ( Constants . COMMA ) ; } } // strip last char and append ' } ' embeddedValueBuilder . deleteCharAt ( embeddedValueBuilder . length ( ) - 1 ) ; embeddedValueBuilder . append ( Constants . CLOSE_CURLY_BRACKET ) ; // add to columnbuilder and builder elementCollectionValueBuilder . append ( embeddedValueBuilder ) ; // end if } elementCollectionValueBuilder . append ( Constants . COMMA ) ; } if ( ! map . isEmpty ( ) ) { elementCollectionValueBuilder . deleteCharAt ( elementCollectionValueBuilder . length ( ) - 1 ) ; } elementCollectionValueBuilder . append ( Constants . CLOSE_CURLY_BRACKET ) ; return elementCollectionValueBuilder ; } return null ; } return null ;
public class MultiPolygon { /** * Return the closest bounding box around the geometry . */ public Bbox getBounds ( ) { } }
Bbox bounds = null ; if ( ! isEmpty ( ) ) { for ( Polygon polygon : polygons ) { if ( bounds == null ) { bounds = polygon . getBounds ( ) ; } else { bounds = bounds . union ( polygon . getBounds ( ) ) ; } } } return bounds ;
public class DateTimeFormatterBuilder { /** * Causes the next added printer / parser to pad to a fixed width . * This padding is intended for padding other than zero - padding . * Zero - padding should be achieved using the appendValue methods . * During formatting , the decorated element will be output and then padded * to the specified width . An exception will be thrown during printing if * the pad width is exceeded . * During parsing , the padding and decorated element are parsed . * If parsing is lenient , then the pad width is treated as a maximum . * If parsing is case insensitive , then the pad character is matched ignoring case . * The padding is parsed greedily . Thus , if the decorated element starts with * the pad character , it will not be parsed . * @ param padWidth the pad width , 1 or greater * @ param padChar the pad character * @ return this , for chaining , not null * @ throws IllegalArgumentException if pad width is too small */ public DateTimeFormatterBuilder padNext ( int padWidth , char padChar ) { } }
if ( padWidth < 1 ) { throw new IllegalArgumentException ( "The pad width must be at least one but was " + padWidth ) ; } active . padNextWidth = padWidth ; active . padNextChar = padChar ; active . valueParserIndex = - 1 ; return this ;
public class TxUtils { /** * Is the transaction active * @ param tx The transaction * @ return True if active ; otherwise false */ public static boolean isActive ( Transaction tx ) { } }
if ( tx == null ) return false ; try { int status = tx . getStatus ( ) ; return status == Status . STATUS_ACTIVE ; } catch ( SystemException error ) { throw new RuntimeException ( "Error during isActive()" , error ) ; }
public class Response { /** * Attempts to set the Content - Type of the Response based on Request * headers . * The Accept header is preferred for negotiation but the Content - Type * header may also be used if an agreeable engine can not be determined . * If no Content - Type can not be negotiated then the response will not be * modified . This behavior allows specification of a default Content - Type * using one of the methods such as < code > xml ( ) < / code > or < code > json ( ) < / code > . * For example , < code > response . xml ( ) . contentType ( request ) . send ( myObject ) ; < / code > * would set the default Content - Type as < code > application / xml < / code > and * then attempt to negotiate the client ' s preferred type . If negotiation failed , * then the default < code > application / xml < / code > would be sent and used to * serialize the outgoing object . * @ param request * @ return the response */ public Response contentType ( Request request ) { } }
// prefer the Accept header if ( "*/*" . equals ( request . getAcceptType ( ) ) ) { // client accepts all types return this ; } ContentTypeEngine engine = contentTypeEngines . getContentTypeEngine ( request . getAcceptType ( ) ) ; if ( engine != null ) { log . debug ( "Negotiated '{}' from request Accept header" , engine . getContentType ( ) ) ; } else if ( ! StringUtils . isNullOrEmpty ( request . getContentType ( ) ) ) { // try to match the Request content - type engine = contentTypeEngines . getContentTypeEngine ( request . getContentType ( ) ) ; if ( engine != null ) { log . debug ( "Negotiated '{}' from request Content-Type header" , engine . getContentType ( ) ) ; } } if ( engine == null ) { log . debug ( "Failed to negotiate a content type for Accept='{}' and Content-Type='{}'" , request . getAcceptType ( ) , request . getContentType ( ) ) ; return this ; } return contentType ( engine . getContentType ( ) ) ;
public class RealVoltDB { /** * See comment on { @ link VoltDBInterface # scheduleWork ( Runnable , long , long , TimeUnit ) } vs * { @ link VoltDBInterface # schedulePriorityWork ( Runnable , long , long , TimeUnit ) } */ @ Override public ScheduledFuture < ? > scheduleWork ( Runnable work , long initialDelay , long delay , TimeUnit unit ) { } }
if ( delay > 0 ) { return m_periodicWorkThread . scheduleWithFixedDelay ( work , initialDelay , delay , unit ) ; } else { return m_periodicWorkThread . schedule ( work , initialDelay , unit ) ; }
public class SamlRegisteredServiceCachedMetadataEndpoint { /** * Gets cached metadata object . * @ param serviceId the service id * @ param entityId the entity id * @ return the cached metadata object */ @ ReadOperation public Map < String , Object > getCachedMetadataObject ( final String serviceId , @ Nullable final String entityId ) { } }
try { val registeredService = findRegisteredService ( serviceId ) ; val issuer = StringUtils . defaultIfBlank ( entityId , registeredService . getServiceId ( ) ) ; val criteriaSet = new CriteriaSet ( ) ; criteriaSet . add ( new EntityIdCriterion ( issuer ) ) ; criteriaSet . add ( new EntityRoleCriterion ( SPSSODescriptor . DEFAULT_ELEMENT_NAME ) ) ; val metadataResolver = cachingMetadataResolver . resolve ( registeredService , criteriaSet ) ; val iteration = metadataResolver . resolve ( criteriaSet ) . spliterator ( ) ; return StreamSupport . stream ( iteration , false ) . map ( entity -> Pair . of ( entity . getEntityID ( ) , SamlUtils . transformSamlObject ( openSamlConfigBean , entity ) . toString ( ) ) ) . collect ( Collectors . toMap ( Pair :: getLeft , Pair :: getRight ) ) ; } catch ( final Exception e ) { LOGGER . error ( e . getMessage ( ) , e ) ; return CollectionUtils . wrap ( "error" , e . getMessage ( ) ) ; }
public class TeamServiceImpl { /** * Retrieves all unique scopes * @ return A data response list of type Scope containing all unique scopes */ @ Override public Iterable < Team > getAllTeams ( ) { } }
// Get all available teams Iterable < Team > teams = teamRepository . findAll ( ) ; for ( Team team : teams ) { Collector collector = collectorRepository . findOne ( team . getCollectorId ( ) ) ; team . setCollector ( collector ) ; } return teams ;
public class JavacHandlerUtil { /** * When generating a setter , the setter either returns void ( beanspec ) or Self ( fluent ) . * This method scans for the { @ code Accessors } annotation to figure that out . */ public static boolean shouldReturnThis ( JavacNode field ) { } }
if ( ( ( ( JCVariableDecl ) field . get ( ) ) . mods . flags & Flags . STATIC ) != 0 ) return false ; AnnotationValues < Accessors > accessors = JavacHandlerUtil . getAccessorsForField ( field ) ; return HandlerUtil . shouldReturnThis0 ( accessors , field . getAst ( ) ) ;
public class SocketRWChannelSelector { /** * @ see com . ibm . ws . tcpchannel . internal . ChannelSelector # performRequest ( ) */ @ Override protected boolean performRequest ( ) { } }
VirtualConnection vci = null ; boolean completeOperation = true ; TCPBaseRequestContext req = null ; SelectionKey selectedKey = null ; // If we were woken up because we have work to do , do it . Set < SelectionKey > keySet = selector . selectedKeys ( ) ; Iterator < SelectionKey > selectedIterator = keySet . iterator ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "performRequest - processing " + keySet . size ( ) + " items" ) ; } while ( selectedIterator . hasNext ( ) ) { selectedKey = selectedIterator . next ( ) ; // safely remove from set while looping selectedIterator . remove ( ) ; req = ( TCPBaseRequestContext ) selectedKey . attachment ( ) ; vci = req . getTCPConnLink ( ) . getVirtualConnection ( ) ; completeOperation = true ; // looking at the vci is messy here , but we ' re doing the try , catch // to save synchronization logic . vci could be null if connection // is closing before we can asked to request permission to finish // the read . if ( vci == null ) { completeOperation = false ; } else { // only check if operation was an async read / write request if ( vci . isInputStateTrackingOperational ( ) && ! req . blockedThread ) { completeOperation = false ; if ( req . isRequestTypeRead ( ) ) { if ( vci . requestPermissionToFinishRead ( ) ) { completeOperation = true ; } } else { if ( vci . requestPermissionToFinishWrite ( ) ) { completeOperation = true ; } } } } if ( completeOperation ) { // try to dispatch request . if it fails , just leave key alone , // and it should get selected and tried again the next time through // the selector . if ( wqm . dispatch ( req , null ) ) { // Dispatch worked , so set key ' s interest set to empty , so it isn ' t // re - selected // until the user asks us to do more work . try { selectedKey . interestOps ( 0 ) ; } catch ( CancelledKeyException cke ) { // ignore , since we already got the key and the data } } } else { // permission denied , assume close has been processed on this VC // therefore disregard this read . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( this , tc , "IO cancelled on closed key " + selectedKey ) ; } try { selectedKey . interestOps ( 0 ) ; } catch ( CancelledKeyException cke ) { // ignore , since the vc is closed / ing } } } return false ;
public class N { /** * Returns an immutable empty < code > ListIterator < / code > if the specified ListIterator is < code > null < / code > , otherwise itself is returned . * @ param iter * @ return */ public static < T > ListIterator < T > nullToEmpty ( final ListIterator < T > iter ) { } }
return iter == null ? N . < T > emptyListIterator ( ) : iter ;
public class BatchDescribeSimulationJobResult { /** * A list of simulation jobs . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setJobs ( java . util . Collection ) } or { @ link # withJobs ( java . util . Collection ) } if you want to override the * existing values . * @ param jobs * A list of simulation jobs . * @ return Returns a reference to this object so that method calls can be chained together . */ public BatchDescribeSimulationJobResult withJobs ( SimulationJob ... jobs ) { } }
if ( this . jobs == null ) { setJobs ( new java . util . ArrayList < SimulationJob > ( jobs . length ) ) ; } for ( SimulationJob ele : jobs ) { this . jobs . add ( ele ) ; } return this ;
public class DecimalFormat { /** * Returns true if rounding - up must be done on { @ code scaledFractionalPartAsInt } , * false otherwise . * This is a utility method that takes correct half - even rounding decision on * passed fractional value at the scaled decimal point ( 2 digits for currency * case and 3 for decimal case ) , when the approximated fractional part after * scaled decimal point is exactly 0.5d . This is done by means of exact * calculations on the { @ code fractionalPart } floating - point value . * This method is supposed to be called by private { @ code fastDoubleFormat } * method only . * The algorithms used for the exact calculations are : * The < b > < i > FastTwoSum < / i > < / b > algorithm , from T . J . Dekker , described in the * papers " < i > A Floating - Point Technique for Extending the Available * Precision < / i > " by Dekker , and in " < i > Adaptive Precision Floating - Point * Arithmetic and Fast Robust Geometric Predicates < / i > " from J . Shewchuk . * A modified version of < b > < i > Sum2S < / i > < / b > cascaded summation described in * " < i > Accurate Sum and Dot Product < / i > " from Takeshi Ogita and All . As * Ogita says in this paper this is an equivalent of the Kahan - Babuska ' s * summation algorithm because we order the terms by magnitude before summing * them . For this reason we can use the < i > FastTwoSum < / i > algorithm rather * than the more expensive Knuth ' s < i > TwoSum < / i > . * We do this to avoid a more expensive exact " < i > TwoProduct < / i > " algorithm , * like those described in Shewchuk ' s paper above . See comments in the code * below . * @ param fractionalPart The fractional value on which we take rounding * decision . * @ param scaledFractionalPartAsInt The integral part of the scaled * fractional value . * @ return the decision that must be taken regarding half - even rounding . */ private boolean exactRoundUp ( double fractionalPart , int scaledFractionalPartAsInt ) { } }
/* exactRoundUp ( ) method is called by fastDoubleFormat ( ) only . * The precondition expected to be verified by the passed parameters is : * scaledFractionalPartAsInt = = * ( int ) ( fractionalPart * fastPathData . fractionalScaleFactor ) . * This is ensured by fastDoubleFormat ( ) code . */ /* We first calculate roundoff error made by fastDoubleFormat ( ) on * the scaled fractional part . We do this with exact calculation on the * passed fractionalPart . Rounding decision will then be taken from roundoff . */ /* - - - - TwoProduct ( fractionalPart , scale factor ( i . e . 1000.0d or 100.0d ) ) . * The below is an optimized exact " TwoProduct " calculation of passed * fractional part with scale factor , using Ogita ' s Sum2S cascaded * summation adapted as Kahan - Babuska equivalent by using FastTwoSum * ( much faster ) rather than Knuth ' s TwoSum . * We can do this because we order the summation from smallest to * greatest , so that FastTwoSum can be used without any additional error . * The " TwoProduct " exact calculation needs 17 flops . We replace this by * a cascaded summation of FastTwoSum calculations , each involving an * exact multiply by a power of 2. * Doing so saves overall 4 multiplications and 1 addition compared to * using traditional " TwoProduct " . * The scale factor is either 100 ( currency case ) or 1000 ( decimal case ) . * - when 1000 , we replace it by ( 1024 - 16 - 8 ) = 1000. * - when 100 , we replace it by ( 128 - 32 + 4 ) = 100. * Every multiplication by a power of 2 ( 1024 , 128 , 32 , 16 , 8 , 4 ) is exact . */ double approxMax ; // Will always be positive . double approxMedium ; // Will always be negative . double approxMin ; double fastTwoSumApproximation = 0.0d ; double fastTwoSumRoundOff = 0.0d ; double bVirtual = 0.0d ; if ( isCurrencyFormat ) { // Scale is 100 = 128 - 32 + 4. // Multiply by 2 * * n is a shift . No roundoff . No error . approxMax = fractionalPart * 128.00d ; approxMedium = - ( fractionalPart * 32.00d ) ; approxMin = fractionalPart * 4.00d ; } else { // Scale is 1000 = 1024 - 16 - 8. // Multiply by 2 * * n is a shift . No roundoff . No error . approxMax = fractionalPart * 1024.00d ; approxMedium = - ( fractionalPart * 16.00d ) ; approxMin = - ( fractionalPart * 8.00d ) ; } // Shewchuk / Dekker ' s FastTwoSum ( approxMedium , approxMin ) . assert ( - approxMedium >= Math . abs ( approxMin ) ) ; fastTwoSumApproximation = approxMedium + approxMin ; bVirtual = fastTwoSumApproximation - approxMedium ; fastTwoSumRoundOff = approxMin - bVirtual ; double approxS1 = fastTwoSumApproximation ; double roundoffS1 = fastTwoSumRoundOff ; // Shewchuk / Dekker ' s FastTwoSum ( approxMax , approxS1 ) ; assert ( approxMax >= Math . abs ( approxS1 ) ) ; fastTwoSumApproximation = approxMax + approxS1 ; bVirtual = fastTwoSumApproximation - approxMax ; fastTwoSumRoundOff = approxS1 - bVirtual ; double roundoff1000 = fastTwoSumRoundOff ; double approx1000 = fastTwoSumApproximation ; double roundoffTotal = roundoffS1 + roundoff1000 ; // Shewchuk / Dekker ' s FastTwoSum ( approx1000 , roundoffTotal ) ; assert ( approx1000 >= Math . abs ( roundoffTotal ) ) ; fastTwoSumApproximation = approx1000 + roundoffTotal ; bVirtual = fastTwoSumApproximation - approx1000 ; // Now we have got the roundoff for the scaled fractional double scaledFractionalRoundoff = roundoffTotal - bVirtual ; // - - - - TwoProduct ( fractionalPart , scale ( i . e . 1000.0d or 100.0d ) ) end . /* - - - - Taking the rounding decision * We take rounding decision based on roundoff and half - even rounding * rule . * The above TwoProduct gives us the exact roundoff on the approximated * scaled fractional , and we know that this approximation is exactly * 0.5d , since that has already been tested by the caller * ( fastDoubleFormat ) . * Decision comes first from the sign of the calculated exact roundoff . * - Since being exact roundoff , it cannot be positive with a scaled * fractional less than 0.5d , as well as negative with a scaled * fractional greater than 0.5d . That leaves us with following 3 cases . * - positive , thus scaled fractional = = 0.500 . . . . 0fff = = > round - up . * - negative , thus scaled fractional = = 0.499 . . . . 9fff = = > don ' t round - up . * - is zero , thus scaled fractioanl = = 0.5 = = > half - even rounding applies : * we round - up only if the integral part of the scaled fractional is odd . */ if ( scaledFractionalRoundoff > 0.0 ) { return true ; } else if ( scaledFractionalRoundoff < 0.0 ) { return false ; } else if ( ( scaledFractionalPartAsInt & 1 ) != 0 ) { return true ; } return false ; // - - - - Taking the rounding decision end
public class HttpInboundLink { /** * Send an error message when a generic throwable occurs . * @ param t */ private void sendErrorMessage ( Throwable t ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Sending a 400 for throwable [" + t + "]" ) ; } sendErrorMessage ( StatusCodes . BAD_REQUEST ) ;
public class DescribeServicesResult { /** * A JSON - formatted list of AWS services . * @ return A JSON - formatted list of AWS services . */ public java . util . List < Service > getServices ( ) { } }
if ( services == null ) { services = new com . amazonaws . internal . SdkInternalList < Service > ( ) ; } return services ;
public class BaseDestinationHandler { private void reallocateTransmissionStreams ( PtoPXmitMsgsItemStream ignoredStream ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "reallocateTransmissionStreams" , ignoredStream ) ; getLocalisationManager ( ) . reallocateTransmissionStreams ( ignoredStream ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reallocateTransmissionStreams" ) ;
public class Configuration { /** * Returns an error message for the supplied exception and based on this * configuration . * @ see # setDefaultExceptionMessage ( String ) * @ see # setSendExceptionMessage ( boolean ) * @ see # setExceptionToMessage ( Map ) * @ param exception the thrown exception * @ return exception message */ public String getMessage ( Throwable exception ) { } }
String message ; if ( getExceptionToMessage ( ) != null ) { message = getExceptionToMessage ( ) . get ( exception . getClass ( ) ) ; if ( StringUtils . hasText ( message ) ) { return message ; } // map entry with a null value if ( getExceptionToMessage ( ) . containsKey ( exception . getClass ( ) ) ) { return exception . getMessage ( ) ; } } if ( isSendExceptionMessage ( ) ) { return exception . getMessage ( ) ; } return getDefaultExceptionMessage ( ) ;
public class WSKeyStore { /** * return true if the entry is a key and false if not */ public boolean isKeyEntry ( String alias ) throws KeyStoreException , KeyException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "isKeyEntry: " + alias ) ; boolean isKey = false ; try { KeyStore jKeyStore = getKeyStore ( false , false ) ; if ( jKeyStore == null ) { throw new KeyStoreException ( "The keystore [" + name + "] is not present in the configuration" ) ; } isKey = jKeyStore . isKeyEntry ( alias ) ; } catch ( KeyStoreException e ) { throw e ; } catch ( Exception ex ) { throw new KeyException ( ex . getMessage ( ) , ex ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "isKeyEntry: " + isKey ) ; return isKey ;
public class FileUtils { /** * Convert a PosixFilePermission set to an integer permissions mode . * @ param aPermSet A PosixFilePermission set * @ return A permissions mode integer */ public static int convertToInt ( final Set < PosixFilePermission > aPermSet ) { } }
int result = 0 ; if ( aPermSet . contains ( PosixFilePermission . OWNER_READ ) ) { result = result | 0400 ; } if ( aPermSet . contains ( PosixFilePermission . OWNER_WRITE ) ) { result = result | 0200 ; } if ( aPermSet . contains ( PosixFilePermission . OWNER_EXECUTE ) ) { result = result | 0100 ; } if ( aPermSet . contains ( PosixFilePermission . GROUP_READ ) ) { result = result | 040 ; } if ( aPermSet . contains ( PosixFilePermission . GROUP_WRITE ) ) { result = result | 020 ; } if ( aPermSet . contains ( PosixFilePermission . GROUP_EXECUTE ) ) { result = result | 010 ; } if ( aPermSet . contains ( PosixFilePermission . OTHERS_READ ) ) { result = result | 04 ; } if ( aPermSet . contains ( PosixFilePermission . OTHERS_WRITE ) ) { result = result | 02 ; } if ( aPermSet . contains ( PosixFilePermission . OTHERS_EXECUTE ) ) { result = result | 01 ; } return result ;
public class MuServerBuilder { /** * Creates and starts this server . An exception is thrown if it fails to start . * @ return The running server . */ public MuServer start ( ) { } }
if ( httpPort < 0 && httpsPort < 0 ) { throw new IllegalArgumentException ( "No ports were configured. Please call MuServerBuilder.withHttpPort(int) or MuServerBuilder.withHttpsPort(int)" ) ; } ServerSettings settings = new ServerSettings ( minimumGzipSize , maxHeadersSize , maxUrlSize , gzipEnabled , mimeTypesToGzip ) ; NettyHandlerAdapter nettyHandlerAdapter = new NettyHandlerAdapter ( handlers ) ; NioEventLoopGroup bossGroup = new NioEventLoopGroup ( 1 ) ; NioEventLoopGroup workerGroup = new NioEventLoopGroup ( ) ; List < Channel > channels = new ArrayList < > ( ) ; Runnable shutdown = ( ) -> { try { for ( Channel channel : channels ) { channel . close ( ) . sync ( ) ; } bossGroup . shutdownGracefully ( 0 , 0 , TimeUnit . MILLISECONDS ) . sync ( ) ; workerGroup . shutdownGracefully ( 0 , 0 , TimeUnit . MILLISECONDS ) . sync ( ) ; } catch ( Exception e ) { log . info ( "Error while shutting down. Will ignore. Error was: " + e . getMessage ( ) ) ; } } ; try { GlobalTrafficShapingHandler trafficShapingHandler = new GlobalTrafficShapingHandler ( workerGroup , 0 , 0 , 1000 ) ; MuStatsImpl stats = new MuStatsImpl ( trafficShapingHandler . trafficCounter ( ) ) ; AtomicReference < MuServer > serverRef = new AtomicReference < > ( ) ; SslContextProvider sslContextProvider = null ; Channel httpChannel = httpPort < 0 ? null : createChannel ( bossGroup , workerGroup , nettyHandlerAdapter , host , httpPort , null , trafficShapingHandler , stats , serverRef , settings , false ) ; Channel httpsChannel ; boolean http2Enabled = Toggles . http2 ; if ( httpsPort < 0 ) { httpsChannel = null ; } else { SSLContextBuilder toUse = this . sslContextBuilder != null ? this . sslContextBuilder : SSLContextBuilder . unsignedLocalhostCertBuilder ( ) ; SslContext nettySslContext = toUse . toNettySslContext ( http2Enabled ) ; log . debug ( "SSL Context is " + nettySslContext ) ; sslContextProvider = new SslContextProvider ( nettySslContext ) ; httpsChannel = createChannel ( bossGroup , workerGroup , nettyHandlerAdapter , host , httpsPort , sslContextProvider , trafficShapingHandler , stats , serverRef , settings , http2Enabled ) ; } URI uri = null ; if ( httpChannel != null ) { channels . add ( httpChannel ) ; uri = getUriFromChannel ( httpChannel , "http" , host ) ; } URI httpsUri = null ; if ( httpsChannel != null ) { channels . add ( httpsChannel ) ; httpsUri = getUriFromChannel ( httpsChannel , "https" , host ) ; } InetSocketAddress serverAddress = ( InetSocketAddress ) channels . get ( 0 ) . localAddress ( ) ; MuServer server = new MuServerImpl ( uri , httpsUri , shutdown , stats , serverAddress , sslContextProvider , http2Enabled ) ; serverRef . set ( server ) ; if ( addShutdownHook ) { Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( server :: stop ) ) ; } return server ; } catch ( Exception ex ) { shutdown . run ( ) ; throw new MuException ( "Error while starting server" , ex ) ; }
public class WASCDIAnnotationInjectionProvider { /** * { @ inheritDoc } */ @ Override public void postConstruct ( Object instance , Object creationMetaData ) throws InjectionProviderException { } }
if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { logger . logp ( Level . FINE , CLASS_NAME , "postConstruct(instance)" , "Instance of = " + instance . getClass ( ) . getName ( ) ) ; } if ( isAvailable ) { try { runtimeAnnotationHelper . doDelayedPostConstruct ( instance ) ; } catch ( RuntimeException exc ) { throw new InjectionProviderException ( exc ) ; } }
public class DebugUtil { /** * Invokes a given method of every element in a { @ code java . util . Collection } and prints the results to a { @ code java . io . PrintStream } . * The method to be invoked must have no formal parameters . * If an exception is throwed during the method invocation , the element ' s { @ code toString ( ) } method is called . * For bulk data types , recursive invocations and invocations of other methods in this class , are used . * Be aware that the { @ code Collection } interface embraces a large portion of the bulk data types in the { @ code java . util } package , * e . g . { @ code List } , { @ code Set } , { @ code Vector } and { @ code HashSet } . * For debugging of arrays , use the method < a href = " http : / / java . sun . com / products / jdk / 1.3 / docs / api / java / util / Arrays . html # asList ( java . lang . Object [ ] ) " > { @ code java . util . Arrays . asList ( Object [ ] ) } < / a > method for converting the object array to a list before calling this method . * @ param pCollection the { @ code java . util . Collection } to be printed . * @ param pMethodName a { @ code java . lang . String } holding the name of the method to be invoked on each collection element . * @ param pPrintStream the { @ code java . io . PrintStream } for flushing the results . * @ see < a href = " http : / / java . sun . com / products / jdk / 1.3 / docs / api / java / util / Collection . html " > { @ code java . util . Collection } < / a > */ public static void printDebug ( final Collection pCollection , final String pMethodName , final PrintStream pPrintStream ) { } }
if ( pPrintStream == null ) { System . err . println ( PRINTSTREAM_IS_NULL_ERROR_MESSAGE ) ; return ; } if ( pCollection == null ) { pPrintStream . println ( COLLECTION_IS_NULL_ERROR_MESSAGE ) ; return ; } else if ( pCollection . isEmpty ( ) ) { pPrintStream . println ( COLLECTION_IS_EMPTY_ERROR_MESSAGE ) ; return ; } for ( Iterator i = pCollection . iterator ( ) ; i . hasNext ( ) ; ) { printDebug ( i . next ( ) , pMethodName , pPrintStream ) ; }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcBoundaryNodeCondition ( ) { } }
if ( ifcBoundaryNodeConditionEClass == null ) { ifcBoundaryNodeConditionEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 51 ) ; } return ifcBoundaryNodeConditionEClass ;
public class Balanced { /** * Returns the index within this string of the first occurrence of the specified character , similar to String . indexOf ( ) . * However , any occurrence of the specified character enclosed between balanced parentheses / brackets / braces is ignored . * @ param text a String * @ param target the character to search for * @ return the index of the character in the string , or - 1 if the specified character is not found */ public static int indexOf ( String text , char target ) { } }
return indexOf ( text , 0 , text . length ( ) , target , null ) ;
public class AliasDestinationHandler { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler # chooseConsumerDispatcher ( ) */ @ Override public ConsumerManager chooseConsumerManager ( SIBUuid12 gatheringTargetUuid , SIBUuid8 fixedMEUuid , HashSet < SIBUuid8 > scopedMEs ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "chooseConsumerManager" , new Object [ ] { gatheringTargetUuid , fixedMEUuid , scopedMEs } ) ; // We ' re an alias ( or foreign destination ) so we should never be called with a scoped ME set if ( scopedMEs != null ) { SIMPErrorException e = new SIMPErrorException ( "Alias called with scoped ME set" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exception ( tc , e ) ; } e . setExceptionReason ( SIRCConstants . SIRC0901_INTERNAL_MESSAGING_ERROR ) ; e . setExceptionInserts ( new String [ ] { "com.ibm.ws.sib.processor.impl.ProducerSessionImpl.handleMessage" , "1:837:1.71.2.6" , SIMPUtils . getStackTrace ( e ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "chooseConsumerManager" , e ) ; throw e ; } boolean error = false ; ConsumerManager consumerManager = null ; // If the caller has fixed an ME , first check it ' s in our set if ( fixedMEUuid != null ) { if ( ( _singleScopedQueuePointME != null ) && ! fixedMEUuid . equals ( _singleScopedQueuePointME ) ) error = true ; else if ( ( _scopedQueuePointMEs != null ) && ! _scopedQueuePointMEs . contains ( fixedMEUuid ) ) error = true ; } if ( ! error ) { // If we have a single ME scoped by this alias we may as well pass it to the target as // a fixed ME ( which it must match as we ' ve already checked that ) to save // having to parse a HashSet everywhere for no need . if ( _singleScopedQueuePointME != null ) fixedMEUuid = _singleScopedQueuePointME ; // If a gatheringUuid was supplied , replace it with the alias uuid if ( gatheringTargetUuid != null ) gatheringTargetUuid = _definition . getUUID ( ) ; // Pass on any scoped queue points ( this is a consumer on an alias ) consumerManager = _targetDestinationHandler . chooseConsumerManager ( gatheringTargetUuid , fixedMEUuid , _scopedQueuePointMEs ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "chooseConsumerManager" , consumerManager ) ; return consumerManager ;
public class QuattroTableModel { /** * { @ inheritDoc } */ @ Override public Object getValueAt ( final int rowIndex , final int columnIndex ) { } }
final Quattro < TL , TR , BL , BR > row = getData ( ) . get ( rowIndex ) ; switch ( columnIndex ) { case 0 : return row . getTopLeft ( ) ; case 1 : return row . getTopRight ( ) ; case 2 : return row . getBottomLeft ( ) ; case 3 : return row . getBottomRight ( ) ; default : return null ; }
public class AnnoConstruct { /** * Helper to getAnnotationsByType */ private static Class < ? extends Annotation > getContainer ( Class < ? extends Annotation > annoType ) { } }
// Since we can not refer to java . lang . annotation . Repeatable until we are // bootstrapping with java 8 we need to get the Repeatable annotation using // reflective invocations instead of just using its type and element method . if ( REPEATABLE_CLASS != null && VALUE_ELEMENT_METHOD != null ) { // Get the Repeatable instance on the annotations declaration Annotation repeatable = ( Annotation ) annoType . getAnnotation ( REPEATABLE_CLASS ) ; if ( repeatable != null ) { try { // Get the value element , it should be a class // indicating the containing annotation type @ SuppressWarnings ( "unchecked" ) Class < ? extends Annotation > containerType = ( Class ) VALUE_ELEMENT_METHOD . invoke ( repeatable ) ; if ( containerType == null ) return null ; return containerType ; } catch ( ClassCastException | IllegalAccessException | InvocationTargetException e ) { return null ; } } } return null ;
public class OWLValueObject { /** * Builds an instance * @ param model * @ param uriClass * @ param object * @ return * @ throws NotYetImplementedException * @ throws OWLTranslationException */ private static OWLValueObject buildFromClasAndObject ( OWLModel model , OWLURIClass uriClass , Object object ) throws NotYetImplementedException , OWLTranslationException { } }
// if object is a primitive data type : if ( object . getClass ( ) . isPrimitive ( ) || object instanceof String ) { return new OWLValueObject ( model , uriClass , model . createDataValue ( object , uriClass . getURI ( ) ) ) ; } // if object uses jenabeans : if ( ObjectOWLSTranslator . isJenaBean ( object ) ) { try { return new OWLValueObject ( model , uriClass , model . createIndividual ( uriClass . getURI ( ) , new URI ( ObjectOWLSTranslator . beanToJenaResource ( model , object ) . getURI ( ) ) ) ) ; } catch ( URISyntaxException ex ) { throw new OWLTranslationException ( "translating to Jena: " , ex ) ; } } throw new NotYetImplementedException ( "new " + OWLValueObject . class . toString ( ) + " from a non-primitive object" ) ;
public class CmsWorkplaceAppManager { /** * Initializes the additional workplace CSS URIs . < p > * They will be taken from the module parameter ' workplace - css ' if present in any module . < p > * @ param moduleManager the module manager instance */ public void initWorkplaceCssUris ( CmsModuleManager moduleManager ) { } }
Set < String > cssUris = new HashSet < String > ( ) ; for ( CmsModule module : moduleManager . getAllInstalledModules ( ) ) { String param = module . getParameter ( WORKPLACE_CSS_PARAM ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( param ) ) { cssUris . add ( param ) ; } } File cssFile = new File ( OpenCms . getSystemInfo ( ) . getAbsoluteRfsPathRelativeToWebApplication ( CmsStringUtil . joinPaths ( "resources" , TOOLBAR_CSS ) ) ) ; if ( cssFile . exists ( ) ) { cssUris . add ( TOOLBAR_CSS ) ; } m_workplaceCssUris = Collections . unmodifiableSet ( cssUris ) ;
public class BeanRepository { /** * With a { @ link Provider } it is possible to get an Accessor to a Bean , without initialise the Bean at the time of * of getting the Accessor . * @ param cls The Class of the Bean , used in the Configuration of the BeanRepository * @ param < R > The Type or a super Type of the Bean * @ param < T > The Type of the Bean * @ return a Provider for the Bean of the given Class . */ public < R , T extends R > Provider < T > getProvider ( final Class < R > cls ) { } }
return providerFor ( beanProviderFor ( cls ) ) ;
public class IterableExtensions { /** * Applies { @ code procedure } for each element of the given iterable . * @ param iterable * the iterable . May not be < code > null < / code > . * @ param procedure * the procedure . May not be < code > null < / code > . */ public static < T > void forEach ( Iterable < T > iterable , Procedure1 < ? super T > procedure ) { } }
IteratorExtensions . forEach ( iterable . iterator ( ) , procedure ) ;
public class MathUtils { /** * Returns the input value x clamped to the range [ min , max ] . */ public static int clamp ( int x , int min , int max ) { } }
if ( x > max ) return max ; if ( x < min ) return min ; return x ;
public class CommerceCountryUtil { /** * Returns the first commerce country in the ordered set where groupId = & # 63 ; . * @ param groupId the group ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce country * @ throws NoSuchCountryException if a matching commerce country could not be found */ public static CommerceCountry findByGroupId_First ( long groupId , OrderByComparator < CommerceCountry > orderByComparator ) throws com . liferay . commerce . exception . NoSuchCountryException { } }
return getPersistence ( ) . findByGroupId_First ( groupId , orderByComparator ) ;
public class TrmMessageFactoryImpl { /** * Create an instance of the appropriate sub - class , e . g . TrmRouteData if * the inbound message is actually a TRM RouteData Message , for the * given JMO . A TRM Message of unknown type will be returned as a TrmMessage . * @ return TrmMessage A TrmMessage of the appropriate subtype */ final TrmMessage createInboundTrmMessage ( JsMsgObject jmo , int messageType ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createInboundTrmMessage " + messageType ) ; TrmMessage trmMessage = null ; /* Create an instance of the appropriate message subclass */ switch ( messageType ) { case TrmMessageType . ROUTE_DATA_INT : trmMessage = new TrmRouteDataImpl ( jmo ) ; break ; default : trmMessage = new TrmMessageImpl ( jmo ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createInboundTrmMessage" ) ; return trmMessage ;
public class GuiceBootstrapModule { /** * Bind bootstrap , configuration and environment objects to be able to use them * as injectable . */ @ SuppressWarnings ( "deprecation" ) private void bindEnvironment ( ) { } }
bind ( Bootstrap . class ) . toInstance ( bootstrap ( ) ) ; bind ( Environment . class ) . toInstance ( environment ( ) ) ; install ( new ConfigBindingModule ( configuration ( ) , configurationTree ( ) , context . option ( BindConfigurationInterfaces ) ) ) ;
public class NettyChannelBuilder { /** * Equivalent to using { @ link # negotiationType ( NegotiationType ) } with { @ code PLAINTEXT } or * { @ code PLAINTEXT _ UPGRADE } . * @ deprecated use { @ link # usePlaintext ( ) } instead . */ @ Override @ Deprecated public NettyChannelBuilder usePlaintext ( boolean skipNegotiation ) { } }
if ( skipNegotiation ) { negotiationType ( NegotiationType . PLAINTEXT ) ; } else { negotiationType ( NegotiationType . PLAINTEXT_UPGRADE ) ; } return this ;
public class Region { /** * Order this region relative to another . */ @ Override public int compareTo ( Comparable < ? > other ) { } }
if ( other instanceof Region ) { Region r = ( Region ) other ; if ( this . start < r . start ) { return - 1 ; } else if ( this . end > r . end ) { return 1 ; } else { return 0 ; } } else if ( other instanceof Long ) { Long l = ( Long ) other ; if ( l > end ) { return - 1 ; } else if ( l < start ) { return 1 ; } else { return 0 ; } } else { throw new AssertionError ( ) ; }
public class NFRuleSet { /** * Formats a long . Selects an appropriate rule and dispatches * control to it . * @ param number The number being formatted * @ param toInsertInto The string where the result is to be placed * @ param pos The position in toInsertInto where the result of * this operation is to be inserted */ public void format ( long number , StringBuilder toInsertInto , int pos , int recursionCount ) { } }
if ( recursionCount >= RECURSION_LIMIT ) { throw new IllegalStateException ( "Recursion limit exceeded when applying ruleSet " + name ) ; } NFRule applicableRule = findNormalRule ( number ) ; applicableRule . doFormat ( number , toInsertInto , pos , ++ recursionCount ) ;
public class Configuration { /** * Get the value of the < code > name < / code > property as a < code > Pattern < / code > . * If no such property is specified , or if the specified value is not a valid * < code > Pattern < / code > , then < code > DefaultValue < / code > is returned . * Note that the returned value is NOT trimmed by this method . * @ param name property name * @ param defaultValue default value * @ return property value as a compiled Pattern , or defaultValue */ public Pattern getPattern ( String name , Pattern defaultValue ) { } }
String valString = get ( name ) ; if ( null == valString || valString . isEmpty ( ) ) { return defaultValue ; } try { return Pattern . compile ( valString ) ; } catch ( PatternSyntaxException pse ) { LOG . warn ( "Regular expression '" + valString + "' for property '" + name + "' not valid. Using default" , pse ) ; return defaultValue ; }
public class RestrictionsContainer { /** * Methode d ' ajout de la restriction NotEq * @ param propertyNom de la Propriete * @ param valueValeur de la propriete * @ param < Y > Type de valeur * @ returnConteneur */ public < Y extends Comparable < ? super Y > > RestrictionsContainer addNotEq ( String property , Y value ) { } }
// Ajout de la restriction restrictions . add ( new NotEq < Y > ( property , value ) ) ; // On retourne le conteneur return this ;
public class UnrolledUnsafeCopierBuilder { /** * Constructs a new Copier using the passed in Unsafe instance * @ param unsafe The sun . misc . Unsafe instance this copier uses * @ return The new UnsageCopier built with the specific parameters * @ throws IllegalAccessException * @ throws InstantiationException * @ throws NoSuchMethodException * @ throws InvocationTargetException * @ throws IllegalArgumentException if any argument is invalid */ public UnsafeCopier build ( Unsafe unsafe ) throws IllegalAccessException , InstantiationException , NoSuchMethodException , InvocationTargetException { } }
checkArgument ( offset >= 0 , "Offset must be set" ) ; checkArgument ( length >= 0 , "Length must be set" ) ; checkNotNull ( unsafe ) ; Class < ? > dynamicType = new ByteBuddy ( ) . subclass ( UnsafeCopier . class ) . method ( named ( "copy" ) ) . intercept ( new CopierImplementation ( offset , length ) ) . make ( ) . load ( getClass ( ) . getClassLoader ( ) , ClassLoadingStrategy . Default . WRAPPER ) . getLoaded ( ) ; return ( UnsafeCopier ) dynamicType . getDeclaredConstructor ( Unsafe . class ) . newInstance ( unsafe ) ;
public class RowSchema { /** * Creates a new { @ link RowSchema } from a list of { @ link AccumuloColumnHandle } objects . Does not validate the schema . * @ param columns Column handles * @ return Row schema */ public static RowSchema fromColumns ( List < AccumuloColumnHandle > columns ) { } }
RowSchema schema = new RowSchema ( ) ; for ( AccumuloColumnHandle columnHandle : columns ) { schema . addColumn ( columnHandle . getName ( ) , columnHandle . getFamily ( ) , columnHandle . getQualifier ( ) , columnHandle . getType ( ) , columnHandle . isIndexed ( ) ) ; } return schema ;
public class Files { /** * Use this instead of { @ link FileWriter } because you cannot specify the character encoding with that . */ public static Writer newBufferedUTF8FileWriter ( final String file ) throws UnsupportedEncodingException , FileNotFoundException { } }
return new BufferedWriter ( new OutputStreamWriter ( new FileOutputStream ( file ) , "UTF-8" ) ) ;
public class S3Util { /** * Use this helper method to generate pre - signed S3 urls . You ' ll need to generate urls for both the put and delete * http methods . Example : Your AWS Access Key is " abcd " . Your AWS Secret Access Key is " efgh " . You want this node to * write its information to " / S3 / master / jboss - domain - master - data " . So , your bucket is " S3 " and your key is * " master / jboss - domain - master - data " . You want this to expire one year from now , or ( System . currentTimeMillis / * 1000 ) + ( 60 * 60 * 24 * 365 ) Let ' s assume that this equals 1316286684 * Here ' s how to generate the value for the pre _ signed _ put _ url property : String putUrl = * S3Util . generatePreSignedUrl ( " abcd " , " efgh " , " put " , " S3 " , " master / jboss - domain - master - data " , 1316286684 ) ; * Here ' s how to generate the value for the pre _ signed _ delete _ url property : String deleteUrl = * S3Util . generatePreSignedUrl ( " abcd " , " efgh " , " delete " , " S3 " , " master / jboss - domain - master - data " , 1316286684 ) ; * @ param awsAccessKey Your AWS Access Key * @ param awsSecretAccessKey Your AWS Secret Access Key * @ param method The HTTP method - use " put " or " delete " for use with S3 _ PING * @ param bucket The S3 bucket you want to write to * @ param key The key within the bucket to write to * @ param expirationDate The date this pre - signed url should expire , in seconds since epoch * @ return The pre - signed url to be used in pre _ signed _ put _ url or pre _ signed _ delete _ url properties */ public static String generatePreSignedUrl ( String awsAccessKey , String awsSecretAccessKey , String method , String bucket , String key , long expirationDate ) { } }
Map headers = new HashMap ( ) ; if ( method . equalsIgnoreCase ( "PUT" ) ) { headers . put ( "x-amz-acl" , Arrays . asList ( "public-read" ) ) ; } return Utils . generateQueryStringAuthentication ( awsAccessKey , awsSecretAccessKey , method , bucket , key , new HashMap ( ) , headers , expirationDate ) ;
public class FlinkKafkaProducerBase { /** * Initializes the connection to Kafka . */ @ Override public void open ( Configuration configuration ) { } }
producer = getKafkaProducer ( this . producerConfig ) ; RuntimeContext ctx = getRuntimeContext ( ) ; if ( null != flinkKafkaPartitioner ) { if ( flinkKafkaPartitioner instanceof FlinkKafkaDelegatePartitioner ) { ( ( FlinkKafkaDelegatePartitioner ) flinkKafkaPartitioner ) . setPartitions ( getPartitionsByTopic ( this . defaultTopicId , this . producer ) ) ; } flinkKafkaPartitioner . open ( ctx . getIndexOfThisSubtask ( ) , ctx . getNumberOfParallelSubtasks ( ) ) ; } LOG . info ( "Starting FlinkKafkaProducer ({}/{}) to produce into default topic {}" , ctx . getIndexOfThisSubtask ( ) + 1 , ctx . getNumberOfParallelSubtasks ( ) , defaultTopicId ) ; // register Kafka metrics to Flink accumulators if ( ! Boolean . parseBoolean ( producerConfig . getProperty ( KEY_DISABLE_METRICS , "false" ) ) ) { Map < MetricName , ? extends Metric > metrics = this . producer . metrics ( ) ; if ( metrics == null ) { // MapR ' s Kafka implementation returns null here . LOG . info ( "Producer implementation does not support metrics" ) ; } else { final MetricGroup kafkaMetricGroup = getRuntimeContext ( ) . getMetricGroup ( ) . addGroup ( "KafkaProducer" ) ; for ( Map . Entry < MetricName , ? extends Metric > metric : metrics . entrySet ( ) ) { kafkaMetricGroup . gauge ( metric . getKey ( ) . name ( ) , new KafkaMetricWrapper ( metric . getValue ( ) ) ) ; } } } if ( flushOnCheckpoint && ! ( ( StreamingRuntimeContext ) this . getRuntimeContext ( ) ) . isCheckpointingEnabled ( ) ) { LOG . warn ( "Flushing on checkpoint is enabled, but checkpointing is not enabled. Disabling flushing." ) ; flushOnCheckpoint = false ; } if ( logFailuresOnly ) { callback = new Callback ( ) { @ Override public void onCompletion ( RecordMetadata metadata , Exception e ) { if ( e != null ) { LOG . error ( "Error while sending record to Kafka: " + e . getMessage ( ) , e ) ; } acknowledgeMessage ( ) ; } } ; } else { callback = new Callback ( ) { @ Override public void onCompletion ( RecordMetadata metadata , Exception exception ) { if ( exception != null && asyncException == null ) { asyncException = exception ; } acknowledgeMessage ( ) ; } } ; }
public class Category { /** * This method creates a new logging event and logs the event without further checks . * @ param fqcn * Fully - qualified class name of the category or logger instance * @ param level * Priority to log * @ param message * Message to log * @ param t * Exception to log */ protected void forcedLog ( final String fqcn , final Priority level , final Object message , final Throwable t ) { } }
provider . log ( fqcn , null , translatePriority ( level ) , t , message == t ? null : message , ( Object [ ] ) null ) ;
public class Thing { /** * Removes a property from the map . * @ param key the key * @ return this */ public Thing removeStateProperty ( String key ) { } }
if ( ! StringUtils . isBlank ( key ) ) { getDeviceState ( ) . remove ( key ) ; } return this ;
public class MultiUserChat { /** * Revokes ownership privileges from another user . The occupant that loses ownership * privileges will become an administrator . Room owners may revoke ownership privileges . * Some room implementations will not allow to grant ownership privileges to other users . * @ param jid the bare XMPP user ID of the user to revoke ownership ( e . g . " user @ host . org " ) . * @ throws XMPPErrorException if an error occurs revoking ownership privileges from a user . * @ throws NoResponseException if there was no response from the server . * @ throws NotConnectedException * @ throws InterruptedException */ public void revokeOwnership ( Jid jid ) throws XMPPErrorException , NoResponseException , NotConnectedException , InterruptedException { } }
changeAffiliationByAdmin ( jid , MUCAffiliation . admin , null ) ;
public class CFEndPointImpl { /** * Assign the value of address based on the String parameter . * @ param addressString * @ throws ChannelFrameworkException * @ throws UnknownHostException */ private void assignAddress ( String addressString ) throws ChannelFrameworkException , UnknownHostException { } }
if ( addressString == null ) { // No address found in properties . No CFEndPoint can be created . throw new ChannelFrameworkException ( "No address available in properties." ) ; } if ( "*" . equals ( addressString ) ) { // TODO WAS used the node name this . address = InetAddress . getLocalHost ( ) ; } else { this . address = InetAddress . getByName ( addressString ) ; }
public class Timecode { /** * Sets the object based on a string in the form HH : MM : SS : FF */ public void setCode ( String timecode ) throws Timecode . TimecodeException { } }
clear ( ) ; setHours ( getToken ( timecode , 0 ) ) ; setMinutes ( getToken ( timecode , 1 ) ) ; setSeconds ( getToken ( timecode , 2 ) ) ; setFrames ( getToken ( timecode , 3 ) ) ; if ( useSamples ( ) ) { try { setSamples ( getToken ( timecode , 4 ) ) ; // If we got here , we ' re parsing a AES31 - formatted string . . . Set our // frames per sec to 30 instead of 75 setFramesPerSecond ( 30 ) ; } catch ( Timecode . TimecodeException ignored ) { } // If this fails , it just means the code didn ' t have samples appended } // fix badly formed values : 00:00:60:00 will become 00:01:00:00 , etc . normalize ( ) ;
public class Put { /** * A map of attribute name to attribute values , representing the primary key of the item to be written by * < code > PutItem < / code > . All of the table ' s primary key attributes must be specified , and their data types must * match those of the table ' s key schema . If any attributes are present in the item that are part of an index key * schema for the table , their types must match the index key schema . * @ param item * A map of attribute name to attribute values , representing the primary key of the item to be written by * < code > PutItem < / code > . All of the table ' s primary key attributes must be specified , and their data types * must match those of the table ' s key schema . If any attributes are present in the item that are part of an * index key schema for the table , their types must match the index key schema . * @ return Returns a reference to this object so that method calls can be chained together . */ public Put withItem ( java . util . Map < String , AttributeValue > item ) { } }
setItem ( item ) ; return this ;
public class FileUtil { /** * Rename the file with oldname to newname . If a file with newname already * exists , it is deleted before the renaming operation proceeds . * If a file with oldname does not exist , no file will exist after the * operation . */ private boolean renameOverwrite ( String oldname , String newname ) { } }
boolean deleted = delete ( newname ) ; if ( exists ( oldname ) ) { File file = new File ( oldname ) ; return file . renameTo ( new File ( newname ) ) ; } return deleted ;
public class StubObject { /** * Create a { @ link StubObject } using the current user ID and the provided * object ID * @ param sID * Object ID * @ param aCustomAttrs * Custom attributes . May be < code > null < / code > . * @ return Never < code > null < / code > . */ @ Nonnull public static StubObject createForCurrentUserAndID ( @ Nonnull @ Nonempty final String sID , @ Nullable final Map < String , String > aCustomAttrs ) { } }
return new StubObject ( sID , LoggedInUserManager . getInstance ( ) . getCurrentUserID ( ) , aCustomAttrs ) ;
public class FogbugzManager { /** * Helper method to create API url from Map , with proper encoding . * @ param params Map with parameters to encode . * @ return String which represents API URL . */ private String mapToFogbugzUrl ( Map < String , String > params ) throws UnsupportedEncodingException { } }
String output = this . getFogbugzUrl ( ) ; for ( String key : params . keySet ( ) ) { String value = params . get ( key ) ; if ( ! value . isEmpty ( ) ) { output += "&" + URLEncoder . encode ( key , "UTF-8" ) + "=" + URLEncoder . encode ( value , "UTF-8" ) ; } } FogbugzManager . log . info ( "Generated URL to send to Fogbugz: " + output ) ; return output ;
public class JavaParser { /** * src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 409:1 : interfaceMethodOrFieldRest : ( constantDeclaratorsRest ' ; ' | interfaceMethodDeclaratorRest ) ; */ public final void interfaceMethodOrFieldRest ( ) throws RecognitionException { } }
int interfaceMethodOrFieldRest_StartIndex = input . index ( ) ; try { if ( state . backtracking > 0 && alreadyParsedRule ( input , 30 ) ) { return ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 410:5 : ( constantDeclaratorsRest ' ; ' | interfaceMethodDeclaratorRest ) int alt42 = 2 ; int LA42_0 = input . LA ( 1 ) ; if ( ( LA42_0 == 54 || LA42_0 == 59 ) ) { alt42 = 1 ; } else if ( ( LA42_0 == 36 ) ) { alt42 = 2 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return ; } NoViableAltException nvae = new NoViableAltException ( "" , 42 , 0 , input ) ; throw nvae ; } switch ( alt42 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 410:7 : constantDeclaratorsRest ' ; ' { pushFollow ( FOLLOW_constantDeclaratorsRest_in_interfaceMethodOrFieldRest996 ) ; constantDeclaratorsRest ( ) ; state . _fsp -- ; if ( state . failed ) return ; match ( input , 52 , FOLLOW_52_in_interfaceMethodOrFieldRest998 ) ; if ( state . failed ) return ; } break ; case 2 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 411:7 : interfaceMethodDeclaratorRest { pushFollow ( FOLLOW_interfaceMethodDeclaratorRest_in_interfaceMethodOrFieldRest1006 ) ; interfaceMethodDeclaratorRest ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving if ( state . backtracking > 0 ) { memoize ( input , 30 , interfaceMethodOrFieldRest_StartIndex ) ; } }
public class BaseFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String createModcaString8FromString ( EDataType eDataType , String initialValue ) { } }
return ( String ) super . createFromString ( eDataType , initialValue ) ;
public class FilterDriver { /** * Easily supports the Join . To use the setSimpleJoin , * you must be a size master data appear in the memory of the task . * @ param masterLabels label of master data * @ param masterColumn master column * @ param dataColumn data column * @ param masterSeparator separator * @ param regex master join is regex * @ param masterData master data */ protected void setSimpleJoin ( String [ ] masterLabels , String masterColumn , String dataColumn , String masterSeparator , boolean regex , List < String > masterData ) { } }
this . conf . setInt ( SimpleJob . READER_TYPE , SimpleJob . SINGLE_COLUMN_JOIN_READER ) ; this . conf . setStrings ( SimpleJob . MASTER_LABELS , masterLabels ) ; this . conf . set ( SimpleJob . JOIN_MASTER_COLUMN , masterColumn ) ; this . conf . set ( SimpleJob . JOIN_DATA_COLUMN , dataColumn ) ; this . masterSeparator = masterSeparator ; this . conf . setBoolean ( SimpleJob . JOIN_REGEX , regex ) ; this . masterData = masterData ;
public class CPOptionCategoryLocalServiceBaseImpl { /** * Returns a range of all the cp option categories . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . product . model . impl . CPOptionCategoryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of cp option categories * @ param end the upper bound of the range of cp option categories ( not inclusive ) * @ return the range of cp option categories */ @ Override public List < CPOptionCategory > getCPOptionCategories ( int start , int end ) { } }
return cpOptionCategoryPersistence . findAll ( start , end ) ;
public class UsersApi { /** * Get User Devices ( asynchronously ) * Retrieve User & # 39 ; s Devices * @ param userId User ID ( required ) * @ param offset Offset for pagination . ( optional ) * @ param count Desired count of items in the result set ( optional ) * @ param includeProperties Optional . Boolean ( true / false ) - If false , only return the user & # 39 ; s device types . If true , also return device types shared by other users . ( optional ) * @ param owner Return owned and / or shared devices . Default to ALL . ( optional ) * @ param includeShareInfo Include share info ( optional ) * @ param dtid Return only devices of this device type . If empty , assumes all device types allowed by the authorization . ( optional ) * @ param callback The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException If fail to process the API call , e . g . serializing the request body object */ public com . squareup . okhttp . Call getUserDevicesAsync ( String userId , Integer offset , Integer count , Boolean includeProperties , String owner , Boolean includeShareInfo , String dtid , final ApiCallback < DevicesEnvelope > callback ) throws ApiException { } }
ProgressResponseBody . ProgressListener progressListener = null ; ProgressRequestBody . ProgressRequestListener progressRequestListener = null ; if ( callback != null ) { progressListener = new ProgressResponseBody . ProgressListener ( ) { @ Override public void update ( long bytesRead , long contentLength , boolean done ) { callback . onDownloadProgress ( bytesRead , contentLength , done ) ; } } ; progressRequestListener = new ProgressRequestBody . ProgressRequestListener ( ) { @ Override public void onRequestProgress ( long bytesWritten , long contentLength , boolean done ) { callback . onUploadProgress ( bytesWritten , contentLength , done ) ; } } ; } com . squareup . okhttp . Call call = getUserDevicesValidateBeforeCall ( userId , offset , count , includeProperties , owner , includeShareInfo , dtid , progressListener , progressRequestListener ) ; Type localVarReturnType = new TypeToken < DevicesEnvelope > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class ZoomerCompat { /** * Starts a zoom from 1.0 to ( 1.0 + endZoom ) . That is , to zoom from 100 % to 125 % , endZoom should by 0.25f . * @ see android . widget . Scroller # startScroll ( int , int , int , int ) */ public void startZoom ( float endZoom ) { } }
mStartRTC = SystemClock . elapsedRealtime ( ) ; mEndZoom = endZoom ; mFinished = false ; mCurrentZoom = 1f ;
public class AESHelper { /** * Reads a file . * @ param filePath The file path . * @ return a byte [ ] The file data . * @ throws java . io . IOException if an error occurs reading the file or if the file does not exists . */ public static byte [ ] readFile ( String filePath ) throws IOException { } }
byte [ ] buffer = new byte [ ( int ) new File ( filePath ) . length ( ) ] ; BufferedInputStream f = null ; try { f = new BufferedInputStream ( new FileInputStream ( filePath ) ) ; f . read ( buffer ) ; } finally { if ( f != null ) { try { f . close ( ) ; } catch ( final IOException ignored ) { } } } return buffer ;
public class ObjectStreamClass { /** * Return the java . lang . reflect . Method if class < code > cl < / code > implements * < code > methodName < / code > . Return null otherwise . * @ param cl * a java . lang . Class which to test * @ return < code > java . lang . reflect . Method < / code > if the class implements * writeReplace < code > null < / code > if the class does not implement * writeReplace */ static Method findMethod ( Class < ? > cl , String methodName ) { } }
Class < ? > search = cl ; Method method = null ; while ( search != null ) { try { method = search . getDeclaredMethod ( methodName , ( Class [ ] ) null ) ; if ( search == cl || ( method . getModifiers ( ) & Modifier . PRIVATE ) == 0 ) { method . setAccessible ( true ) ; return method ; } } catch ( NoSuchMethodException nsm ) { } search = search . getSuperclass ( ) ; } return null ;
public class TupleUtils { /** * Returns a { @ link Predicate } of { @ link Tuple8 } that wraps a predicate of the component values of the tuple * @ param predicate the component value predicate * @ param < T1 > the type of the first value * @ param < T2 > the type of the second value * @ param < T3 > the type of the third value * @ param < T4 > the type of the fourth value * @ param < T5 > the type of the fifth value * @ param < T6 > the type of the sixth value * @ param < T7 > the type of the seventh value * @ param < T8 > the type of the eighth value * @ return the wrapper predicate */ public static < T1 , T2 , T3 , T4 , T5 , T6 , T7 , T8 > Predicate < Tuple8 < T1 , T2 , T3 , T4 , T5 , T6 , T7 , T8 > > predicate ( Predicate8 < T1 , T2 , T3 , T4 , T5 , T6 , T7 , T8 > predicate ) { } }
return tuple -> predicate . test ( tuple . getT1 ( ) , tuple . getT2 ( ) , tuple . getT3 ( ) , tuple . getT4 ( ) , tuple . getT5 ( ) , tuple . getT6 ( ) , tuple . getT7 ( ) , tuple . getT8 ( ) ) ;
public class MercatorProjection { /** * Calculates the absolute pixel position for a tile and tile size relative to origin * @ param latLong the geographic position . * @ param tile tile * @ return the relative pixel position to the origin values ( e . g . for a tile ) */ public static Point getPixelRelativeToTile ( LatLong latLong , Tile tile ) { } }
return getPixelRelative ( latLong , tile . mapSize , tile . getOrigin ( ) ) ;
public class MPP8Reader { /** * This method is used to extract the task hyperlink attributes * from a block of data and call the appropriate modifier methods * to configure the specified task object . * @ param task task instance * @ param data hyperlink data block */ private void processHyperlinkData ( Task task , byte [ ] data ) { } }
if ( data != null ) { int offset = 12 ; String hyperlink ; String address ; String subaddress ; offset += 12 ; hyperlink = MPPUtility . getUnicodeString ( data , offset ) ; offset += ( ( hyperlink . length ( ) + 1 ) * 2 ) ; offset += 12 ; address = MPPUtility . getUnicodeString ( data , offset ) ; offset += ( ( address . length ( ) + 1 ) * 2 ) ; offset += 12 ; subaddress = MPPUtility . getUnicodeString ( data , offset ) ; task . setHyperlink ( hyperlink ) ; task . setHyperlinkAddress ( address ) ; task . setHyperlinkSubAddress ( subaddress ) ; }
public class JobTracker { /** * Start the JobTracker process . This is used only for debugging . As a rule , * JobTracker should be run as part of the DFS Namenode process . */ public static void main ( String argv [ ] ) throws IOException , InterruptedException { } }
StringUtils . startupShutdownMessage ( JobTracker . class , argv , LOG ) ; try { if ( argv . length == 0 ) { JobTracker tracker = startTracker ( new JobConf ( ) ) ; tracker . offerService ( ) ; return ; } if ( "-instance" . equals ( argv [ 0 ] ) && argv . length == 2 ) { int instance = Integer . parseInt ( argv [ 1 ] ) ; if ( instance == 0 || instance == 1 ) { JobConf conf = new JobConf ( ) ; JobConf . overrideConfiguration ( conf , instance ) ; JobTracker tracker = startTracker ( conf ) ; tracker . offerService ( ) ; return ; } } if ( "-dumpConfiguration" . equals ( argv [ 0 ] ) && argv . length == 1 ) { dumpConfiguration ( new PrintWriter ( System . out ) ) ; return ; } System . out . println ( "usage: JobTracker [-dumpConfiguration]" ) ; System . out . println ( " JobTracker [-instance <0|1>]" ) ; System . exit ( - 1 ) ; } catch ( Throwable e ) { LOG . fatal ( StringUtils . stringifyException ( e ) ) ; System . exit ( - 1 ) ; }
public class EvalCacheImpl { public void saveExprValue ( int id , Object value ) { } }
if ( tc . isEntryEnabled ( ) ) tc . entry ( this , cclass , "saveExprValue" , "id: " + new Integer ( id ) + ",value: " + value ) ; cacheTag [ id ] = generation ; cacheValue [ id ] = value ; if ( tc . isEntryEnabled ( ) ) tc . exit ( this , cclass , "saveExprValue" ) ;
public class Vector4d { /** * Add the component - wise multiplication of < code > a * b < / code > to this vector . * @ param a * the first multiplicand * @ param b * the second multiplicand * @ return a vector holding the result */ public Vector4d fma ( Vector4dc a , Vector4dc b ) { } }
return fma ( a , b , thisOrNew ( ) ) ;
public class Report { /** * Add some application info on the report . * @ param key the key of app info to add * @ param value the value of app info to add * @ return the modified report * @ deprecated use { @ link # addToTab ( String , String , Object ) } instead */ @ Deprecated public Report setAppInfo ( String key , Object value ) { } }
diagnostics . app . put ( key , value ) ; return this ;
public class Condition { /** * Returns a new condition based on the disjunction of the current condition * and the given condition . * @ param that given condition . */ public OrCondition or ( Condition that ) { } }
return new OrCondition ( this , that . atomic ( ) ? that : _ ( that ) ) ;
public class GCHSTImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . GCHST__XPOS : return XPOS_EDEFAULT == null ? xpos != null : ! XPOS_EDEFAULT . equals ( xpos ) ; case AfplibPackage . GCHST__YPOS : return YPOS_EDEFAULT == null ? ypos != null : ! YPOS_EDEFAULT . equals ( ypos ) ; case AfplibPackage . GCHST__CP : return CP_EDEFAULT == null ? cp != null : ! CP_EDEFAULT . equals ( cp ) ; } return super . eIsSet ( featureID ) ;
public class SentryClient { /** * Set the tags to extract from the MDC system and set on { @ link io . sentry . event . Event } s , where applicable . * @ param mdcTags Set of tags to extract from the MDC system */ public void setMdcTags ( Set < String > mdcTags ) { } }
if ( mdcTags == null ) { this . mdcTags = new HashSet < > ( ) ; } else { this . mdcTags = mdcTags ; }
public class DocumentVersionInfoMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DocumentVersionInfo documentVersionInfo , ProtocolMarshaller protocolMarshaller ) { } }
if ( documentVersionInfo == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( documentVersionInfo . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( documentVersionInfo . getDocumentVersion ( ) , DOCUMENTVERSION_BINDING ) ; protocolMarshaller . marshall ( documentVersionInfo . getVersionName ( ) , VERSIONNAME_BINDING ) ; protocolMarshaller . marshall ( documentVersionInfo . getCreatedDate ( ) , CREATEDDATE_BINDING ) ; protocolMarshaller . marshall ( documentVersionInfo . getIsDefaultVersion ( ) , ISDEFAULTVERSION_BINDING ) ; protocolMarshaller . marshall ( documentVersionInfo . getDocumentFormat ( ) , DOCUMENTFORMAT_BINDING ) ; protocolMarshaller . marshall ( documentVersionInfo . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( documentVersionInfo . getStatusInformation ( ) , STATUSINFORMATION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RespokeDirectConnection { /** * Establish a new direct connection instance with the peer connection for the call . This is used internally to the SDK and should not be called directly by your client application . */ public void createDataChannel ( ) { } }
if ( null != callReference ) { RespokeCall call = callReference . get ( ) ; if ( null != call ) { PeerConnection peerConnection = call . getPeerConnection ( ) ; dataChannel = peerConnection . createDataChannel ( "respokeDataChannel" , new DataChannel . Init ( ) ) ; dataChannel . registerObserver ( this ) ; } }
public class Compiler { /** * Compile a list of compilation units . This method can be called multiple * times , but it will not compile compilation units that have already been * compiled . * @ param names an array of fully qualified template names * @ return The names of all the sources compiled by this compiler * @ exception IOException */ public String [ ] compile ( String [ ] names ) throws IOException { } }
if ( ! TemplateRepository . isInitialized ( ) ) { return compile0 ( names ) ; } String [ ] compNames = compile0 ( names ) ; ArrayList < String > compList = new ArrayList < String > ( Arrays . asList ( compNames ) ) ; TemplateRepository rep = TemplateRepository . getInstance ( ) ; String [ ] callers = rep . getCallersNeedingRecompile ( compNames , this ) ; if ( callers . length > 0 ) compList . addAll ( Arrays . asList ( compile0 ( callers ) ) ) ; String [ ] compiled = compList . toArray ( new String [ compList . size ( ) ] ) ; // JoshY - There ' s a VM bug in JVM 1.4.2 that can cause the repository // update to throw a NullPointerException when it shouldn ' t . There ' s a // workaround in place and we also put a catch here , to allow the // TeaServlet init to finish just in case try { rep . update ( compiled ) ; } catch ( Exception e ) { System . err . println ( "Unable to update repository" ) ; e . printStackTrace ( System . err ) ; } return compiled ;
public class UnusedImports { /** * Collects the details of imports . * @ param aAST node containing the import details */ private void processImport ( DetailAST aAST ) { } }
final FullIdent name = FullIdent . createFullIdentBelow ( aAST ) ; if ( ( name != null ) && ! name . getText ( ) . endsWith ( ".*" ) ) { imports . add ( name ) ; }
public class AbstractJpaStorage { /** * Get object of type T * @ param id identity key * @ param type class of type T * @ return Instance of type T * @ throws StorageException if a storage problem occurs while storing a bean */ public < T > T get ( Long id , Class < T > type ) throws StorageException { } }
T rval ; EntityManager entityManager = getActiveEntityManager ( ) ; try { rval = entityManager . find ( type , id ) ; } catch ( Throwable t ) { logger . error ( t . getMessage ( ) , t ) ; throw new StorageException ( t ) ; } return rval ;
public class BulkSubmissionPublisher { /** * Flush . */ public void flush ( ) { } }
JMLog . debug ( log , "flush" , this . dataList . size ( ) ) ; synchronized ( this . dataList ) { if ( this . dataList . size ( ) > 0 ) { this . listSubmissionPublisher . submit ( this . dataList ) ; this . dataList = new ArrayList < > ( ) ; } }
public class SecurityActions { /** * Get the input stream for a resource in the context class loader * @ param name The name of the resource * @ return The input stream */ static InputStream getResourceAsStream ( final String name ) { } }
if ( System . getSecurityManager ( ) == null ) return Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( name ) ; return AccessController . doPrivileged ( new PrivilegedAction < InputStream > ( ) { public InputStream run ( ) { return Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( name ) ; } } ) ;
public class Canon { /** * Generate the initial invariants for each atom in the { @ code container } . * The labels use the invariants described in { @ cdk . cite WEI89 } . * The bits in the low 32 - bits are : { @ code 00000xxxxXXXXeeeeescchhhh } * where : * < ul > * < li > 0 : padding < / li > * < li > x : number of connections < / li > * < li > X : number of non - hydrogens bonds < / li > * < li > e : atomic number < / li > * < li > s : sign of charge < / li > * < li > c : absolute charge < / li > * < li > h : number of attached hydrogens < / li > * < / ul > * < b > Important : These invariants are < i > basic < / i > and there are known * examples they don ' t distinguish . One trivial example to consider is * { @ code [ O ] C = O } where both oxygens have no hydrogens and a single * connection but the atoms are not equivalent . Including a better * initial partition is more expensive < / b > * @ param container an atom container to generate labels for * @ param graph graph representation ( adjacency list ) * @ return initial invariants * @ throws NullPointerException an atom had unset atomic number , hydrogen * count or formal charge */ public static long [ ] basicInvariants ( IAtomContainer container , int [ ] [ ] graph ) { } }
long [ ] labels = new long [ graph . length ] ; for ( int v = 0 ; v < graph . length ; v ++ ) { IAtom atom = container . getAtom ( v ) ; int deg = graph [ v ] . length ; int impH = implH ( atom ) ; int expH = 0 ; int elem = atomicNumber ( atom ) ; int chg = charge ( atom ) ; // count non - suppressed ( explicit ) hydrogens for ( int w : graph [ v ] ) if ( atomicNumber ( container . getAtom ( w ) ) == 1 ) expH ++ ; long label = 0 ; // connectivity ( first in ) label |= deg + impH & 0xf ; label <<= 4 ; // connectivity ( heavy ) < = 15 ( 4 bits ) label |= deg - expH & 0xf ; label <<= 7 ; // atomic number < = 127 ( 7 bits ) label |= elem & 0x7f ; label <<= 1 ; // charge sign = = 1 ( 1 bit ) label |= chg >> 31 & 0x1 ; label <<= 2 ; // charge < = 3 ( 2 bits ) label |= Math . abs ( chg ) & 0x3 ; label <<= 4 ; // hydrogen count < = 15 ( 4 bits ) label |= impH + expH & 0xf ; labels [ v ] = label ; } return labels ;
public class Expressions { /** * Creates an IsLessThan expression from the given expressions . * @ param left The left expression . * @ param right The right expression . * @ return A new is less than binary expression . */ public static IsLessThan isLessThan ( ComparableExpression < Number > left , ComparableExpression < Number > right ) { } }
return new IsLessThan ( left , right ) ;
public class ModelConstraints { /** * Ensures that the primary / foreign keys referenced by references / collections are present * in the target type even if generate - table - info = " false " , by evaluating the subtypes * of the target type . * @ param modelDef The model * @ param checkLevel The current check level ( this constraint is always checked ) * @ throws ConstraintException If there is an error with the keys of the subtypes or there * ain ' t any subtypes */ private void ensureReferencedKeys ( ModelDef modelDef , String checkLevel ) throws ConstraintException { } }
ClassDescriptorDef classDef ; CollectionDescriptorDef collDef ; ReferenceDescriptorDef refDef ; for ( Iterator it = modelDef . getClasses ( ) ; it . hasNext ( ) ; ) { classDef = ( ClassDescriptorDef ) it . next ( ) ; for ( Iterator refIt = classDef . getReferences ( ) ; refIt . hasNext ( ) ; ) { refDef = ( ReferenceDescriptorDef ) refIt . next ( ) ; if ( ! refDef . getBooleanProperty ( PropertyHelper . OJB_PROPERTY_IGNORE , false ) ) { ensureReferencedPKs ( modelDef , refDef ) ; } } for ( Iterator collIt = classDef . getCollections ( ) ; collIt . hasNext ( ) ; ) { collDef = ( CollectionDescriptorDef ) collIt . next ( ) ; if ( ! collDef . getBooleanProperty ( PropertyHelper . OJB_PROPERTY_IGNORE , false ) ) { if ( collDef . hasProperty ( PropertyHelper . OJB_PROPERTY_INDIRECTION_TABLE ) ) { ensureReferencedPKs ( modelDef , collDef ) ; } else { ensureReferencedFKs ( modelDef , collDef ) ; } } } }
public class StaleSecurityGroup { /** * Information about the stale inbound rules in the security group . * @ param staleIpPermissions * Information about the stale inbound rules in the security group . */ public void setStaleIpPermissions ( java . util . Collection < StaleIpPermission > staleIpPermissions ) { } }
if ( staleIpPermissions == null ) { this . staleIpPermissions = null ; return ; } this . staleIpPermissions = new com . amazonaws . internal . SdkInternalList < StaleIpPermission > ( staleIpPermissions ) ;
public class AsciiEncoding { /** * Parse an ASCII encoded int from a { @ link CharSequence } . * @ param cs to parse . * @ param index at which the number begins . * @ param length of the encoded number in characters . * @ return the parsed value . */ public static int parseIntAscii ( final CharSequence cs , final int index , final int length ) { } }
final int endExclusive = index + length ; final int first = cs . charAt ( index ) ; int i = index ; if ( first == MINUS_SIGN ) { i ++ ; } int tally = 0 ; for ( ; i < endExclusive ; i ++ ) { tally = ( tally * 10 ) + AsciiEncoding . getDigit ( i , cs . charAt ( i ) ) ; } if ( first == MINUS_SIGN ) { tally = - tally ; } return tally ;
public class MultiProcessCluster { /** * Formats the cluster journal . */ public synchronized void formatJournal ( ) throws IOException { } }
if ( mDeployMode == DeployMode . EMBEDDED ) { for ( Master master : mMasters ) { File journalDir = new File ( master . getConf ( ) . get ( PropertyKey . MASTER_JOURNAL_FOLDER ) ) ; FileUtils . deleteDirectory ( journalDir ) ; journalDir . mkdirs ( ) ; } return ; } try ( Closeable c = new ConfigurationRule ( mProperties , ServerConfiguration . global ( ) ) . toResource ( ) ) { Format . format ( Format . Mode . MASTER , ServerConfiguration . global ( ) ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; }
public class DifferenceEngine { /** * First point of call : if nodes are comparable it compares node values then * recurses to compare node children . * @ param control * @ param test * @ param listener * @ param elementQualifier * @ throws DifferenceFoundException */ protected void compareNode ( Node control , Node test , DifferenceListener listener , ElementQualifier elementQualifier ) throws DifferenceFoundException { } }
boolean comparable = compareNodeBasics ( control , test , listener ) ; boolean isDocumentNode = false ; if ( comparable ) { switch ( control . getNodeType ( ) ) { case Node . ELEMENT_NODE : compareElement ( ( Element ) control , ( Element ) test , listener ) ; break ; case Node . CDATA_SECTION_NODE : case Node . TEXT_NODE : compareText ( ( CharacterData ) control , ( CharacterData ) test , listener ) ; break ; case Node . COMMENT_NODE : compareComment ( ( Comment ) control , ( Comment ) test , listener ) ; break ; case Node . DOCUMENT_TYPE_NODE : compareDocumentType ( ( DocumentType ) control , ( DocumentType ) test , listener ) ; break ; case Node . PROCESSING_INSTRUCTION_NODE : compareProcessingInstruction ( ( ProcessingInstruction ) control , ( ProcessingInstruction ) test , listener ) ; break ; case Node . DOCUMENT_NODE : isDocumentNode = true ; compareDocument ( ( Document ) control , ( Document ) test , listener , elementQualifier ) ; break ; default : listener . skippedComparison ( control , test ) ; } } compareHasChildNodes ( control , test , listener ) ; if ( isDocumentNode ) { Element controlElement = ( ( Document ) control ) . getDocumentElement ( ) ; Element testElement = ( ( Document ) test ) . getDocumentElement ( ) ; if ( controlElement != null && testElement != null ) { compareNode ( controlElement , testElement , listener , elementQualifier ) ; } } else { controlTracker . indent ( ) ; testTracker . indent ( ) ; compareNodeChildren ( control , test , listener , elementQualifier ) ; controlTracker . outdent ( ) ; testTracker . outdent ( ) ; }
public class TagLoop { /** * write out index loop * @ param adapter * @ throws TemplateException */ private void writeOutTypeFromTo ( BytecodeContext bc ) throws TransformerException { } }
ForDoubleVisitor forDoubleVisitor = new ForDoubleVisitor ( ) ; loopVisitor = forDoubleVisitor ; GeneratorAdapter adapter = bc . getAdapter ( ) ; // int from = ( int ) @ from ; int from = adapter . newLocal ( Types . DOUBLE_VALUE ) ; ExpressionUtil . writeOutSilent ( getAttribute ( "from" ) . getValue ( ) , bc , Expression . MODE_VALUE ) ; adapter . storeLocal ( from ) ; // int to = ( int ) @ to ; int to = adapter . newLocal ( Types . DOUBLE_VALUE ) ; ExpressionUtil . writeOutSilent ( getAttribute ( "to" ) . getValue ( ) , bc , Expression . MODE_VALUE ) ; adapter . storeLocal ( to ) ; // int step = ( int ) @ step ; int step = adapter . newLocal ( Types . DOUBLE_VALUE ) ; Attribute attrStep = getAttribute ( "step" ) ; if ( attrStep != null ) { ExpressionUtil . writeOutSilent ( attrStep . getValue ( ) , bc , Expression . MODE_VALUE ) ; } else { adapter . push ( 1D ) ; } adapter . storeLocal ( step ) ; // boolean dirPlus = ( step > 0 ) ; int dirPlus = adapter . newLocal ( Types . BOOLEAN_VALUE ) ; DecisionDoubleVisitor div = new DecisionDoubleVisitor ( ) ; div . visitBegin ( ) ; adapter . loadLocal ( step ) ; div . visitGT ( ) ; adapter . push ( 0D ) ; div . visitEnd ( bc ) ; adapter . storeLocal ( dirPlus ) ; // if ( step ! = 0 ) { div = new DecisionDoubleVisitor ( ) ; div . visitBegin ( ) ; adapter . loadLocal ( step ) ; div . visitNEQ ( ) ; adapter . push ( 0D ) ; div . visitEnd ( bc ) ; Label ifEnd = new Label ( ) ; adapter . ifZCmp ( Opcodes . IFEQ , ifEnd ) ; // VariableReference index > = VariableInterpreter . getVariableReference ( pc , @ index ) ) ; int index = adapter . newLocal ( Types . VARIABLE_REFERENCE ) ; adapter . loadArg ( 0 ) ; Attribute attr = getAttribute ( "index" ) ; if ( attr == null ) attr = getAttribute ( "item" ) ; ExpressionUtil . writeOutSilent ( attr . getValue ( ) , bc , Expression . MODE_REF ) ; adapter . invokeStatic ( Types . VARIABLE_INTERPRETER , GET_VARIABLE_REFERENCE ) ; adapter . storeLocal ( index ) ; // index . set ( from ) ; adapter . loadLocal ( index ) ; adapter . loadLocal ( from ) ; adapter . invokeVirtual ( Types . VARIABLE_REFERENCE , SET_DOUBLE ) ; // for // int i = forConditionVisitor . visitBeforeExpression ( adapter , from , step , true ) ; // init adapter . visitLabel ( forDoubleVisitor . beforeInit ) ; forDoubleVisitor . forInit ( adapter , from , true ) ; adapter . goTo ( forDoubleVisitor . beforeExpr ) ; // update adapter . visitLabel ( forDoubleVisitor . beforeUpdate ) ; adapter . loadLocal ( index ) ; // forConditionVisitor . forUpdate ( adapter , step , true ) ; adapter . visitVarInsn ( Opcodes . DLOAD , forDoubleVisitor . i ) ; adapter . loadLocal ( step ) ; adapter . visitInsn ( Opcodes . DADD ) ; adapter . visitInsn ( Opcodes . DUP2 ) ; adapter . visitVarInsn ( Opcodes . DSTORE , forDoubleVisitor . i ) ; adapter . invokeVirtual ( Types . VARIABLE_REFERENCE , SET_DOUBLE ) ; // expression adapter . visitLabel ( forDoubleVisitor . beforeExpr ) ; int i = forDoubleVisitor . i ; adapter . loadLocal ( dirPlus ) ; Label l1 = new Label ( ) ; adapter . visitJumpInsn ( Opcodes . IFEQ , l1 ) ; div = new DecisionDoubleVisitor ( ) ; div . visitBegin ( ) ; adapter . visitVarInsn ( Opcodes . DLOAD , i ) ; div . visitLTE ( ) ; adapter . loadLocal ( to ) ; div . visitEnd ( bc ) ; Label l2 = new Label ( ) ; adapter . visitJumpInsn ( Opcodes . GOTO , l2 ) ; adapter . visitLabel ( l1 ) ; div = new DecisionDoubleVisitor ( ) ; div . visitBegin ( ) ; adapter . visitVarInsn ( Opcodes . DLOAD , i ) ; div . visitGTE ( ) ; adapter . loadLocal ( to ) ; div . visitEnd ( bc ) ; adapter . visitLabel ( l2 ) ; forDoubleVisitor . visitAfterExpressionBeginBody ( adapter ) ; // adapter . loadLocal ( index ) ; // adapter . visitVarInsn ( Opcodes . DLOAD , i ) ; // adapter . invokeVirtual ( Types . VARIABLE _ REFERENCE , SET _ DOUBLE ) ; getBody ( ) . writeOut ( bc ) ; forDoubleVisitor . visitEndBody ( bc , getEnd ( ) ) ; // / / / / set i after usage // adapter . loadLocal ( index ) ; // adapter . visitVarInsn ( Opcodes . DLOAD , i ) ; // adapter . invokeVirtual ( Types . VARIABLE _ REFERENCE , SET _ DOUBLE ) ; adapter . visitLabel ( ifEnd ) ;
public class SharedBufferAccessor { /** * Removes the { @ code SharedBufferNode } , when the ref is decreased to zero , and also * decrease the ref of the edge on this node . * @ param node id of the entry * @ param sharedBufferNode the node body to be removed * @ throws Exception Thrown if the system cannot access the state . */ private void removeNode ( NodeId node , SharedBufferNode sharedBufferNode ) throws Exception { } }
sharedBuffer . removeEntry ( node ) ; EventId eventId = node . getEventId ( ) ; releaseEvent ( eventId ) ; for ( SharedBufferEdge sharedBufferEdge : sharedBufferNode . getEdges ( ) ) { releaseNode ( sharedBufferEdge . getTarget ( ) ) ; }
public class CodecSearchTree { /** * Search mtas tree . * @ param position the position * @ param in the in * @ param ref the ref * @ param objectRefApproxOffset the object ref approx offset * @ return the array list * @ throws IOException Signals that an I / O exception has occurred . */ public static ArrayList < MtasTreeHit < ? > > searchMtasTree ( int position , IndexInput in , long ref , long objectRefApproxOffset ) throws IOException { } }
return searchMtasTree ( position , position , in , ref , objectRefApproxOffset ) ;
public class FileLogOutput { /** * Calculate the padding space currently required and do reservation if required . * The amount of padding space will either grow ( eg . when we first startup ) or shrink * ( eg . when the logbuffer increases in size ) ; also , when the log file size changes ! * The only time it is expected that reserve ( ) could throw an Exception ( because there * is not enough space ) is when we are starting ( the log is so small there is not * enough space for padding minimum . Otherwise all other calls to this method should * be ok : * 1 ) increase log buffer - this decreases the amount of padding needed so no reserve * 2 ) shrinking the file - this decreases the amount of padding needed so no reserve * 3 ) growing the file - this increases the amount of padding needed , but we will * have just made a load more space available so the reserve should succeed . * @ throws ObjectManagerException if reserve ( ) throws an Exception ( LogFileFull ) */ private void calculatePaddingSpaceTarget ( ) throws ObjectManagerException { } }
if ( trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "calculatePaddingSpaceTarget" , new Object [ ] { Long . valueOf ( PADDING_SPACE_TARGET ) } ) ; long oldTarget ; synchronized ( paddingSpaceLock ) { oldTarget = PADDING_SPACE_TARGET ; // we could take out sector bytes as they are already reserved , but its hardly // worth it . // the numberOfBuffersPerLog represents the maximum number of times we will do a // flush in the middle of adding a large log record . This represents the maximum // number of pages we might have to pad ( probably a gross over - estimation but at // least it is safe ) . long numberOfBuffersPerLog = ( fileLogHeader . fileSize - FileLogHeader . headerLength ) / ( logBuffer . numberOfPages * pageSize ) ; PADDING_SPACE_TARGET = Math . max ( numberOfBuffersPerLog * pageSize , PADDING_SPACE_MINIMUM * pageSize ) ; } long paddingSpaceDelta = PADDING_SPACE_TARGET - oldTarget ; if ( paddingSpaceDelta > 0 ) { // padding space required has grown reserve ( paddingSpaceDelta ) ; paddingReserveLogSpace ( - paddingSpaceDelta ) ; } else { // padding space required has shrunk // no need to do anything , it will be given back naturally } if ( trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "calculatePaddingSpaceTarget" , new Object [ ] { Long . valueOf ( PADDING_SPACE_TARGET ) } ) ;
public class LoadBalancingRxClient { /** * Look up the client associated with this Server . * @ param host * @ param port * @ return */ protected T getOrCreateRxClient ( Server server ) { } }
T client = rxClientCache . get ( server ) ; if ( client != null ) { return client ; } else { client = createRxClient ( server ) ; client . subscribe ( listener ) ; client . subscribe ( eventSubject ) ; T old = rxClientCache . putIfAbsent ( server , client ) ; if ( old != null ) { return old ; } else { return client ; } }