signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CommerceNotificationQueueEntryPersistenceImpl { /** * Returns the first commerce notification queue entry in the ordered set where sentDate & lt ; & # 63 ; . * @ param sentDate the sent date * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce notification queue entry * @ throws NoSuchNotificationQueueEntryException if a matching commerce notification queue entry could not be found */ @ Override public CommerceNotificationQueueEntry findByLtS_First ( Date sentDate , OrderByComparator < CommerceNotificationQueueEntry > orderByComparator ) throws NoSuchNotificationQueueEntryException { } }
CommerceNotificationQueueEntry commerceNotificationQueueEntry = fetchByLtS_First ( sentDate , orderByComparator ) ; if ( commerceNotificationQueueEntry != null ) { return commerceNotificationQueueEntry ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "sentDate=" ) ; msg . append ( sentDate ) ; msg . append ( "}" ) ; throw new NoSuchNotificationQueueEntryException ( msg . toString ( ) ) ;
public class AlignmentTools { /** * Fundamentally , an alignment is just a list of aligned residues in each * protein . This method converts two lists of ResidueNumbers into an * AFPChain . * < p > Parameters are filled with defaults ( often null ) or sometimes * calculated . * < p > For a way to modify the alignment of an existing AFPChain , see * { @ link AlignmentTools # replaceOptAln ( AFPChain , Atom [ ] , Atom [ ] , Map ) } * @ param ca1 CA atoms of the first protein * @ param ca2 CA atoms of the second protein * @ param aligned1 A list of aligned residues from the first protein * @ param aligned2 A list of aligned residues from the second protein . * Must be the same length as aligned1. * @ return An AFPChain representing the alignment . Many properties may be * null or another default . * @ throws StructureException if an error occured during superposition * @ throws IllegalArgumentException if aligned1 and aligned2 have different * lengths * @ see AlignmentTools # replaceOptAln ( AFPChain , Atom [ ] , Atom [ ] , Map ) */ public static AFPChain createAFPChain ( Atom [ ] ca1 , Atom [ ] ca2 , ResidueNumber [ ] aligned1 , ResidueNumber [ ] aligned2 ) throws StructureException { } }
// input validation int alnLen = aligned1 . length ; if ( alnLen != aligned2 . length ) { throw new IllegalArgumentException ( "Alignment lengths are not equal" ) ; } AFPChain a = new AFPChain ( AFPChain . UNKNOWN_ALGORITHM ) ; try { a . setName1 ( ca1 [ 0 ] . getGroup ( ) . getChain ( ) . getStructure ( ) . getName ( ) ) ; if ( ca2 [ 0 ] . getGroup ( ) . getChain ( ) . getStructure ( ) != null ) { // common case for cloned ca2 a . setName2 ( ca2 [ 0 ] . getGroup ( ) . getChain ( ) . getStructure ( ) . getName ( ) ) ; } } catch ( Exception e ) { // One of the structures wasn ' t fully created . Ignore } a . setBlockNum ( 1 ) ; a . setCa1Length ( ca1 . length ) ; a . setCa2Length ( ca2 . length ) ; a . setOptLength ( alnLen ) ; a . setOptLen ( new int [ ] { alnLen } ) ; Matrix [ ] ms = new Matrix [ a . getBlockNum ( ) ] ; a . setBlockRotationMatrix ( ms ) ; Atom [ ] blockShiftVector = new Atom [ a . getBlockNum ( ) ] ; a . setBlockShiftVector ( blockShiftVector ) ; String [ ] [ ] [ ] pdbAln = new String [ 1 ] [ 2 ] [ alnLen ] ; for ( int i = 0 ; i < alnLen ; i ++ ) { pdbAln [ 0 ] [ 0 ] [ i ] = aligned1 [ i ] . getChainName ( ) + ":" + aligned1 [ i ] ; pdbAln [ 0 ] [ 1 ] [ i ] = aligned2 [ i ] . getChainName ( ) + ":" + aligned2 [ i ] ; } a . setPdbAln ( pdbAln ) ; // convert pdbAln to optAln , and fill in some other basic parameters AFPChainXMLParser . rebuildAFPChain ( a , ca1 , ca2 ) ; return a ; // Currently a single block . Split into several blocks by sequence if needed // return AlignmentTools . splitBlocksByTopology ( a , ca1 , ca2 ) ;
public class InventoryAggregatorMarshaller { /** * Marshall the given parameter object . */ public void marshall ( InventoryAggregator inventoryAggregator , ProtocolMarshaller protocolMarshaller ) { } }
if ( inventoryAggregator == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( inventoryAggregator . getExpression ( ) , EXPRESSION_BINDING ) ; protocolMarshaller . marshall ( inventoryAggregator . getAggregators ( ) , AGGREGATORS_BINDING ) ; protocolMarshaller . marshall ( inventoryAggregator . getGroups ( ) , GROUPS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class JvmMetricPoller { /** * { @ inheritDoc } */ @ Override public final List < Metric > poll ( MetricFilter filter , boolean reset ) { } }
long now = System . currentTimeMillis ( ) ; MetricList metrics = new MetricList ( filter ) ; addClassLoadingMetrics ( now , metrics ) ; addCompilationMetrics ( now , metrics ) ; addGarbageCollectorMetrics ( now , metrics ) ; addMemoryPoolMetrics ( now , metrics ) ; addOperatingSystemMetrics ( now , metrics ) ; addThreadMetrics ( now , metrics ) ; return metrics . getList ( ) ;
public class Matrix4x3d { /** * / * ( non - Javadoc ) * @ see org . joml . Matrix4x3dc # shadow ( double , double , double , double , org . joml . Matrix4x3dc , org . joml . Matrix4x3d ) */ public Matrix4x3d shadow ( double lightX , double lightY , double lightZ , double lightW , Matrix4x3dc planeTransform , Matrix4x3d dest ) { } }
// compute plane equation by transforming ( y = 0) double a = planeTransform . m10 ( ) ; double b = planeTransform . m11 ( ) ; double c = planeTransform . m12 ( ) ; double d = - a * planeTransform . m30 ( ) - b * planeTransform . m31 ( ) - c * planeTransform . m32 ( ) ; return shadow ( lightX , lightY , lightZ , lightW , a , b , c , d , dest ) ;
public class LeaderRole { /** * Ensures the local server is not the leader . */ private void stepDown ( ) { } }
if ( raft . getLeader ( ) != null && raft . getLeader ( ) . equals ( raft . getCluster ( ) . getMember ( ) ) ) { raft . setLeader ( null ) ; }
public class RecommendationsInner { /** * Disable all recommendations for an app . * Disable all recommendations for an app . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param siteName Name of the app . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > disableAllForWebAppAsync ( String resourceGroupName , String siteName , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( disableAllForWebAppWithServiceResponseAsync ( resourceGroupName , siteName ) , serviceCallback ) ;
public class ArgumentAttr { /** * Main entry point for attributing an argument with given tree and attribution environment . */ Type attribArg ( JCTree tree , Env < AttrContext > env ) { } }
Env < AttrContext > prevEnv = this . env ; try { this . env = env ; tree . accept ( this ) ; return result ; } finally { this . env = prevEnv ; }
public class JoynrRuntimeImpl { /** * Registers a provider in the joynr framework * @ param domain * The domain the provider should be registered for . Has to be identical at the client to be able to find * the provider . * @ param provider * Instance of the provider implementation ( has to extend a generated . . . AbstractProvider ) . * @ param providerQos * the provider ' s quality of service settings * @ param awaitGlobalRegistration * If true , wait for global registration to complete or timeout , if required . * @ return Returns a Future which can be used to check the registration status . */ @ Override public Future < Void > registerProvider ( String domain , Object provider , ProviderQos providerQos , boolean awaitGlobalRegistration ) { } }
JoynrInterface joynrInterfaceAnnotatation = AnnotationUtil . getAnnotation ( provider . getClass ( ) , JoynrInterface . class ) ; if ( joynrInterfaceAnnotatation == null ) { throw new IllegalArgumentException ( "The provider object must have a JoynrInterface annotation" ) ; } Class interfaceClass = joynrInterfaceAnnotatation . provides ( ) ; return registerProvider ( domain , provider , providerQos , awaitGlobalRegistration , interfaceClass ) ;
public class RequestUtils { /** * Checks if the requests content - type contains application / json * @ param exchange The Undertow HttpServerExchange * @ return True if the request content - type contains application / json , false otherwise */ public static boolean isJsonRequest ( HttpServerExchange exchange ) { } }
Objects . requireNonNull ( exchange , Required . HTTP_SERVER_EXCHANGE . toString ( ) ) ; final HeaderMap headerMap = exchange . getRequestHeaders ( ) ; return headerMap != null && headerMap . get ( Header . CONTENT_TYPE . toHttpString ( ) ) != null && headerMap . get ( Header . CONTENT_TYPE . toHttpString ( ) ) . element ( ) . toLowerCase ( Locale . ENGLISH ) . contains ( MediaType . JSON_UTF_8 . withoutParameters ( ) . toString ( ) ) ;
public class CmsDriverManager { /** * Returns a list of all template resources which must be processed during a static export . < p > * @ param dbc the current database context * @ param parameterResources flag for reading resources with parameters ( 1 ) or without ( 0) * @ param timestamp for reading the data from the db * @ return a list of template resources as < code > { @ link String } < / code > objects * @ throws CmsException if something goes wrong */ public List < String > readStaticExportResources ( CmsDbContext dbc , int parameterResources , long timestamp ) throws CmsException { } }
return getProjectDriver ( dbc ) . readStaticExportResources ( dbc , parameterResources , timestamp ) ;
public class AsyncTableEntryReader { /** * Creates a new { @ link AsyncTableEntryReader } that can be used to read a { @ link TableEntry } with a an optional * matching key . * @ param soughtKey ( Optional ) An { @ link ArrayView } representing the Key to match . If provided , a { @ link TableEntry } * will only be returned if its { @ link TableEntry # getKey ( ) } matches this value . * @ param keyVersion The version of the { @ link TableEntry } that is located at this position . This will be used for * constructing the result and has no bearing on the reading / matching logic . * @ param serializer The { @ link EntrySerializer } to use for deserializing the { @ link TableEntry } instance . * @ param timer Timer for the whole operation . * @ return A new instance of the { @ link AsyncTableEntryReader } class . The { @ link # getResult ( ) } will be completed with * a { @ link TableEntry } instance once the Key is matched . */ static AsyncTableEntryReader < TableEntry > readEntry ( ArrayView soughtKey , long keyVersion , EntrySerializer serializer , TimeoutTimer timer ) { } }
return new EntryReader ( soughtKey , keyVersion , serializer , timer ) ;
public class SnapshotJobBuilder { /** * / * ( non - Javadoc ) * @ see org . duracloud . snapshot . manager . spring . batch . BatchJobBuilder # buildIdentifyingJobParameters ( java . lang . Object ) */ @ Override public JobParameters buildIdentifyingJobParameters ( Snapshot snapshot ) { } }
Map < String , JobParameter > map = createIdentifyingJobParameters ( snapshot ) ; JobParameters params = new JobParameters ( map ) ; return params ;
public class PushSelectCriteria { /** * Move the criteria that applies to the join to be included in the actual join criteria . * @ param criteriaNode the SELECT node ; may not be null * @ param joinNode the JOIN node ; may not be null */ private void moveCriteriaIntoOnClause ( PlanNode criteriaNode , PlanNode joinNode ) { } }
List < Constraint > constraints = joinNode . getPropertyAsList ( Property . JOIN_CONSTRAINTS , Constraint . class ) ; Constraint criteria = criteriaNode . getProperty ( Property . SELECT_CRITERIA , Constraint . class ) ; // since the parser uses EMPTY _ LIST , check for size 0 also if ( constraints == null || constraints . isEmpty ( ) ) { constraints = new LinkedList < Constraint > ( ) ; joinNode . setProperty ( Property . JOIN_CONSTRAINTS , constraints ) ; } if ( ! constraints . contains ( criteria ) ) { constraints . add ( criteria ) ; if ( criteriaNode . hasBooleanProperty ( Property . IS_DEPENDENT ) ) { joinNode . setProperty ( Property . IS_DEPENDENT , Boolean . TRUE ) ; } } criteriaNode . extractFromParent ( ) ;
public class ReflTools { /** * Sets the field < tt > fieldName < / tt > of an object < tt > obj < / tt > to a the * value < tt > value < / tt > . * @ param obj The object * @ param fieldName the field name * @ param value The value */ public static final void setValue ( Object obj , String fieldName , Object value ) { } }
try { getField ( obj . getClass ( ) , fieldName ) . set ( obj , value ) ; } catch ( IllegalArgumentException e ) { throw new RuntimeException ( e ) ; } catch ( IllegalAccessException e ) { throw new RuntimeException ( e ) ; }
public class ConnectionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Connection connection , ProtocolMarshaller protocolMarshaller ) { } }
if ( connection == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( connection . getOwnerAccount ( ) , OWNERACCOUNT_BINDING ) ; protocolMarshaller . marshall ( connection . getConnectionId ( ) , CONNECTIONID_BINDING ) ; protocolMarshaller . marshall ( connection . getConnectionName ( ) , CONNECTIONNAME_BINDING ) ; protocolMarshaller . marshall ( connection . getConnectionState ( ) , CONNECTIONSTATE_BINDING ) ; protocolMarshaller . marshall ( connection . getRegion ( ) , REGION_BINDING ) ; protocolMarshaller . marshall ( connection . getLocation ( ) , LOCATION_BINDING ) ; protocolMarshaller . marshall ( connection . getBandwidth ( ) , BANDWIDTH_BINDING ) ; protocolMarshaller . marshall ( connection . getVlan ( ) , VLAN_BINDING ) ; protocolMarshaller . marshall ( connection . getPartnerName ( ) , PARTNERNAME_BINDING ) ; protocolMarshaller . marshall ( connection . getLoaIssueTime ( ) , LOAISSUETIME_BINDING ) ; protocolMarshaller . marshall ( connection . getLagId ( ) , LAGID_BINDING ) ; protocolMarshaller . marshall ( connection . getAwsDevice ( ) , AWSDEVICE_BINDING ) ; protocolMarshaller . marshall ( connection . getJumboFrameCapable ( ) , JUMBOFRAMECAPABLE_BINDING ) ; protocolMarshaller . marshall ( connection . getAwsDeviceV2 ( ) , AWSDEVICEV2_BINDING ) ; protocolMarshaller . marshall ( connection . getHasLogicalRedundancy ( ) , HASLOGICALREDUNDANCY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AgentLoader { /** * Finds attach method in VirtualMachine . * @ param vmClass * VirtualMachine class * @ return ' attach ' Method * @ throws SecurityException * If access is not legal * @ throws NoSuchMethodException * If no such method is found */ private static Method getAttachMethod ( Class < ? > vmClass ) throws SecurityException , NoSuchMethodException { } }
return vmClass . getMethod ( "attach" , new Class < ? > [ ] { String . class } ) ;
public class ProviderInfo { /** * 得到属性值 , 先去动态属性 , 再取静态属性 * @ param key 属性Key * @ return 属性值 */ public String getAttr ( String key ) { } }
String val = ( String ) dynamicAttrs . get ( key ) ; return val == null ? staticAttrs . get ( key ) : val ;
public class TransactionSynchronizationRegistryImpl { /** * { @ inheritDoc } */ public Object getResource ( Object key ) { } }
TransactionImpl tx = registry . getTransaction ( ) ; return tx . getResource ( key ) ;
public class Diff { /** * DifferenceListener implementation . * If the { @ link Diff # overrideDifferenceListener overrideDifferenceListener } * method has been called then the interpretation of the difference * will be delegated . * @ param difference * @ return a DifferenceListener . RETURN _ . . . constant indicating how the * difference was interpreted . * Always RETURN _ ACCEPT _ DIFFERENCE if the call is not delegated . */ public int differenceFound ( Difference difference ) { } }
int returnValue = evaluate ( difference ) ; switch ( returnValue ) { case RETURN_IGNORE_DIFFERENCE_NODES_IDENTICAL : return returnValue ; case RETURN_IGNORE_DIFFERENCE_NODES_SIMILAR : identical = false ; haltComparison = false ; break ; case RETURN_ACCEPT_DIFFERENCE : identical = false ; if ( difference . isRecoverable ( ) ) { haltComparison = false ; } else { similar = false ; haltComparison = true ; } break ; case RETURN_UPGRADE_DIFFERENCE_NODES_DIFFERENT : identical = similar = false ; haltComparison = true ; break ; default : throw new IllegalArgumentException ( returnValue + " is not a defined DifferenceListener.RETURN_... value" ) ; } if ( haltComparison ) { messages . append ( "\n[different]" ) ; } else { messages . append ( "\n[not identical]" ) ; } appendDifference ( messages , difference ) ; return returnValue ;
public class BingImagesImpl { /** * The Image Detail Search API lets you search on Bing and get back insights about an image , such as webpages that include the image . This section provides technical details about the query parameters and headers that you use to request insights of images and the JSON response objects that contain them . For examples that show how to make requests , see [ Searching the Web for Images ] ( https : / / docs . microsoft . com / azure / cognitive - services / bing - image - search / search - the - web ) . * @ param query The user ' s search query term . The term cannot be empty . The term may contain [ Bing Advanced Operators ] ( http : / / msdn . microsoft . com / library / ff795620 . aspx ) . For example , to limit images to a specific domain , use the [ site : ] ( http : / / msdn . microsoft . com / library / ff795613 . aspx ) operator . To help improve relevance of an insights query ( see [ insightsToken ] ( https : / / docs . microsoft . com / en - us / rest / api / cognitiveservices / bing - images - api - v7 - reference # insightstoken ) ) , you should always include the user ' s query term . Use this parameter only with the Image Search API . Do not specify this parameter when calling the Trending Images API . * @ param detailsOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ImageInsights object */ public Observable < ImageInsights > detailsAsync ( String query , DetailsOptionalParameter detailsOptionalParameter ) { } }
return detailsWithServiceResponseAsync ( query , detailsOptionalParameter ) . map ( new Func1 < ServiceResponse < ImageInsights > , ImageInsights > ( ) { @ Override public ImageInsights call ( ServiceResponse < ImageInsights > response ) { return response . body ( ) ; } } ) ;
public class Frame { /** * Get the number of arguments passed to given method invocation , including * the object instance if the call is to an instance method . * @ param ins * the method invocation instruction * @ param cpg * the ConstantPoolGen for the class containing the method * @ return number of arguments , including object instance if appropriate * @ throws DataflowAnalysisException */ public int getNumArgumentsIncludingObjectInstance ( InvokeInstruction ins , ConstantPoolGen cpg ) throws DataflowAnalysisException { } }
int numConsumed = ins . consumeStack ( cpg ) ; if ( numConsumed == Const . UNPREDICTABLE ) { throw new DataflowAnalysisException ( "Unpredictable stack consumption in " + ins ) ; } return numConsumed ;
public class AbstractSequenceClassifier { /** * Makes a DocumentReaderAndWriter based on * flags . plainTextReaderAndWriter . Useful for reading in * untokenized text documents or reading plain text from the command * line . An example of a way to use this would be to return a * edu . stanford . nlp . wordseg . Sighan2005DocumentReaderAndWriter for * the Chinese Segmenter . */ public DocumentReaderAndWriter < IN > makePlainTextReaderAndWriter ( ) { } }
String readerClassName = flags . plainTextDocumentReaderAndWriter ; // We set this default here if needed because there may be models // which don ' t have the reader flag set if ( readerClassName == null ) { readerClassName = SeqClassifierFlags . DEFAULT_PLAIN_TEXT_READER ; } DocumentReaderAndWriter < IN > readerAndWriter ; try { readerAndWriter = ( ( DocumentReaderAndWriter < IN > ) Class . forName ( readerClassName ) . newInstance ( ) ) ; } catch ( Exception e ) { throw new RuntimeException ( String . format ( "Error loading flags.plainTextDocumentReaderAndWriter: '%s'" , flags . plainTextDocumentReaderAndWriter ) , e ) ; } readerAndWriter . init ( flags ) ; return readerAndWriter ;
public class SimpleAttributeListOpenRenderer { /** * { @ inheritDoc } */ @ Override public final void renderAttributeListOpen ( final StringBuilder builder , final int pad , final GedObject subObject ) { } }
if ( subObject . hasAttributes ( ) ) { GedRenderer . renderPad ( builder , pad , true ) ; builder . append ( "<ul>\n" ) ; }
public class CheckBoxMenuItemPainter { /** * Paint the check mark in enabled state . * @ param g the Graphics2D context to paint with . * @ param width the width . * @ param height the height . */ private void paintCheckIconEnabledAndSelected ( Graphics2D g , int width , int height ) { } }
Shape s = shapeGenerator . createCheckMark ( 0 , 0 , width , height ) ; g . setPaint ( iconEnabledSelected ) ; g . fill ( s ) ;
public class LiveVariablesAnalysis { /** * Parameters belong to the function scope , but variables defined in the function body belong to * the function body scope . Assign a unique index to each variable , regardless of which scope it ' s * in . */ private void addScopeVariables ( ) { } }
int num = 0 ; for ( Var v : orderedVars ) { scopeVariables . put ( v . getName ( ) , num ) ; num ++ ; }
public class PayloadSender { /** * Executed when we ' re done with the current payload * @ param payload - current payload * @ param cancelled - flag indicating if payload Http - request was cancelled * @ param errorMessage - if not < code > null < / code > - payload request failed * @ param responseCode - http - request response code * @ param responseData - http - reqeust response json ( or null if failed ) */ private synchronized void handleFinishSendingPayload ( PayloadData payload , boolean cancelled , String errorMessage , int responseCode , JSONObject responseData ) { } }
sendingFlag = false ; // mark sender as ' not busy ' try { if ( listener != null ) { listener . onFinishSending ( this , payload , cancelled , errorMessage , responseCode , responseData ) ; } } catch ( Exception e ) { ApptentiveLog . e ( e , "Exception while notifying payload listener" ) ; logException ( e ) ; }
public class DomainsInner { /** * Get a domain . * Get properties of a domain . * @ param resourceGroupName The name of the resource group within the user ' s subscription . * @ param domainName Name of the domain * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DomainInner object if successful . */ public DomainInner getByResourceGroup ( String resourceGroupName , String domainName ) { } }
return getByResourceGroupWithServiceResponseAsync ( resourceGroupName , domainName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class AbstractCommand { /** * 执行命令 * @ param params * @ return * @ throws APPErrorException */ @ Override public Object excute ( Object ... params ) throws APPErrorException { } }
Object obj = tgtools . util . JsonParseHelper . parseToObject ( params [ 0 ] . toString ( ) , getModelClass ( ) , false ) ; invoke ( obj ) ; return true ;
public class DefaultSentryClientFactory { /** * Whether to sample events , and if so how much to allow through to the server ( from 0.0 to 1.0 ) . * @ param dsn Sentry server DSN which may contain options . * @ return The ratio of events to allow through to server , or null if sampling is disabled . */ protected Double getSampleRate ( Dsn dsn ) { } }
return Util . parseDouble ( Lookup . lookup ( SAMPLE_RATE_OPTION , dsn ) , null ) ;
public class NetworkSadnessTransformer { /** * Return the next event that is safe for delivery or null * if there are no safe objects to deliver . * Null response could mean no events , or could mean all events * are scheduled for the future . * @ param systemCurrentTimeMillis The current time . */ @ Override public T next ( long systemCurrentTimeMillis ) { } }
// drain all the waiting messages from the source ( up to 10k ) while ( delayed . size ( ) < 10000 ) { T event = source . next ( systemCurrentTimeMillis ) ; if ( event == null ) { break ; } transformAndQueue ( event , systemCurrentTimeMillis ) ; } return delayed . nextReady ( systemCurrentTimeMillis ) ;
public class NIOLockFile { /** * does the real work of releasing the FileLock */ private boolean releaseFileLock ( ) { } }
// Note : Closing the super class RandomAccessFile has the // side - effect of closing the file lock ' s FileChannel , // so we do not deal with this here . boolean success = false ; if ( this . fileLock == null ) { success = true ; } else { try { this . fileLock . release ( ) ; success = true ; } catch ( Exception e ) { } finally { this . fileLock = null ; } } return success ;
public class BalancerUrlRewriter { /** * Return the path within the web application for the given request . * < p > Detects include request URL if called within a RequestDispatcher include . */ public String getPathWithinApplication ( HttpServletRequest request ) { } }
String requestUri = request . getRequestURI ( ) ; if ( requestUri == null ) requestUri = "" ; String decodedRequestUri = decodeRequestString ( request , requestUri ) ; String contextPath = "" ; // getContextPath ( request ) ; String path ; if ( StringUtils . startsWithIgnoreCase ( decodedRequestUri , contextPath ) && ! conf . isUseContext ( ) ) { // Normal case : URI contains context path . path = decodedRequestUri . substring ( contextPath . length ( ) ) ; } else if ( ! StringUtils . startsWithIgnoreCase ( decodedRequestUri , contextPath ) && conf . isUseContext ( ) ) { // add the context path on path = contextPath + decodedRequestUri ; } else { path = decodedRequestUri ; } return StringUtils . isBlank ( path ) ? "/" : path ;
public class AmazonCloudFormationClient { /** * Returns the stack instance that ' s associated with the specified stack set , AWS account , and region . * For a list of stack instances that are associated with a specific stack set , use < a > ListStackInstances < / a > . * @ param describeStackInstanceRequest * @ return Result of the DescribeStackInstance operation returned by the service . * @ throws StackSetNotFoundException * The specified stack set doesn ' t exist . * @ throws StackInstanceNotFoundException * The specified stack instance doesn ' t exist . * @ sample AmazonCloudFormation . DescribeStackInstance * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloudformation - 2010-05-15 / DescribeStackInstance " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeStackInstanceResult describeStackInstance ( DescribeStackInstanceRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeStackInstance ( request ) ;
public class MSwingUtilities { /** * Démarre un composant dans une Frame sans pack ( ) ( utile pour écrire des méthodes main sur des panels en développement ) . * @ param component * JComponent * @ return la Frame créée */ public static JFrame runUnpacked ( final JComponent component ) { } }
component . setPreferredSize ( component . getSize ( ) ) ; return run ( component ) ;
public class CrnkClient { /** * Generic access using { @ link Resource } class without type mapping . */ public ResourceRepository < Resource , String > getRepositoryForPath ( String resourceType ) { } }
init ( ) ; ResourceInformation resourceInformation = new ResourceInformation ( moduleRegistry . getTypeParser ( ) , Resource . class , resourceType , null , null , PagingSpec . class ) ; return ( ResourceRepository < Resource , String > ) decorate ( new ResourceRepositoryStubImpl < > ( this , Resource . class , resourceInformation , urlBuilder ) ) ;
public class ComputeNodesImpl { /** * Gets the Remote Desktop Protocol file for the specified compute node . * Before you can access a node by using the RDP file , you must create a user account on the node . This API can only be invoked on pools created with a cloud service configuration . For pools created with a virtual machine configuration , see the GetRemoteLoginSettings API . * @ param poolId The ID of the pool that contains the compute node . * @ param nodeId The ID of the compute node for which you want to get the Remote Desktop Protocol file . * @ param computeNodeGetRemoteDesktopOptions Additional parameters for the operation * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the InputStream object */ public Observable < ServiceResponseWithHeaders < InputStream , ComputeNodeGetRemoteDesktopHeaders > > getRemoteDesktopWithServiceResponseAsync ( String poolId , String nodeId , ComputeNodeGetRemoteDesktopOptions computeNodeGetRemoteDesktopOptions ) { } }
if ( this . client . batchUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.batchUrl() is required and cannot be null." ) ; } if ( poolId == null ) { throw new IllegalArgumentException ( "Parameter poolId is required and cannot be null." ) ; } if ( nodeId == null ) { throw new IllegalArgumentException ( "Parameter nodeId is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Validator . validate ( computeNodeGetRemoteDesktopOptions ) ; Integer timeout = null ; if ( computeNodeGetRemoteDesktopOptions != null ) { timeout = computeNodeGetRemoteDesktopOptions . timeout ( ) ; } UUID clientRequestId = null ; if ( computeNodeGetRemoteDesktopOptions != null ) { clientRequestId = computeNodeGetRemoteDesktopOptions . clientRequestId ( ) ; } Boolean returnClientRequestId = null ; if ( computeNodeGetRemoteDesktopOptions != null ) { returnClientRequestId = computeNodeGetRemoteDesktopOptions . returnClientRequestId ( ) ; } DateTime ocpDate = null ; if ( computeNodeGetRemoteDesktopOptions != null ) { ocpDate = computeNodeGetRemoteDesktopOptions . ocpDate ( ) ; } String parameterizedHost = Joiner . on ( ", " ) . join ( "{batchUrl}" , this . client . batchUrl ( ) ) ; DateTimeRfc1123 ocpDateConverted = null ; if ( ocpDate != null ) { ocpDateConverted = new DateTimeRfc1123 ( ocpDate ) ; } return service . getRemoteDesktop ( poolId , nodeId , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , timeout , clientRequestId , returnClientRequestId , ocpDateConverted , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < InputStream , ComputeNodeGetRemoteDesktopHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < InputStream , ComputeNodeGetRemoteDesktopHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < InputStream , ComputeNodeGetRemoteDesktopHeaders > clientResponse = getRemoteDesktopDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class PlaneSweepCrackerHelper { /** * Merges two coincident edges into one . The edge2 becomes invalid . */ void mergeEdges_ ( int edge1 , int edge2 ) { } }
// dbg _ check _ edge _ ( edge1 ) ; int cluster_1 = getEdgeCluster ( edge1 , 0 ) ; int cluster2 = getEdgeCluster ( edge1 , 1 ) ; int cluster21 = getEdgeCluster ( edge2 , 0 ) ; int cluster22 = getEdgeCluster ( edge2 , 1 ) ; int originVertices1 = getEdgeOriginVertices ( edge1 ) ; int originVertices2 = getEdgeOriginVertices ( edge2 ) ; m_edge_vertices . concatenateLists ( originVertices1 , originVertices2 ) ; if ( edge2 == getClusterFirstEdge ( cluster_1 ) ) setClusterFirstEdge_ ( cluster_1 , edge1 ) ; if ( edge2 == getClusterFirstEdge ( cluster2 ) ) setClusterFirstEdge_ ( cluster2 , edge1 ) ; disconnectEdge_ ( edge2 ) ; // disconnects the edge2 from the clusters . deleteEdge_ ( edge2 ) ; if ( ! ( ( cluster_1 == cluster21 && cluster2 == cluster22 ) || ( cluster2 == cluster21 && cluster_1 == cluster22 ) ) ) { // Merged edges have different clusters ( clusters have not yet been // merged ) // merge clusters before merging the edges getClusterXY ( cluster_1 , pt_1 ) ; getClusterXY ( cluster21 , pt_2 ) ; if ( pt_1 . isEqual ( pt_2 ) ) { if ( cluster_1 != cluster21 ) { mergeClusters_ ( cluster_1 , cluster21 ) ; assert ( ! m_modified_clusters . hasElement ( cluster21 ) ) ; } if ( cluster2 != cluster22 ) { mergeClusters_ ( cluster2 , cluster22 ) ; assert ( ! m_modified_clusters . hasElement ( cluster22 ) ) ; } } else { if ( cluster2 != cluster21 ) { mergeClusters_ ( cluster2 , cluster21 ) ; assert ( ! m_modified_clusters . hasElement ( cluster21 ) ) ; } if ( cluster_1 != cluster22 ) { mergeClusters_ ( cluster_1 , cluster22 ) ; assert ( ! m_modified_clusters . hasElement ( cluster22 ) ) ; } } } else { // Merged edges have equal clusters . } // dbg _ check _ edge _ ( edge1 ) ;
public class HijrahDate { /** * Obtains an instance of { @ code HijrahDate } from the era , year - of - era * month - of - year and day - of - month . * @ param era the era to represent , not null * @ param yearOfEra the year - of - era to represent , from 1 to 9999 * @ param monthOfYear the month - of - year to represent , from 1 to 12 * @ param dayOfMonth the day - of - month to represent , from 1 to 31 * @ return the Hijrah date , never null * @ throws IllegalCalendarFieldValueException if the value of any field is out of range * @ throws InvalidCalendarFieldException if the day - of - month is invalid for the month - year */ static HijrahDate of ( HijrahEra era , int yearOfEra , int monthOfYear , int dayOfMonth ) { } }
Jdk8Methods . requireNonNull ( era , "era" ) ; checkValidYearOfEra ( yearOfEra ) ; checkValidMonth ( monthOfYear ) ; checkValidDayOfMonth ( dayOfMonth ) ; long gregorianDays = getGregorianEpochDay ( era . prolepticYear ( yearOfEra ) , monthOfYear , dayOfMonth ) ; return new HijrahDate ( gregorianDays ) ;
public class ByteUtils { /** * Converts a byte array into an UTF String . * @ param bytes * used to be converted * @ return UTF String * @ throws IllegalArgumentException * if codePoint is less than 0 or greater than 0X10FFFF */ public static String toUTF ( byte [ ] bytes ) { } }
int codePoint = ByteBuffer . wrap ( bytes ) . getInt ( ) ; if ( codePoint < 0 || codePoint > 0X10FFFF ) throw new IllegalArgumentException ( "RangeError: pack(U): value out of range" ) ; return String . valueOf ( ( char ) codePoint ) ;
public class RefinePolyLineCorner { /** * If A * A + B * B = = 1 then a simplified distance formula can be used */ protected static double distance ( LineGeneral2D_F64 line , Point2D_I32 p ) { } }
return Math . abs ( line . A * p . x + line . B * p . y + line . C ) ;
public class DFSClient { /** * Get the data transfer protocol version supported in the cluster * assuming all the datanodes have the same version . * @ return the data transfer protocol version supported in the cluster */ public int getDataTransferProtocolVersion ( ) throws IOException { } }
synchronized ( dataTransferVersion ) { if ( dataTransferVersion == - 1 ) { // Get the version number from NN try { int remoteDataTransferVersion = namenode . getDataTransferProtocolVersion ( ) ; updateDataTransferProtocolVersionIfNeeded ( remoteDataTransferVersion ) ; } catch ( RemoteException re ) { IOException ioe = re . unwrapRemoteException ( IOException . class ) ; if ( ioe . getMessage ( ) . startsWith ( IOException . class . getName ( ) + ": " + NoSuchMethodException . class . getName ( ) ) ) { dataTransferVersion = 14 ; // last version not supportting this RPC } else { throw ioe ; } } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Data Transfer Protocal Version is " + dataTransferVersion ) ; } } return dataTransferVersion ; }
public class JingleSession { /** * Trigger a session closed event . */ protected void triggerSessionClosed ( String reason ) { } }
// for ( ContentNegotiator contentNegotiator : contentNegotiators ) { // contentNegotiator . stopJingleMediaSession ( ) ; // for ( TransportCandidate candidate : contentNegotiator . getTransportNegotiator ( ) . getOfferedCandidates ( ) ) // candidate . removeCandidateEcho ( ) ; List < JingleListener > listeners = getListenersList ( ) ; for ( JingleListener li : listeners ) { if ( li instanceof JingleSessionListener ) { JingleSessionListener sli = ( JingleSessionListener ) li ; sli . sessionClosed ( reason , this ) ; } } close ( ) ;
public class UpdateChecker { /** * Downloads the specified update as a jar - file and launches it . The jar * file will be saved at the same location as the currently executed file * but will not replace it ( unless it has the same filename but this will * never happen ) * @ param updateToInstall The { @ link UpdateInfo } - object that contains the information * about the update to download * @ param gui The reference to an { @ link UpdateProgressDialog } that displays * the current update status . * @ param launchUpdateAfterInstall If { @ code true } , the downloaded file will be launched after * the download succeeds . * @ param deleteOldVersion If { @ code true } , the old app version will be automatically * deleted once the new version is downloaded . < b > Please note < / b > * that the file can ' t delete itself on some operating systems . * Therefore , the deletion is done by the updated file . To * actually delete the file , you need to call * { @ link # completeUpdate ( String [ ] ) } in your applications main * method . * @ param inheritIO If st to { @ code true } , the new version will inherit standard in and standard out . * @ param params Additional commandline parameters to be submitted to the new application version . * @ return { @ code true } if the download finished successfully , { @ code false } * if the download was cancelled using * { @ link # cancelDownloadAndLaunch ( ) } * @ throws IllegalStateException if maven fails to download or copy the new artifact . * @ throws IOException If the updated artifact cannot be launched . * @ see # completeUpdate ( String [ ] ) */ public static boolean downloadAndInstallUpdate ( UpdateInfo updateToInstall , UpdateProgressDialog gui , boolean launchUpdateAfterInstall , @ SuppressWarnings ( "SameParameterValue" ) boolean deleteOldVersion , boolean inheritIO , String ... params ) throws IllegalStateException , IOException { } }
// Reset cancel state cancelDownloadAndLaunch = false ; if ( gui != null ) { gui . preparePhaseStarted ( ) ; } // Perform Cancel if requested if ( cancelDownloadAndLaunch ) { // noinspection ConstantConditions if ( gui != null ) { gui . operationCanceled ( ) ; } return false ; } String destFolder ; File currentSourceFolder = ( new File ( URLDecoder . decode ( UpdateChecker . class . getProtectionDomain ( ) . getCodeSource ( ) . getLocation ( ) . getPath ( ) , "UTF-8" ) ) ) ; // get up the structure if we are currently on a file while ( currentSourceFolder . isFile ( ) ) { currentSourceFolder = currentSourceFolder . getParentFile ( ) ; } destFolder = currentSourceFolder . getAbsolutePath ( ) ; String destFilename ; // Construct file name of output file if ( updateToInstall . mavenClassifier . equals ( "" ) ) { // No classifier destFilename = updateToInstall . mavenArtifactID + "-" + updateToInstall . toVersion . toString ( ) + "." + updateToInstall . packaging ; } else { destFilename = updateToInstall . mavenArtifactID + "-" + updateToInstall . toVersion . toString ( ) + "-" + updateToInstall . mavenClassifier + "." + updateToInstall . packaging ; } URL artifactURL ; // Construct the download url if ( updateToInstall . mavenClassifier . equals ( "" ) ) { artifactURL = new URL ( updateToInstall . mavenRepoBaseURL . toString ( ) + "/" + updateToInstall . mavenGroupID . replace ( '.' , '/' ) + "/" + updateToInstall . mavenArtifactID + "/" + updateToInstall . toVersion . toString ( ) + "/" + updateToInstall . mavenArtifactID + "-" + updateToInstall . toVersion . toString ( ) + "." + updateToInstall . packaging ) ; } else { artifactURL = new URL ( updateToInstall . mavenRepoBaseURL . toString ( ) + "/" + updateToInstall . mavenGroupID . replace ( '.' , '/' ) + "/" + updateToInstall . mavenArtifactID + "/" + updateToInstall . toVersion . toString ( ) + "/" + updateToInstall . mavenArtifactID + "-" + updateToInstall . toVersion . toString ( ) + "-" + updateToInstall . mavenClassifier + "." + updateToInstall . packaging ) ; } // Perform Cancel if requested if ( cancelDownloadAndLaunch ) { if ( gui != null ) { gui . operationCanceled ( ) ; } return false ; } // Create empty file File outputFile = new File ( destFolder + File . separator + destFilename ) ; // Download if ( gui != null ) { gui . downloadStarted ( ) ; } FOKLogger . info ( UpdateChecker . class . getName ( ) , "Downloading artifact from " + artifactURL . toString ( ) + "..." ) ; FOKLogger . info ( UpdateChecker . class . getName ( ) , "Downloading to: " + outputFile . getAbsolutePath ( ) ) ; // FileUtils . copyURLToFile ( artifactURL , outputFile ) ; try { HttpURLConnection httpConnection = ( HttpURLConnection ) ( artifactURL . openConnection ( ) ) ; long completeFileSize = httpConnection . getContentLength ( ) ; java . io . BufferedInputStream in = new java . io . BufferedInputStream ( httpConnection . getInputStream ( ) ) ; java . io . FileOutputStream fos = new java . io . FileOutputStream ( outputFile ) ; java . io . BufferedOutputStream bout = new BufferedOutputStream ( fos , 1024 ) ; byte [ ] data = new byte [ 1024 ] ; long downloadedFileSize = 0 ; int x ; while ( ( x = in . read ( data , 0 , 1024 ) ) >= 0 ) { downloadedFileSize += x ; // calculate progress // final int currentProgress = ( int ) // ( ( ( ( double ) downloadedFileSize ) / ( ( double ) completeFileSize ) ) // * 100000d ) ; // update progress bar if ( gui != null ) { gui . downloadProgressChanged ( downloadedFileSize / 1024.0 , completeFileSize / 1024.0 ) ; } bout . write ( data , 0 , x ) ; // Perform Cancel if requested if ( cancelDownloadAndLaunch ) { bout . close ( ) ; in . close ( ) ; Files . delete ( outputFile . toPath ( ) ) ; if ( gui != null ) { gui . operationCanceled ( ) ; } return false ; } } bout . close ( ) ; in . close ( ) ; } catch ( IOException e ) { FOKLogger . log ( UpdateChecker . class . getName ( ) , Level . SEVERE , FOKLogger . DEFAULT_ERROR_TEXT , e ) ; } // Perform Cancel if requested if ( cancelDownloadAndLaunch ) { if ( gui != null ) { gui . operationCanceled ( ) ; } return false ; } // Perform install steps ( none at the moment ) if ( gui != null ) { gui . installStarted ( ) ; } // Perform Cancel if requested if ( cancelDownloadAndLaunch ) { if ( gui != null ) { gui . operationCanceled ( ) ; } return false ; } // launch the app if ( gui != null ) { gui . launchStarted ( ) ; } if ( launchUpdateAfterInstall ) { ProcessBuilder pb ; List < String > startupArgs = new ArrayList < > ( ) ; if ( updateToInstall . packaging . equals ( "jar" ) ) { startupArgs . add ( "java" ) ; startupArgs . add ( "-jar" ) ; } startupArgs . add ( destFolder + File . separator + destFilename ) ; String decodedPath = Common . getInstance ( ) . getPathAndNameOfCurrentJar ( ) ; if ( deleteOldVersion ) { FOKLogger . info ( UpdateChecker . class . getName ( ) , "The following file will be deleted once the update completes: " + decodedPath ) ; startupArgs . add ( "deleteFile=" + decodedPath ) ; } // add the version info of this file to the startup args startupArgs . add ( "oldVersion=" + Common . getInstance ( ) . getAppVersion ( ) ) ; startupArgs . add ( "oldFile=" + decodedPath ) ; startupArgs . addAll ( Arrays . asList ( params ) ) ; FOKLogger . info ( UpdateChecker . class . getName ( ) , "Launching new version using command: " + StringUtils . join ( startupArgs . toArray ( ) , " " ) ) ; pb = new ProcessBuilder ( startupArgs ) ; // . inheritIO ( ) ; pb . start ( ) ; /* * / / Wait for process to end try { process . waitFor ( ) ; } catch * ( InterruptedException e ) { FOKLogger . log ( Level . SEVERE , * FOKLogger . DEFAULT _ ERROR _ TEXT , e ) ; } */ Platform . exit ( ) ; } // Everything went smoothly return true ;
public class Key { /** * Decrypt the payload of a Fernet token . * @ param cipherText the padded encrypted payload of a token . The length < em > must < / em > be a multiple of 16 ( 128 bits ) . * @ param initializationVector the random bytes used in the AES encryption of the token * @ return the decrypted payload * @ see Key # encrypt ( byte [ ] , IvParameterSpec ) */ @ SuppressWarnings ( "PMD.LawOfDemeter" ) public byte [ ] decrypt ( final byte [ ] cipherText , final IvParameterSpec initializationVector ) { } }
try { final Cipher cipher = Cipher . getInstance ( getCipherTransformation ( ) ) ; cipher . init ( DECRYPT_MODE , getEncryptionKeySpec ( ) , initializationVector ) ; return cipher . doFinal ( cipherText ) ; } catch ( final NoSuchAlgorithmException | NoSuchPaddingException | InvalidKeyException | InvalidAlgorithmParameterException | IllegalBlockSizeException e ) { // this should not happen as we use an algorithm ( AES ) and padding // ( PKCS5 ) that are guaranteed to exist . // in addition , we validate the encryption key and initialization vector up front throw new IllegalStateException ( e . getMessage ( ) , e ) ; } catch ( final BadPaddingException bpe ) { throw new TokenValidationException ( "Invalid padding in token: " + bpe . getMessage ( ) , bpe ) ; }
public class AWSServerlessApplicationRepositoryClient { /** * Creates an application version . * @ param createApplicationVersionRequest * @ return Result of the CreateApplicationVersion operation returned by the service . * @ throws TooManyRequestsException * The client is sending more than the allowed number of requests per unit of time . * @ throws BadRequestException * One of the parameters in the request is invalid . * @ throws InternalServerErrorException * The AWS Serverless Application Repository service encountered an internal error . * @ throws ConflictException * The resource already exists . * @ throws ForbiddenException * The client is not authenticated . * @ sample AWSServerlessApplicationRepository . CreateApplicationVersion * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / serverlessrepo - 2017-09-08 / CreateApplicationVersion " * target = " _ top " > AWS API Documentation < / a > */ @ Override public CreateApplicationVersionResult createApplicationVersion ( CreateApplicationVersionRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateApplicationVersion ( request ) ;
public class StateDescriptor { /** * Configures optional activation of state time - to - live ( TTL ) . * < p > State user value will expire , become unavailable and be cleaned up in storage * depending on configured { @ link StateTtlConfig } . * @ param ttlConfig configuration of state TTL */ public void enableTimeToLive ( StateTtlConfig ttlConfig ) { } }
Preconditions . checkNotNull ( ttlConfig ) ; Preconditions . checkArgument ( ttlConfig . getUpdateType ( ) != StateTtlConfig . UpdateType . Disabled && queryableStateName == null , "Queryable state is currently not supported with TTL" ) ; this . ttlConfig = ttlConfig ;
public class RTMPConnection { /** * Dispatches event * @ param event * Event */ @ Override public void dispatchEvent ( IEvent event ) { } }
if ( log . isDebugEnabled ( ) ) { log . debug ( "Event notify: {}" , event ) ; } // determine if its an outgoing invoke or notify switch ( event . getType ( ) ) { case CLIENT_INVOKE : ClientInvokeEvent cie = ( ClientInvokeEvent ) event ; invoke ( cie . getMethod ( ) , cie . getParams ( ) , cie . getCallback ( ) ) ; break ; case CLIENT_NOTIFY : ClientNotifyEvent cne = ( ClientNotifyEvent ) event ; notify ( cne . getMethod ( ) , cne . getParams ( ) ) ; break ; default : log . warn ( "Unhandled event: {}" , event ) ; }
public class UrlUtils { /** * Create a map from String to String that represents the contents of the query * portion of a URL . For each x = y , x is the key and y is the value . * @ param s the query part of the URI . * @ return the map . */ public static Map < String , String > parseQueryString ( String s ) { } }
Map < String , String > ht = new HashMap < String , String > ( ) ; StringTokenizer st = new StringTokenizer ( s , "&" ) ; while ( st . hasMoreTokens ( ) ) { String pair = st . nextToken ( ) ; int pos = pair . indexOf ( '=' ) ; if ( pos == - 1 ) { ht . put ( pair . toLowerCase ( ) , "" ) ; } else { ht . put ( pair . substring ( 0 , pos ) . toLowerCase ( ) , pair . substring ( pos + 1 ) ) ; } } return ht ;
public class MediaFormatBuilder { /** * Builds the media format definition . * @ return Media format definition */ public @ NotNull MediaFormat build ( ) { } }
if ( this . name == null ) { throw new IllegalArgumentException ( "Name is missing." ) ; } return new MediaFormat ( name , label , description , width , minWidth , maxWidth , height , minHeight , maxHeight , ratio , ratioWidth , ratioHeight , fileSizeMax , nonNullArray ( extensions ) , renditionGroup , download , internal , ranking , ImmutableValueMap . copyOf ( properties ) ) ;
public class SnapshotNode { /** * Tries to get the most up to date lengths of files under construction . */ void updateLeasedFiles ( SnapshotStorage ssStore ) throws IOException { } }
FSNamesystem fsNamesys = ssStore . getFSNamesystem ( ) ; List < Block > blocksForNN = new ArrayList < Block > ( ) ; leaseUpdateThreadPool = new ThreadPoolExecutor ( 1 , maxLeaseUpdateThreads , 60 , TimeUnit . SECONDS , new LinkedBlockingQueue < Runnable > ( ) ) ; ( ( ThreadPoolExecutor ) leaseUpdateThreadPool ) . allowCoreThreadTimeOut ( true ) ; // Try to update lengths for leases from DN LightWeightLinkedSet < Lease > sortedLeases = fsNamesys . leaseManager . getSortedLeases ( ) ; Iterator < Lease > itr = sortedLeases . iterator ( ) ; while ( itr . hasNext ( ) ) { Lease lease = itr . next ( ) ; for ( String path : lease . getPaths ( ) ) { // Update file lengths using worker threads to increase throughput leaseUpdateThreadPool . execute ( new LeaseUpdateWorker ( conf , path , fsNamesys , blocksForNN ) ) ; } } try { leaseUpdateThreadPool . shutdown ( ) ; // Wait till update tasks finish successfully ( max 20 mins ? ) if ( ! leaseUpdateThreadPool . awaitTermination ( 1200 , TimeUnit . SECONDS ) ) { throw new IOException ( "Updating lease files failed" ) ; } } catch ( InterruptedException e ) { throw new IOException ( "Snapshot creation interrupted while updating leased files" ) ; } // Fetch block lengths for renamed / deleted leases from NN long [ ] blockIds = new long [ blocksForNN . size ( ) ] ; for ( int i = 0 ; i < blocksForNN . size ( ) ; ++ i ) { blockIds [ i ] = blocksForNN . get ( i ) . getBlockId ( ) ; } long [ ] lengths = namenode . getBlockLengths ( blockIds ) ; for ( int i = 0 ; i < blocksForNN . size ( ) ; ++ i ) { if ( lengths [ i ] == - 1 ) { // Couldn ' t update block length , keep preferred length LOG . error ( "Couldn't update length for block " + blocksForNN . get ( i ) ) ; } else { blocksForNN . get ( i ) . setNumBytes ( lengths [ i ] ) ; } }
public class SelfExtractor { /** * Display and obtain agreement for the license terms */ private static boolean obtainLicenseAgreement ( LicenseProvider licenseProvider ) { } }
// Prompt for word - wrapped display of license agreement & information boolean view ; SelfExtract . wordWrappedOut ( SelfExtract . format ( "showAgreement" , "--viewLicenseAgreement" ) ) ; view = SelfExtract . getResponse ( SelfExtract . format ( "promptAgreement" ) , "" , "xX" ) ; if ( view ) { SelfExtract . showLicenseFile ( licenseProvider . getLicenseAgreement ( ) ) ; System . out . println ( ) ; } SelfExtract . wordWrappedOut ( SelfExtract . format ( "showInformation" , "--viewLicenseInfo" ) ) ; view = SelfExtract . getResponse ( SelfExtract . format ( "promptInfo" ) , "" , "xX" ) ; if ( view ) { SelfExtract . showLicenseFile ( licenseProvider . getLicenseInformation ( ) ) ; System . out . println ( ) ; } System . out . println ( ) ; SelfExtract . wordWrappedOut ( SelfExtract . format ( "licenseOptionDescription" ) ) ; System . out . println ( ) ; boolean accept = SelfExtract . getResponse ( SelfExtract . format ( "licensePrompt" , new Object [ ] { "[1]" , "[2]" } ) , "1" , "2" ) ; System . out . println ( ) ; return accept ;
public class JobTracker { /** * Change the run - time priority of the given job . * @ param jobId job id * @ param priority new { @ link JobPriority } for the job */ synchronized void setJobPriority ( JobID jobId , JobPriority priority ) { } }
JobInProgress job = jobs . get ( jobId ) ; if ( job != null ) { synchronized ( taskScheduler ) { JobStatus oldStatus = ( JobStatus ) job . getStatus ( ) . clone ( ) ; job . setPriority ( priority ) ; JobStatus newStatus = ( JobStatus ) job . getStatus ( ) . clone ( ) ; JobStatusChangeEvent event = new JobStatusChangeEvent ( job , EventType . PRIORITY_CHANGED , oldStatus , newStatus ) ; updateJobInProgressListeners ( event ) ; } } else { LOG . warn ( "Trying to change the priority of an unknown job: " + jobId ) ; }
public class BindTypeSubProcessor { /** * / * ( non - Javadoc ) * @ see javax . annotation . processing . AbstractProcessor # process ( java . util . Set , javax . annotation . processing . RoundEnvironment ) */ @ Override public boolean process ( final Set < ? extends TypeElement > annotations , final RoundEnvironment roundEnv ) { } }
parseBindType ( roundEnv ) ; // Build model for ( Element element : roundEnv . getElementsAnnotatedWith ( BindType . class ) ) { final Element item = element ; AssertKripton . assertTrueOrInvalidKindForAnnotationException ( item . getKind ( ) == ElementKind . CLASS , item , BindType . class ) ; BindEntityBuilder . parse ( model , ( TypeElement ) item ) ; } if ( globalBeanElements . size ( ) == 0 ) { info ( "No class with @%s annotation was found" , BindType . class . getSimpleName ( ) ) ; } // Generate classes for model try { generateClasses ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; throw ( new KriptonRuntimeException ( e ) ) ; } return true ;
public class CommonOps_DSCC { /** * Returns a diagonal matrix with the specified diagonal elements . * @ param values values of diagonal elements * @ return A diagonal matrix */ public static DMatrixSparseCSC diag ( double ... values ) { } }
int N = values . length ; return diag ( new DMatrixSparseCSC ( N , N , N ) , values , 0 , N ) ;
public class Image { /** * Deprecates this image . * @ return a global operation if the deprecation request was successfully sent , { @ code null } if * the image was not found * @ throws ComputeException upon failure or if this image is a publicly - available image */ public Operation deprecate ( DeprecationStatus < ImageId > deprecationStatus , OperationOption ... options ) { } }
return compute . deprecate ( getImageId ( ) , deprecationStatus , options ) ;
public class SVGAndroidRenderer { private void render ( SVG . Polygon obj ) { } }
debug ( "Polygon render" ) ; updateStyleForElement ( state , obj ) ; if ( ! display ( ) ) return ; if ( ! visible ( ) ) return ; if ( ! state . hasStroke && ! state . hasFill ) return ; if ( obj . transform != null ) canvas . concat ( obj . transform ) ; int numPoints = obj . points . length ; if ( numPoints < 2 ) return ; Path path = makePathAndBoundingBox ( obj ) ; updateParentBoundingBox ( obj ) ; checkForGradientsAndPatterns ( obj ) ; checkForClipPath ( obj ) ; boolean compositing = pushLayer ( ) ; if ( state . hasFill ) doFilledPath ( obj , path ) ; if ( state . hasStroke ) doStroke ( path ) ; renderMarkers ( obj ) ; if ( compositing ) popLayer ( obj ) ;
public class JsonLdProcessor { /** * Expands the given input according to the steps in the * < a href = " http : / / www . w3 . org / TR / json - ld - api / # expansion - algorithm " > Expansion * algorithm < / a > . * @ param input * The input JSON - LD object . * @ param opts * The { @ link JsonLdOptions } that are to be sent to the expansion * algorithm . * @ return The expanded JSON - LD document * @ throws JsonLdError * If there is an error while expanding . */ public static List < Object > expand ( Object input , JsonLdOptions opts ) throws JsonLdError { } }
// TODO : look into java futures / promises // 2 ) TODO : better verification of DOMString IRI if ( input instanceof String && ( ( String ) input ) . contains ( ":" ) ) { try { final RemoteDocument tmp = opts . getDocumentLoader ( ) . loadDocument ( ( String ) input ) ; input = tmp . getDocument ( ) ; // TODO : figure out how to deal with remote context } catch ( final Exception e ) { throw new JsonLdError ( Error . LOADING_DOCUMENT_FAILED , e ) ; } // if set the base in options should override the base iri in the // active context // thus only set this as the base iri if it ' s not already set in // options if ( opts . getBase ( ) == null ) { opts . setBase ( ( String ) input ) ; } } Context activeCtx = new Context ( opts ) ; if ( opts . getExpandContext ( ) != null ) { Object exCtx = opts . getExpandContext ( ) ; if ( exCtx instanceof Map && ( ( Map < String , Object > ) exCtx ) . containsKey ( JsonLdConsts . CONTEXT ) ) { exCtx = ( ( Map < String , Object > ) exCtx ) . get ( JsonLdConsts . CONTEXT ) ; } activeCtx = activeCtx . parse ( exCtx ) ; } // TODO : add support for getting a context from HTTP when content - type // is set to a jsonld compatable format Object expanded = new JsonLdApi ( opts ) . expand ( activeCtx , input ) ; // final step of Expansion Algorithm if ( expanded instanceof Map && ( ( Map ) expanded ) . containsKey ( JsonLdConsts . GRAPH ) && ( ( Map ) expanded ) . size ( ) == 1 ) { expanded = ( ( Map < String , Object > ) expanded ) . get ( JsonLdConsts . GRAPH ) ; } else if ( expanded == null ) { expanded = new ArrayList < Object > ( ) ; } // normalize to an array if ( ! ( expanded instanceof List ) ) { final List < Object > tmp = new ArrayList < Object > ( ) ; tmp . add ( expanded ) ; expanded = tmp ; } return ( List < Object > ) expanded ;
public class WorkflowClient { /** * Starts the decision task for the given workflow instance * @ param workflowId the id of the workflow instance */ public void runDecider ( String workflowId ) { } }
Preconditions . checkArgument ( StringUtils . isNotBlank ( workflowId ) , "workflow id cannot be blank" ) ; put ( "workflow/decide/{workflowId}" , null , null , workflowId ) ;
public class Interpreter { /** * FIXME : This breaks for non - numeric uses if isSubtraction */ private static Object add ( ExecutionContext context , Object lhs , Object rhs ) { } }
if ( lhs instanceof String || rhs instanceof String ) { return ( Types . toString ( context , lhs ) + Types . toString ( context , rhs ) ) ; } Number lhsNum = Types . toNumber ( context , lhs ) ; Number rhsNum = Types . toNumber ( context , rhs ) ; if ( Double . isNaN ( lhsNum . doubleValue ( ) ) || Double . isNaN ( rhsNum . doubleValue ( ) ) ) { return ( Double . NaN ) ; } if ( lhsNum instanceof Double || rhsNum instanceof Double ) { if ( lhsNum . doubleValue ( ) == 0.0 && rhsNum . doubleValue ( ) == 0.0 ) { if ( Double . compare ( lhsNum . doubleValue ( ) , 0.0 ) < 0 && Double . compare ( rhsNum . doubleValue ( ) , 0.0 ) < 0 ) { return ( - 0.0 ) ; } else { return ( 0.0 ) ; } } return ( lhsNum . doubleValue ( ) - rhsNum . doubleValue ( ) ) ; } return ( lhsNum . longValue ( ) + rhsNum . longValue ( ) ) ;
public class LocalTrustGraph { /** * create a bidirectional trust link between the nodes with ids ' a ' and ' b ' * @ param a id of node that will form a trust link with ' b ' * @ param b id of node that will form a trust link with ' a ' */ public void addRoute ( final TrustGraphNodeId a , final TrustGraphNodeId b ) { } }
addDirectedRoute ( a , b ) ; addDirectedRoute ( b , a ) ;
public class PhotosInterface { /** * Set the meta data for the photo . * This method requires authentication with ' write ' permission . * @ param photoId * The photo ID * @ param title * The new title * @ param description * The new description * @ throws FlickrException */ public void setMeta ( String photoId , String title , String description ) throws FlickrException { } }
Map < String , Object > parameters = new HashMap < String , Object > ( ) ; parameters . put ( "method" , METHOD_SET_META ) ; parameters . put ( "photo_id" , photoId ) ; parameters . put ( "title" , title ) ; parameters . put ( "description" , description ) ; Response response = transport . post ( transport . getPath ( ) , parameters , apiKey , sharedSecret ) ; if ( response . isError ( ) ) { throw new FlickrException ( response . getErrorCode ( ) , response . getErrorMessage ( ) ) ; }
public class LocalCall { /** * Calls this salt call via the async client and returns the results * as they come in via the event stream . * @ param localAsync function providing callAsync for LocalCalls * @ param runnerAsync function providing callAsync for RunnerCalls * @ param events the event stream to use * @ param cancel future to cancel the action * @ return a map from minion id to future of the result . */ public CompletionStage < Optional < Map < String , CompletionStage < Result < R > > > > > callAsync ( Function < LocalCall < R > , CompletionStage < Optional < LocalAsyncResult < R > > > > localAsync , Function < RunnerCall < Map < String , R > > , CompletionStage < RunnerAsyncResult < Map < String , R > > > > runnerAsync , EventStream events , CompletionStage < GenericError > cancel ) { } }
return localAsync . apply ( this ) . thenApply ( optLar -> { TypeToken < R > returnTypeToken = this . getReturnType ( ) ; Type result = ClientUtils . parameterizedType ( null , Result . class , returnTypeToken . getType ( ) ) ; @ SuppressWarnings ( "unchecked" ) TypeToken < Result < R > > typeToken = ( TypeToken < Result < R > > ) TypeToken . get ( result ) ; return optLar . map ( lar -> { Map < String , CompletableFuture < Result < R > > > futures = lar . getMinions ( ) . stream ( ) . collect ( Collectors . toMap ( mid -> mid , mid -> new CompletableFuture < > ( ) ) ) ; EventListener listener = new EventListener ( ) { @ Override public void notify ( Event event ) { Optional < JobReturnEvent > jobReturnEvent = JobReturnEvent . parse ( event ) ; if ( jobReturnEvent . isPresent ( ) ) { jobReturnEvent . ifPresent ( e -> onJobReturn ( lar . getJid ( ) , e , typeToken , futures ) ) ; } else { RunnerReturnEvent . parse ( event ) . ifPresent ( e -> onRunnerReturn ( lar . getJid ( ) , e , typeToken , futures ) ) ; } } @ Override public void eventStreamClosed ( int code , String phrase ) { Result < R > error = Result . error ( new GenericError ( "EventStream closed with reason " + phrase ) ) ; futures . values ( ) . forEach ( f -> f . complete ( error ) ) ; } } ; CompletableFuture < Void > allResolves = CompletableFuture . allOf ( futures . entrySet ( ) . stream ( ) . map ( entry -> // mask errors since CompletableFuture . allOf resolves on first error entry . getValue ( ) . < Integer > handle ( ( v , e ) -> 0 ) ) . toArray ( CompletableFuture [ ] :: new ) ) ; allResolves . whenComplete ( ( v , e ) -> events . removeEventListener ( listener ) ) ; cancel . whenComplete ( ( v , e ) -> { if ( v != null ) { Result < R > error = Result . error ( v ) ; futures . values ( ) . forEach ( f -> f . complete ( error ) ) ; } else if ( e != null ) { futures . values ( ) . forEach ( f -> f . completeExceptionally ( e ) ) ; } } ) ; events . addEventListener ( listener ) ; // fire off lookup to get a result event for minions that already finished // before we installed the listeners runnerAsync . apply ( Jobs . lookupJid ( lar ) ) ; return futures . entrySet ( ) . stream ( ) . collect ( Collectors . toMap ( Map . Entry :: getKey , e -> ( CompletionStage < Result < R > > ) e . getValue ( ) ) ) ; } ) ; } ) ;
public class SnapshotStore { /** * Creates a memory snapshot . */ private Snapshot createMemorySnapshot ( SnapshotDescriptor descriptor ) { } }
HeapBuffer buffer = HeapBuffer . allocate ( SnapshotDescriptor . BYTES , Integer . MAX_VALUE ) ; Snapshot snapshot = new MemorySnapshot ( buffer , descriptor . copyTo ( buffer ) , this ) ; LOGGER . debug ( "Created memory snapshot: {}" , snapshot ) ; return snapshot ;
public class Symm { /** * encode InputStream onto Output Stream * @ param is * @ param estimate * @ return * @ throws IOException */ public void encode ( InputStream is , OutputStream os ) throws IOException { } }
// StringBuilder sb = new StringBuilder ( ( int ) ( estimate * 1.255 ) ) ; / / try to get the right size of StringBuilder from start . . slightly more than 1.25 times int prev = 0 ; int read , idx = 0 , line = 0 ; boolean go ; do { read = is . read ( ) ; if ( go = read >= 0 ) { if ( line >= splitLinesAt ) { os . write ( '\n' ) ; line = 0 ; } switch ( ++ idx ) { // 1 based reading , slightly faster + + case 1 : // ptr is the first 6 bits of read os . write ( codeset [ read >> 2 ] ) ; prev = read ; break ; case 2 : // ptr is the last 2 bits of prev followed by the first 4 bits of read os . write ( codeset [ ( ( prev & 0x03 ) << 4 ) | ( read >> 4 ) ] ) ; prev = read ; break ; default : // Char 1 is last 4 bits of prev plus the first 2 bits of read // Char 2 is the last 6 bits of read os . write ( codeset [ ( ( ( prev & 0xF ) << 2 ) | ( read >> 6 ) ) ] ) ; if ( line == splitLinesAt ) { // deal with line splitting for two characters os . write ( '\n' ) ; line = 0 ; } os . write ( codeset [ ( read & 0x3F ) ] ) ; ++ line ; idx = 0 ; prev = 0 ; } ++ line ; } else { // deal with any remaining bits from Prev , then pad switch ( idx ) { case 1 : // just the last 2 bits of prev os . write ( codeset [ ( prev & 0x03 ) << 4 ] ) ; if ( endEquals ) os . write ( DOUBLE_EQ ) ; break ; case 2 : // just the last 4 bits of prev os . write ( codeset [ ( prev & 0xF ) << 2 ] ) ; if ( endEquals ) os . write ( '=' ) ; break ; } idx = 0 ; } } while ( go ) ;
public class KeyValueDatabase { /** * Look up potentially matching records . */ public Collection < Record > findCandidateMatches ( Record record ) { } }
if ( DEBUG ) System . out . println ( "---------------------------------------------------------------------------" ) ; // do lookup on all tokens from all lookup properties // ( we only identify the buckets for now . later we decide how to process // them ) List < Bucket > buckets = lookup ( record ) ; // preprocess the list of buckets Collections . sort ( buckets ) ; double score_sum = 0.0 ; for ( Bucket b : buckets ) score_sum += b . getScore ( ) ; double score_so_far = 0.0 ; int threshold = buckets . size ( ) - 1 ; for ( ; ( score_so_far / score_sum ) < min_relevance ; threshold -- ) { score_so_far += buckets . get ( threshold ) . getScore ( ) ; if ( DEBUG ) System . out . println ( "score_so_far: " + ( score_so_far / score_sum ) + " (" + threshold + ")" ) ; } // bucket . get ( threshold ) made us go over the limit , so we need to step // one back threshold ++ ; if ( DEBUG ) System . out . println ( "Threshold: " + threshold ) ; // the collection of candidates Map < Long , Score > candidates = new HashMap ( ) ; // go through the buckets that we ' re going to collect candidates from int next_bucket = collectCandidates ( candidates , buckets , threshold ) ; // there might still be some buckets left below the threshold . for // these we go through the existing candidates and check if we can // find them in the buckets . bumpScores ( candidates , buckets , next_bucket ) ; if ( DEBUG ) System . out . println ( "candidates: " + candidates . size ( ) ) ; // if the cutoff properties are not set we can stop right here // FIXME : it ' s possible to make this a lot cleaner if ( max_search_hits > candidates . size ( ) && min_relevance == 0.0 ) { Collection < Record > cands = new ArrayList ( candidates . size ( ) ) ; for ( Long id : candidates . keySet ( ) ) cands . add ( store . findRecordById ( id ) ) ; if ( DEBUG ) System . out . println ( "final: " + cands . size ( ) ) ; return cands ; } // flatten candidates into an array , prior to sorting etc int ix = 0 ; Score [ ] scores = new Score [ candidates . size ( ) ] ; double max_score = 0.0 ; for ( Score s : candidates . values ( ) ) { scores [ ix ++ ] = s ; if ( s . score > max_score ) max_score = s . score ; if ( DEBUG && false ) System . out . println ( "" + s . id + ": " + s . score ) ; } // allow map to be GC - ed candidates = null ; // filter candidates with min _ relevance and max _ search _ hits . do // this by turning the scores [ ] array into a priority queue ( on // . score ) , then retrieving the best candidates . ( gives a big // performance improvement over sorting the array . ) PriorityQueue pq = new PriorityQueue ( scores ) ; int count = Math . min ( scores . length , max_search_hits ) ; Collection < Record > records = new ArrayList ( count ) ; for ( ix = 0 ; ix < count ; ix ++ ) { Score s = pq . next ( ) ; if ( s . score >= min_relevance ) records . add ( store . findRecordById ( s . id ) ) ; } if ( DEBUG ) System . out . println ( "final: " + records . size ( ) ) ; return records ;
public class RedisQueue { /** * { @ inheritDoc } */ @ Override protected JedisConnector buildJedisConnector ( ) { } }
JedisConnector jedisConnector = new JedisConnector ( ) ; jedisConnector . setJedisPoolConfig ( JedisUtils . defaultJedisPoolConfig ( ) ) . setRedisHostsAndPorts ( getRedisHostAndPort ( ) ) . setRedisPassword ( getRedisPassword ( ) ) . init ( ) ; return jedisConnector ;
public class FloatColumnsMathOpTransform { /** * Transform a sequence * @ param sequence */ @ Override public Object mapSequence ( Object sequence ) { } }
List < List < Float > > seq = ( List < List < Float > > ) sequence ; List < Float > ret = new ArrayList < > ( ) ; for ( List < Float > step : seq ) ret . add ( ( Float ) map ( step ) ) ; return ret ;
public class ApiOvhMsServices { /** * Alter this object properties * REST : PUT / msServices / { serviceName } / account / { userPrincipalName } / exchange * @ param body [ required ] New object properties * @ param serviceName [ required ] The internal name of your Active Directory organization * @ param userPrincipalName [ required ] User Principal Name * API beta */ public void serviceName_account_userPrincipalName_exchange_PUT ( String serviceName , String userPrincipalName , OvhExchangeInformation body ) throws IOException { } }
String qPath = "/msServices/{serviceName}/account/{userPrincipalName}/exchange" ; StringBuilder sb = path ( qPath , serviceName , userPrincipalName ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class ApiOvhTelephony { /** * List of available exchange merchandise brand * REST : GET / telephony / { billingAccount } / line / { serviceName } / phone / merchandiseAvailable * @ param billingAccount [ required ] The name of your billingAccount * @ param serviceName [ required ] */ public ArrayList < OvhHardwareOffer > billingAccount_line_serviceName_phone_merchandiseAvailable_GET ( String billingAccount , String serviceName ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/line/{serviceName}/phone/merchandiseAvailable" ; StringBuilder sb = path ( qPath , billingAccount , serviceName ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t7 ) ;
public class DefaultErrorWebExceptionHandler { /** * Render the error information as a JSON payload . * @ param request the current request * @ return a { @ code Publisher } of the HTTP response */ protected Mono < ServerResponse > renderErrorResponse ( ServerRequest request ) { } }
boolean includeStackTrace = isIncludeStackTrace ( request , MediaType . ALL ) ; Map < String , Object > error = getErrorAttributes ( request , includeStackTrace ) ; return ServerResponse . status ( getHttpStatus ( error ) ) . contentType ( MediaType . APPLICATION_JSON_UTF8 ) . body ( BodyInserters . fromObject ( error ) ) ;
public class InventorySchedule { /** * Sets the frequency for producing inventory results . */ public void setFrequency ( InventoryFrequency frequency ) { } }
setFrequency ( frequency == null ? ( String ) null : frequency . toString ( ) ) ;
public class SAXProcessor { /** * Get all permutations of the given alphabet of given length . * @ param alphabet the alphabet to use . * @ param wordLength the word length . * @ return set of permutation . */ public static String [ ] getAllPermutations ( String [ ] alphabet , int wordLength ) { } }
// initialize our returned list with the number of elements calculated above String [ ] allLists = new String [ ( int ) Math . pow ( alphabet . length , wordLength ) ] ; // lists of length 1 are just the original elements if ( wordLength == 1 ) return alphabet ; else { // the recursion - - get all lists of length 3 , length 2 , all the way up to 1 String [ ] allSublists = getAllPermutations ( alphabet , wordLength - 1 ) ; // append the sublists to each element int arrayIndex = 0 ; for ( int i = 0 ; i < alphabet . length ; i ++ ) { for ( int j = 0 ; j < allSublists . length ; j ++ ) { // add the newly appended combination to the list allLists [ arrayIndex ] = alphabet [ i ] + allSublists [ j ] ; arrayIndex ++ ; } } return allLists ; }
public class AppWidgetManagerUtils { /** * Wrapper method of the { @ link android . appwidget . AppWidgetManager # getAppWidgetIds ( android . content . ComponentName ) } . * @ see android . appwidget . AppWidgetManager # getAppWidgetIds ( android . content . ComponentName ) . */ public static int [ ] getAppWidgetIds ( AppWidgetManager appWidgetManager , Context context , Class < ? > clazz ) { } }
return appWidgetManager . getAppWidgetIds ( new ComponentName ( context , clazz ) ) ;
public class AbstractDb { /** * 检查数据库是否支持事务 , 此项检查同一个数据源只检查一次 , 如果不支持抛出DbRuntimeException异常 * @ param conn Connection * @ throws SQLException 获取元数据信息失败 * @ throws DbRuntimeException 不支持事务 */ protected void checkTransactionSupported ( Connection conn ) throws SQLException , DbRuntimeException { } }
if ( null == isSupportTransaction ) { isSupportTransaction = conn . getMetaData ( ) . supportsTransactions ( ) ; } if ( false == isSupportTransaction ) { throw new DbRuntimeException ( "Transaction not supported for current database!" ) ; }
public class SystemConfiguration { /** * Returns for given parameter < i > _ id < / i > the instance of class * { @ link SystemConfiguration } . * @ param _ id id of the system configuration * @ return instance of class { @ link SystemConfiguration } * @ throws CacheReloadException on error */ public static SystemConfiguration get ( final long _id ) throws CacheReloadException { } }
final Cache < Long , SystemConfiguration > cache = InfinispanCache . get ( ) . < Long , SystemConfiguration > getCache ( SystemConfiguration . IDCACHE ) ; if ( ! cache . containsKey ( _id ) ) { SystemConfiguration . getSystemConfigurationFromDB ( SystemConfiguration . SQL_ID , _id ) ; } return cache . get ( _id ) ;
public class SipFactoryImpl { /** * ( non - Javadoc ) * @ see javax . servlet . sip . SipFactory # createAddress ( javax . servlet . sip . URI , * java . lang . String ) */ public Address createAddress ( URI uri , String displayName ) { } }
try { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Creating Address from URI[" + uri . toString ( ) + "] with display name[" + displayName + "]" ) ; } javax . sip . address . Address address = SipFactoryImpl . addressFactory . createAddress ( ( ( URIImpl ) uri ) . getURI ( ) ) ; address . setDisplayName ( displayName ) ; return new AddressImpl ( address , null , ModifiableRule . Modifiable ) ; } catch ( ParseException e ) { throw new IllegalArgumentException ( e ) ; }
public class AbstractJdbcHelper { /** * Calculate fetch size used for streaming . * @ param hintFetchSize * @ param conn * @ return * @ throws SQLException */ protected int calcFetchSizeForStream ( int hintFetchSize , Connection conn ) throws SQLException { } }
DatabaseVendor dbVendor = DbcHelper . detectDbVendor ( conn ) ; switch ( dbVendor ) { case MYSQL : return Integer . MIN_VALUE ; default : return hintFetchSize < 0 ? 1 : hintFetchSize ; }
public class locationfile { /** * Use this API to fetch all the locationfile resources that are configured on netscaler . */ public static locationfile get ( nitro_service service ) throws Exception { } }
locationfile obj = new locationfile ( ) ; locationfile [ ] response = ( locationfile [ ] ) obj . get_resources ( service ) ; return response [ 0 ] ;
public class GaugeSkin { /** * * * * * * Private Methods * * * * * */ private void rotateNeedle ( double value ) { } }
double startOffsetAngle = 180 - startAngle ; double targetAngle ; if ( NeedleBehavior . STANDARD == needleBehavior ) { if ( ScaleDirection . CLOCKWISE == gauge . getScaleDirection ( ) ) { targetAngle = startOffsetAngle + ( value - minValue ) * angleStep ; targetAngle = Helper . clamp ( startOffsetAngle , startOffsetAngle + angleRange , targetAngle ) ; } else { targetAngle = startOffsetAngle - ( value - minValue ) * angleStep ; targetAngle = Helper . clamp ( startOffsetAngle - angleRange , startOffsetAngle , targetAngle ) ; } } else { if ( value < minValue ) value = maxValue - minValue + value ; if ( value > maxValue ) value = value - maxValue + minValue ; if ( ScaleDirection . CLOCKWISE == gauge . getScaleDirection ( ) ) { targetAngle = startOffsetAngle + ( value - minValue ) * angleStep ; targetAngle = Helper . clamp ( startOffsetAngle , startOffsetAngle + angleRange , targetAngle ) ; } else { targetAngle = startOffsetAngle - ( value - minValue ) * angleStep ; targetAngle = Helper . clamp ( startOffsetAngle - angleRange , startOffsetAngle , targetAngle ) ; } } needleRotate . setAngle ( targetAngle ) ; valueText . setText ( formatNumber ( gauge . getLocale ( ) , gauge . getFormatString ( ) , gauge . getDecimals ( ) , value ) ) ; if ( gauge . isLcdVisible ( ) ) { valueText . setTranslateX ( ( 0.691 * size - valueText . getLayoutBounds ( ) . getWidth ( ) ) ) ; } else { valueText . setTranslateX ( ( size - valueText . getLayoutBounds ( ) . getWidth ( ) ) * 0.5 ) ; } if ( gauge . isAverageVisible ( ) ) drawAverage ( ) ;
public class SyntaxNum { public void init ( String st , int minsize , int maxsize ) { } }
super . init ( check ( st ) , minsize , maxsize ) ;
public class LdapHelper { /** * Returns a Value from the Default Collection . * @ param key the key of the default - collection . * @ return the default value for that key if exists otherwise an empty * string . */ public String getDefault ( final String key ) { } }
if ( defaultValues . containsKey ( key ) ) { return defaultValues . get ( key ) ; } return "" ;
public class DataUtils { /** * Checks if a given value is the default value . Default value can be * < ul > * < li > { @ code null } < / li > * < li > empty string < / li > * < li > 0 < / li > * < li > 0,00 < / li > * < / ul > * @ param value * the value to check * @ return { @ code true } if the value is the default value , { @ code false } otherwise */ public static boolean isDefaultValue ( final String value ) { } }
if ( value == null || value . equals ( "" ) || value . equals ( "0,00" ) || value . equals ( "0" ) ) { return true ; } return false ;
public class AnimatedImageResultBuilder { /** * Builds the { @ link AnimatedImageResult } . The preview bitmap and the decoded frames are closed * after build is called , so this should not be called more than once or those fields will be lost * after the first call . * @ return the result */ public AnimatedImageResult build ( ) { } }
try { return new AnimatedImageResult ( this ) ; } finally { CloseableReference . closeSafely ( mPreviewBitmap ) ; mPreviewBitmap = null ; CloseableReference . closeSafely ( mDecodedFrames ) ; mDecodedFrames = null ; }
public class CharacterEncoder { /** * Return a byte array from the remaining bytes in this ByteBuffer . * The ByteBuffer ' s position will be advanced to ByteBuffer ' s limit . * To avoid an extra copy , the implementation will attempt to return the * byte array backing the ByteBuffer . If this is not possible , a * new byte array will be created . */ private byte [ ] getBytes ( ByteBuffer bb ) { } }
/* * This should never return a BufferOverflowException , as we ' re * careful to allocate just the right amount . */ byte [ ] buf = null ; /* * If it has a usable backing byte buffer , use it . Use only * if the array exactly represents the current ByteBuffer . */ if ( bb . hasArray ( ) ) { byte [ ] tmp = bb . array ( ) ; if ( ( tmp . length == bb . capacity ( ) ) && ( tmp . length == bb . remaining ( ) ) ) { buf = tmp ; bb . position ( bb . limit ( ) ) ; } } if ( buf == null ) { /* * This class doesn ' t have a concept of encode ( buf , len , off ) , * so if we have a partial buffer , we must reallocate * space . */ buf = new byte [ bb . remaining ( ) ] ; /* * position ( ) automatically updated */ bb . get ( buf ) ; } return buf ;
public class FreeMarkerRender { /** * Set freemarker ' s property . * The value of template _ update _ delay is 5 seconds . * Example : FreeMarkerRender . setProperty ( " template _ update _ delay " , " 1600 " ) ; */ public static void setProperty ( String propertyName , String propertyValue ) { } }
try { FreeMarkerRender . getConfiguration ( ) . setSetting ( propertyName , propertyValue ) ; } catch ( TemplateException e ) { throw new RuntimeException ( e ) ; }
public class MessageProcessorControl { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . runtime . MessageProcessorControllable # getLocalQueuePointControlByID ( java . lang . String ) */ public SIMPLocalQueuePointControllable getLocalQueuePointControlByID ( String id ) throws SIMPInvalidRuntimeIDException , SIMPControllableNotFoundException , SIMPException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getLocalQueuePointControlByID" , new Object [ ] { id } ) ; SIMPLocalQueuePointControllable control = null ; // Extract destination uuid and msgStore id String [ ] tokens = id . split ( RuntimeControlConstants . QUEUE_ID_INSERT ) ; if ( tokens . length > 0 ) { DestinationHandler dest = destinationManager . getDestinationInternal ( new SIBUuid12 ( tokens [ 0 ] ) , false ) ; if ( dest != null ) { try { control = ( SIMPLocalQueuePointControllable ) findControllableFromItemStream ( ( SIMPItemStream ) dest , tokens [ 1 ] ) ; } catch ( ClassCastException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.runtime.MessageProcessorControl.getLocalQueuePointControlByID" , "1:550:1.52" , this ) ; SIMPControllableNotFoundException finalE = new SIMPControllableNotFoundException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "MessageProcessorControl.getLocalQueuePointControlByID" , "1:558:1.52" , id } , null ) ) ; SibTr . exception ( tc , finalE ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getLocalQueuePointControlByID" , finalE ) ; throw finalE ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getLocalQueuePointControlByID" ) ; return control ;
public class AbstractStringBasedEbeanQuery { /** * ( non - Javadoc ) * @ see org . springframework . data . ebean . repository . query . AbstractEbeanQuery # doCreateQuery ( java . lang . Object [ ] ) */ @ Override public EbeanQueryWrapper doCreateQuery ( Object [ ] values ) { } }
ParameterAccessor accessor = new ParametersParameterAccessor ( getQueryMethod ( ) . getParameters ( ) , values ) ; EbeanQueryWrapper query = createEbeanQuery ( this . query . getQueryString ( ) ) ; return createBinder ( values ) . bindAndPrepare ( query ) ;
public class RestorePhoneNumberRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RestorePhoneNumberRequest restorePhoneNumberRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( restorePhoneNumberRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( restorePhoneNumberRequest . getPhoneNumberId ( ) , PHONENUMBERID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class KillBillHttpClient { /** * HEAD */ public Response doHead ( final String uri , final RequestOptions requestOptions ) throws KillBillClientException { } }
return doHead ( uri , requestOptions , this . requestTimeoutSec ) ;
public class NetworkConverter { /** * Convert a JSON link object into a Java Link object . * @ param net the network to populate * @ param o the JSON object to convert */ public void linkFromJSON ( Model mo , Network net , JSONObject o ) throws JSONConverterException { } }
net . connect ( requiredInt ( o , "id" ) , readCapacity ( o ) , getSwitch ( net , requiredInt ( o , SWITCH_LABEL ) ) , physicalElementFromJSON ( mo , net , ( JSONObject ) o . get ( "physicalElement" ) ) ) ;
public class AbstractCassandraStorage { /** * convert object to ByteBuffer */ protected ByteBuffer objToBB ( Object o ) { } }
if ( o == null ) return nullToBB ( ) ; if ( o instanceof java . lang . String ) return ByteBuffer . wrap ( new DataByteArray ( ( String ) o ) . get ( ) ) ; if ( o instanceof Integer ) return Int32Type . instance . decompose ( ( Integer ) o ) ; if ( o instanceof Long ) return LongType . instance . decompose ( ( Long ) o ) ; if ( o instanceof Float ) return FloatType . instance . decompose ( ( Float ) o ) ; if ( o instanceof Double ) return DoubleType . instance . decompose ( ( Double ) o ) ; if ( o instanceof UUID ) return ByteBuffer . wrap ( UUIDGen . decompose ( ( UUID ) o ) ) ; if ( o instanceof Tuple ) { List < Object > objects = ( ( Tuple ) o ) . getAll ( ) ; // collections if ( objects . size ( ) > 0 && objects . get ( 0 ) instanceof String ) { String collectionType = ( String ) objects . get ( 0 ) ; if ( "set" . equalsIgnoreCase ( collectionType ) || "list" . equalsIgnoreCase ( collectionType ) ) return objToListOrSetBB ( objects . subList ( 1 , objects . size ( ) ) ) ; else if ( "map" . equalsIgnoreCase ( collectionType ) ) return objToMapBB ( objects . subList ( 1 , objects . size ( ) ) ) ; } return objToCompositeBB ( objects ) ; } return ByteBuffer . wrap ( ( ( DataByteArray ) o ) . get ( ) ) ;
public class DecimalConvertor { /** * Produces decimal digits for non - zero , finite floating point values . * The sign of the value is discarded . Passing in Infinity or NaN produces * invalid digits . The maximum number of decimal digits that this * function will likely produce is 9. * @ param v value * @ param digits buffer to receive decimal digits * @ param offset offset into digit buffer * @ param maxDigits maximum number of digits to produce * @ param maxFractDigits maximum number of fractional digits to produce * @ param roundMode i . e . ROUND _ HALF _ UP * @ return Upper 16 bits : decimal point offset ; lower 16 bits : number of * digits produced */ public static int toDecimalDigits ( float v , char [ ] digits , int offset , int maxDigits , int maxFractDigits , int roundMode ) { } }
int bits = Float . floatToIntBits ( v ) ; int f = bits & 0x7fffff ; int e = ( bits >> 23 ) & 0xff ; if ( e != 0 ) { // Normalized number . return toDecimalDigits ( f + 0x800000 , e - 126 , 24 , digits , offset , maxDigits , maxFractDigits , roundMode ) ; } else { // Denormalized number . return toDecimalDigits ( f , - 125 , 23 , digits , offset , maxDigits , maxFractDigits , roundMode ) ; }
public class SubjectHelper { /** * Gets a Hashtable of values from the Subject , but do not trace the hashtable * @ param subject { @ code null } is not supported . * @ param properties The properties to get . * @ return the hashtable containing the properties . */ @ Sensitive public Hashtable < String , ? > getSensitiveHashtableFromSubject ( @ Sensitive final Subject subject , @ Sensitive final String [ ] properties ) { } }
return AccessController . doPrivileged ( new PrivilegedAction < Hashtable < String , ? > > ( ) { @ Override public Hashtable < String , ? > run ( ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Looking for custom properties in public cred list." ) ; } Set < Object > list_public = subject . getPublicCredentials ( ) ; Hashtable < String , ? > hashtableFromPublic = getSensitiveHashtable ( list_public , properties ) ; if ( hashtableFromPublic != null ) { return hashtableFromPublic ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Looking for custom properties in private cred list." ) ; } Set < Object > list_private = subject . getPrivateCredentials ( ) ; Hashtable < String , ? > hashtableFromPrivate = getSensitiveHashtable ( list_private , properties ) ; if ( hashtableFromPrivate != null ) { return hashtableFromPrivate ; } return null ; } } ) ;
public class ChineseLengthValidator { /** * 获取中文字符串长度 中文算N个字符 ( N由 { @ link ChineseLengthConstrant # cnHoldLength ( ) } 指定 , * 默认为2 ) , 英文算一个 * @ param value * @ return */ private long getChineseLength ( String value ) { } }
long valueLength = 0 ; for ( int i = 0 ; i < value . length ( ) ; i ++ ) { /* 获取一个字符 */ char temp = value . charAt ( i ) ; /* 判断是否为中文字符 */ if ( ( temp >= '\u4e00' && temp <= '\u9fa5' ) || ( temp >= '\ufe30' && temp <= '\uffa0' ) ) { /* 中文长度倍数 */ valueLength += this . chineseHoldLength ; } else { /* 其他字符长度为1 */ valueLength += 1 ; } } return valueLength ;
public class ChronoFormatter { /** * / * [ deutsch ] * < p > Konstruiert ein Hilfsobjekt zum Bauen eines globalen Zeitformats mit Verwendung * des angegebenen Kalendertyps . < / p > * < p > Zum Formatieren ist es notwendig , die Zeitzone am fertiggestellten Formatierer zu setzen . * Beim Parsen ist die Kalendervariante notwendig . < / p > * @ param < C > generic calendrical type with variant * @ param locale format locale * @ param overrideCalendar formattable calendar chronology * @ return new { @ code Builder } - instance applicable on { @ code Moment } * @ since 3.11/4.8 */ public static < C extends CalendarVariant < C > > ChronoFormatter . Builder < Moment > setUpWithOverride ( Locale locale , CalendarFamily < C > overrideCalendar ) { } }
if ( overrideCalendar == null ) { throw new NullPointerException ( "Missing override calendar." ) ; } return new Builder < > ( Moment . axis ( ) , locale , overrideCalendar ) ;
public class SolutionStackDescription { /** * The permitted file types allowed for a solution stack . * @ return The permitted file types allowed for a solution stack . */ public java . util . List < String > getPermittedFileTypes ( ) { } }
if ( permittedFileTypes == null ) { permittedFileTypes = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return permittedFileTypes ;
public class SheetBindingErrors { /** * パスを指定してフィールドエラーを取得する 。 * < p > 検索する際には 、 引数 「 path 」 に現在のパス ( { @ link # getCurrentPath ( ) } ) を付与して処理します 。 < / p > * @ param path 最後に ' * ' を付けるとワイルドカードが指定可能 。 * @ return エラーがない場合は空のリストを返す */ public List < FieldError > getFieldErrors ( final String path ) { } }
final String fullPath = buildFieldPath ( path ) ; return getFieldErrors ( ) . stream ( ) . filter ( e -> isMatchingFieldError ( fullPath , e ) ) . collect ( Collectors . toList ( ) ) ;
public class AbstractRadialBargraph { /** * Returns the bargraph track image * with the given with and height . * @ param WIDTH * @ param START _ ANGLE * @ param ANGLE _ EXTEND * @ param APEX _ ANGLE * @ param BARGRAPH _ OFFSET * @ param image * @ return buffered image containing the bargraph track image */ protected BufferedImage create_BARGRAPH_TRACK_Image ( final int WIDTH , final double START_ANGLE , final double ANGLE_EXTEND , final double APEX_ANGLE , final double BARGRAPH_OFFSET , BufferedImage image ) { } }
if ( WIDTH <= 0 ) { return null ; } if ( image == null ) { image = UTIL . createImage ( WIDTH , WIDTH , Transparency . TRANSLUCENT ) ; } final Graphics2D G2 = image . createGraphics ( ) ; G2 . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; // G2 . setRenderingHint ( RenderingHints . KEY _ RENDERING , RenderingHints . VALUE _ RENDER _ QUALITY ) ; // G2 . setRenderingHint ( RenderingHints . KEY _ DITHERING , RenderingHints . VALUE _ DITHER _ ENABLE ) ; // G2 . setRenderingHint ( RenderingHints . KEY _ ALPHA _ INTERPOLATION , RenderingHints . VALUE _ ALPHA _ INTERPOLATION _ QUALITY ) ; // G2 . setRenderingHint ( RenderingHints . KEY _ COLOR _ RENDERING , RenderingHints . VALUE _ COLOR _ RENDER _ QUALITY ) ; // G2 . setRenderingHint ( RenderingHints . KEY _ STROKE _ CONTROL , RenderingHints . VALUE _ STROKE _ NORMALIZE ) ; final int IMAGE_WIDTH = image . getWidth ( ) ; final int IMAGE_HEIGHT = image . getHeight ( ) ; // Create led track final java . awt . geom . Arc2D BACK = new java . awt . geom . Arc2D . Double ( java . awt . geom . Arc2D . PIE ) ; BACK . setFrame ( IMAGE_WIDTH * 0.1074766355 , IMAGE_HEIGHT * 0.1074766355 , IMAGE_WIDTH * 0.785046729 , IMAGE_HEIGHT * 0.785046729 ) ; BACK . setAngleStart ( START_ANGLE + 2 ) ; BACK . setAngleExtent ( ANGLE_EXTEND - 5 ) ; final Ellipse2D BACK_SUB = new Ellipse2D . Double ( IMAGE_WIDTH * 0.1822429907 , IMAGE_HEIGHT * 0.1822429907 , IMAGE_WIDTH * 0.6355140187 , IMAGE_HEIGHT * 0.6355140187 ) ; final java . awt . geom . Area LED_TRACK_FRAME = new java . awt . geom . Area ( BACK ) ; LED_TRACK_FRAME . subtract ( new java . awt . geom . Area ( BACK_SUB ) ) ; final Point2D LED_TRACK_FRAME_START = new Point2D . Double ( 0 , LED_TRACK_FRAME . getBounds2D ( ) . getMinY ( ) ) ; final Point2D LED_TRACK_FRAME_STOP = new Point2D . Double ( 0 , LED_TRACK_FRAME . getBounds2D ( ) . getMaxY ( ) ) ; final float [ ] LED_TRACK_FRAME_FRACTIONS = { 0.0f , 0.22f , 0.76f , 1.0f } ; final Color [ ] LED_TRACK_FRAME_COLORS = { new Color ( 0 , 0 , 0 , 255 ) , new Color ( 51 , 51 , 51 , 255 ) , new Color ( 51 , 51 , 51 , 255 ) , new Color ( 100 , 100 , 100 , 255 ) } ; Util . INSTANCE . validateGradientPoints ( LED_TRACK_FRAME_START , LED_TRACK_FRAME_STOP ) ; final LinearGradientPaint LED_TRACK_FRAME_GRADIENT = new LinearGradientPaint ( LED_TRACK_FRAME_START , LED_TRACK_FRAME_STOP , LED_TRACK_FRAME_FRACTIONS , LED_TRACK_FRAME_COLORS ) ; G2 . setPaint ( LED_TRACK_FRAME_GRADIENT ) ; G2 . fill ( LED_TRACK_FRAME ) ; final java . awt . geom . Arc2D FRONT = new java . awt . geom . Arc2D . Double ( java . awt . geom . Arc2D . PIE ) ; FRONT . setFrame ( IMAGE_WIDTH * 0.1121495327 , IMAGE_HEIGHT * 0.1121495327 , IMAGE_WIDTH * 0.7803738318 , IMAGE_HEIGHT * 0.7803738318 ) ; FRONT . setAngleStart ( START_ANGLE ) ; FRONT . setAngleExtent ( ANGLE_EXTEND ) ; final Ellipse2D FRONT_SUB = new Ellipse2D . Double ( IMAGE_WIDTH * 0.1822429907 , IMAGE_HEIGHT * 0.1822429907 , IMAGE_WIDTH * 0.6448598131 , IMAGE_HEIGHT * 0.6448598131 ) ; final java . awt . geom . Area LED_TRACK_MAIN = new java . awt . geom . Area ( BACK ) ; LED_TRACK_MAIN . subtract ( new java . awt . geom . Area ( FRONT_SUB ) ) ; final Point2D LED_TRACK_MAIN_START = new Point2D . Double ( 0 , LED_TRACK_MAIN . getBounds2D ( ) . getMinY ( ) ) ; final Point2D LED_TRACK_MAIN_STOP = new Point2D . Double ( 0 , LED_TRACK_MAIN . getBounds2D ( ) . getMaxY ( ) ) ; final float [ ] LED_TRACK_MAIN_FRACTIONS = { 0.0f , 1.0f } ; final Color [ ] LED_TRACK_MAIN_COLORS = { new Color ( 17 , 17 , 17 , 255 ) , new Color ( 51 , 51 , 51 , 255 ) } ; Util . INSTANCE . validateGradientPoints ( LED_TRACK_MAIN_START , LED_TRACK_MAIN_STOP ) ; final LinearGradientPaint LED_TRACK_MAIN_GRADIENT = new LinearGradientPaint ( LED_TRACK_MAIN_START , LED_TRACK_MAIN_STOP , LED_TRACK_MAIN_FRACTIONS , LED_TRACK_MAIN_COLORS ) ; G2 . setPaint ( LED_TRACK_MAIN_GRADIENT ) ; G2 . fill ( LED_TRACK_MAIN ) ; // Draw the inactive leds final Point2D CENTER = new Point2D . Double ( WIDTH / 2.0 , WIDTH / 2.0 ) ; final Rectangle2D LED = new Rectangle2D . Double ( WIDTH * 0.1168224299 , WIDTH * 0.4859813084 , WIDTH * 0.06074766355140187 , WIDTH * 0.023364486 ) ; final Point2D LED_CENTER = new Point2D . Double ( LED . getCenterX ( ) , LED . getCenterY ( ) ) ; final Color [ ] LED_COLORS = new Color [ ] { new Color ( 60 , 60 , 60 , 255 ) , new Color ( 50 , 50 , 50 , 255 ) } ; final RadialGradientPaint LED_GRADIENT = new RadialGradientPaint ( LED_CENTER , ( float ) ( 0.030373831775700934 * IMAGE_WIDTH ) , LED_FRACTIONS , LED_COLORS ) ; G2 . setPaint ( LED_GRADIENT ) ; final AffineTransform OLD_TRANSFORM = G2 . getTransform ( ) ; for ( double angle = 0 ; angle <= APEX_ANGLE ; angle += 5.0 ) { G2 . rotate ( Math . toRadians ( angle + BARGRAPH_OFFSET ) , CENTER . getX ( ) , CENTER . getY ( ) ) ; G2 . fill ( LED ) ; G2 . setTransform ( OLD_TRANSFORM ) ; } G2 . dispose ( ) ; return image ;
public class AccessorUtils { /** * Validates that the method annotated with the specified annotation has a single * argument of the expected type . */ public static void validateArgument ( Method method , Class < ? extends Annotation > annotationType , Class < ? > expectedParameterType ) { } }
if ( method . getParameterTypes ( ) . length != 1 || ! method . getParameterTypes ( ) [ 0 ] . equals ( expectedParameterType ) ) { throw new BeanCreationException ( String . format ( "Method %s with @%s MUST take a single argument of type %s" , method , annotationType . getName ( ) , expectedParameterType . getName ( ) ) ) ; }