signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class InequalityRule { /** * { @ inheritDoc } */ public boolean evaluate ( final LoggingEvent event , Map matches ) { } }
long first = 0 ; try { first = new Long ( RESOLVER . getValue ( field , event ) . toString ( ) ) . longValue ( ) ; } catch ( NumberFormatException nfe ) { return false ; } long second = 0 ; try { second = new Long ( value ) . longValue ( ) ; } catch ( NumberFormatException nfe ) { return false ; } boolean result = false ; if ( "<" . equals ( inequalitySymbol ) ) { result = first < second ; } else if ( ">" . equals ( inequalitySymbol ) ) { result = first > second ; } else if ( "<=" . equals ( inequalitySymbol ) ) { result = first <= second ; } else if ( ">=" . equals ( inequalitySymbol ) ) { result = first >= second ; } if ( result && matches != null ) { Set entries = ( Set ) matches . get ( field . toUpperCase ( ) ) ; if ( entries == null ) { entries = new HashSet ( ) ; matches . put ( field . toUpperCase ( ) , entries ) ; } entries . add ( String . valueOf ( first ) ) ; } return result ;
public class MultiViewOps { /** * Creates a trifocal tensor from two camera matrices . T < sub > i < / sub > < sup > jk < / sup > = a [ j , i ] * b [ k , 3 ] - a [ j , 3 ] * b [ k , i ] , * where a = P2 and b = P3. * IMPORTANT : It is assumed that the first camera has the following camera matrix P1 = [ I | 0 ] , * where I is an identify matrix . * @ param P2 Camera matrix for view 2 . 3x4 matrix * @ param P3 Camera matrix for view 3 . 3x4 matrix * @ param ret Storage for trifocal tensor . If null a new instance will be created . * @ return The trifocal tensor */ public static TrifocalTensor createTrifocal ( DMatrixRMaj P2 , DMatrixRMaj P3 , @ Nullable TrifocalTensor ret ) { } }
if ( ret == null ) ret = new TrifocalTensor ( ) ; for ( int i = 0 ; i < 3 ; i ++ ) { DMatrixRMaj T = ret . getT ( i ) ; int index = 0 ; for ( int j = 0 ; j < 3 ; j ++ ) { double a_left = P2 . get ( j , i ) ; double a_right = P2 . get ( j , 3 ) ; for ( int k = 0 ; k < 3 ; k ++ ) { T . data [ index ++ ] = a_left * P3 . get ( k , 3 ) - a_right * P3 . get ( k , i ) ; } } } return ret ;
public class AbstractListPreference { /** * Obtains the scrollable area of the preference ' s dialog from a specific typed array . * @ param typedArray * The typed array , the scrollable area should be obtained from , as an instance of the * class { @ link TypedArray } . The typed array may not be null */ private void obtainDialogScrollableArea ( @ NonNull final TypedArray typedArray ) { } }
int topIndex = typedArray . getInt ( R . styleable . DialogPreference_dialogScrollableAreaTop , - 1 ) ; ScrollableArea . Area top = null ; ScrollableArea . Area bottom = null ; if ( topIndex != - 1 ) { top = ScrollableArea . Area . fromIndex ( topIndex ) ; int bottomIndex = typedArray . getInt ( R . styleable . DialogPreference_dialogScrollableAreaBottom , - 1 ) ; if ( bottomIndex != - 1 ) { bottom = ScrollableArea . Area . fromIndex ( bottomIndex ) ; } } setDialogScrollableArea ( top , bottom != null ? bottom : top ) ;
public class SimpleGroovyClassDocAssembler { /** * hack warning ! fragile ! TODO find a better way */ private String getDefaultValue ( GroovySourceAST t ) { } }
GroovySourceAST child = ( GroovySourceAST ) t . getFirstChild ( ) ; if ( t . getNumberOfChildren ( ) != 4 ) return null ; for ( int i = 1 ; i < t . getNumberOfChildren ( ) ; i ++ ) { child = ( GroovySourceAST ) child . getNextSibling ( ) ; } GroovySourceAST nodeToProcess = child ; if ( child . getType ( ) != ANNOTATION_ARRAY_INIT && child . getNumberOfChildren ( ) > 0 ) { nodeToProcess = ( GroovySourceAST ) child . getFirstChild ( ) ; } return getChildTextFromSource ( nodeToProcess , ";" ) ;
public class UpdateIdentityPoolResult { /** * A list representing an Amazon Cognito user pool and its client ID . * @ param cognitoIdentityProviders * A list representing an Amazon Cognito user pool and its client ID . */ public void setCognitoIdentityProviders ( java . util . Collection < CognitoIdentityProvider > cognitoIdentityProviders ) { } }
if ( cognitoIdentityProviders == null ) { this . cognitoIdentityProviders = null ; return ; } this . cognitoIdentityProviders = new java . util . ArrayList < CognitoIdentityProvider > ( cognitoIdentityProviders ) ;
public class ComputeNodeReimageHeaders { /** * Set the time at which the resource was last modified . * @ param lastModified the lastModified value to set * @ return the ComputeNodeReimageHeaders object itself . */ public ComputeNodeReimageHeaders withLastModified ( DateTime lastModified ) { } }
if ( lastModified == null ) { this . lastModified = null ; } else { this . lastModified = new DateTimeRfc1123 ( lastModified ) ; } return this ;
public class InstanceAdminClient { /** * Sets the access control policy on an instance resource . Replaces any existing policy . * < p > Authorization requires ` spanner . instances . setIamPolicy ` on * [ resource ] [ google . iam . v1 . SetIamPolicyRequest . resource ] . * < p > Sample code : * < pre > < code > * try ( InstanceAdminClient instanceAdminClient = InstanceAdminClient . create ( ) ) { * String formattedResource = InstanceName . format ( " [ PROJECT ] " , " [ INSTANCE ] " ) ; * Policy policy = Policy . newBuilder ( ) . build ( ) ; * Policy response = instanceAdminClient . setIamPolicy ( formattedResource , policy ) ; * < / code > < / pre > * @ param resource REQUIRED : The resource for which the policy is being specified . ` resource ` is * usually specified as a path . For example , a Project resource is specified as * ` projects / { project } ` . * @ param policy REQUIRED : The complete policy to be applied to the ` resource ` . The size of the * policy is limited to a few 10s of KB . An empty policy is a valid policy but certain Cloud * Platform services ( such as Projects ) might reject them . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Policy setIamPolicy ( String resource , Policy policy ) { } }
SetIamPolicyRequest request = SetIamPolicyRequest . newBuilder ( ) . setResource ( resource ) . setPolicy ( policy ) . build ( ) ; return setIamPolicy ( request ) ;
public class Query { /** * < code > * See { @ link Query # setQueryUrlParams ( Map ) } . * < / code > * @ param key * The key of the url parameter * @ param value * The value of the url parameter */ public Query addQueryUrlParams ( String key , String value ) { } }
this . queryUrlParams . put ( key , value ) ; return this ;
public class CarrierAccount { /** * update */ public CarrierAccount update ( Map < String , Object > params ) throws EasyPostException { } }
return this . update ( params , null ) ;
public class EventCollector { /** * Execute all filters for the event . * @ param event the event * @ return true , if successful */ private boolean filter ( Event event ) { } }
if ( null == filters ) return false ; for ( EventFilter filter : filters ) { if ( filter . filter ( event ) ) { return true ; } } return false ;
public class SrcGen4J { /** * Incremental parse and generate . The class loader of this class will be used . * @ param files * Set of files to parse for the model . * @ throws ParseException * Error during parse process . * @ throws GenerateException * Error during generation process . */ public final void execute ( @ NotNull final Set < File > files ) throws ParseException , GenerateException { } }
Contract . requireArgNotNull ( "files" , files ) ; LOG . info ( "Executing incremental build ({} files)" , files . size ( ) ) ; if ( LOG . isDebugEnabled ( ) ) { for ( final File file : files ) { LOG . debug ( file . toString ( ) ) ; } } if ( files . size ( ) == 0 ) { // Nothing to do . . . return ; } // Parse models & generate final Parsers parsers = config . getParsers ( ) ; if ( parsers == null ) { LOG . warn ( "No parsers element" ) ; } else { final List < ParserConfig > parserConfigs = parsers . getList ( ) ; if ( parserConfigs == null ) { LOG . warn ( "No parsers configured" ) ; } else { for ( final ParserConfig pc : parserConfigs ) { final Parser < Object > pars = pc . getParser ( ) ; if ( pars instanceof IncrementalParser ) { final IncrementalParser < ? > parser = ( IncrementalParser < ? > ) pars ; final Object model = parser . parse ( files ) ; final List < GeneratorConfig > generatorConfigs = config . findGeneratorsForParser ( pc . getName ( ) ) ; for ( final GeneratorConfig gc : generatorConfigs ) { final Generator < Object > generator = gc . getGenerator ( ) ; generator . generate ( model , true ) ; } } else { LOG . debug ( "No incremental parser: {}" , pars . getClass ( ) . getName ( ) ) ; } } } }
public class HttpServletResponseWrapper { /** * The default behavior of this method is to return addHeader ( String name , String value ) * on the wrapped response object . */ @ Override public void addHeader ( String name , String value ) { } }
this . _getHttpServletResponse ( ) . addHeader ( name , value ) ;
public class AbstractClassFileWriter { /** * Pushes type arguments onto the stack . * @ param generatorAdapter The generator adapter * @ param types The type references */ protected static void pushTypeArguments ( GeneratorAdapter generatorAdapter , Map < String , Object > types ) { } }
if ( types == null || types . isEmpty ( ) ) { generatorAdapter . visitInsn ( ACONST_NULL ) ; return ; } int len = types . size ( ) ; // Build calls to Argument . create ( . . . ) pushNewArray ( generatorAdapter , Argument . class , len ) ; int i = 0 ; for ( Map . Entry < String , Object > entry : types . entrySet ( ) ) { // the array index generatorAdapter . push ( i ) ; String typeParameterName = entry . getKey ( ) ; Object value = entry . getValue ( ) ; if ( value instanceof Map ) { buildArgumentWithGenerics ( generatorAdapter , typeParameterName , ( Map ) value ) ; } else { buildArgument ( generatorAdapter , typeParameterName , value ) ; } // store the type reference generatorAdapter . visitInsn ( AASTORE ) ; // if we are not at the end of the array duplicate array onto the stack if ( i != ( len - 1 ) ) { generatorAdapter . visitInsn ( DUP ) ; } i ++ ; }
public class SortedBugCollection { /** * Write the BugCollection to given output stream as XML . The output stream * will be closed , even if an exception is thrown . * @ param out * the OutputStream to write to */ @ Override public void writeXML ( @ WillClose Writer out ) throws IOException { } }
assert project != null ; bugsPopulated ( ) ; XMLOutput xmlOutput ; // if ( project = = null ) throw new NullPointerException ( " No project " ) ; xmlOutput = new OutputStreamXMLOutput ( out ) ; writeXML ( xmlOutput ) ;
public class PoolListHeaders { /** * Set the time at which the resource was last modified . * @ param lastModified the lastModified value to set * @ return the PoolListHeaders object itself . */ public PoolListHeaders withLastModified ( DateTime lastModified ) { } }
if ( lastModified == null ) { this . lastModified = null ; } else { this . lastModified = new DateTimeRfc1123 ( lastModified ) ; } return this ;
public class DataTable { /** * getter for body - gets the body of the table that contains data * @ generated * @ return value of the feature */ public StringArray getBody ( ) { } }
if ( DataTable_Type . featOkTst && ( ( DataTable_Type ) jcasType ) . casFeat_body == null ) jcasType . jcas . throwFeatMissing ( "body" , "ch.epfl.bbp.uima.types.DataTable" ) ; return ( StringArray ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( DataTable_Type ) jcasType ) . casFeatCode_body ) ) ) ;
public class Config { /** * Sets the map of map event journal configurations , mapped by config name . * The config name may be a pattern with which the configuration will be * obtained in the future . * @ param eventJournalConfigs the map event journal configuration map to set * @ return this config instance */ public Config setMapEventJournalConfigs ( Map < String , EventJournalConfig > eventJournalConfigs ) { } }
this . mapEventJournalConfigs . clear ( ) ; this . mapEventJournalConfigs . putAll ( eventJournalConfigs ) ; for ( Entry < String , EventJournalConfig > entry : eventJournalConfigs . entrySet ( ) ) { entry . getValue ( ) . setMapName ( entry . getKey ( ) ) ; } return this ;
public class TimeParser { /** * returns the instance . * @ return Parser */ public static final Parser < Date > instance ( ) { } }
// NOPMD it ' s thread save ! if ( TimeParser . instanceParser == null ) { synchronized ( TimeParser . class ) { if ( TimeParser . instanceParser == null ) { TimeParser . instanceParser = new TimeParser ( "HH:mm:ss" ) ; } } } return TimeParser . instanceParser ;
public class PDFDomTree { /** * Parses a PDF document and serializes the resulting DOM tree to an output . This requires * a DOM Level 3 capable implementation to be available . */ @ Override public void writeText ( PDDocument doc , Writer outputStream ) throws IOException { } }
try { DOMImplementationRegistry registry = DOMImplementationRegistry . newInstance ( ) ; DOMImplementationLS impl = ( DOMImplementationLS ) registry . getDOMImplementation ( "LS" ) ; LSSerializer writer = impl . createLSSerializer ( ) ; LSOutput output = impl . createLSOutput ( ) ; writer . getDomConfig ( ) . setParameter ( "format-pretty-print" , true ) ; output . setCharacterStream ( outputStream ) ; createDOM ( doc ) ; writer . write ( getDocument ( ) , output ) ; } catch ( ClassCastException e ) { throw new IOException ( "Error: cannot initialize the DOM serializer" , e ) ; } catch ( ClassNotFoundException e ) { throw new IOException ( "Error: cannot initialize the DOM serializer" , e ) ; } catch ( InstantiationException e ) { throw new IOException ( "Error: cannot initialize the DOM serializer" , e ) ; } catch ( IllegalAccessException e ) { throw new IOException ( "Error: cannot initialize the DOM serializer" , e ) ; }
public class PathParamArgs { /** * # hope jflute similer to UrlPatternAnalyzer ' s one so want to refactor ( 2018/10/30) */ public boolean isNumberTypeParameter ( int index ) { } }
// contains optional generic type if ( pathParamTypeList . size ( ) <= index ) { // avoid out of bounds return false ; } final Class < ? > parameterType = pathParamTypeList . get ( index ) ; if ( Number . class . isAssignableFrom ( parameterType ) ) { return true ; } if ( parameterType . isPrimitive ( ) && Stream . of ( long . class , int . class , short . class , byte . class , double . class , float . class ) . anyMatch ( numType -> numType . isAssignableFrom ( parameterType ) ) ) { // from pull request # 55 ( thanks ! ) return true ; } final Class < ? > genericType = optionalGenericTypeMap . get ( index ) ; return genericType != null && Number . class . isAssignableFrom ( genericType ) ;
public class HerokuAPI { /** * Update the list of buildpacks installed on an app * @ param appName See { @ link # listApps } for a list of apps that can be used . * @ param buildpacks the new list of buildpack names or URLs . */ public void updateBuildpackInstallations ( String appName , List < String > buildpacks ) { } }
connection . execute ( new BuildpackInstallationUpdate ( appName , buildpacks ) , apiKey ) ;
public class ProxyFilter { /** * Get the { @ link ProxyLogicHandler } for a given session . * @ param session the session object * @ return the handler which will handle handshaking with the proxy */ private ProxyLogicHandler getProxyHandler ( final IoSession session ) { } }
ProxyLogicHandler handler = ( ( ProxyIoSession ) session . getAttribute ( ProxyIoSession . PROXY_SESSION ) ) . getHandler ( ) ; if ( handler == null ) { throw new IllegalStateException ( ) ; } // Sanity check if ( handler . getProxyIoSession ( ) . getProxyFilter ( ) != this ) { throw new IllegalArgumentException ( "Not managed by this filter." ) ; } return handler ;
public class PBaseNumber { /** * Between lower and upper values . * @ param lower the lower bind value * @ param upper the upper bind value * @ return the root query bean instance */ public R between ( int lower , int upper ) { } }
expr ( ) . between ( _name , lower , upper ) ; return _root ;
public class ReconfigurableClient { /** * Closes the connection of a messaging client and terminates it properly . * @ param client the client ( never null ) * @ param errorMessage the error message to log in case of problem * @ param logger a logger */ static void terminateClient ( IMessagingClient client , String errorMessage , Logger logger ) { } }
try { logger . fine ( "The reconfigurable client is requesting its internal connection to be closed." ) ; if ( client != null ) client . closeConnection ( ) ; } catch ( Exception e ) { logger . warning ( errorMessage + " " + e . getMessage ( ) ) ; Utils . logException ( logger , e ) ; } finally { // " unregisterService " was not merged with " closeConnection " // on purpose . What is specific to JMX is restricted to this class // and this bundle . Sub - classes may use " closeConnection " without // any side effect on the JMX part . if ( client instanceof JmxWrapperForMessagingClient ) ( ( JmxWrapperForMessagingClient ) client ) . unregisterService ( ) ; }
public class BillableRevenueOverrides { /** * Sets the netBillableRevenueOverride value for this BillableRevenueOverrides . * @ param netBillableRevenueOverride * The overridden { @ link ReconciliationLineItemReport # netBillableRevenue } . * < p > If the { @ link ReconciliationLineItemReport } data * is for a { @ link ProposalLineItem } * and the { @ link ReconciliationLineItemReport # pricingModel } * is * { @ link PricingModel # GROSS } , then this value will be * calculated using the * { @ link # billableRevenueOverride } and the proposal * line item ' s billing * settings . Otherwise , the value of this field will * be the same as the * { @ link # billableRevenueOverride } . < / p > * < p > This value is read - only . < / p > */ public void setNetBillableRevenueOverride ( com . google . api . ads . admanager . axis . v201902 . Money netBillableRevenueOverride ) { } }
this . netBillableRevenueOverride = netBillableRevenueOverride ;
public class BundleProcessor { /** * Creates the bundles in the destination directory * @ param servlet * the servlet * @ param bundleHandler * the bundles handler * @ param destDirPath * the destination directory path * @ param servletMapping * the mapping of the servlet * @ param keepUrlMapping * the flag indicating if we must keep the URL mapping * @ throws IOException * if an IO exception occurs * @ throws ServletException * if a servlet exception occurs */ protected void createBundles ( HttpServlet servlet , ResourceBundlesHandler bundleHandler , String destDirPath , String servletMapping , boolean keepUrlMapping ) throws IOException , ServletException { } }
List < JoinableResourceBundle > bundles = bundleHandler . getContextBundles ( ) ; Iterator < JoinableResourceBundle > bundleIterator = bundles . iterator ( ) ; MockServletResponse response = new MockServletResponse ( ) ; MockServletRequest request = new MockServletRequest ( JAWR_BUNDLE_PROCESSOR_CONTEXT_PATH ) ; MockServletSession session = new MockServletSession ( servlet . getServletContext ( ) ) ; request . setSession ( session ) ; String resourceType = servlet . getServletConfig ( ) . getInitParameter ( TYPE_INIT_PARAMETER ) ; if ( resourceType == null ) { resourceType = JawrConstant . JS_TYPE ; } // For the list of bundle defines , create the file associated while ( bundleIterator . hasNext ( ) ) { JoinableResourceBundle bundle = ( JoinableResourceBundle ) bundleIterator . next ( ) ; // Check if there is a resource file , which could be in conflict // with the bundle name URL url = servlet . getServletContext ( ) . getResource ( bundle . getId ( ) ) ; if ( url != null ) { logger . error ( "It is not recommended to use a bundle name which could be in conflict with a resource.\n" + "Please rename your bundle '" + bundle . getId ( ) + "' to avoid any issue" ) ; } List < Map < String , String > > allVariants = VariantUtils . getAllVariants ( bundle . getVariants ( ) ) ; if ( allVariants == null ) { allVariants = new ArrayList < Map < String , String > > ( ) ; } if ( allVariants . isEmpty ( ) ) { allVariants . add ( new HashMap < String , String > ( ) ) ; } // Creates the bundle file for each local variant for ( Iterator < Map < String , String > > it = allVariants . iterator ( ) ; it . hasNext ( ) ; ) { Map < String , String > variantMap = ( Map < String , String > ) it . next ( ) ; List < RenderedLink > linksToBundle = createLinkToBundle ( bundleHandler , bundle . getId ( ) , resourceType , variantMap ) ; for ( Iterator < RenderedLink > iteratorLinks = linksToBundle . iterator ( ) ; iteratorLinks . hasNext ( ) ; ) { RenderedLink renderedLink = iteratorLinks . next ( ) ; String path = renderedLink . getLink ( ) ; // Force the debug mode of the config to match what was used // in the generated link JawrConfig config = bundleHandler . getConfig ( ) ; config . setDebugModeOn ( renderedLink . isDebugMode ( ) ) ; String finalBundlePath = null ; if ( keepUrlMapping ) { finalBundlePath = path ; } else { finalBundlePath = getFinalBundlePath ( path , config , variantMap ) ; } // Sets the request URL setRequestUrl ( request , variantMap , path , config ) ; // We can ' t use path for generated resources because it ' s // not a valid file path ( / jawr _ generator . js ? xxx . . . . ) if ( ! ( path . indexOf ( "?" ) != - 1 ) || ! keepUrlMapping ) { File bundleFile = new File ( destDirPath , finalBundlePath ) ; createBundleFile ( servlet , response , request , path , bundleFile , servletMapping ) ; } } } }
public class JavaFxSubscriptions { /** * Create a Disposable that always runs < code > unsubscribe < / code > in the event dispatch thread . * @ param unsubscribe the runnable to be performed in the ui thread at un - subscription * @ return an Disposable that always runs < code > unsubscribe < / code > in the event dispatch thread . */ public static Disposable unsubscribeInEventDispatchThread ( final Runnable unsubscribe ) { } }
return Disposables . fromRunnable ( ( ) -> { if ( Platform . isFxApplicationThread ( ) ) { unsubscribe . run ( ) ; } else { final Scheduler . Worker inner = JavaFxScheduler . platform ( ) . createWorker ( ) ; inner . schedule ( ( ) -> { unsubscribe . run ( ) ; inner . dispose ( ) ; } ) ; } } ) ;
public class EmbeddedNeo4jEntityQueries { /** * Find the node corresponding to an entity . * @ param executionEngine the { @ link GraphDatabaseService } used to run the query * @ param columnValues the values in { @ link org . hibernate . ogm . model . key . spi . EntityKey # getColumnValues ( ) } * @ return the corresponding node */ public Node findEntity ( GraphDatabaseService executionEngine , Object [ ] columnValues ) { } }
Map < String , Object > params = params ( columnValues ) ; Result result = executionEngine . execute ( getFindEntityQuery ( ) , params ) ; return singleResult ( result ) ;
public class BasicAnnotationProcessor { /** * Adds { @ code element } and its enclosed elements to { @ code annotatedElements } if they are * annotated with any annotations in { @ code annotationClasses } . Does not traverse to member types * of { @ code element } , so that if { @ code Outer } is passed in the example below , looking for * { @ code @ X } , then { @ code Outer } , { @ code Outer . foo } , and { @ code Outer . foo ( ) } will be added to the * multimap , but neither { @ code Inner } nor its members will . * < pre > < code > * { @ literal @ } X class Outer { * { @ literal @ } X Object foo ; * { @ literal @ } X void foo ( ) { } * { @ literal @ } X static class Inner { * { @ literal @ } X Object bar ; * { @ literal @ } X void bar ( ) { } * < / code > < / pre > */ private static void findAnnotatedElements ( Element element , Set < ? extends Class < ? extends Annotation > > annotationClasses , ImmutableSetMultimap . Builder < Class < ? extends Annotation > , Element > annotatedElements ) { } }
for ( Element enclosedElement : element . getEnclosedElements ( ) ) { if ( ! enclosedElement . getKind ( ) . isClass ( ) && ! enclosedElement . getKind ( ) . isInterface ( ) ) { findAnnotatedElements ( enclosedElement , annotationClasses , annotatedElements ) ; } } // element . getEnclosedElements ( ) does NOT return parameter elements if ( element instanceof ExecutableElement ) { for ( Element parameterElement : ( ( ExecutableElement ) element ) . getParameters ( ) ) { findAnnotatedElements ( parameterElement , annotationClasses , annotatedElements ) ; } } for ( Class < ? extends Annotation > annotationClass : annotationClasses ) { if ( isAnnotationPresent ( element , annotationClass ) ) { annotatedElements . put ( annotationClass , element ) ; } }
public class IntegerRendererWithoutSeparator { /** * returns the instance . * @ return CurrencyDoubleRenderer */ public static final Renderer < Integer > instance ( ) { } }
// NOPMD it ' s thread save ! if ( IntegerRendererWithoutSeparator . instanceRenderer == null ) { synchronized ( IntegerRendererWithoutSeparator . class ) { if ( IntegerRendererWithoutSeparator . instanceRenderer == null ) { IntegerRendererWithoutSeparator . instanceRenderer = new IntegerRendererWithoutSeparator ( ) ; } } } return IntegerRendererWithoutSeparator . instanceRenderer ;
public class FeedCommProcessor { /** * Call this when you know the feed comm processor object will never be used again * and thus the ping executor will also never be used again . */ private void destroyPingExecutor ( ) { } }
synchronized ( pingExecutor ) { if ( ! pingExecutor . isShutdown ( ) ) { try { log . debugf ( "Shutting down WebSocket ping executor" ) ; pingExecutor . shutdown ( ) ; if ( ! pingExecutor . awaitTermination ( 1 , TimeUnit . SECONDS ) ) { pingExecutor . shutdownNow ( ) ; } } catch ( Throwable t ) { log . warnf ( "Cannot shut down WebSocket ping executor. Cause=%s" , t . toString ( ) ) ; } } }
public class Anima { /** * Batch delete model * @ param model model class type * @ param ids mode primary id array * @ param < T > * @ param < S > */ @ SafeVarargs public static < T extends Model , S extends Serializable > void deleteBatch ( Class < T > model , S ... ids ) { } }
atomic ( ( ) -> Arrays . stream ( ids ) . forEach ( new AnimaQuery < > ( model ) :: deleteById ) ) . catchException ( e -> log . error ( "Batch save model error, message: {}" , e ) ) ;
public class CurationManager { /** * Get the stored harvest configuration from storage for the indicated * object . * @ param oid * The object we want config for */ private JsonSimple getConfigFromStorage ( String oid ) { } }
String configOid = null ; String configPid = null ; // Get our object and look for its config info try { DigitalObject object = storage . getObject ( oid ) ; Properties metadata = object . getMetadata ( ) ; configOid = metadata . getProperty ( "jsonConfigOid" ) ; configPid = metadata . getProperty ( "jsonConfigPid" ) ; } catch ( StorageException ex ) { log . error ( "Error accessing object '{}' in storage: " , oid , ex ) ; return null ; } // Validate if ( configOid == null || configPid == null ) { log . error ( "Unable to find configuration for OID '{}'" , oid ) ; return null ; } // Grab the config from storage try { DigitalObject object = storage . getObject ( configOid ) ; Payload payload = object . getPayload ( configPid ) ; try { return new JsonSimple ( payload . open ( ) ) ; } catch ( IOException ex ) { log . error ( "Error accessing config '{}' in storage: " , configOid , ex ) ; } finally { payload . close ( ) ; } } catch ( StorageException ex ) { log . error ( "Error accessing object in storage: " , ex ) ; } // Something screwed the pooch return null ;
public class MetricZipperTransform { /** * Merges data points . * @ param originalDatapoints The original data points . * @ param baseDatapoints The base data points . * @ return The merged data points . */ public Map < Long , Double > zip ( Map < Long , Double > originalDatapoints , Map < Long , Double > baseDatapoints ) { } }
SystemAssert . requireArgument ( baseDatapoints != null && ! baseDatapoints . isEmpty ( ) , "Zipper transform requires valid baseDatapoints from base metric!" ) ; Map < Long , Double > zippedDP = new HashMap < > ( ) ; for ( Map . Entry < Long , Double > originalDP : originalDatapoints . entrySet ( ) ) { Long originalKey = originalDP . getKey ( ) ; Double originalVal = originalDP . getValue ( ) ; // if base datapoints doesn ' t have the key , give it null Double baseVal = baseDatapoints . containsKey ( originalKey ) ? baseDatapoints . get ( originalKey ) : null ; zippedDP . put ( originalKey , this . valueZipper . zip ( originalVal , baseVal ) ) ; } // if a point exists in the baseDP but does not exist in the original set , // then only add it to the result when fullJoinIndicator is true . if ( fulljoinIndicator ) { for ( Map . Entry < Long , Double > baseDP : baseDatapoints . entrySet ( ) ) { Long baseDPKey = baseDP . getKey ( ) ; if ( ! zippedDP . containsKey ( baseDPKey ) ) { zippedDP . put ( baseDPKey , this . valueZipper . zip ( null , baseDP . getValue ( ) ) ) ; } } } return zippedDP ;
public class AsciiSequenceView { /** * { @ inheritDoc } */ public char charAt ( final int index ) { } }
if ( index < 0 || index >= length ) { throw new StringIndexOutOfBoundsException ( "index=" + index + " length=" + length ) ; } return ( char ) buffer . getByte ( offset + index ) ;
public class JdbcControlImpl { /** * Get a connection from a DataSource . * @ param jndiName Specifed in the subclasse ' s ConnectionDataSource annotation * @ param jndiFactory Specified in the subclasse ' s ConnectionDataSource Annotation . * @ return null if a connection cannot be established * @ throws SQLException */ private Connection getConnectionFromDataSource ( String jndiName , Class < ? extends JdbcControl . JndiContextFactory > jndiFactory ) throws SQLException { } }
Connection con = null ; try { JndiContextFactory jf = ( JndiContextFactory ) jndiFactory . newInstance ( ) ; Context jndiContext = jf . getContext ( ) ; _dataSource = ( DataSource ) jndiContext . lookup ( jndiName ) ; con = _dataSource . getConnection ( ) ; } catch ( IllegalAccessException iae ) { throw new ControlException ( "IllegalAccessException:" , iae ) ; } catch ( InstantiationException ie ) { throw new ControlException ( "InstantiationException:" , ie ) ; } catch ( NamingException ne ) { throw new ControlException ( "NamingException:" , ne ) ; } return con ;
public class Json { /** * Quicker method for loading a json string into a < code > Map < / code > of < code > String < / code > keys and < code > Object < / code > * values . This method does not use information about { @ link JsonSerializable } and instead loads into basic data types . * @ param json the json to load from * @ return the map of representing the json object in the string * @ throws IOException Something went wrong in loading */ public static Map < String , Object > qloads ( @ NonNull Resource json ) throws IOException { } }
return qloads ( json . readToString ( ) . trim ( ) ) ;
public class ConfigUtil { /** * Convert the value to an integer . * @ param value The value instance . * @ return The integer value . */ public static int asInteger ( Object value ) { } }
if ( value instanceof Number ) { return ( ( Number ) value ) . intValue ( ) ; } else if ( value instanceof Numeric ) { return ( ( Numeric ) value ) . asInteger ( ) ; } else if ( value instanceof Boolean ) { return ( ( Boolean ) value ) ? 1 : 0 ; } else if ( value instanceof CharSequence ) { try { return Integer . parseInt ( value . toString ( ) ) ; } catch ( NumberFormatException nfe ) { throw new IncompatibleValueException ( "Unable to parse string \"" + Strings . escape ( value . toString ( ) ) + "\" to an int" , nfe ) ; } } else if ( value instanceof Date ) { // Convert date timestamp to seconds since epoch . return ( int ) ( ( ( Date ) value ) . getTime ( ) / 1000 ) ; } throw new IncompatibleValueException ( "Unable to convert " + value . getClass ( ) . getSimpleName ( ) + " to an int" ) ;
public class ArrayUtils { /** * Searches the array for the first element accepted by the { @ link Filter } . * @ param < T > Class type of the elements in the array . * @ param array array to search . * @ param filter { @ link Filter } used to find the first element in the array accepted by the { @ link Filter } . * @ return the first element from the array accepted by the { @ link Filter } or null if no such element is found . * @ throws IllegalArgumentException if { @ link Filter } is null . * @ see org . cp . elements . lang . Filter * @ see # findAll ( Object [ ] , Filter ) */ public static < T > T findOne ( T [ ] array , Filter < T > filter ) { } }
Assert . notNull ( filter , "Filter is required" ) ; return stream ( nullSafeArray ( array ) ) . filter ( filter :: accept ) . findFirst ( ) . orElse ( null ) ;
public class AbstractGraph { /** * { @ inheritDoc } */ public int degree ( int vertex ) { } }
EdgeSet < T > e = getEdgeSet ( vertex ) ; return ( e == null ) ? 0 : e . size ( ) ;
public class RuntimeUtil { /** * 获取命令执行结果 , 获取后销毁进程 * @ param process { @ link Process } 进程 * @ param charset 编码 * @ return 命令执行结果列表 * @ since 3.1.2 */ public static String getResult ( Process process , Charset charset ) { } }
InputStream in = null ; try { in = process . getInputStream ( ) ; return IoUtil . read ( in , charset ) ; } finally { IoUtil . close ( in ) ; destroy ( process ) ; }
public class EditDistance { /** * / to transform one string to the other , or - 1 if the distance is greater than the specified maxDistance . < / returns > */ public int DamerauLevenshteinDistance ( String string2 , int maxDistance ) { } }
if ( baseString == null ) return string2 == null ? 0 : string2 . length ( ) ; // string2 ? ? " " ) . Length ; if ( string2 == null || string2 . isEmpty ( ) ) return baseString . length ( ) ; if ( maxDistance == 0 ) return baseString . equals ( string2 ) ? 0 : - 1 ; // if strings of different lengths , ensure shorter string is in string1 . This can result in a little // faster speed by spending more time spinning just the inner loop during the main processing . String string1 ; if ( baseString . length ( ) > string2 . length ( ) ) { string1 = string2 ; string2 = baseString ; } else { string1 = baseString ; } int sLen = string1 . length ( ) ; // this is also the minimun length of the two strings int tLen = string2 . length ( ) ; // suffix common to both strings can be ignored while ( ( sLen > 0 ) && ( string1 . charAt ( sLen - 1 ) == string2 . charAt ( tLen - 1 ) ) ) { sLen -- ; tLen -- ; } int start = 0 ; if ( ( string1 . charAt ( 0 ) == string2 . charAt ( 0 ) ) || ( sLen == 0 ) ) { // if there ' string1 a shared prefix , or all string1 matches string2 ' string1 suffix // prefix common to both strings can be ignored while ( ( start < sLen ) && ( string1 . charAt ( start ) == string2 . charAt ( start ) ) ) start ++ ; sLen -= start ; // length of the part excluding common prefix and suffix tLen -= start ; // if all of shorter string matches prefix and / or suffix of longer string , then // edit distance is just the delete of additional characters present in longer string if ( sLen == 0 ) return tLen ; string2 = string2 . substring ( start , start + tLen ) ; // faster than string2 [ start + j ] in inner loop below } int lenDiff = tLen - sLen ; if ( ( maxDistance < 0 ) || ( maxDistance > tLen ) ) { maxDistance = tLen ; } else if ( lenDiff > maxDistance ) return - 1 ; if ( tLen > v0 . length ) { v0 = new int [ tLen ] ; v2 = new int [ tLen ] ; } else { for ( int i = 0 ; i < tLen ; i ++ ) v2 [ i ] = 0 ; // Substituting Array . clear ( v2 , 0 , tLen ) } int j ; for ( j = 0 ; j < maxDistance ; j ++ ) v0 [ j ] = j + 1 ; for ( ; j < tLen ; j ++ ) v0 [ j ] = maxDistance + 1 ; int jStartOffset = maxDistance - ( tLen - sLen ) ; boolean haveMax = maxDistance < tLen ; int jStart = 0 ; int jEnd = maxDistance ; char sChar = string1 . charAt ( 0 ) ; int current = 0 ; for ( int i = 0 ; i < sLen ; i ++ ) { char prevsChar = sChar ; sChar = string1 . charAt ( start + i ) ; char tChar = string2 . charAt ( 0 ) ; int left = i ; current = left + 1 ; int nextTransCost = 0 ; // no need to look beyond window of lower right diagonal - maxDistance cells ( lower right diag is i - lenDiff ) // and the upper left diagonal + maxDistance cells ( upper left is i ) jStart += ( i > jStartOffset ) ? 1 : 0 ; jEnd += ( jEnd < tLen ) ? 1 : 0 ; for ( j = jStart ; j < jEnd ; j ++ ) { int above = current ; int thisTransCost = nextTransCost ; nextTransCost = v2 [ j ] ; v2 [ j ] = current = left ; // cost of diagonal ( substitution ) left = v0 [ j ] ; // left now equals current cost ( which will be diagonal at next iteration ) char prevtChar = tChar ; tChar = string2 . charAt ( j ) ; if ( sChar != tChar ) { if ( left < current ) current = left ; // insertion if ( above < current ) current = above ; // deletion current ++ ; if ( ( i != 0 ) && ( j != 0 ) && ( sChar == prevtChar ) && ( prevsChar == tChar ) ) { thisTransCost ++ ; if ( thisTransCost < current ) current = thisTransCost ; // transposition } } v0 [ j ] = current ; } if ( haveMax && ( v0 [ i + lenDiff ] > maxDistance ) ) return - 1 ; } return ( current <= maxDistance ) ? current : - 1 ;
public class UserAPI { /** * 标签管理 获取公众号已创建的标签 * @ since 2.8.1 * @ param access _ token access _ token * @ return result */ public static TagsGetResult tagsGet ( String access_token ) { } }
HttpUriRequest httpUriRequest = RequestBuilder . get ( ) . setUri ( BASE_URI + "/cgi-bin/tags/get" ) . addParameter ( PARAM_ACCESS_TOKEN , API . accessToken ( access_token ) ) . build ( ) ; return LocalHttpClient . executeJsonResult ( httpUriRequest , TagsGetResult . class ) ;
public class BigComplex { /** * Calculates the subtraction of the given complex value from this complex number . * < p > This methods < strong > does not < / strong > modify this instance . < / p > * @ param value the { @ link BigComplex } value to subtract * @ return the calculated { @ link BigComplex } result */ public BigComplex subtract ( BigComplex value ) { } }
return valueOf ( re . subtract ( value . re ) , im . subtract ( value . im ) ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.ibm.com/websphere/wim" , name = "roomNumber" ) public JAXBElement < String > createRoomNumber ( String value ) { } }
return new JAXBElement < String > ( _RoomNumber_QNAME , String . class , null , value ) ;
public class DoradusClient { /** * Set credentials such as tenant , username , password for use with a Doradus application . * @ param tenant tenant name * @ param username user name use when accessing applications within the specified tenant . * @ param userpassword user password */ public void setCredentials ( String tenant , String username , String userpassword ) { } }
Credentials credentials = new Credentials ( tenant , username , userpassword ) ; restClient . setCredentials ( credentials ) ;
public class InfluxMeterRegistry { /** * VisibleForTesting */ Stream < String > writeGauge ( Meter . Id id , Double value ) { } }
if ( Double . isFinite ( value ) ) { return Stream . of ( influxLineProtocol ( id , "gauge" , Stream . of ( new Field ( "value" , value ) ) ) ) ; } return Stream . empty ( ) ;
public class UndoCommand { /** * { @ inheritDoc } */ @ Override protected void perform ( final Wave wave ) throws CommandException { } }
this . stackName = wave . get ( UndoRedoWaves . STACK_NAME ) ; getService ( UndoRedoService . class , this . stackName ) . undo ( ) ;
public class GinjectorFragmentOutputter { /** * Outputs all the top - level methods and fields of the class , and commits the * writer . Must be the last method invoked on this object . */ void commit ( ) { } }
if ( committed ) { errorManager . logError ( "Committed the fragment for %s twice." , fragmentPackageName ) ; return ; } committed = true ; // Write the field where the enclosing injector is stored . writer . beginJavaDocComment ( ) ; writer . print ( "Field for the enclosing injector." ) ; writer . endJavaDocComment ( ) ; writer . println ( "private final %s injector;" , ginjectorClassName ) ; // Write the constructor , which takes the enclosing injector and does // nothing but store it in a field . It ' s important that the constructor has // no other side - effects ; in particular , it must not call any injector // methods , since the injector might not be fully constructed . sourceWriteUtil . writeMethod ( writer , String . format ( "public %s(%s injector)" , fragmentClassName , ginjectorClassName ) , "this.injector = injector;" ) ; if ( hasEagerSingletonInitialization ( ) ) { // Write a method to initialize eager singletons . sourceWriteUtil . writeMethod ( writer , "public void initializeEagerSingletons()" , initializeEagerSingletonsBody . toString ( ) ) ; } if ( hasStaticInjectionInitialization ( ) ) { // Write a method to initialize static injection . sourceWriteUtil . writeMethod ( writer , "public void initializeStaticInjections()" , initializeStaticInjectionsBody . toString ( ) ) ; } writer . commit ( logger ) ;
public class WrappingUtils { /** * Wraps the given drawable with a new { @ link MatrixDrawable } . * < p > If the provided drawable or matrix is null , the given drawable is returned without * being wrapped . * @ return the wrapping matrix drawable , or the original drawable if the wrapping didn ' t * take place */ @ Nullable static Drawable maybeWrapWithMatrix ( @ Nullable Drawable drawable , @ Nullable Matrix matrix ) { } }
if ( drawable == null || matrix == null ) { return drawable ; } return new MatrixDrawable ( drawable , matrix ) ;
public class StaticLog { /** * Error等级日志 < br > * @ param log 日志对象 * @ param format 格式文本 , { } 代表变量 * @ param arguments 变量对应的参数 */ public static void error ( Log log , String format , Object ... arguments ) { } }
error ( log , null , format , arguments ) ;
public class DataChecksum { /** * This constructucts a DataChecksum by reading HEADER _ LEN bytes from * input stream < i > in < / i > */ public static DataChecksum newDataChecksum ( DataInputStream in ) throws IOException { } }
int type = in . readByte ( ) ; int bpc = in . readInt ( ) ; DataChecksum summer = newDataChecksum ( type , bpc ) ; if ( summer == null ) { throw new IOException ( "Could not create DataChecksum of type " + type + " with bytesPerChecksum " + bpc ) ; } return summer ;
public class ArrayTypes { /** * / * @ Nullable */ private ArrayTypeReference doTryConvertToArray ( ParameterizedTypeReference typeReference ) { } }
LightweightTypeReference parameterizedIterable = typeReference . getSuperType ( Iterable . class ) ; if ( parameterizedIterable != null ) { ITypeReferenceOwner owner = typeReference . getOwner ( ) ; if ( parameterizedIterable . isRawType ( ) ) { // return Object [ ] List < LightweightTypeReference > superTypes = parameterizedIterable . getSuperTypes ( ) ; if ( superTypes . isEmpty ( ) ) { return null ; } LightweightTypeReference objectType = superTypes . get ( 0 ) ; ArrayTypeReference array = owner . newArrayTypeReference ( objectType ) ; return array ; } else { LightweightTypeReference componentType = parameterizedIterable . getTypeArguments ( ) . get ( 0 ) . getUpperBoundSubstitute ( ) ; ArrayTypeReference array = owner . newArrayTypeReference ( componentType ) ; return array ; } } return null ;
public class ComponentsInner { /** * Purges data in an Application Insights component by a set of user - defined filters . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the Application Insights component resource . * @ param body Describes the body of a request to purge data in a single table of an Application Insights component * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ComponentPurgeResponseInner object */ public Observable < ComponentPurgeResponseInner > purgeAsync ( String resourceGroupName , String resourceName , ComponentPurgeBody body ) { } }
return purgeWithServiceResponseAsync ( resourceGroupName , resourceName , body ) . map ( new Func1 < ServiceResponse < ComponentPurgeResponseInner > , ComponentPurgeResponseInner > ( ) { @ Override public ComponentPurgeResponseInner call ( ServiceResponse < ComponentPurgeResponseInner > response ) { return response . body ( ) ; } } ) ;
public class ViewMgr { /** * We may have to come out a better method . */ public Collection < String > getViewNamesByTable ( String tblName , Transaction tx ) { } }
Collection < String > result = new LinkedList < String > ( ) ; TableInfo ti = tblMgr . getTableInfo ( VCAT , tx ) ; RecordFile rf = ti . open ( tx , true ) ; rf . beforeFirst ( ) ; while ( rf . next ( ) ) { Parser parser = new Parser ( ( String ) rf . getVal ( VCAT_VDEF ) . asJavaVal ( ) ) ; if ( parser . queryCommand ( ) . tables ( ) . contains ( tblName ) ) result . add ( ( String ) rf . getVal ( VCAT_VNAME ) . asJavaVal ( ) ) ; } rf . close ( ) ; return result ;
public class Util { /** * Get this image ' s full filename . * @ param strSubDirectory The sub - directory . * @ param fixRelativePaths Fix the relative paths * @ param filename The filename of this image ( if no path , assumes images / buttons ; if not ext assumes . gif ) . * @ return The full ( relative ) filename for this image . */ public static String getFullFilename ( String strFilename , String strSubDirectory , String fileLocation , boolean fixRelativePaths ) { } }
String localLocation = fileLocation . substring ( fileLocation . lastIndexOf ( '/' ) + 1 ) ; if ( ( ( strFilename . indexOf ( File . separator ) == - 1 ) && ( strFilename . indexOf ( '/' ) == - 1 ) ) && ( strSubDirectory != null ) ) strFilename = fileLocation + File . separator + strSubDirectory + File . separator + strFilename ; else if ( ( strFilename . indexOf ( localLocation + File . separator ) == - 1 ) && ( strFilename . indexOf ( localLocation + "/" ) == - 1 ) ) strFilename = fileLocation + File . separator + strFilename ; else if ( ( strFilename . indexOf ( localLocation + File . separator ) == 0 ) || ( strFilename . indexOf ( localLocation + "/" ) == 0 ) ) strFilename = fileLocation + strFilename . substring ( localLocation . length ( ) ) ; if ( fixRelativePaths ) { // Remove ' / . . / ' while ( strFilename . indexOf ( "/.." ) != - 1 ) { int startPath = strFilename . indexOf ( "/.." ) ; int startPrev = strFilename . lastIndexOf ( "/" , startPath - 1 ) ; if ( startPrev == - 1 ) break ; strFilename = strFilename . substring ( 0 , startPrev ) + strFilename . substring ( startPath + 3 ) ; } } return strFilename ;
public class MultiHashTable { /** * version : key is irrelevant */ @ Nonnull public < B > HashTable < K , B > toHashTable ( @ Nonnull F < ImmutableList < V > , B > conversion ) { } }
// return this . data . foldLeft ( ( acc , p ) - > acc . put ( p . a , conversion . apply ( p . b ) ) , HashTable . empty ( this . data . hasher ) ) ; return this . toHashTable ( ( k , vs ) -> conversion . apply ( vs ) ) ;
public class SyncServerGroupOperationHandler { /** * For the local model we include both the original as well as the remote model . The diff will automatically remove * not used configuration . * @ param context the operation context * @ param remote the remote model * @ param remoteExtensions the extension registry * @ return */ @ Override Transformers . ResourceIgnoredTransformationRegistry createRegistry ( OperationContext context , Resource remote , Set < String > remoteExtensions ) { } }
final ReadMasterDomainModelUtil . RequiredConfigurationHolder rc = new ReadMasterDomainModelUtil . RequiredConfigurationHolder ( ) ; final PathElement host = PathElement . pathElement ( HOST , localHostName ) ; final Resource hostModel = context . readResourceFromRoot ( PathAddress . EMPTY_ADDRESS . append ( host ) ) ; final Resource original = this . originalModel ; // Process the required using the remote model to include content which may not be available locally ReadMasterDomainModelUtil . processHostModel ( rc , remote , hostModel , parameters . getExtensionRegistry ( ) ) ; // Process the original ReadMasterDomainModelUtil . processHostModel ( rc , original , original . getChild ( host ) , parameters . getExtensionRegistry ( ) ) ; final Transformers . ResourceIgnoredTransformationRegistry delegate = new Transformers . ResourceIgnoredTransformationRegistry ( ) { @ Override public boolean isResourceTransformationIgnored ( PathAddress address ) { return parameters . getIgnoredResourceRegistry ( ) . isResourceExcluded ( address ) ; } } ; return ReadMasterDomainModelUtil . createServerIgnoredRegistry ( rc , delegate ) ;
public class OobChannel { /** * Must be called only once , right after the OobChannel is created . */ void setSubchannel ( final InternalSubchannel subchannel ) { } }
log . log ( Level . FINE , "[{0}] Created with [{1}]" , new Object [ ] { this , subchannel } ) ; this . subchannel = subchannel ; subchannelImpl = new AbstractSubchannel ( ) { @ Override public void shutdown ( ) { subchannel . shutdown ( Status . UNAVAILABLE . withDescription ( "OobChannel is shutdown" ) ) ; } @ Override ClientTransport obtainActiveTransport ( ) { return subchannel . obtainActiveTransport ( ) ; } @ Override InternalInstrumented < ChannelStats > getInternalSubchannel ( ) { return subchannel ; } @ Override public void requestConnection ( ) { subchannel . obtainActiveTransport ( ) ; } @ Override public List < EquivalentAddressGroup > getAllAddresses ( ) { return subchannel . getAddressGroups ( ) ; } @ Override public Attributes getAttributes ( ) { return Attributes . EMPTY ; } } ; subchannelPicker = new SubchannelPicker ( ) { final PickResult result = PickResult . withSubchannel ( subchannelImpl ) ; @ Override public PickResult pickSubchannel ( PickSubchannelArgs args ) { return result ; } } ; delayedTransport . reprocess ( subchannelPicker ) ;
public class ModelsImpl { /** * Delete an entity role . * @ param appId The application ID . * @ param versionId The version ID . * @ param entityId The entity ID . * @ param roleId The entity role Id . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the OperationStatus object */ public Observable < OperationStatus > deletePatternAnyEntityRoleAsync ( UUID appId , String versionId , UUID entityId , UUID roleId ) { } }
return deletePatternAnyEntityRoleWithServiceResponseAsync ( appId , versionId , entityId , roleId ) . map ( new Func1 < ServiceResponse < OperationStatus > , OperationStatus > ( ) { @ Override public OperationStatus call ( ServiceResponse < OperationStatus > response ) { return response . body ( ) ; } } ) ;
public class WorkflowClient { /** * Paginated search for workflows based on payload * @ param start start value of page * @ param size number of workflows to be returned * @ param sort sort order * @ param freeText additional free text query * @ param query the search query * @ return the { @ link SearchResult } containing the { @ link WorkflowSummary } that match the query */ public SearchResult < WorkflowSummary > search ( Integer start , Integer size , String sort , String freeText , String query ) { } }
Object [ ] params = new Object [ ] { "start" , start , "size" , size , "sort" , sort , "freeText" , freeText , "query" , query } ; return getForEntity ( "workflow/search" , params , searchResultWorkflowSummary ) ;
public class LottieDrawable { /** * Create a composition with { @ link LottieCompositionFactory } * @ return True if the composition is different from the previously set composition , false otherwise . */ public boolean setComposition ( LottieComposition composition ) { } }
if ( this . composition == composition ) { return false ; } isDirty = false ; clearComposition ( ) ; this . composition = composition ; buildCompositionLayer ( ) ; animator . setComposition ( composition ) ; setProgress ( animator . getAnimatedFraction ( ) ) ; setScale ( scale ) ; updateBounds ( ) ; // We copy the tasks to a new ArrayList so that if this method is called from multiple threads , // then there won ' t be two iterators iterating and removing at the same time . Iterator < LazyCompositionTask > it = new ArrayList < > ( lazyCompositionTasks ) . iterator ( ) ; while ( it . hasNext ( ) ) { LazyCompositionTask t = it . next ( ) ; t . run ( composition ) ; it . remove ( ) ; } lazyCompositionTasks . clear ( ) ; composition . setPerformanceTrackingEnabled ( performanceTrackingEnabled ) ; return true ;
public class ServiceRefTypeImpl { /** * Returns all < code > port - component - ref < / code > elements * @ return list of < code > port - component - ref < / code > */ public List < PortComponentRefType < ServiceRefType < T > > > getAllPortComponentRef ( ) { } }
List < PortComponentRefType < ServiceRefType < T > > > list = new ArrayList < PortComponentRefType < ServiceRefType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "port-component-ref" ) ; for ( Node node : nodeList ) { PortComponentRefType < ServiceRefType < T > > type = new PortComponentRefTypeImpl < ServiceRefType < T > > ( this , "port-component-ref" , childNode , node ) ; list . add ( type ) ; } return list ;
public class ReadOnlyStorageEngine { /** * Time since last time the store was swapped * @ return Time in milliseconds since the store was swapped */ @ JmxGetter ( name = "lastSwapped" , description = "Time in milliseconds since the store was swapped" ) public long getLastSwapped ( ) { } }
long timeSinceLastSwap = System . currentTimeMillis ( ) - lastSwapped ; return timeSinceLastSwap > 0 ? timeSinceLastSwap : 0 ;
public class AbstractReady { /** * { @ inheritDoc } */ @ Override public final < C extends Command > C getCommand ( final UniqueKey < C > commandKey ) { } }
localFacade ( ) . globalFacade ( ) . trackEvent ( JRebirthEventType . ACCESS_COMMAND , this . getClass ( ) , commandKey . classField ( ) ) ; return localFacade ( ) . globalFacade ( ) . commandFacade ( ) . retrieve ( commandKey ) ;
public class DateOffsetScanListener { /** * FormatDate Method . */ public String formatDate ( Date date , int type ) { } }
String string = null ; if ( type == DBConstants . DATE_TIME_FORMAT ) string = XmlUtilities . dateTimeFormat . format ( date ) ; else if ( type == DBConstants . DATE_ONLY_FORMAT ) string = XmlUtilities . dateFormat . format ( date ) ; else if ( type == DBConstants . TIME_ONLY_FORMAT ) string = XmlUtilities . timeFormat . format ( date ) ; return string ;
public class StorageTierAssoc { /** * Interprets a tier ordinal given the number of tiers . * Non - negative values identify tiers starting from top going down ( 0 identifies the first tier , * 1 identifies the second tier , and so on ) . If the provided value is greater than the number * of tiers , it identifies the last tier . Negative values identify tiers starting from the bottom * going up ( - 1 identifies the last tier , - 2 identifies the second to last tier , and so on ) . If * the absolute value of the provided value is greater than the number of tiers , it identifies * the first tier . * @ param ordinal the storage tier ordinal to interpret * @ param numTiers the number of storage tiers * @ return a valid tier ordinal */ static int interpretOrdinal ( int ordinal , int numTiers ) { } }
if ( ordinal >= 0 ) { return Math . min ( ordinal , numTiers - 1 ) ; } return Math . max ( numTiers + ordinal , 0 ) ;
public class DaoHelper { /** * 转为单列查询语句 * @ param sql * @ return */ public static String toSingleColumnSql ( String sql ) { } }
String tempSql = sql . trim ( ) ; String regex = "(?i)^select(.*?)from" ; Matcher m = Pattern . compile ( regex ) . matcher ( tempSql ) ; String columnAlias = SqlTemplate . SINGLE_COLUMN_ALIAS ; if ( m . find ( ) ) { columnAlias = m . group ( 1 ) . trim ( ) . replaceAll ( "\\(\\s+" , "(" ) . replaceAll ( "\\s+\\)" , ")" ) . split ( "\\s+" ) [ 0 ] + " AS " + columnAlias ; } return tempSql . replaceFirst ( regex , "select " + columnAlias + " from " ) ;
public class MtasRBTree { /** * ( non - Javadoc ) * @ see mtas . codec . tree . MtasTree # addRange ( int , int , java . lang . Integer , * java . lang . Long ) */ @ Override final protected void addRange ( int left , int right , int additionalId , long additionalRef , Integer id , Long ref ) { } }
String key = left + "_" + right ; if ( index . containsKey ( key ) ) { index . get ( key ) . addIdAndRef ( id , ref , additionalId , additionalRef ) ; } else { root = addRange ( root , left , right , additionalId , additionalRef , id , ref ) ; root . color = MtasRBTreeNode . BLACK ; }
public class BigtableDataGrpcClient { /** * { @ inheritDoc } */ @ Override public ResultScanner < Row > readRows ( ReadRowsRequest request ) { } }
if ( shouldOverrideAppProfile ( request . getAppProfileId ( ) ) ) { request = request . toBuilder ( ) . setAppProfileId ( clientDefaultAppProfileId ) . build ( ) ; } // Delegate all resumable operations to the scanner . It will request a non - resumable scanner // during operation . // TODO ( sduskis ) : Figure out a way to perform operation level metrics with the // AbstractBigtableResultScanner implementations . final ResultScanner < FlatRow > delegate = readFlatRows ( request ) ; return new ResultScanner < Row > ( ) { @ Override public void close ( ) throws IOException { delegate . close ( ) ; } @ Override public Row [ ] next ( int count ) throws IOException { FlatRow [ ] flatRows = delegate . next ( count ) ; Row [ ] rows = new Row [ flatRows . length ] ; for ( int i = 0 ; i < flatRows . length ; i ++ ) { rows [ i ] = FlatRowConverter . convert ( flatRows [ i ] ) ; } return rows ; } @ Override public Row next ( ) throws IOException { return FlatRowConverter . convert ( delegate . next ( ) ) ; } @ Override public int available ( ) { return delegate . available ( ) ; } } ;
public class ContainerInstanceMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ContainerInstance containerInstance , ProtocolMarshaller protocolMarshaller ) { } }
if ( containerInstance == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( containerInstance . getContainerInstanceArn ( ) , CONTAINERINSTANCEARN_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getEc2InstanceId ( ) , EC2INSTANCEID_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getVersion ( ) , VERSION_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getVersionInfo ( ) , VERSIONINFO_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getRemainingResources ( ) , REMAININGRESOURCES_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getRegisteredResources ( ) , REGISTEREDRESOURCES_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getAgentConnected ( ) , AGENTCONNECTED_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getRunningTasksCount ( ) , RUNNINGTASKSCOUNT_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getPendingTasksCount ( ) , PENDINGTASKSCOUNT_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getAgentUpdateStatus ( ) , AGENTUPDATESTATUS_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getAttributes ( ) , ATTRIBUTES_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getRegisteredAt ( ) , REGISTEREDAT_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getAttachments ( ) , ATTACHMENTS_BINDING ) ; protocolMarshaller . marshall ( containerInstance . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SelectParser { /** * STDDEV _ POP | STDDEV _ SAMP } ( [ ALL | DISTINCT ] [ 1 ] ] Expression ) } [ [ AS ] label ] */ private Expression clip ( ParserString raw ) throws SQLParserException { } }
Expression exp = column ( raw ) ; // if ( exp = = null ) exp = brackedColumn ( raw ) ; if ( exp == null ) exp = date ( raw ) ; if ( exp == null ) exp = bracked ( raw ) ; if ( exp == null ) exp = number ( raw ) ; if ( exp == null ) exp = string ( raw ) ; return exp ;
public class AutoElasticsearch { /** * Utility methods */ private void putCred ( String key , EsToken token , Map < String , String > credentials ) { } }
FastByteArrayOutputStream stream = new FastByteArrayOutputStream ( ) ; DataOutput output = new DataOutputStream ( stream ) ; try { token . writeOut ( output ) ; } catch ( IOException e ) { throw new EsHadoopException ( "Could not serialize EsToken" , e ) ; } String credential = new String ( Base64 . encodeBase64 ( stream . bytes ( ) . bytes ( ) ) , StringUtils . UTF_8 ) ; credentials . put ( key , credential ) ;
public class ModifyDBClusterSnapshotAttributeRequest { /** * A list of DB cluster snapshot attributes to add to the attribute specified by < code > AttributeName < / code > . * To authorize other AWS accounts to copy or restore a manual DB cluster snapshot , set this list to include one or * more AWS account IDs , or < code > all < / code > to make the manual DB cluster snapshot restorable by any AWS account . * Do not add the < code > all < / code > value for any manual DB cluster snapshots that contain private information that * you don ' t want available to all AWS accounts . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setValuesToAdd ( java . util . Collection ) } or { @ link # withValuesToAdd ( java . util . Collection ) } if you want to * override the existing values . * @ param valuesToAdd * A list of DB cluster snapshot attributes to add to the attribute specified by < code > AttributeName < / code > * To authorize other AWS accounts to copy or restore a manual DB cluster snapshot , set this list to include * one or more AWS account IDs , or < code > all < / code > to make the manual DB cluster snapshot restorable by any * AWS account . Do not add the < code > all < / code > value for any manual DB cluster snapshots that contain * private information that you don ' t want available to all AWS accounts . * @ return Returns a reference to this object so that method calls can be chained together . */ public ModifyDBClusterSnapshotAttributeRequest withValuesToAdd ( String ... valuesToAdd ) { } }
if ( this . valuesToAdd == null ) { setValuesToAdd ( new com . amazonaws . internal . SdkInternalList < String > ( valuesToAdd . length ) ) ; } for ( String ele : valuesToAdd ) { this . valuesToAdd . add ( ele ) ; } return this ;
public class GroovyObjectWrapper { /** * / * ( non - Javadoc ) * @ see groovy . lang . GroovyObject # invokeMethod ( java . lang . String , java . lang . Object ) */ public Object invokeMethod ( final String name , final Object args ) { } }
return this . wrapped . invokeMethod ( name , args ) ;
public class LocalDate { /** * Returns a copy of this { @ code LocalDate } with the specified number of months subtracted . * This method subtracts the specified amount from the months field in three steps : * < ol > * < li > Subtract the input months from the month - of - year field < / li > * < li > Check if the resulting date would be invalid < / li > * < li > Adjust the day - of - month to the last valid day if necessary < / li > * < / ol > * For example , 2007-03-31 minus one month would result in the invalid date * 2007-02-31 . Instead of returning an invalid result , the last valid day * of the month , 2007-02-28 , is selected instead . * This instance is immutable and unaffected by this method call . * @ param monthsToSubtract the months to subtract , may be negative * @ return a { @ code LocalDate } based on this date with the months subtracted , not null * @ throws DateTimeException if the result exceeds the supported date range */ public LocalDate minusMonths ( long monthsToSubtract ) { } }
return ( monthsToSubtract == Long . MIN_VALUE ? plusMonths ( Long . MAX_VALUE ) . plusMonths ( 1 ) : plusMonths ( - monthsToSubtract ) ) ;
public class AVDefaultConnectionListener { /** * 处理 v2 版本中 message 的 rcp 消息 * @ param msgId * @ param conversationId * @ param timestamp */ private void processMessageReceipt ( String msgId , String conversationId , int convType , long timestamp ) { } }
Object messageCache = MessageReceiptCache . get ( session . getSelfPeerId ( ) , msgId ) ; if ( messageCache == null ) { return ; } Message m = ( Message ) messageCache ; AVIMMessage msg = new AVIMMessage ( conversationId , session . getSelfPeerId ( ) , m . timestamp , timestamp ) ; msg . setMessageId ( m . id ) ; msg . setContent ( m . msg ) ; msg . setMessageStatus ( AVIMMessage . AVIMMessageStatus . AVIMMessageStatusReceipt ) ; AVConversationHolder conversation = session . getConversationHolder ( conversationId , convType ) ; conversation . onMessageReceipt ( msg ) ;
public class ReceiveListenerDispatcher { /** * Queues any type of invocation entry into an appropriate queue . The queue is selected first * by determining if a requests associated conversation is already associated with a queue . If * it is not then a list of empty queues is consulted before finally reverting to using a * the modulus of a monatomically incrementing counter ( associated with conversation instance ) . * Note : this method contains some reasonably complex synchronization . One underlying assumption * in this is that it cannot be invoked concurrently to queue an invocation for the same conversation . * This pre - condition is kept by virtue of the fact that the channel framework will not concurrently * read data from a single socket - and this method must be executed prior to making another read * request . * @ param invocation * @ param conversation */ private void queueInvocationCommon ( AbstractInvocation invocation , ConversationImpl conversation ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "queueInvocationCommon" , new Object [ ] { invocation , conversation } ) ; if ( dispatcherEnabled ) { // Start F201521 try // D202636 { // D202636 // First job is to ask the ConversationReceiveListener for the object which may contain // a dispatch queue . If they return null here , we assume that we should use the dispatch // queue held in the Conversation . // Note this method may throw a RuntimeException indicating that there was a problem // executing this method . If this occurs , we don ' t want to dispatch this segment as // the conversation will be closed . Dispatchable dispatchable = invocation . getThreadContext ( ) ; // If this was null , then use the Conversation as the Dispatchable object if ( dispatchable == null ) dispatchable = conversation ; // Save this in the invocation invocation . setDispatchable ( dispatchable ) ; // Update the count in the Conversation of how many requests are outstanding on // this Conversation . This is needed when we are processing errorOccurred // notifications as we want to ensure that errorOccurred callbacks come after // all the data has been processed for that Conversation . synchronized ( conversation . getTotalOutstandingRequestCountLock ( ) ) { conversation . incrementTotalOutstandingCount ( ) ; } ReceiveListenerDispatchQueue dispatchQueue = null ; Object dispatchableLock = dispatchable . getDispatchLockObject ( ) ; // Take a lock on the dispatchable to determin its dispatch queue . This is // required to exclude other threads whilst we check if there is an associated // dispatch queue and if there is - increment its reference count . We can drop // out of the synchronize block once the reference count is increased as other // threads will not disassociate a dispatch queue with a non - zero reference count . synchronized ( dispatchableLock ) { dispatchQueue = ( ReceiveListenerDispatchQueue ) dispatchable . getDispatchQueue ( ) ; if ( dispatchQueue != null ) dispatchable . incrementDispatchQueueRefCount ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "queueInvocationCommon" , "dispatchQueue=" + dispatchQueue ) ; // End F201521 if ( dispatchQueue == null ) { // The dispatchable was not already associated with a dispatch queue , so try // finding a suitable queue in the list of empty queues . We synchronize on the // empty queues list to prevent corruption of the data structure by concurrent // access . We also want to prevent anyone else from adding or removing queues // from the list until we have either removed a queue of our own or chosen a // queue to dispatch into . synchronized ( emptyDispatchQueues ) { if ( ! emptyDispatchQueues . isEmpty ( ) ) dispatchQueue = emptyDispatchQueues . remove ( emptyDispatchQueues . size ( ) - 1 ) ; // D217401 else { // There was no empty queues so choose a queue by taking the modulus of // the monotomically incrementing counter associated with the conversation . // We must do this inside a block synchronized on the empty dispatch queue // to prevent a race condition where someone could be adding the queue we // decide upon into the empty queues list . int queueNumber = conversation . getInstanceCounterValue ( ) ; dispatchQueue = dispatchQueues [ queueNumber % maxConcurrentDispatches ] ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "dispatchQueue=" + dispatchQueue ) ; // Now we have decided upon a dispatch queue - associate it with the // dispatchable and ensure it has a non - zero reference count . This is // done inside a synchronize block to ensure the change is kept consistent // across threads accessing the dispatchable . synchronized ( dispatchableLock ) // F201521 { dispatchable . setDispatchQueue ( dispatchQueue ) ; // F201521 dispatchable . incrementDispatchQueueRefCount ( ) ; // F201521 } } // Reset invocation prior to enqueue to prevent its reference count getting messed up . invocation . resetReferenceCounts ( ) ; // Finally enqueue the work to our chosen dispatch queue dispatchQueue . enqueue ( invocation ) ; } catch ( RuntimeException e ) { // No FFDC code needed // Note the connection has already been blown away by this point - so no further action // needs to be taken . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Looks like getThreadContext failed:" , e ) ; } // Start D213108 // It ' s important that we carry on here and do not barf back up to the TCP channel // as we do not want to knacker any threads that will service user requests catch ( Throwable t ) { FFDCFilter . processException ( t , "com.ibm.ws.sib.jfapchannel.impl.rldispatcher.ReceiveListenerDispatcher" , JFapChannelConstants . RLDISPATCHER_QUEUEINVOCCOMMON_01 , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "RL Dispatcher threw an exception: " , t ) ; } // End D213108 // End D202636 } else // dispatcherEnabled = = false { // If we are not using the new dispatching code then just invoke // the appropriate method on this thread . invocation . invoke ( ) ; invocation . repool ( ) ; invocation . resetReferenceCounts ( ) ; // D213108 } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "queueInvocationCommon" ) ;
public class CmsSitemapController { /** * Helper method for looking up a value in a map which may be null . < p > * @ param < A > the key type * @ param < B > the value type * @ param map the map ( which may be null ) * @ param key the map key * @ return the value of the map at the given key , or null if the map is null */ public static < A , B > B safeLookup ( Map < A , B > map , A key ) { } }
if ( map == null ) { return null ; } return map . get ( key ) ;
public class NodeListener { /** * Inform listeners that node is being created . * @ param node A node being created . */ public static void fireOnCreated ( @ Nonnull Node node ) { } }
for ( NodeListener nl : all ( ) ) { try { nl . onCreated ( node ) ; } catch ( Throwable ex ) { LOGGER . log ( Level . WARNING , "Listener invocation failed" , ex ) ; } }
public class TagTypeImpl { /** * Returns all < code > variable < / code > elements * @ return list of < code > variable < / code > */ public List < VariableType < TagType < T > > > getAllVariable ( ) { } }
List < VariableType < TagType < T > > > list = new ArrayList < VariableType < TagType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "variable" ) ; for ( Node node : nodeList ) { VariableType < TagType < T > > type = new VariableTypeImpl < TagType < T > > ( this , "variable" , childNode , node ) ; list . add ( type ) ; } return list ;
public class ParosDatabase { /** * / * ( non - Javadoc ) * @ see org . parosproxy . paros . db . DatabaseIF # deleteSession ( java . lang . String ) */ @ Override public void deleteSession ( String sessionName ) { } }
logger . debug ( "deleteSession " + sessionName ) ; if ( databaseServer == null ) { return ; } try { databaseServer . shutdown ( false ) ; } catch ( SQLException e ) { logger . error ( e . getMessage ( ) , e ) ; } deleteDbFile ( new File ( sessionName ) ) ; deleteDbFile ( new File ( sessionName + ".data" ) ) ; deleteDbFile ( new File ( sessionName + ".script" ) ) ; deleteDbFile ( new File ( sessionName + ".properties" ) ) ; deleteDbFile ( new File ( sessionName + ".backup" ) ) ; deleteDbFile ( new File ( sessionName + ".lobs" ) ) ; databaseServer = null ;
public class AbstractLazyLoadRunMap { /** * Finds the build # M where M is nearby the given ' n ' . * @ param n * the index to start the search from * @ param d * defines what we mean by " nearby " above . * If EXACT , find # N or return null . * If ASC , finds the closest # M that satisfies M ≥ N . * If DESC , finds the closest # M that satisfies M ≤ N . */ public @ CheckForNull R search ( final int n , final Direction d ) { } }
switch ( d ) { case EXACT : return getByNumber ( n ) ; case ASC : for ( int m : numberOnDisk ) { if ( m < n ) { // TODO could be made more efficient with numberOnDisk . find continue ; } R r = getByNumber ( m ) ; if ( r != null ) { return r ; } } return null ; case DESC : // TODO again could be made more efficient ListIterator < Integer > iterator = numberOnDisk . listIterator ( numberOnDisk . size ( ) ) ; while ( iterator . hasPrevious ( ) ) { int m = iterator . previous ( ) ; if ( m > n ) { continue ; } R r = getByNumber ( m ) ; if ( r != null ) { return r ; } } return null ; default : throw new AssertionError ( ) ; }
public class EndpointsResponse { /** * The list of endpoints . * @ param item * The list of endpoints . */ public void setItem ( java . util . Collection < EndpointResponse > item ) { } }
if ( item == null ) { this . item = null ; return ; } this . item = new java . util . ArrayList < EndpointResponse > ( item ) ;
public class Launcher { /** * Expands the list of environment variables by inheriting current env variables . */ private static EnvVars inherit ( @ CheckForNull String [ ] env ) { } }
// convert String [ ] to Map first EnvVars m = new EnvVars ( ) ; if ( env != null ) { for ( String e : env ) { int index = e . indexOf ( '=' ) ; m . put ( e . substring ( 0 , index ) , e . substring ( index + 1 ) ) ; } } // then do the inheritance return inherit ( m ) ;
public class Ssh2DsaPublicKey { /** * Encode the public key into a blob of binary data , the encoded result will * be passed into init to recreate the key . * @ return an encoded byte array * @ throws SshException * @ todo Implement this com . sshtools . ssh . SshPublicKey method */ public byte [ ] getEncoded ( ) throws SshException { } }
ByteArrayWriter baw = new ByteArrayWriter ( ) ; try { baw . writeString ( getAlgorithm ( ) ) ; baw . writeBigInteger ( pubkey . getParams ( ) . getP ( ) ) ; baw . writeBigInteger ( pubkey . getParams ( ) . getQ ( ) ) ; baw . writeBigInteger ( pubkey . getParams ( ) . getG ( ) ) ; baw . writeBigInteger ( pubkey . getY ( ) ) ; return baw . toByteArray ( ) ; } catch ( IOException ioe ) { throw new SshException ( "Failed to encoded DSA key" , SshException . INTERNAL_ERROR , ioe ) ; } finally { try { baw . close ( ) ; } catch ( IOException e ) { } }
public class TreeBuilder { /** * Listen to events on all paths of the tree * @ param listener listener * @ return builder */ public TreeBuilder < T > listen ( Listener < T > listener ) { } }
return listen ( listener , PathUtil . allpathSelector ( ) ) ;
public class DataSetUtil { /** * Extract out the specified column , and merge the specified label and label mask arrays * ( i . e . , concatenate the examples ) * @ param labelsToMerge Features to merge . Will use featuresToMerge [ all ] [ inOutIdx ] * @ param labelMasksToMerge Mask arrays to merge . May be null * @ return Merged features and mask . Mask may be null */ public static Pair < INDArray , INDArray > mergeLabels ( @ NonNull INDArray [ ] [ ] labelsToMerge , INDArray [ ] [ ] labelMasksToMerge , int inOutIdx ) { } }
Pair < INDArray [ ] , INDArray [ ] > p = selectColumnFromMDSData ( labelsToMerge , labelMasksToMerge , inOutIdx ) ; return mergeLabels ( p . getFirst ( ) , p . getSecond ( ) ) ;
public class Utils4J { /** * Create an instance with Class . forName ( . . ) and wrap all exceptions into RuntimeExceptions . * @ param className * Full qualified class name - Cannot be < code > null < / code > . * @ param classLoader * Dedicated class loader to use - Cannot be < code > NULL < / code > . * @ return New instance of the class . */ public static Object createInstance ( final String className , final ClassLoader classLoader ) { } }
checkNotNull ( "className" , className ) ; checkNotNull ( "classLoader" , classLoader ) ; try { final Class < ? > clasz = Class . forName ( className , true , classLoader ) ; return clasz . newInstance ( ) ; } catch ( final ClassNotFoundException e ) { throw new RuntimeException ( "Unknown class!" , e ) ; } catch ( final InstantiationException e ) { throw new RuntimeException ( "Error instanciating class!" , e ) ; } catch ( final IllegalAccessException e ) { throw new RuntimeException ( "Error accessing class!" , e ) ; }
public class VisWindow { /** * Adds close button to window , next to window title . After pressing that button , { @ link # close ( ) } is called . If nothing * else was added to title table , and current title alignment is center then the title will be automatically centered . */ public void addCloseButton ( ) { } }
Label titleLabel = getTitleLabel ( ) ; Table titleTable = getTitleTable ( ) ; VisImageButton closeButton = new VisImageButton ( "close-window" ) ; titleTable . add ( closeButton ) . padRight ( - getPadRight ( ) + 0.7f ) ; closeButton . addListener ( new ChangeListener ( ) { @ Override public void changed ( ChangeEvent event , Actor actor ) { close ( ) ; } } ) ; closeButton . addListener ( new ClickListener ( ) { @ Override public boolean touchDown ( InputEvent event , float x , float y , int pointer , int button ) { event . cancel ( ) ; return true ; } } ) ; if ( titleLabel . getLabelAlign ( ) == Align . center && titleTable . getChildren ( ) . size == 2 ) titleTable . getCell ( titleLabel ) . padLeft ( closeButton . getWidth ( ) * 2 ) ;
public class JsDocInfoParser { /** * Extracts the text found on the current line and all subsequent until either an annotation , end * of comment or end of file is reached . Note that if this method detects an end of line as the * first token , it will quit immediately ( indicating that there is no text where it was expected ) . * Note that token = info . token ; should be called after this method is used to update the token * properly in the parser . * @ param token The start token . * @ param option How to handle whitespace . * @ param includeAnnotations Whether the extracted text may include annotations . If set to false , * text extraction will stop on the first encountered annotation token . * @ return The extraction information . */ private ExtractionInfo extractMultilineTextualBlock ( JsDocToken token , WhitespaceOption option , boolean includeAnnotations ) { } }
if ( token == JsDocToken . EOC || token == JsDocToken . EOL || token == JsDocToken . EOF ) { return new ExtractionInfo ( "" , token ) ; } return extractMultilineComment ( token , option , true , includeAnnotations ) ;
public class CassandraDataHandlerBase { /** * Scroll over super column . * @ param m * the m * @ param relationNames * the relation names * @ param isWrapReq * the is wrap req * @ param relations * the relations * @ param entityType * the entity type * @ param superColumn * the super column * @ param embeddedObject * the embedded object * @ param isCql3Enabled * the is cql3 enabled * @ throws InstantiationException * the instantiation exception * @ throws IllegalAccessException * the illegal access exception */ private void scrollOverSuperColumn ( EntityMetadata m , List < String > relationNames , boolean isWrapReq , Map < String , Object > relations , EntityType entityType , SuperColumn superColumn , Object embeddedObject , boolean isCql3Enabled ) throws InstantiationException , IllegalAccessException { } }
for ( Column column : superColumn . getColumns ( ) ) { embeddedObject = onColumn ( column , m , embeddedObject , entityType , relationNames , isWrapReq , relations , isCql3Enabled ) ; }
public class GeoShapeMapperBuilder { /** * Returns the { @ link GeoShapeMapper } represented by this { @ link MapperBuilder } . * @ param field the name of the field to be built * @ return the { @ link GeoShapeMapper } represented by this */ @ Override public GeoShapeMapper build ( String field ) { } }
try { return new GeoShapeMapper ( field , column , validated , maxLevels , transformations ) ; } catch ( NoClassDefFoundError e ) { throw new JTSNotFoundException ( ) ; }
public class CommerceTaxFixedRateAddressRelLocalServiceBaseImpl { /** * Performs a dynamic query on the database and returns the matching rows . * @ param dynamicQuery the dynamic query * @ return the matching rows */ @ Override public < T > List < T > dynamicQuery ( DynamicQuery dynamicQuery ) { } }
return commerceTaxFixedRateAddressRelPersistence . findWithDynamicQuery ( dynamicQuery ) ;
public class VpnTunnelClient { /** * Retrieves an aggregated list of VPN tunnels . * < p > Sample code : * < pre > < code > * try ( VpnTunnelClient vpnTunnelClient = VpnTunnelClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * for ( VpnTunnelsScopedList element : vpnTunnelClient . aggregatedListVpnTunnels ( project . toString ( ) ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final AggregatedListVpnTunnelsPagedResponse aggregatedListVpnTunnels ( String project ) { } }
AggregatedListVpnTunnelsHttpRequest request = AggregatedListVpnTunnelsHttpRequest . newBuilder ( ) . setProject ( project ) . build ( ) ; return aggregatedListVpnTunnels ( request ) ;
public class FnObject { /** * Creates a list of the specified type with only the target object in it . * @ param < T > the type of the list elements * @ param type the type of the list * @ return the resulting list . */ public static final < T > Function < T , List < T > > intoSingletonListOf ( final Type < T > type ) { } }
return new IntoSingletonList < T > ( ) ;
public class ImgUtil { /** * 给图片添加文字水印 < br > * 此方法并不关闭流 * @ param srcImage 源图像 * @ param to 目标流 * @ param pressText 水印文字 * @ param color 水印的字体颜色 * @ param font { @ link Font } 字体相关信息 , 如果默认则为 { @ code null } * @ param x 修正值 。 默认在中间 , 偏移量相对于中间偏移 * @ param y 修正值 。 默认在中间 , 偏移量相对于中间偏移 * @ param alpha 透明度 : alpha 必须是范围 [ 0.0 , 1.0 ] 之内 ( 包含边界值 ) 的一个浮点数字 * @ throws IORuntimeException IO异常 * @ since 3.2.2 */ public static void pressText ( Image srcImage , OutputStream to , String pressText , Color color , Font font , int x , int y , float alpha ) throws IORuntimeException { } }
pressText ( srcImage , getImageOutputStream ( to ) , pressText , color , font , x , y , alpha ) ;
public class Sort { /** * Check if the float array is reverse sorted . It loops through the entire float * array once , checking that the elements are reverse sorted . * < br > * < br > * < i > Runtime : < / i > O ( n ) * @ param floatArray the float array to check * @ return < i > true < / i > if the float array is reverse sorted , else < i > false < / i > . */ public static boolean isReverseSorted ( float [ ] floatArray ) { } }
for ( int i = 0 ; i < floatArray . length - 1 ; i ++ ) { if ( floatArray [ i ] < floatArray [ i + 1 ] ) { return false ; } } return true ;
public class MultiColumnRelation { /** * Creates a multi - column IN relation with a list of IN values or markers . * For example : " SELECT . . . WHERE ( a , b ) IN ( ( 0 , 1 ) , ( 2 , 3 ) ) " * @ param entities the columns on the LHS of the relation * @ param inValues a list of Tuples . Literal instances or a Tuples . Raw markers */ public static MultiColumnRelation createInRelation ( List < ColumnIdentifier . Raw > entities , List < ? extends Term . MultiColumnRaw > inValues ) { } }
return new MultiColumnRelation ( entities , Operator . IN , null , inValues , null ) ;
public class Pool { /** * Add new connection if needed . Only one thread create new connection , so new connection request * will wait to newly created connection or for a released connection . */ private void addConnectionRequest ( ) { } }
if ( totalConnection . get ( ) < options . maxPoolSize && poolState . get ( ) == POOL_STATE_OK ) { // ensure to have one worker if was timeout connectionAppender . prestartCoreThread ( ) ; connectionAppenderQueue . offer ( ( ) -> { if ( ( totalConnection . get ( ) < options . minPoolSize || pendingRequestNumber . get ( ) > 0 ) && totalConnection . get ( ) < options . maxPoolSize ) { try { addConnection ( ) ; } catch ( SQLException sqle ) { // eat } } } ) ; }