signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class GeoPackageCoreConnection { /** * Query for typed values in a single ( first ) row
* @ param < T >
* result value type
* @ param sql
* sql statement
* @ param args
* arguments
* @ return single row results
* @ since 3.1.0 */
public < T > List < T > querySingleRowTypedResults ( String sql , String [ ] args ) { } } | @ SuppressWarnings ( "unchecked" ) List < T > result = ( List < T > ) querySingleRowResults ( sql , args ) ; return result ; |
public class JavacState { /** * Recursively delete a directory and all its contents . */
private void deleteContents ( File dir ) { } } | if ( dir != null && dir . exists ( ) ) { for ( File f : dir . listFiles ( ) ) { if ( f . isDirectory ( ) ) { deleteContents ( f ) ; } if ( ! options . isUnidentifiedArtifactPermitted ( f . getAbsolutePath ( ) ) ) { Log . debug ( "Removing " + f . getAbsolutePath ( ) ) ; f . delete ( ) ; } } } |
public class AbstractPostgreSQLQuery { /** * adds a DISTINCT ON clause
* @ param exprs
* @ return */
@ WithBridgeMethods ( value = PostgreSQLQuery . class , castRequired = true ) public C distinctOn ( Expression < ? > ... exprs ) { } } | return addFlag ( Position . AFTER_SELECT , Expressions . template ( Object . class , "distinct on({0}) " , ExpressionUtils . list ( Object . class , exprs ) ) ) ; |
public class JwtTokenActions { /** * anyone calling this method needs to add upn to the extraClaims that it passes in ( if they need it ) */
public String getJwtTokenUsingBuilder ( String testName , LibertyServer server , String builderId , List < NameValuePair > extraClaims ) throws Exception { } } | String jwtBuilderUrl = SecurityFatHttpUtils . getServerUrlBase ( server ) + "/jwtbuilder/build" ; List < NameValuePair > requestParams = setRequestParms ( builderId , extraClaims ) ; WebClient webClient = new WebClient ( ) ; Page response = invokeUrlWithParameters ( testName , webClient , jwtBuilderUrl , requestParams ) ; Log . info ( thisClass , testName , "JWT builder app response: " + WebResponseUtils . getResponseText ( response ) ) ; Cookie jwtCookie = webClient . getCookieManager ( ) . getCookie ( "JWT" ) ; Log . info ( thisClass , testName , "Built JWT cookie: " + jwtCookie ) ; Log . info ( thisClass , testName , "Cookie value: " + jwtCookie . getValue ( ) ) ; return jwtCookie . getValue ( ) ; |
public class JournalCreator { /** * Create a journal entry , add the arguments , and invoke the method . */
public Date setDatastreamVersionable ( Context context , String pid , String dsID , boolean versionable , String logMessage ) throws ServerException { } } | try { CreatorJournalEntry cje = new CreatorJournalEntry ( METHOD_SET_DATASTREAM_VERSIONABLE , context ) ; cje . addArgument ( ARGUMENT_NAME_PID , pid ) ; cje . addArgument ( ARGUMENT_NAME_DS_ID , dsID ) ; cje . addArgument ( ARGUMENT_NAME_VERSIONABLE , versionable ) ; cje . addArgument ( ARGUMENT_NAME_LOG_MESSAGE , logMessage ) ; return ( Date ) cje . invokeAndClose ( delegate , writer ) ; } catch ( JournalException e ) { throw new GeneralException ( "Problem creating the Journal" , e ) ; } |
public class ReflectionUtil { /** * Create a ORCSchemaProvider from it ' s fully qualified class name . The
* class passed in by name must be assignable to ORCSchemaProvider and have
* 1 - parameter constructor accepting a SecorConfig . Allows the ORCSchemaProvider
* to be pluggable by providing the class name of a desired ORCSchemaProvider in
* config .
* See the secor . orc . schema . provider config option .
* @ param className class name
* @ param config secor config
* @ return ORCSchemaProvider
* @ throws Exception on error */
public static ORCSchemaProvider createORCSchemaProvider ( String className , SecorConfig config ) throws Exception { } } | Class < ? > clazz = Class . forName ( className ) ; if ( ! ORCSchemaProvider . class . isAssignableFrom ( clazz ) ) { throw new IllegalArgumentException ( String . format ( "The class '%s' is not assignable to '%s'." , className , ORCSchemaProvider . class . getName ( ) ) ) ; } return ( ORCSchemaProvider ) clazz . getConstructor ( SecorConfig . class ) . newInstance ( config ) ; |
public class OAuth20UsernamePasswordAuthenticator { /** * Gets client id and client secret .
* @ param context the context
* @ return the client id and client secret */
protected Pair < String , String > getClientIdAndClientSecret ( final WebContext context ) { } } | val extractor = new BasicAuthExtractor ( ) ; val upc = extractor . extract ( context ) ; if ( upc != null ) { return Pair . of ( upc . getUsername ( ) , upc . getPassword ( ) ) ; } val clientId = context . getRequestParameter ( OAuth20Constants . CLIENT_ID ) ; val clientSecret = context . getRequestParameter ( OAuth20Constants . CLIENT_SECRET ) ; return Pair . of ( clientId , clientSecret ) ; |
public class DefaultGroovyStaticMethods { /** * Works exactly like ResourceBundle . getBundle ( String , Locale ) . This is needed
* because the java method depends on a particular stack configuration that
* is not guaranteed in Groovy when calling the Java method .
* @ param self placeholder variable used by Groovy categories ; ignored for default static methods
* @ param bundleName the name of the bundle .
* @ param locale the specific locale
* @ return the resource bundle
* @ see java . util . ResourceBundle # getBundle ( java . lang . String , java . util . Locale )
* @ since 1.6.0 */
public static ResourceBundle getBundle ( ResourceBundle self , String bundleName , Locale locale ) { } } | Class c = ReflectionUtils . getCallingClass ( ) ; ClassLoader targetCL = c != null ? c . getClassLoader ( ) : null ; if ( targetCL == null ) targetCL = ClassLoader . getSystemClassLoader ( ) ; return ResourceBundle . getBundle ( bundleName , locale , targetCL ) ; |
public class h2odriver { /** * Array of bytes to brute - force convert into a hexadecimal string .
* The length of the returned string is byteArr . length * 2.
* @ param byteArr byte array to convert
* @ return hexadecimal string */
static private String convertByteArrToString ( byte [ ] byteArr ) { } } | StringBuilder sb = new StringBuilder ( ) ; for ( byte b : byteArr ) { int i = b ; i = i & 0xff ; sb . append ( String . format ( "%02x" , i ) ) ; } return sb . toString ( ) ; |
public class TokenizerBagOfWordsTermSequenceIndexTransform { /** * Convert the given text
* in to an { @ link INDArray }
* using the { @ link TokenizerFactory }
* specified in the constructor .
* @ param text the text to transform
* @ return the created { @ link INDArray }
* based on the { @ link # wordIndexMap } for the column indices
* of the word . */
public INDArray convert ( String text ) { } } | Tokenizer tokenizer = tokenizerFactory . create ( text ) ; List < String > tokens = tokenizer . getTokens ( ) ; INDArray create = Nd4j . create ( 1 , wordIndexMap . size ( ) ) ; Counter < String > tokenizedCounter = new Counter < > ( ) ; for ( int i = 0 ; i < tokens . size ( ) ; i ++ ) { tokenizedCounter . incrementCount ( tokens . get ( i ) , 1.0 ) ; } for ( int i = 0 ; i < tokens . size ( ) ; i ++ ) { if ( wordIndexMap . containsKey ( tokens . get ( i ) ) ) { int idx = wordIndexMap . get ( tokens . get ( i ) ) ; int count = ( int ) tokenizedCounter . getCount ( tokens . get ( i ) ) ; double weight = tfidfWord ( tokens . get ( i ) , count , tokens . size ( ) ) ; create . putScalar ( idx , weight ) ; } } return create ; |
public class BoxAPIConnection { /** * Refresh ' s this connection ' s access token using its refresh token .
* @ throws IllegalStateException if this connection ' s access token cannot be refreshed . */
public void refresh ( ) { } } | this . refreshLock . writeLock ( ) . lock ( ) ; if ( ! this . canRefresh ( ) ) { this . refreshLock . writeLock ( ) . unlock ( ) ; throw new IllegalStateException ( "The BoxAPIConnection cannot be refreshed because it doesn't have a " + "refresh token." ) ; } URL url = null ; try { url = new URL ( this . tokenURL ) ; } catch ( MalformedURLException e ) { this . refreshLock . writeLock ( ) . unlock ( ) ; assert false : "An invalid refresh URL indicates a bug in the SDK." ; throw new RuntimeException ( "An invalid refresh URL indicates a bug in the SDK." , e ) ; } String urlParameters = String . format ( "grant_type=refresh_token&refresh_token=%s&client_id=%s&client_secret=%s" , this . refreshToken , this . clientID , this . clientSecret ) ; BoxAPIRequest request = new BoxAPIRequest ( this , url , "POST" ) ; request . shouldAuthenticate ( false ) ; request . setBody ( urlParameters ) ; String json ; try { BoxJSONResponse response = ( BoxJSONResponse ) request . send ( ) ; json = response . getJSON ( ) ; } catch ( BoxAPIException e ) { this . notifyError ( e ) ; this . refreshLock . writeLock ( ) . unlock ( ) ; throw e ; } JsonObject jsonObject = JsonObject . readFrom ( json ) ; this . accessToken = jsonObject . get ( "access_token" ) . asString ( ) ; this . refreshToken = jsonObject . get ( "refresh_token" ) . asString ( ) ; this . lastRefresh = System . currentTimeMillis ( ) ; this . expires = jsonObject . get ( "expires_in" ) . asLong ( ) * 1000 ; this . notifyRefresh ( ) ; this . refreshLock . writeLock ( ) . unlock ( ) ; |
public class AbstractAtomFeedParser { /** * Parse the feed and return a new parsed instance of the feed type . This method can be skipped if
* all you want are the items .
* @ throws IOException I / O exception
* @ throws XmlPullParserException XML pull parser exception */
public T parseFeed ( ) throws IOException , XmlPullParserException { } } | boolean close = true ; try { this . feedParsed = true ; T result = Types . newInstance ( feedClass ) ; Xml . parseElement ( parser , result , namespaceDictionary , Atom . StopAtAtomEntry . INSTANCE ) ; close = false ; return result ; } finally { if ( close ) { close ( ) ; } } |
public class GramAttributes { /** * Returns type of the job .
* @ return job type . - 1 if not set or job type
* is unknown . */
public int getJobType ( ) { } } | String jobType = getSingle ( "jobtype" ) ; if ( jobType == null ) return - 1 ; if ( jobType . equalsIgnoreCase ( "single" ) ) { return JOBTYPE_SINGLE ; } else if ( jobType . equalsIgnoreCase ( "multiple" ) ) { return JOBTYPE_MULTIPLE ; } else if ( jobType . equalsIgnoreCase ( "mpi" ) ) { return JOBTYPE_MPI ; } else if ( jobType . equalsIgnoreCase ( "condor" ) ) { return JOBTYPE_CONDOR ; } else { return - 1 ; } |
public class RegistryCredentialMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RegistryCredential registryCredential , ProtocolMarshaller protocolMarshaller ) { } } | if ( registryCredential == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( registryCredential . getCredential ( ) , CREDENTIAL_BINDING ) ; protocolMarshaller . marshall ( registryCredential . getCredentialProvider ( ) , CREDENTIALPROVIDER_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Primitives { /** * Returns the boxed default value for a primitive or a primitive wrapper .
* @ param primitiveOrWrapperType The type to lookup the default value
* @ return The boxed default values as defined in Java Language Specification ,
* < code > null < / code > if the type is neither a primitive nor a wrapper */
public static < T > T defaultValue ( Class < T > primitiveOrWrapperType ) { } } | return ( T ) PRIMITIVE_OR_WRAPPER_DEFAULT_VALUES . get ( primitiveOrWrapperType ) ; |
public class BsCrawlingInfoParamCB { public CrawlingInfoParamCB acceptPK ( String id ) { } } | assertObjectNotNull ( "id" , id ) ; BsCrawlingInfoParamCB cb = this ; cb . query ( ) . docMeta ( ) . setId_Equal ( id ) ; return ( CrawlingInfoParamCB ) this ; |
public class SqlSubstitutionFragment { /** * Return the text for a PreparedStatement from this fragment . This type of fragment
* typically evaluates any reflection parameters at this point . The exception
* is if the reflected result is a ComplexSqlFragment , it that case the sql text
* is retrieved from the fragment in this method . The parameter values are
* retrieved in the getParameterValues method of this class .
* @ param context A ControlBeanContext instance
* @ param m The annotated method
* @ param args The method parameters
* @ return A String containing the value of this fragment and its children */
protected String getPreparedStatementText ( ControlBeanContext context , Method m , Object [ ] args ) { } } | StringBuilder sb = new StringBuilder ( ) ; for ( SqlFragment frag : _children ) { boolean complexFragment = frag . hasComplexValue ( context , m , args ) ; if ( frag . hasParamValue ( ) && ! complexFragment ) { Object [ ] pValues = frag . getParameterValues ( context , m , args ) ; for ( Object o : pValues ) { sb . append ( processSqlParams ( o ) ) ; } } else { _hasParamValue |= complexFragment ; sb . append ( frag . getPreparedStatementText ( context , m , args ) ) ; } } return sb . toString ( ) ; |
public class ServerManager { /** * Starts a Tango server . The system property TANGO _ HOST is mandatory if
* using the tango database . If the tango db is not used the system property
* OAPort ( for jacorb ) must be set . The errors occurred will be only logged .
* < pre >
* ServerManager . getInstance ( ) . start ( new String [ ] { & quot ; 1 & quot ; } , JTangoTest . class ) ;
* < / pre >
* @ param args
* The arguments to pass . instanceName [ - v [ trace level ] ] [ - nodb
* [ - dlist < device name list > ] ]
* @ param deviceClass
* The class of the device . The server name and class name must
* be defined in tango db with deviceClass . getSimpleName to be
* started with this method .
* @ see ServerManager # addClass ( String , Class ) */
public synchronized void start ( final String [ ] args , final Class < ? > deviceClass ) { } } | if ( ! isStarted . get ( ) ) { addClass ( deviceClass . getSimpleName ( ) , deviceClass ) ; try { init ( args , deviceClass . getSimpleName ( ) ) ; } catch ( final DevFailed e ) { DevFailedUtils . printDevFailed ( e ) ; } } |
public class InternalXbaseWithAnnotationsParser { /** * InternalXbaseWithAnnotations . g : 6823:1 : ruleQualifiedNameInStaticImport returns [ AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken ( ) ] : ( this _ ValidID _ 0 = ruleValidID kw = ' . ' ) + ; */
public final AntlrDatatypeRuleToken ruleQualifiedNameInStaticImport ( ) throws RecognitionException { } } | AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken ( ) ; Token kw = null ; AntlrDatatypeRuleToken this_ValidID_0 = null ; enterRule ( ) ; try { // InternalXbaseWithAnnotations . g : 6829:2 : ( ( this _ ValidID _ 0 = ruleValidID kw = ' . ' ) + )
// InternalXbaseWithAnnotations . g : 6830:2 : ( this _ ValidID _ 0 = ruleValidID kw = ' . ' ) +
{ // InternalXbaseWithAnnotations . g : 6830:2 : ( this _ ValidID _ 0 = ruleValidID kw = ' . ' ) +
int cnt122 = 0 ; loop122 : do { int alt122 = 2 ; int LA122_0 = input . LA ( 1 ) ; if ( ( LA122_0 == RULE_ID ) ) { int LA122_2 = input . LA ( 2 ) ; if ( ( LA122_2 == 52 ) ) { alt122 = 1 ; } } switch ( alt122 ) { case 1 : // InternalXbaseWithAnnotations . g : 6831:3 : this _ ValidID _ 0 = ruleValidID kw = ' . '
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getQualifiedNameInStaticImportAccess ( ) . getValidIDParserRuleCall_0 ( ) ) ; } pushFollow ( FOLLOW_80 ) ; this_ValidID_0 = ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current . merge ( this_ValidID_0 ) ; } if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } kw = ( Token ) match ( input , 52 , FOLLOW_86 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current . merge ( kw ) ; newLeafNode ( kw , grammarAccess . getQualifiedNameInStaticImportAccess ( ) . getFullStopKeyword_1 ( ) ) ; } } break ; default : if ( cnt122 >= 1 ) break loop122 ; if ( state . backtracking > 0 ) { state . failed = true ; return current ; } EarlyExitException eee = new EarlyExitException ( 122 , input ) ; throw eee ; } cnt122 ++ ; } while ( true ) ; } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class SvgPathData { /** * Checks an ' l ' command . */
private void checkl ( ) throws DatatypeException , IOException { } } | if ( context . length ( ) == 0 ) { appendToContext ( current ) ; } current = reader . read ( ) ; appendToContext ( current ) ; skipSpaces ( ) ; _checkl ( 'l' , true ) ; |
public class RTMPUtils { /** * Calculates the delta between two time stamps , adjusting for time stamp wrapping .
* @ param a
* First time stamp
* @ param b
* Second time stamp
* @ return the distance between a and b , which will be negative if a is less than b . */
public static long diffTimestamps ( final int a , final int b ) { } } | // first convert each to unsigned integers
final long unsignedA = a & 0xFFFFFFFFL ; final long unsignedB = b & 0xFFFFFFFFL ; // then find the delta
final long delta = unsignedA - unsignedB ; return delta ; |
public class FastHessianFeatureDetector { /** * Sees if the best score in the current layer is greater than all the scores in a 3x3 neighborhood
* in another layer . */
protected static boolean checkMax ( ImageBorder_F32 inten , float bestScore , int c_x , int c_y ) { } } | for ( int y = c_y - 1 ; y <= c_y + 1 ; y ++ ) { for ( int x = c_x - 1 ; x <= c_x + 1 ; x ++ ) { if ( inten . get ( x , y ) >= bestScore ) { return false ; } } } return true ; |
public class InBinding { /** * Appends the value to the matching value list .
* @ param value
* the matching value to be appended . */
public void addValue ( final Object value ) { } } | if ( value == null ) { setNullContained ( true ) ; } else { _values . add ( String . valueOf ( value ) ) ; } |
public class ClassInfoCache { /** * ' [ b ] oolean ' and ' [ b ] yte ' . */
public static Class < ? > getPrimitiveClass ( Type type ) { } } | switch ( type . getDescriptor ( ) . charAt ( 0 ) ) { case 'B' : return byte . class ; case 'C' : return char . class ; case 'D' : return double . class ; case 'F' : return float . class ; case 'I' : return int . class ; case 'J' : return long . class ; case 'S' : return short . class ; case 'V' : return void . class ; case 'Z' : return boolean . class ; default : throw new IllegalArgumentException ( "Unrecognized type [ " + type . getDescriptor ( ) + " ]" ) ; } |
public class QuoteManager { /** * Remove the a tick callback
* @ param symbol
* @ param callback
* @ return
* @ throws BitfinexClientException */
public boolean removeTickCallback ( final BitfinexTickerSymbol symbol , final BiConsumer < BitfinexTickerSymbol , BitfinexTick > callback ) throws BitfinexClientException { } } | return tickerCallbacks . removeCallback ( symbol , callback ) ; |
public class Utility { /** * Copy the application properties to this map .
* @ param properties
* @ param appProperties
* @ return */
public static Map < String , Object > copyAppProperties ( Map < String , Object > properties , Map < String , Object > appProperties ) { } } | if ( appProperties != null ) { appProperties = Utility . putAllIfNew ( new HashMap < String , Object > ( ) , appProperties ) ; if ( appProperties . get ( Params . APP_NAME ) != null ) appProperties . remove ( Params . APP_NAME ) ; // if ( appProperties . get ( Params . MESSAGE _ SERVER ) ! = null )
// appProperties . remove ( Params . MESSAGE _ SERVER ) ;
if ( appProperties . get ( DBParams . FREEIFDONE ) != null ) appProperties . remove ( DBParams . FREEIFDONE ) ; if ( appProperties . get ( MessageConstants . MESSAGE_FILTER ) != null ) appProperties . remove ( MessageConstants . MESSAGE_FILTER ) ; } return Utility . putAllIfNew ( properties , appProperties ) ; |
public class ApiOvhOrder { /** * Get prices and contracts information
* REST : GET / order / dedicatedCloud / { serviceName } / upgradeRessource / { duration }
* @ param upgradedRessourceType [ required ] The type of ressource you want to upgrade .
* @ param upgradedRessourceId [ required ] The id of a particular ressource you want to upgrade in your Private Cloud ( useless for " all " UpgradeRessourceTypeEnum )
* @ param upgradeType [ required ] The type of upgrade you want to process on the ressource ( s )
* @ param serviceName [ required ]
* @ param duration [ required ] Duration */
public OvhOrder dedicatedCloud_serviceName_upgradeRessource_duration_GET ( String serviceName , String duration , OvhUpgradeTypeEnum upgradeType , Long upgradedRessourceId , OvhUpgradeRessourceTypeEnum upgradedRessourceType ) throws IOException { } } | String qPath = "/order/dedicatedCloud/{serviceName}/upgradeRessource/{duration}" ; StringBuilder sb = path ( qPath , serviceName , duration ) ; query ( sb , "upgradeType" , upgradeType ) ; query ( sb , "upgradedRessourceId" , upgradedRessourceId ) ; query ( sb , "upgradedRessourceType" , upgradedRessourceType ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOrder . class ) ; |
public class WrappedByteBuffer { /** * Puts segment of a single - byte array into the buffer at the current position .
* @ param v
* the source single - byte array
* @ offset
* the start position of source array
* @ length
* the length to put into WrappedByteBuffer
* @ return the buffer */
public WrappedByteBuffer put ( byte [ ] v , int offset , int length ) { } } | _autoExpand ( length ) ; for ( int i = 0 ; i < length ; i ++ ) { _buf . put ( v [ offset + i ] ) ; } return this ; |
public class MongoDB { /** * Retrieves a list of mapped Morphia objects from MongoDB
* @ param clazz The mapped Morphia class
* @ param < T > JavaDoc requires this - please ignore
* @ return A list of mapped Morphia objects or an empty list if none found */
public < T extends Object > List < T > findAll ( Class < T > clazz ) { } } | Preconditions . checkNotNull ( clazz , "Tryed to get all morphia objects of a given object, but given object is null" ) ; return this . datastore . find ( clazz ) . asList ( ) ; |
public class FFDC { /** * Gets and formats the specified thread ' s information .
* @ param thread The thread to obtain the information from .
* @ return A formatted string for the thread information . */
private static String getThreadInfo ( Thread thread ) { } } | final StringBuilder sb = new StringBuilder ( ) ; sb . append ( "Thread: " ) ; sb . append ( thread . getId ( ) ) ; sb . append ( " (" ) ; sb . append ( thread . getName ( ) ) ; sb . append ( ")" ) ; sb . append ( lineSeparator ) ; final StackTraceElement [ ] stack = thread . getStackTrace ( ) ; if ( stack . length == 0 ) { sb . append ( " No Java callstack associated with this thread" ) ; sb . append ( lineSeparator ) ; } else { for ( StackTraceElement element : stack ) { sb . append ( " at " ) ; sb . append ( element . getClassName ( ) ) ; sb . append ( "." ) ; sb . append ( element . getMethodName ( ) ) ; sb . append ( "(" ) ; final int lineNumber = element . getLineNumber ( ) ; if ( lineNumber == - 2 ) { sb . append ( "Native Method" ) ; } else if ( lineNumber >= 0 ) { sb . append ( element . getFileName ( ) ) ; sb . append ( ":" ) ; sb . append ( element . getLineNumber ( ) ) ; } else { sb . append ( element . getFileName ( ) ) ; } sb . append ( ")" ) ; sb . append ( lineSeparator ) ; } } sb . append ( lineSeparator ) ; return sb . toString ( ) ; |
public class GosucUtil { /** * Special handling for the unusual structure of the IBM JDK .
* @ return A list containing the special ' vm . jar ' absolute path if we are using an IBM JDK ; otherwise an empty list is returned . */
protected static List < String > getIbmClasspath ( ) { } } | List < String > retval = new ArrayList < > ( ) ; if ( System . getProperty ( "java.vendor" ) . equals ( "IBM Corporation" ) ) { String fileSeparator = System . getProperty ( "file.separator" ) ; String classpathSeparator = System . getProperty ( "path.separator" ) ; String [ ] bootClasspath = System . getProperty ( "sun.boot.class.path" ) . split ( classpathSeparator ) ; for ( String entry : bootClasspath ) { if ( entry . endsWith ( fileSeparator + "vm.jar" ) ) { retval . add ( entry ) ; break ; } } } return retval ; |
public class CSSParserVisitorImpl { /** * check if rule context contains error node
* @ param ctx rule context
* @ return contains context error node */
private boolean ctxHasErrorNode ( ParserRuleContext ctx ) { } } | for ( int i = 0 ; i < ctx . children . size ( ) ; i ++ ) { if ( ctx . getChild ( i ) instanceof ErrorNode ) { return true ; } } return false ; |
public class UnifiedResponseDefaultSettings { /** * Sets a response header to the response according to the passed name and
* value . An existing header entry with the same name is overridden .
* @ param sName
* Name of the header . May neither be < code > null < / code > nor empty .
* @ param sValue
* Value of the header . May neither be < code > null < / code > nor empty . */
public static void setResponseHeader ( @ Nonnull @ Nonempty final String sName , @ Nonnull @ Nonempty final String sValue ) { } } | ValueEnforcer . notEmpty ( sName , "Name" ) ; ValueEnforcer . notEmpty ( sValue , "Value" ) ; s_aRWLock . writeLocked ( ( ) -> s_aResponseHeaderMap . setHeader ( sName , sValue ) ) ; |
public class CmsXmlContainerPageFactory { /** * Factory method to unmarshal ( read ) a container page instance from a OpenCms VFS file
* that contains XML data , using wither the encoding set
* in the XML file header , or the encoding set in the VFS file property . < p >
* If you are not sure about the implications of the encoding issues ,
* use { @ link # unmarshal ( CmsObject , CmsFile ) } instead . < p >
* < b > Warning : < / b > < br / >
* This method does not support requested historic versions , it always loads the
* most recent version . Use < code > { @ link # unmarshal ( CmsObject , CmsResource , ServletRequest ) } < / code >
* for history support . < p >
* @ param cms the current cms object
* @ param file the file with the XML data to unmarshal
* @ param keepEncoding if < code > true < / code > , the encoding specified in the XML header is used ,
* otherwise the encoding from the VFS file property is used
* @ return a container page instance unmarshalled from the provided file
* @ throws CmsXmlException if something goes wrong */
public static CmsXmlContainerPage unmarshal ( CmsObject cms , CmsFile file , boolean keepEncoding ) throws CmsXmlException { } } | return unmarshal ( cms , file , keepEncoding , false ) ; |
public class InnerMetricContext { /** * Register a given metric under a given name .
* This method does not support registering { @ link com . codahale . metrics . MetricSet } s .
* See { @ link # registerAll ( com . codahale . metrics . MetricSet ) } .
* This method will not register a metric with the same name in the parent context ( if it exists ) . */
@ Override public synchronized < T extends com . codahale . metrics . Metric > T register ( String name , T metric ) throws IllegalArgumentException { } } | if ( ! ( metric instanceof ContextAwareMetric ) ) { throw new UnsupportedOperationException ( "Can only register ContextAwareMetrics" ) ; } if ( this . contextAwareMetrics . putIfAbsent ( name , ( ( ContextAwareMetric ) metric ) . getInnerMetric ( ) ) != null ) { throw new IllegalArgumentException ( "A metric named " + name + " already exists" ) ; } MetricContext metricContext = this . metricContext . get ( ) ; if ( metricContext != null ) { metricContext . addToMetrics ( ( ContextAwareMetric ) metric ) ; } // Also register the metric with the MetricRegistry using its fully - qualified name
return metric ; |
public class SchemaServer { /** * For a given version and type ( Iced class simpleName ) return an appropriate new Schema object , if any .
* If a higher version is asked for than is available ( e . g . , if the highest version of
* Frame is FrameV2 and the client asks for the schema for ( Frame , 17 ) then an instance
* of FrameV2 will be returned . This compatibility lookup is cached .
* @ throws H2ONotFoundArgumentException if an appropriate schema is not found
* @ deprecated */
private static Schema schema ( int version , String type ) { } } | Class < ? extends Schema > clz = schemaClass ( version , type ) ; if ( clz == null ) clz = schemaClass ( EXPERIMENTAL_VERSION , type ) ; if ( clz == null ) throw new H2ONotFoundArgumentException ( "Failed to find schema for version: " + version + " and type: " + type , "Failed to find schema for version: " + version + " and type: " + type + "\n" + "Did you forget to add an entry into META-INF/services/water.api.Schema?" ) ; return Schema . newInstance ( clz ) ; |
public class UntagResourceRequest { /** * The keys of the tags to remove from the user pool .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setTagKeys ( java . util . Collection ) } or { @ link # withTagKeys ( java . util . Collection ) } if you want to override
* the existing values .
* @ param tagKeys
* The keys of the tags to remove from the user pool .
* @ return Returns a reference to this object so that method calls can be chained together . */
public UntagResourceRequest withTagKeys ( String ... tagKeys ) { } } | if ( this . tagKeys == null ) { setTagKeys ( new java . util . ArrayList < String > ( tagKeys . length ) ) ; } for ( String ele : tagKeys ) { this . tagKeys . add ( ele ) ; } return this ; |
public class Tags { /** * Resolves all the tags IDs ( name followed by value ) into the a map .
* This function is the opposite of { @ link # resolveAll } .
* @ param tsdb The TSDB to use for UniqueId lookups .
* @ param tags The tag IDs to resolve .
* @ return A map mapping tag names to tag values .
* @ throws NoSuchUniqueId if one of the elements in the array contained an
* invalid ID .
* @ throws IllegalArgumentException if one of the elements in the array had
* the wrong number of bytes . */
public static HashMap < String , String > resolveIds ( final TSDB tsdb , final ArrayList < byte [ ] > tags ) throws NoSuchUniqueId { } } | try { return resolveIdsAsync ( tsdb , tags ) . joinUninterruptibly ( ) ; } catch ( NoSuchUniqueId e ) { throw e ; } catch ( DeferredGroupException e ) { final Throwable ex = Exceptions . getCause ( e ) ; if ( ex instanceof NoSuchUniqueId ) { throw ( NoSuchUniqueId ) ex ; } // TODO process e . results ( )
throw new RuntimeException ( "Shouldn't be here" , e ) ; } catch ( Exception e ) { throw new RuntimeException ( "Shouldn't be here" , e ) ; } |
public class BaseClassFinderService { /** * Find the currently installed bundle that exports this package .
* @ param bundleContext
* @ param resource
* @ return */
public Bundle findBundle ( Object objResource , Object bundleContext , String packageName , String versionRange ) { } } | if ( bundleContext == null ) bundleContext = this . bundleContext ; if ( bundleContext == null ) return null ; if ( objResource == null ) return BaseClassFinderService . findBundle ( ( BundleContext ) bundleContext , packageName , versionRange ) ; Bundle [ ] bundles = ( ( BundleContext ) bundleContext ) . getBundles ( ) ; for ( Bundle bundle : bundles ) { if ( objResource != null ) { if ( this . isResourceBundleMatch ( objResource , bundle ) ) return bundle ; } } return null ; |
public class CalendarView { /** * Sends the request to the calendar view to display the given date and
* time . The view will switch to the { @ link DayPage } and set the value of
* { @ link # dateProperty ( ) } to the date and { @ link # requestedTimeProperty ( ) }
* to the time .
* @ param dateTime the date and time to show in the view */
public final void showDateTime ( LocalDateTime dateTime ) { } } | requireNonNull ( dateTime ) ; if ( ! dayPage . isHidden ( ) ) { selectedPage . set ( getDayPage ( ) ) ; } else if ( ! weekPage . isHidden ( ) ) { selectedPage . set ( getWeekPage ( ) ) ; } else if ( ! monthPage . isHidden ( ) ) { selectedPage . set ( getMonthPage ( ) ) ; } else if ( ! yearPage . isHidden ( ) ) { selectedPage . set ( getYearPage ( ) ) ; } setDate ( dateTime . toLocalDate ( ) ) ; setRequestedTime ( dateTime . toLocalTime ( ) ) ; |
public class VirtualMachineScaleSetRollingUpgradesInner { /** * Starts a rolling upgrade to move all virtual machine scale set instances to the latest available Platform Image OS version . Instances which are already running the latest available OS version are not affected .
* @ param resourceGroupName The name of the resource group .
* @ param vmScaleSetName The name of the VM scale set .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < OperationStatusResponseInner > startOSUpgradeAsync ( String resourceGroupName , String vmScaleSetName ) { } } | return startOSUpgradeWithServiceResponseAsync ( resourceGroupName , vmScaleSetName ) . map ( new Func1 < ServiceResponse < OperationStatusResponseInner > , OperationStatusResponseInner > ( ) { @ Override public OperationStatusResponseInner call ( ServiceResponse < OperationStatusResponseInner > response ) { return response . body ( ) ; } } ) ; |
public class MatchmakingRuleSetMarshaller { /** * Marshall the given parameter object . */
public void marshall ( MatchmakingRuleSet matchmakingRuleSet , ProtocolMarshaller protocolMarshaller ) { } } | if ( matchmakingRuleSet == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( matchmakingRuleSet . getRuleSetName ( ) , RULESETNAME_BINDING ) ; protocolMarshaller . marshall ( matchmakingRuleSet . getRuleSetBody ( ) , RULESETBODY_BINDING ) ; protocolMarshaller . marshall ( matchmakingRuleSet . getCreationTime ( ) , CREATIONTIME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ParameterUtil { /** * Get child dependent parameters
* @ param report next report object
* @ param p current parameter
* @ return a map of all parameters that use the current parameter in theirs source definition */
public static Map < String , QueryParameter > getChildDependentParameters ( Report report , QueryParameter p ) { } } | if ( report == null ) { return new HashMap < String , QueryParameter > ( ) ; } return getChildDependentParameters ( report . getParameters ( ) , p ) ; |
public class Permission { /** * Add a Group to the particular role
* @ param group
* @ param role */
protected void addGroupToRole ( String group , String role ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , CLASS_NAME + "addGroupToRole" , new Object [ ] { group , role } ) ; } Set < String > groupsForTheRole = roleToGroupMap . get ( role ) ; if ( groupsForTheRole != null ) { groupsForTheRole . add ( group ) ; } else { groupsForTheRole = new HashSet < String > ( ) ; groupsForTheRole . add ( group ) ; } roleToGroupMap . put ( role , groupsForTheRole ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exit ( tc , CLASS_NAME + "addGroupToRole" ) ; } |
public class SQLCommand { /** * Application entry point */
public static void main ( String args [ ] ) { } } | System . setProperty ( "voltdb_no_logging" , "true" ) ; int exitCode = mainWithReturnCode ( args ) ; System . exit ( exitCode ) ; |
public class InjectorConfiguration { /** * Returns a factory instance to instantiate a certain class .
* @ param classDefinition the class that is to be instantiated .
* @ param < T > the type of the class to be instantiated .
* @ return a factory method for the specified class . */
@ Nullable public < T > Provider < T > getScopedFactory ( Class scope , Class < T > classDefinition ) { } } | // noinspection unchecked
return factories . getScope ( scope ) . get ( classDefinition ) ; |
public class ApplicationsImpl { /** * Lists all of the applications available in the specified account .
* This operation returns only applications and versions that are available for use on compute nodes ; that is , that can be used in an application package reference . For administrator information about applications and versions that are not yet available to compute nodes , use the Azure portal or the Azure Resource Manager API .
* @ param applicationListOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ApplicationSummary & gt ; object */
public Observable < ServiceResponseWithHeaders < Page < ApplicationSummary > , ApplicationListHeaders > > listWithServiceResponseAsync ( final ApplicationListOptions applicationListOptions ) { } } | return listSinglePageAsync ( applicationListOptions ) . concatMap ( new Func1 < ServiceResponseWithHeaders < Page < ApplicationSummary > , ApplicationListHeaders > , Observable < ServiceResponseWithHeaders < Page < ApplicationSummary > , ApplicationListHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < ApplicationSummary > , ApplicationListHeaders > > call ( ServiceResponseWithHeaders < Page < ApplicationSummary > , ApplicationListHeaders > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } ApplicationListNextOptions applicationListNextOptions = null ; if ( applicationListOptions != null ) { applicationListNextOptions = new ApplicationListNextOptions ( ) ; applicationListNextOptions . withClientRequestId ( applicationListOptions . clientRequestId ( ) ) ; applicationListNextOptions . withReturnClientRequestId ( applicationListOptions . returnClientRequestId ( ) ) ; applicationListNextOptions . withOcpDate ( applicationListOptions . ocpDate ( ) ) ; } return Observable . just ( page ) . concatWith ( listNextWithServiceResponseAsync ( nextPageLink , applicationListNextOptions ) ) ; } } ) ; |
public class SummernoteFocusEvent { /** * Fires a summernote focus event on all registered handlers in the handler
* manager . If no such handlers exist , this method will do nothing .
* @ param source the source of the handlers */
public static void fire ( final HasSummernoteFocusHandlers source ) { } } | if ( TYPE != null ) { SummernoteFocusEvent event = new SummernoteFocusEvent ( ) ; source . fireEvent ( event ) ; } |
public class ResolveSource { /** * Conceptually , this is key . value ( ) . resolveSubstitutions ( ) but using the
* replacement for key . value ( ) if any . */
AbstractConfigValue resolveCheckingReplacement ( ResolveContext context , AbstractConfigValue original ) throws NotPossibleToResolve { } } | AbstractConfigValue replacement ; replacement = replacement ( context , original ) ; if ( replacement != original ) { // start over , checking if replacement was memoized
return context . resolve ( replacement ) ; } else { AbstractConfigValue resolved ; resolved = original . resolveSubstitutions ( context ) ; return resolved ; } |
public class InventoryGroupMarshaller { /** * Marshall the given parameter object . */
public void marshall ( InventoryGroup inventoryGroup , ProtocolMarshaller protocolMarshaller ) { } } | if ( inventoryGroup == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( inventoryGroup . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( inventoryGroup . getFilters ( ) , FILTERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class StatementTable { /** * { @ inheritDoc } */
@ Override protected void _from ( ObjectInput in ) throws IOException , ClassNotFoundException { } } | // 1 : read number of stmts
final int stmts = in . readInt ( ) ; statements = sizedArrayList ( stmts ) ; for ( int i = 0 ; i < stmts ; i ++ ) { TableStatement ts = new TableStatement ( ) ; // 1 : read each table statement
ts . readExternal ( in ) ; // 2 : read the relevant document index
final int did = in . readInt ( ) ; // 3 : read the statement
final Statement statement = readStatement ( in ) ; addStatement ( ts , statement , did ) ; } |
public class ObjectWritableCodec { /** * Decode Hadoop Writable object from a byte array .
* @ param buffer serialized version of the Writable object ( as a byte array ) .
* @ return a Writable object .
* @ throws RemoteRuntimeException if deserialization fails . */
@ Override public T decode ( final byte [ ] buffer ) { } } | try ( final ByteArrayInputStream bis = new ByteArrayInputStream ( buffer ) ; final DataInputStream dis = new DataInputStream ( bis ) ) { final T writable = this . writableClass . newInstance ( ) ; writable . readFields ( dis ) ; return writable ; } catch ( final IOException | InstantiationException | IllegalAccessException ex ) { LOG . log ( Level . SEVERE , "Cannot decode class " + this . writableClass , ex ) ; throw new RemoteRuntimeException ( ex ) ; } |
public class LockStrategy { /** * PQ53065 */
public boolean lock ( EJSContainer c , ContainerTx tx , Object lockName , int mode ) throws LockException { } } | // This method intentionally left blank .
return true ; |
public class AbstractAdminstrationDao { /** * Closes all open idle connections . The current data source
* must have superuser rights .
* This can be used if a another database action needs full access to a database ,
* e . g . when deleting and then creating it
* @ param databasename */
protected void closeAllConnections ( String databasename ) { } } | String sql = "SELECT pg_terminate_backend(pg_stat_activity.pid)\n" + "FROM pg_stat_activity\n" + "WHERE pg_stat_activity.datname = ?\n" + " AND pid <> pg_backend_pid();" ; try ( Connection conn = getDataSource ( ) . getConnection ( ) ) { DatabaseMetaData meta = conn . getMetaData ( ) ; if ( meta . getDatabaseMajorVersion ( ) == 9 && meta . getDatabaseMinorVersion ( ) <= 1 ) { sql = "SELECT pg_terminate_backend(pg_stat_activity.procpid)\n" + "FROM pg_stat_activity\n" + "WHERE pg_stat_activity.datname = ?\n" + " AND procpid <> pg_backend_pid();" ; } } catch ( SQLException ex ) { log . warn ( "Could not get the PostgreSQL version" , ex ) ; } getJdbcTemplate ( ) . queryForRowSet ( sql , databasename ) ; |
public class JobQueuesManager { /** * Method removes the jobs from both running and waiting job queue in
* job queue manager . */
private void jobCompleted ( JobInProgress job , JobSchedulingInfo oldInfo , QueueInfo qi ) { } } | LOG . info ( "Job " + job . getJobID ( ) . toString ( ) + " submitted to queue " + job . getProfile ( ) . getQueueName ( ) + " has completed" ) ; // remove jobs from both queue ' s a job can be in
// running and waiting queue at the same time .
qi . removeRunningJob ( oldInfo ) ; qi . removeWaitingJob ( oldInfo ) ; // let scheduler know
scheduler . jobCompleted ( job ) ; |
public class Syslog { /** * When installed , System . out and System . err are redirected to Syslog . log .
* System . out produces info events , and System . err produces error events . */
public static void install ( ) { } } | synchronized ( log ( ) ) { if ( ! cInstalled ) { cInstalled = true ; cOriginalOut = System . out ; cOriginalErr = System . err ; cSystemOut = new LogEventParsingOutputStream ( log ( ) , LogEvent . INFO_TYPE ) { public boolean isEnabled ( ) { return log ( ) . isInfoEnabled ( ) ; } } ; cSystemOut . addLogListener ( log ( ) ) ; System . setOut ( new PrintStream ( cSystemOut , true ) ) ; cSystemErr = new LogEventParsingOutputStream ( log ( ) , LogEvent . ERROR_TYPE ) { public boolean isEnabled ( ) { return log ( ) . isErrorEnabled ( ) ; } } ; cSystemErr . addLogListener ( log ( ) ) ; System . setErr ( new PrintStream ( cSystemErr , true ) ) ; } } |
public class JsEventBusImpl { /** * Add a handler for feature selection . */
public JsHandlerRegistration addFeatureSelectionHandler ( final FeatureSelectedHandler selectedHandler , final FeatureDeselectedHandler deselectedHandler ) { } } | if ( ( ( MapImpl ) map ) . getMapWidget ( ) . getMapModel ( ) . isInitialized ( ) ) { return addFeatureSelectionHandler2 ( selectedHandler , deselectedHandler ) ; } final CallbackHandlerRegistration callbackRegistration = new CallbackHandlerRegistration ( ) ; ( ( MapImpl ) map ) . getMapWidget ( ) . getMapModel ( ) . addMapModelChangedHandler ( new MapModelChangedHandler ( ) { public void onMapModelChanged ( MapModelChangedEvent event ) { JsHandlerRegistration temp = addFeatureSelectionHandler2 ( selectedHandler , deselectedHandler ) ; callbackRegistration . setRegistration ( temp ) ; } } ) ; return callbackRegistration ; |
public class Main { /** * This method calculates the perimeter of a triangle using the lengths of its sides .
* Examples :
* calculatePerimeter ( 10 , 20 , 30 ) - > 60
* calculatePerimeter ( 3 , 4 , 5 ) - > 12
* calculatePerimeter ( 25 , 35 , 45 ) - > 105
* Parameters :
* a ( int or float ) : The length of the first side of the triangle .
* b ( int or float ) : The length of the second side of the triangle .
* c ( int or float ) : The length of the third side of the triangle .
* Returns :
* perimeter ( int or float ) : The perimeter of the triangle . */
public static double calculatePerimeter ( double a , double b , double c ) { } public static void main ( String [ ] args ) { System . out . println ( calculatePerimeter ( 10 , 20 , 30 ) ) ; System . out . println ( calculatePerimeter ( 3 , 4 , 5 ) ) ; System . out . println ( calculatePerimeter ( 25 , 35 , 45 ) ) ; } } | double perimeter = a + b + c ; return perimeter ; |
public class SecretBox { /** * Decrypt a ciphertext using the given key and nonce .
* @ param nonce a 24 - byte nonce
* @ param ciphertext the encrypted message
* @ return an { @ link Optional } of the original plaintext , or if either the key , nonce , or
* ciphertext was modified , an empty { @ link Optional }
* @ see # nonce ( byte [ ] )
* @ see # nonce ( ) */
public Optional < byte [ ] > open ( byte [ ] nonce , byte [ ] ciphertext ) { } } | final XSalsa20Engine xsalsa20 = new XSalsa20Engine ( ) ; final Poly1305 poly1305 = new Poly1305 ( ) ; // initialize XSalsa20
xsalsa20 . init ( false , new ParametersWithIV ( new KeyParameter ( key ) , nonce ) ) ; // generate mac subkey
final byte [ ] sk = new byte [ Keys . KEY_LEN ] ; xsalsa20 . processBytes ( sk , 0 , sk . length , sk , 0 ) ; // hash ciphertext
poly1305 . init ( new KeyParameter ( sk ) ) ; final int len = Math . max ( ciphertext . length - poly1305 . getMacSize ( ) , 0 ) ; poly1305 . update ( ciphertext , poly1305 . getMacSize ( ) , len ) ; final byte [ ] calculatedMAC = new byte [ poly1305 . getMacSize ( ) ] ; poly1305 . doFinal ( calculatedMAC , 0 ) ; // extract mac
final byte [ ] presentedMAC = new byte [ poly1305 . getMacSize ( ) ] ; System . arraycopy ( ciphertext , 0 , presentedMAC , 0 , Math . min ( ciphertext . length , poly1305 . getMacSize ( ) ) ) ; // compare macs
if ( ! MessageDigest . isEqual ( calculatedMAC , presentedMAC ) ) { return Optional . empty ( ) ; } // decrypt ciphertext
final byte [ ] plaintext = new byte [ len ] ; xsalsa20 . processBytes ( ciphertext , poly1305 . getMacSize ( ) , plaintext . length , plaintext , 0 ) ; return Optional . of ( plaintext ) ; |
public class LaContainerBuilderUtils { public static boolean resourceExists ( String path , AbstractLaContainerBuilder builder ) { } } | InputStream is ; try { is = builder . getResourceResolver ( ) . getInputStream ( path ) ; } catch ( IORuntimeException ex ) { if ( ex . getCause ( ) instanceof FileNotFoundException ) { return false ; } else { throw ex ; } } if ( is == null ) { return false ; } else { try { is . close ( ) ; } catch ( IOException ignore ) { } return true ; } |
public class WebSecurityScannerClient { /** * Gets a Finding .
* < p > Sample code :
* < pre > < code >
* try ( WebSecurityScannerClient webSecurityScannerClient = WebSecurityScannerClient . create ( ) ) {
* FindingName name = FindingName . of ( " [ PROJECT ] " , " [ SCAN _ CONFIG ] " , " [ SCAN _ RUN ] " , " [ FINDING ] " ) ;
* Finding response = webSecurityScannerClient . getFinding ( name . toString ( ) ) ;
* < / code > < / pre >
* @ param name Required . The resource name of the Finding to be returned . The name follows the
* format of
* ' projects / { projectId } / scanConfigs / { scanConfigId } / scanRuns / { scanRunId } / findings / { findingId } ' .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Finding getFinding ( String name ) { } } | GetFindingRequest request = GetFindingRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return getFinding ( request ) ; |
public class Img { /** * 缩放图像 ( 按长宽缩放 ) < br >
* 注意 : 目标长宽与原图不成比例会变形
* @ param width 目标宽度
* @ param height 目标高度
* @ return this */
public Img scale ( int width , int height ) { } } | final BufferedImage srcImg = getValidSrcImg ( ) ; int srcHeight = srcImg . getHeight ( ) ; int srcWidth = srcImg . getWidth ( ) ; int scaleType ; if ( srcHeight == height && srcWidth == width ) { // 源与目标长宽一致返回原图
this . targetImage = srcImg ; return this ; } else if ( srcHeight < height || srcWidth < width ) { // 放大图片使用平滑模式
scaleType = Image . SCALE_SMOOTH ; } else { scaleType = Image . SCALE_DEFAULT ; } final Image image = srcImg . getScaledInstance ( width , height , scaleType ) ; this . targetImage = ImgUtil . toBufferedImage ( image ) ; return this ; |
public class BufferedWriter { /** * Prints a string .
* @ exception IOException
* if an I / O error has occurred */
public void print ( String s ) throws IOException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { // 306998.15
Tr . debug ( tc , "print --> " + s ) ; } if ( ! _hasWritten && obs != null ) { _hasWritten = true ; obs . alertFirstWrite ( ) ; } int len = s . length ( ) ; if ( limit > - 1 ) { if ( total + len > limit ) { len = ( int ) ( limit - total ) ; except = new WriteBeyondContentLengthException ( ) ; } } int off = 0 ; while ( len > 0 ) { int n = buf . length - count ; if ( n == 0 ) { response . setFlushMode ( false ) ; flushChars ( ) ; response . setFlushMode ( true ) ; n = buf . length - count ; } if ( n > len ) { n = len ; } s . getChars ( off , off + n , buf , count ) ; count += n ; total += n ; off += n ; len -= n ; } check ( ) ; |
public class FSDataset { /** * { @ inheritDoc } */
public Block getStoredBlock ( int namespaceId , long blkid , boolean useOnDiskLength ) throws IOException { } } | lock . readLock ( ) . lock ( ) ; try { ReplicaToRead replica = getReplicaToRead ( namespaceId , new Block ( blkid ) ) ; if ( replica == null ) { return null ; } File blockfile = replica . getDataFileToRead ( ) ; if ( blockfile == null ) { return null ; } File metafile = null ; if ( ! replica . isInlineChecksum ( ) ) { metafile = BlockWithChecksumFileWriter . findMetaFile ( blockfile , true ) ; if ( metafile == null ) { return null ; } } Block block = new Block ( blkid ) ; if ( useOnDiskLength ) { block . setNumBytes ( replica . getBytesWritten ( ) ) ; } else { block . setNumBytes ( replica . getBytesVisible ( ) ) ; } if ( replica . isInlineChecksum ( ) ) { block . setGenerationStamp ( BlockInlineChecksumReader . getGenerationStampFromInlineChecksumFile ( blockfile . getName ( ) ) ) ; } else { block . setGenerationStamp ( BlockWithChecksumFileReader . parseGenerationStampInMetaFile ( blockfile , metafile ) ) ; } return block ; } finally { lock . readLock ( ) . unlock ( ) ; } |
public class AbstractUserObject { /** * Returns for given parameter < i > _ name < / i > the instance of class
* { @ link AbstractUserObject } . The returned AbstractUserObject can be a
* { @ link Role } , { @ link Group } , { @ link Company } , { @ link Consortium }
* or { @ link Person } . It is searched in the given sequence . User is searched
* last due to the reason that it is the only object that is not always stored
* in a cache an might produce queries against the DataBase
* @ param _ name name to search in the cache
* @ return instance of class { @ link AbstractUserObject }
* @ throws EFapsException on error */
public static AbstractUserObject getUserObject ( final String _name ) throws EFapsException { } } | AbstractUserObject ret = UUIDUtil . isUUID ( _name ) ? Role . get ( UUID . fromString ( _name ) ) : Role . get ( _name ) ; if ( ret == null ) { ret = UUIDUtil . isUUID ( _name ) ? Group . get ( UUID . fromString ( _name ) ) : Group . get ( _name ) ; } if ( ret == null ) { ret = UUIDUtil . isUUID ( _name ) ? Company . get ( UUID . fromString ( _name ) ) : Company . get ( _name ) ; } if ( ret == null ) { ret = UUIDUtil . isUUID ( _name ) ? Consortium . get ( UUID . fromString ( _name ) ) : Consortium . get ( _name ) ; } if ( ret == null ) { ret = UUIDUtil . isUUID ( _name ) ? Person . get ( UUID . fromString ( _name ) ) : Person . get ( _name ) ; } return ret ; |
public class DefaultServiceRegistry { /** * - - - GET LOCAL SERVICE - - - */
@ Override public Service getService ( String name ) { } } | Service service = null ; long stamp = lock . tryOptimisticRead ( ) ; if ( stamp != 0 ) { try { service = services . get ( name ) ; } catch ( Exception modified ) { stamp = 0 ; } } if ( ! lock . validate ( stamp ) || stamp == 0 ) { stamp = lock . readLock ( ) ; try { service = services . get ( name ) ; } finally { lock . unlockRead ( stamp ) ; } } if ( service == null ) { throw new ServiceNotFoundError ( nodeID , name ) ; } return service ; |
public class JcrGroovyCompiler { /** * Compile Groovy source that located in < code > files < / code > . Compiled sources
* can be dependent to each other and dependent to Groovy sources that are
* accessible for this compiler and with additional Groovy sources
* < code > src < / code > . < b > NOTE < / b > To be able load Groovy source files from
* specified folders the following rules must be observed :
* < ul >
* < li > Groovy source files must be located in folder with respect to package
* structure < / li >
* < li > Name of Groovy source files must be the same as name of class located
* in file < / li >
* < li > Groovy source file must have extension ' . groovy ' < / li >
* < / ul >
* @ param src additional Groovy source location that should be added in
* class - path when compile < code > files < / code >
* @ param files Groovy sources to be compiled
* @ return result of compilation
* @ throws IOException if any i / o errors occurs */
public Class < ? > [ ] compile ( SourceFolder [ ] src , SourceFile [ ] files ) throws IOException { } } | return doCompile ( ( JcrGroovyClassLoader ) classLoaderProvider . getGroovyClassLoader ( src ) , files ) ; |
public class LocationDirector { /** * Requests that this client be moved to the specified place . A request will be made and when
* the response is received , the location observers will be notified of success or failure .
* @ return true if the move to request was issued , false if it was rejected by a location
* observer or because we have another request outstanding . */
public boolean moveTo ( int placeId ) { } } | // make sure the placeId is valid
if ( placeId < 0 ) { log . warning ( "Refusing moveTo(): invalid placeId " + placeId + "." ) ; return false ; } // first check to see if our observers are happy with this move request
if ( ! mayMoveTo ( placeId , null ) ) { return false ; } // we need to call this both to mark that we ' re issuing a move request and to check to see
// if the last issued request should be considered stale
boolean refuse = checkRepeatMove ( ) ; // complain if we ' re over - writing a pending request
if ( _pendingPlaceId != - 1 ) { // if the pending request has been outstanding more than a minute , go ahead and let
// this new one through in an attempt to recover from dropped moveTo requests
if ( refuse ) { log . warning ( "Refusing moveTo; We have a request outstanding" , "ppid" , _pendingPlaceId , "npid" , placeId ) ; return false ; } else { log . warning ( "Overriding stale moveTo request" , "ppid" , _pendingPlaceId , "npid" , placeId ) ; } } // make a note of our pending place id
_pendingPlaceId = placeId ; // issue a moveTo request
log . info ( "Issuing moveTo(" + placeId + ")." ) ; _lservice . moveTo ( placeId , new LocationService . MoveListener ( ) { public void moveSucceeded ( PlaceConfig config ) { // handle the successful move
didMoveTo ( _pendingPlaceId , config ) ; // and clear out the tracked pending oid
_pendingPlaceId = - 1 ; handlePendingForcedMove ( ) ; } public void requestFailed ( String reason ) { // clear out our pending request oid
int placeId = _pendingPlaceId ; _pendingPlaceId = - 1 ; log . info ( "moveTo failed" , "pid" , placeId , "reason" , reason ) ; // let our observers know that something has gone horribly awry
handleFailure ( placeId , reason ) ; handlePendingForcedMove ( ) ; } } ) ; return true ; |
public class RebindOperationRecorder { /** * @ see org . springframework . ldap . support . transaction . CompensatingTransactionOperationRecorder # recordOperation ( java . lang . Object [ ] ) */
public CompensatingTransactionOperationExecutor recordOperation ( Object [ ] args ) { } } | if ( args == null || args . length != 3 ) { throw new IllegalArgumentException ( "Invalid arguments for bind operation" ) ; } Name dn = LdapTransactionUtils . getFirstArgumentAsName ( args ) ; Object object = args [ 1 ] ; Attributes attributes = null ; if ( args [ 2 ] != null && ! ( args [ 2 ] instanceof Attributes ) ) { throw new IllegalArgumentException ( "Invalid third argument to bind operation" ) ; } else if ( args [ 2 ] != null ) { attributes = ( Attributes ) args [ 2 ] ; } Name temporaryName = renamingStrategy . getTemporaryName ( dn ) ; return new RebindOperationExecutor ( ldapOperations , dn , temporaryName , object , attributes ) ; |
public class ns_conf_upgrade_history { /** * < pre >
* Performs generic data validation for the operation to be performed
* < / pre > */
protected void validate ( String operationType ) throws Exception { } } | super . validate ( operationType ) ; MPSString ns_conf_upgradefile_validator = new MPSString ( ) ; ns_conf_upgradefile_validator . validate ( operationType , ns_conf_upgradefile , "\"ns_conf_upgradefile\"" ) ; MPSIPAddress ns_ip_address_validator = new MPSIPAddress ( ) ; ns_ip_address_validator . validate ( operationType , ns_ip_address , "\"ns_ip_address\"" ) ; |
public class Scope { /** * Attempt to bind variable reference to a variable in this scope or a
* parent scope . If the variable to bind to isn ' t available or doesn ' t
* exist , false is returned .
* @ return true if reference has been bound */
public boolean bindToVariable ( VariableRef ref ) { } } | String name = ref . getName ( ) ; Variable var = getDeclaredVariable ( name ) ; if ( var != null ) { ref . setType ( null ) ; ref . setVariable ( var ) ; mVariableRefs . add ( ref ) ; return true ; } else { return false ; } |
public class JsonArray { /** * Adds the specified character to self .
* @ param character the character that needs to be added to the array . */
public void add ( Character character ) { } } | elements . add ( character == null ? JsonNull . INSTANCE : new JsonPrimitive ( character ) ) ; |
public class BaseMonetaryConversionsSingletonSpi { /** * Access the current registered { @ link javax . money . convert . ExchangeRateProvider } instances . If no provider
* names are passed ALL current registered providers are returned in undefined order .
* @ param providers the provider names of hte providers to be accessed
* @ return the list of providers , in the same order as requested .
* @ throws javax . money . MonetaryException if a provider could not be resolved . */
public List < ExchangeRateProvider > getExchangeRateProviders ( String ... providers ) { } } | List < ExchangeRateProvider > provInstances = new ArrayList < > ( ) ; Collection < String > providerNames = Arrays . asList ( providers ) ; if ( providerNames . isEmpty ( ) ) { providerNames = getProviderNames ( ) ; } for ( String provName : providerNames ) { ExchangeRateProvider provider = getExchangeRateProvider ( provName ) ; if ( provider == null ) { throw new MonetaryException ( "Unsupported conversion/rate provider: " + provName ) ; } provInstances . add ( provider ) ; } return provInstances ; |
public class InterceptorMetaDataHelper { /** * Populate the IntercetorMap for the EJB module with the metadata from the
* WCCM objects created from ejb - jar . xml file of the EJB module . See { @ link com . ibm . ejs . csi . EJBModuleMetaDataImpl # ivInterceptorsMap } for details
* about this map .
* @ param interceptors is the Interceptors object obtained from WCCM for the
* WCCM EJBJar object that represents metadata from ejb - jar . xml file .
* @ param interceptorsMap is the map object to be populated with metadata
* obtained from WCCM objects .
* @ return a map where the key is the fully qualified name of an interceptor class that is
* defined in the ejb - jar . xml file for the module and the value is the WCCM
* Interceptor object that is created for the interceptor class in the EJB module .
* @ throws EJBConfigurationException if unable to load an interceptor class ,
* find one of its interceptor methods , or other configuration errors
* such as invalid method signature . */
public static Map < String , Interceptor > // d468919
populateInterceptorsMap ( ClassLoader classLoader , Interceptors interceptors , IdentityHashMap < Class < ? > , EnumMap < InterceptorMethodKind , List < Method > > > interceptorsMap , J2EEName name ) throws EJBConfigurationException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "populateInterceptorsMap" ) ; } // Get the list of Interceptor objects from the WCCM Interceptors object
// passed as argument to this method .
List < Interceptor > interceptorList = interceptors . getInterceptorList ( ) ; HashMap < String , Interceptor > interceptorMap = new HashMap < String , Interceptor > ( interceptorList . size ( ) ) ; // d468919
// For each Interceptor object in the list .
for ( Interceptor interceptor : interceptorList ) { String className = interceptor . getInterceptorClassName ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Loading EJB 3.0 Interceptor class: " + className ) ; } Class < ? > c ; try { c = classLoader . loadClass ( className ) ; } catch ( ClassNotFoundException ex ) { FFDCFilter . processException ( ex , CLASS_NAME + ".initializeInterceptorMD" , "5352" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Load of EJB 3.0 Interceptor class failed: " + className , ex ) ; } EJBConfigurationException ecex ; ecex = new EJBConfigurationException ( "CNTR0237E: The user-provided EJB 3.0 interceptor class \"" + className + "\" could not be found or loaded." , ex ) ; // CNTR0237E : The user - provided enterprise bean level 3.0 { 0 } interceptor class cannot be found or loaded .
Tr . error ( tc , "INTERCEPTOR_CLASS_NOT_FOUND_CNTR0237E" , new Object [ ] { className } ) ; throw ecex ; } // Update the interceptor map with this WCCM Interceptor object .
interceptorMap . put ( className , interceptor ) ; // d468919
// Create a LIFO list of Class object where most generic super class of interceptor
// is first out and the interceptor class itself is last out .
LinkedList < Class < ? > > lifoClasses = InterceptorMetaDataHelper . getLIFOSuperClassesList ( c ) ; // Create EnumMap for this Interceptor class and populate it with any
// interceptor methods configured for this Interceptor class .
EnumMap < InterceptorMethodKind , List < Method > > methodMap = new EnumMap < InterceptorMethodKind , List < Method > > ( InterceptorMethodKind . class ) ; // F743-17763 - Iterate over all interceptor kinds , and find the
// interceptor methods defined in the deployment descriptor .
for ( InterceptorMethodKind kind : InterceptorMethodKind . values ( ) ) { // Get list of methods and populate EnumMap with the list of Method
// objects for these methods .
List < ? extends InterceptorCallback > methodMetaDataList = kind . getMethods ( interceptor ) ; if ( methodMetaDataList != null && ! methodMetaDataList . isEmpty ( ) ) { populateInterceptorMethodMap ( c , lifoClasses , kind , PARM_TYPES , methodMetaDataList , methodMap , false , name ) ; } } // Create a map entry for this Interceptor class .
interceptorsMap . put ( c , methodMap ) ; } // end for each interceptor in list
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "populateInterceptorsMap" ) ; } return interceptorMap ; // d468919 |
public class NIODirectorySocket { /** * { @ inheritDoc } */
@ Override public void cleanup ( ) { } } | if ( nioThread != null ) { nioThread . toStop ( ) ; } if ( selectionKey != null ) { SocketChannel sock = ( SocketChannel ) selectionKey . channel ( ) ; selectionKey . cancel ( ) ; try { sock . socket ( ) . shutdownInput ( ) ; } catch ( IOException e ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Ignoring exception during shutdown input" , e ) ; } } try { sock . socket ( ) . shutdownOutput ( ) ; } catch ( IOException e ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Ignoring exception during shutdown output" , e ) ; } } try { sock . socket ( ) . close ( ) ; } catch ( IOException e ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Ignoring exception during socket close" , e ) ; } } try { sock . close ( ) ; } catch ( IOException e ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Ignoring exception during channel close" , e ) ; } } } try { Thread . sleep ( 100 ) ; } catch ( InterruptedException e ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "SendThread interrupted during sleep, ignoring" ) ; } } selectionKey = null ; |
public class GobblinClusterManager { /** * Stop the application launcher then any services that were started outside of the application launcher */
private void stopAppLauncherAndServices ( ) { } } | try { this . applicationLauncher . stop ( ) ; } catch ( ApplicationException ae ) { LOGGER . error ( "Error while stopping Gobblin Cluster application launcher" , ae ) ; } if ( this . jobCatalog instanceof Service ) { ( ( Service ) this . jobCatalog ) . stopAsync ( ) . awaitTerminated ( ) ; } |
public class SqlConnRunner { /** * 查询 < br >
* 此方法不会关闭Connection
* @ param < T > 结果对象类型
* @ param conn 数据库连接对象
* @ param query { @ link Query }
* @ param rsh 结果集处理对象
* @ return 结果对象
* @ throws SQLException SQL执行异常 */
public < T > T find ( Connection conn , Query query , RsHandler < T > rsh ) throws SQLException { } } | checkConn ( conn ) ; Assert . notNull ( query , "[query] is null !" ) ; PreparedStatement ps = null ; try { ps = dialect . psForFind ( conn , query ) ; return SqlExecutor . query ( ps , rsh ) ; } catch ( SQLException e ) { throw e ; } finally { DbUtil . close ( ps ) ; } |
public class ResourceAssignment { /** * Retrieves the calendar used for this resource assignment .
* @ return ProjectCalendar instance */
public ProjectCalendar getCalendar ( ) { } } | ProjectCalendar calendar = null ; Resource resource = getResource ( ) ; if ( resource != null ) { calendar = resource . getResourceCalendar ( ) ; } Task task = getTask ( ) ; if ( calendar == null || task . getIgnoreResourceCalendar ( ) ) { calendar = task . getEffectiveCalendar ( ) ; } return calendar ; |
public class SignatureGenerator { /** * FormalTypeParameters :
* < FormalTypeParameter + >
* FormalTypeParameter :
* Identifier ClassBound InterfaceBound * */
private void genOptFormalTypeParameters ( List < ? extends TypeParameterElement > typeParameters , StringBuilder sb ) { } } | if ( ! typeParameters . isEmpty ( ) ) { sb . append ( '<' ) ; for ( TypeParameterElement typeParam : typeParameters ) { genFormalTypeParameter ( typeParam , sb ) ; } sb . append ( '>' ) ; } |
public class PluginValueExtender { /** * Create all getter
* @ param aOutline
* JAXB outline
* @ param aAllCtorClasses
* Map from class with value ( direct and derived ) to value type */
private static void _addValueGetter ( @ Nonnull final Outline aOutline , @ Nonnull final Map < JClass , JType > aAllCtorClasses ) { } } | final JCodeModel aCodeModel = aOutline . getCodeModel ( ) ; // For all generated classes
for ( final ClassOutline aClassOutline : aOutline . getClasses ( ) ) { // Get the implementation class
final JDefinedClass jClass = aClassOutline . implClass ; // For all methods in the class ( copy ! )
for ( final JMethod aMethod : new CommonsArrayList < > ( jClass . methods ( ) ) ) if ( aMethod . name ( ) . startsWith ( "get" ) && aMethod . params ( ) . isEmpty ( ) ) { final JType aReturnType = aMethod . type ( ) ; final JType aValueType = aAllCtorClasses . get ( aReturnType ) ; if ( aValueType != null ) { final boolean bIsBoolean = aValueType == aCodeModel . BOOLEAN ; final String sMethodName ; if ( bIsBoolean ) sMethodName = "is" + aMethod . name ( ) . substring ( 3 ) + "Value" ; else sMethodName = aMethod . name ( ) + "Value" ; if ( _containsMethodWithoutParams ( jClass . methods ( ) , sMethodName ) ) { // This can happen if an XSD contains the element " X " and
// " XValue " in the same type .
// Noticed in CII D16B for BasicWorkItemType with " Index " and
// " IndexValue " elements
LOGGER . error ( "Another method with name '" + sMethodName + "' and no parameters is already present in class '" + jClass . name ( ) + "' - not creating it." ) ; continue ; } // The return type is a generated class
if ( aValueType . isPrimitive ( ) ) { final JMethod aGetter ; final JVar aParam ; if ( bIsBoolean ) { // Create the boolean is . . . Value ( ) method
aGetter = jClass . method ( JMod . PUBLIC , aValueType , sMethodName ) ; aParam = aGetter . param ( JMod . FINAL , aValueType , "nullValue" ) ; final JVar aObj = aGetter . body ( ) . decl ( aReturnType , "aObj" , JExpr . invoke ( aMethod ) ) ; aGetter . body ( ) . _return ( JOp . cond ( aObj . eq ( JExpr . _null ( ) ) , aParam , aObj . invoke ( "isValue" ) ) ) ; } else { // Create the byte / char / double / float / int / long / short
// get . . . Value ( ) method
aGetter = jClass . method ( JMod . PUBLIC , aValueType , sMethodName ) ; aParam = aGetter . param ( JMod . FINAL , aValueType , "nullValue" ) ; final JVar aObj = aGetter . body ( ) . decl ( aReturnType , "aObj" , JExpr . invoke ( aMethod ) ) ; aGetter . body ( ) . _return ( JOp . cond ( aObj . eq ( JExpr . _null ( ) ) , aParam , aObj . invoke ( "getValue" ) ) ) ; } // Javadoc
aGetter . javadoc ( ) . add ( "Get the value of the contained " + aReturnType . name ( ) + " object" ) ; aGetter . javadoc ( ) . addParam ( aParam ) . add ( "The value to be returned, if the owning object is <code>null</code>" ) ; aGetter . javadoc ( ) . addReturn ( ) . add ( "Either the value of the contained " + aReturnType . name ( ) + " object or the passed " + aParam . name ( ) ) ; aGetter . javadoc ( ) . add ( AUTHOR ) ; } else { // Create the Object get . . . Value ( ) method
final JMethod aGetter = jClass . method ( JMod . PUBLIC , aValueType , sMethodName ) ; aGetter . annotate ( Nullable . class ) ; final JVar aObj = aGetter . body ( ) . decl ( aReturnType , "aObj" , JExpr . invoke ( aMethod ) ) ; aGetter . body ( ) . _return ( JOp . cond ( aObj . eq ( JExpr . _null ( ) ) , JExpr . _null ( ) , aObj . invoke ( "getValue" ) ) ) ; aGetter . javadoc ( ) . add ( "Get the value of the contained " + aReturnType . name ( ) + " object" ) ; aGetter . javadoc ( ) . addReturn ( ) . add ( "Either the value of the contained " + aReturnType . name ( ) + " object or <code>null</code>" ) ; aGetter . javadoc ( ) . add ( AUTHOR ) ; } } } } |
public class GetMaintenanceWindowExecutionTaskRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetMaintenanceWindowExecutionTaskRequest getMaintenanceWindowExecutionTaskRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getMaintenanceWindowExecutionTaskRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getMaintenanceWindowExecutionTaskRequest . getWindowExecutionId ( ) , WINDOWEXECUTIONID_BINDING ) ; protocolMarshaller . marshall ( getMaintenanceWindowExecutionTaskRequest . getTaskId ( ) , TASKID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class WebcamUtils { /** * Get resource bundle for specific class .
* @ param clazz the class for which resource bundle should be found
* @ param locale the { @ link Locale } object
* @ return Resource bundle */
public static final ResourceBundle loadRB ( Class < ? > clazz , Locale locale ) { } } | String pkg = WebcamUtils . class . getPackage ( ) . getName ( ) . replaceAll ( "\\." , "/" ) ; return PropertyResourceBundle . getBundle ( String . format ( "%s/i18n/%s" , pkg , clazz . getSimpleName ( ) ) ) ; |
public class ScannerParam { /** * Sets whether or not the HTTP Headers of all requests should be scanned , not just requests that send parameters , through
* the query or request body .
* @ param scanAllRequests { @ code true } if the HTTP Headers of all requests should be scanned , { @ code false } otherwise
* @ since 2.5.0
* @ see # isScanHeadersAllRequests ( ) */
public void setScanHeadersAllRequests ( boolean scanAllRequests ) { } } | if ( scanAllRequests == scanHeadersAllRequests ) { return ; } this . scanHeadersAllRequests = scanAllRequests ; getConfig ( ) . setProperty ( SCAN_HEADERS_ALL_REQUESTS , this . scanHeadersAllRequests ) ; |
public class DatastoreException { /** * Translate RetryHelperException to the DatastoreException that caused the error . This method
* will always throw an exception .
* @ throws DatastoreException when { @ code ex } was caused by a { @ code DatastoreException } */
static DatastoreException translateAndThrow ( RetryHelperException ex ) { } } | BaseServiceException . translate ( ex ) ; throw new DatastoreException ( UNKNOWN_CODE , ex . getMessage ( ) , null , ex . getCause ( ) ) ; |
public class PluralRulesLoader { /** * Returns the locales for which we have plurals data . Utility for testing . */
public ULocale [ ] getAvailableULocales ( ) { } } | Set < String > keys = getLocaleIdToRulesIdMap ( PluralType . CARDINAL ) . keySet ( ) ; ULocale [ ] locales = new ULocale [ keys . size ( ) ] ; int n = 0 ; for ( Iterator < String > iter = keys . iterator ( ) ; iter . hasNext ( ) ; ) { locales [ n ++ ] = ULocale . createCanonical ( iter . next ( ) ) ; } return locales ; |
public class PhotosetsApi { /** * Get the list of photos in a set .
* < br >
* This method does not require authentication .
* @ param photosetId id of the photoset to return photos for . Required .
* @ param photoExtras Optional . A list of extra information to fetch for the primary photo . Currently supported fields are :
* license , date _ upload , date _ taken , owner _ name , icon _ server , original _ format , last _ update ,
* geo , tags , machine _ tags , o _ dims , views , media , path _ alias , url _ sq , url _ t , url _ s , url _ m , url _ o
* @ param privacyFilter Optional . Return photos only matching a certain privacy level . This only applies when making an authenticated call to view a photoset you own .
* @ param perPage Optional . Number of photos to return per page . If this argument is zero , it defaults to 500 . The maximum allowed value is 500.
* @ param page Optional . The page of results to return . If this argument is zero , it defaults to 1.
* @ param mediaType Optional . Filter results by media type .
* @ return object containing some basic photoset metadata information , along with a list of photos in the photoset .
* @ throws JinxException if required parameters are null or empty , or if there are any errors .
* @ see < a href = " https : / / www . flickr . com / services / api / flickr . photosets . getPhotos . html " > flickr . photosets . getPhotos < / a > */
public PhotosetPhotos getPhotos ( String photosetId , EnumSet < JinxConstants . PhotoExtras > photoExtras , JinxConstants . PrivacyFilter privacyFilter , int perPage , int page , JinxConstants . MediaType mediaType ) throws JinxException { } } | JinxUtils . validateParams ( photosetId ) ; Map < String , String > params = new TreeMap < > ( ) ; params . put ( "method" , "flickr.photosets.getPhotos" ) ; params . put ( "photoset_id" , photosetId ) ; if ( ! JinxUtils . isNullOrEmpty ( photoExtras ) ) { params . put ( "extras" , JinxUtils . buildCommaDelimitedList ( photoExtras ) ) ; } if ( privacyFilter != null ) { params . put ( "privacy_filter" , Integer . toString ( JinxUtils . privacyFilterToFlickrPrivacyFilterId ( privacyFilter ) ) ) ; } if ( perPage > 0 ) { params . put ( "per_page" , Integer . toString ( perPage ) ) ; } if ( page > 0 ) { params . put ( "page" , Integer . toString ( page ) ) ; } if ( mediaType != null ) { params . put ( "media" , mediaType . toString ( ) ) ; } return jinx . flickrGet ( params , PhotosetPhotos . class ) ; |
public class LinuxResourceCalculatorPlugin { /** * Read / proc / meminfo , parse and compute memory information
* @ param readAgain if false , read only on the first time */
private void readProcMemInfoFile ( boolean readAgain ) { } } | if ( readMemInfoFile && ! readAgain ) { return ; } // Read " / proc / memInfo " file
BufferedReader in = null ; FileReader fReader = null ; try { fReader = new FileReader ( procfsMemFile ) ; in = new BufferedReader ( fReader ) ; } catch ( FileNotFoundException f ) { // shouldn ' t happen . . . .
return ; } Matcher mat = null ; try { String str = in . readLine ( ) ; while ( str != null ) { mat = PROCFS_MEMFILE_FORMAT . matcher ( str ) ; if ( mat . find ( ) ) { if ( mat . group ( 1 ) . equals ( MEMTOTAL_STRING ) ) { ramSize = Long . parseLong ( mat . group ( 2 ) ) ; } else if ( mat . group ( 1 ) . equals ( SWAPTOTAL_STRING ) ) { swapSize = Long . parseLong ( mat . group ( 2 ) ) ; } else if ( mat . group ( 1 ) . equals ( MEMFREE_STRING ) ) { ramSizeFree = Long . parseLong ( mat . group ( 2 ) ) ; } else if ( mat . group ( 1 ) . equals ( SWAPFREE_STRING ) ) { swapSizeFree = Long . parseLong ( mat . group ( 2 ) ) ; } else if ( mat . group ( 1 ) . equals ( INACTIVE_STRING ) ) { inactiveSize = Long . parseLong ( mat . group ( 2 ) ) ; } } str = in . readLine ( ) ; } } catch ( IOException io ) { LOG . warn ( "Error reading the stream " + io ) ; } finally { // Close the streams
try { fReader . close ( ) ; try { in . close ( ) ; } catch ( IOException i ) { LOG . warn ( "Error closing the stream " + in ) ; } } catch ( IOException i ) { LOG . warn ( "Error closing the stream " + fReader ) ; } } readMemInfoFile = true ; |
public class druidGLexer { /** * $ ANTLR start " WHERE " */
public final void mWHERE ( ) throws RecognitionException { } } | try { int _type = WHERE ; int _channel = DEFAULT_TOKEN_CHANNEL ; // druidG . g : 629:8 : ( ( ' WHERE ' | ' where ' ) )
// druidG . g : 629:10 : ( ' WHERE ' | ' where ' )
{ // druidG . g : 629:10 : ( ' WHERE ' | ' where ' )
int alt18 = 2 ; int LA18_0 = input . LA ( 1 ) ; if ( ( LA18_0 == 'W' ) ) { alt18 = 1 ; } else if ( ( LA18_0 == 'w' ) ) { alt18 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 18 , 0 , input ) ; throw nvae ; } switch ( alt18 ) { case 1 : // druidG . g : 629:11 : ' WHERE '
{ match ( "WHERE" ) ; } break ; case 2 : // druidG . g : 629:19 : ' where '
{ match ( "where" ) ; } break ; } } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving
} |
public class ConfigValidator { /** * Validates the given { @ link ReplicatedMapConfig } .
* @ param replicatedMapConfig the { @ link ReplicatedMapConfig } to check
* @ param mergePolicyProvider the { @ link com . hazelcast . replicatedmap . merge . MergePolicyProvider }
* to resolve merge policy classes */
public static void checkReplicatedMapConfig ( ReplicatedMapConfig replicatedMapConfig , com . hazelcast . replicatedmap . merge . MergePolicyProvider mergePolicyProvider ) { } } | checkReplicatedMapMergePolicy ( replicatedMapConfig , mergePolicyProvider ) ; |
public class XNElement { /** * Parse an XML from the supplied URL .
* @ param url the target URL
* @ return the parsed XML
* @ throws IOException if an I / O error occurs
* @ throws XMLStreamException if a parser error occurs */
public static XNElement parseXML ( URL url ) throws IOException , XMLStreamException { } } | try ( InputStream in = new BufferedInputStream ( url . openStream ( ) ) ) { return parseXML ( in ) ; } |
public class DefaultJobProgressStep { /** * Add children to the step and return the first one .
* @ param steps the number of step
* @ param newLevelSource who asked to create this new level
* @ param levelStep the new level can contains only one step
* @ return the new step */
public DefaultJobProgressStep addLevel ( int steps , Object newLevelSource , boolean levelStep ) { } } | assertModifiable ( ) ; this . maximumChildren = steps ; this . levelSource = newLevelSource ; if ( steps > 0 ) { this . childSize = 1.0D / steps ; } if ( this . maximumChildren > 0 ) { this . children = new ArrayList < > ( this . maximumChildren ) ; } else { this . children = new ArrayList < > ( ) ; } this . levelStep = levelStep ; // Create a virtual child
return new DefaultJobProgressStep ( null , newLevelSource , this ) ; |
public class MemberBuilder { /** * Sets the member host / port .
* @ param host the host name
* @ param port the port number
* @ return the member builder
* @ throws io . atomix . utils . net . MalformedAddressException if a valid { @ link Address } cannot be constructed from the arguments
* @ deprecated since 3.1 . Use { @ link # withHost ( String ) } and { @ link # withPort ( int ) } instead */
@ Deprecated public MemberBuilder withAddress ( String host , int port ) { } } | return withAddress ( Address . from ( host , port ) ) ; |
public class P2sVpnGatewaysInner { /** * Generates VPN profile for P2S client of the P2SVpnGateway in the specified resource group .
* @ param resourceGroupName The name of the resource group .
* @ param gatewayName The name of the P2SVpnGateway .
* @ param authenticationMethod VPN client Authentication Method . Possible values are : ' EAPTLS ' and ' EAPMSCHAPv2 ' . Possible values include : ' EAPTLS ' , ' EAPMSCHAPv2'
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the VpnProfileResponseInner object if successful . */
public VpnProfileResponseInner beginGenerateVpnProfile ( String resourceGroupName , String gatewayName , AuthenticationMethod authenticationMethod ) { } } | return beginGenerateVpnProfileWithServiceResponseAsync ( resourceGroupName , gatewayName , authenticationMethod ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class HikariDataSource { /** * { @ inheritDoc } */
@ Override public boolean isWrapperFor ( Class < ? > iface ) throws SQLException { } } | if ( iface . isInstance ( this ) ) { return true ; } HikariPool p = pool ; if ( p != null ) { final DataSource unwrappedDataSource = p . getUnwrappedDataSource ( ) ; if ( iface . isInstance ( unwrappedDataSource ) ) { return true ; } if ( unwrappedDataSource != null ) { return unwrappedDataSource . isWrapperFor ( iface ) ; } } return false ; |
public class BlackBox { private void build_info_as_str ( int index ) { } } | // Convert time to a string
SimpleDateFormat sdf = new SimpleDateFormat ( "dd/MM/yyyy HH:mm:ss:SS" ) ; sdf . setTimeZone ( TimeZone . getTimeZone ( "ECT" ) ) ; String da = sdf . format ( box [ index ] . when ) ; elt_str = new StringBuffer ( da ) ; // Add request type and command name in case of
elt_str . append ( " : " ) ; if ( box [ index ] . req_type == Req_Operation ) { elt_str . append ( "Operation " ) ; switch ( box [ index ] . op_type ) { case Op_Command_inout : elt_str . append ( "command_inout (cmd = " ) ; elt_str . append ( box [ index ] . cmd_name ) ; elt_str . append ( ") " ) ; break ; case Op_Ping : elt_str . append ( "ping " ) ; break ; case Op_Info : elt_str . append ( "info " ) ; break ; case Op_BlackBox : elt_str . append ( "blackbox " ) ; break ; case Op_Command_list : elt_str . append ( "command_list_query " ) ; break ; case Op_Command : elt_str . append ( "command_query " ) ; break ; case Op_Get_Attr_Config : elt_str . append ( "get_attribute_config " ) ; break ; case Op_Set_Attr_Config : elt_str . append ( "set_attribute_config " ) ; break ; case Op_Read_Attr : elt_str . append ( "read_attributes " ) ; break ; case Op_Write_Attr : elt_str . append ( "write_attributes " ) ; break ; case Op_Command_inout_2 : elt_str . append ( "command_inout_2 (cmd = " ) . append ( box [ index ] . cmd_name ) . append ( ") " ) ; break ; case Op_Command_list_2 : elt_str . append ( "command_list_query_2 " ) ; break ; case Op_Command_2 : elt_str . append ( "command_query_2 " ) ; break ; case Op_Get_Attr_Config_2 : elt_str . append ( "get_attribute_config_2 " ) ; break ; case Op_Read_Attr_2 : elt_str . append ( "read_attributes_2 " ) ; break ; case Op_Command_inout_history_2 : elt_str . append ( "command_inout_history_2 " ) ; break ; case Op_Read_Attr_history_2 : elt_str . append ( "read_attribute_history_2" ) ; break ; case Op_Unknown : elt_str . append ( "unknown operation !!!!!" ) ; return ; } } else if ( box [ index ] . req_type == Req_Attribute ) { elt_str . append ( "Attribute " ) ; switch ( box [ index ] . attr_type ) { case Attr_Name : elt_str . append ( "name " ) ; break ; case Attr_Description : elt_str . append ( "description " ) ; break ; case Attr_Status : elt_str . append ( "status " ) ; break ; case Attr_State : elt_str . append ( "state " ) ; break ; case Attr_AdmName : elt_str . append ( "adm_name " ) ; break ; case Attr_Unknown : elt_str . append ( "unknown attribute !!!!!" ) ; return ; } } else { elt_str . append ( "Unknown CORBA request type !!!!!" ) ; return ; } // Add client host if defined
if ( box [ index ] . host . equals ( "Unknown" ) != true ) { int [ ] conv_addr = new int [ 4 ] ; for ( int i = 0 ; i < 4 ; i ++ ) { if ( box [ index ] . host_ip [ i ] < 0 ) conv_addr [ i ] = 0xff + ( int ) ( box [ index ] . host_ip [ i ] ) + 1 ; else conv_addr [ i ] = ( int ) ( box [ index ] . host_ip [ i ] ) ; } StringBuffer host_str = new StringBuffer ( ) ; host_str . append ( conv_addr [ 0 ] ) ; host_str . append ( "." ) ; host_str . append ( conv_addr [ 1 ] ) ; host_str . append ( "." ) ; host_str . append ( conv_addr [ 2 ] ) ; host_str . append ( "." ) ; host_str . append ( conv_addr [ 3 ] ) ; elt_str . append ( "requested from " ) ; InetAddress ad ; try { ad = InetAddress . getByName ( host_str . toString ( ) ) ; elt_str . append ( ad . getHostName ( ) ) ; } catch ( UnknownHostException ex ) { elt_str . append ( host_str . toString ( ) ) ; } } |
public class CommunityGatewayConfigTranslatorFactorySpi { /** * Given an incoming namespace , return the translator pipeline
* to translate a document with that namespace up to the ' current ' format .
* @ param ns
* @ return */
@ Override public GatewayConfigTranslator getTranslator ( GatewayConfigNamespace ns ) { } } | // First , we create our pipeline composite
GatewayConfigTranslatorPipeline result = null ; if ( ns . equals ( GatewayConfigNamespace . SEPTEMBER_2014 ) ) { result = new GatewayConfigTranslatorPipeline ( ) ; GatewayConfigTranslator september2014Translator = new September2014ToNovember2015Translator ( ) ; result . addTranslator ( september2014Translator ) ; ns = GatewayConfigNamespace . NOVEMBER_2015 ; } if ( ns . equals ( GatewayConfigNamespace . NOVEMBER_2015 ) ) { if ( result == null ) { result = new GatewayConfigTranslatorPipeline ( ) ; } GatewayConfigTranslator november2015Validator = new November2015ToJune2016Translator ( ) ; result . addTranslator ( november2015Validator ) ; ns = GatewayConfigNamespace . CURRENT_NS ; } if ( ns . equals ( GatewayConfigNamespace . CURRENT_NS ) ) { if ( result == null ) { result = new GatewayConfigTranslatorPipeline ( ) ; } GatewayConfigTranslator june2016Validator = new June2016Validator ( ) ; result . addTranslator ( june2016Validator ) ; } return result ; |
public class HttpJsonSerializer { /** * Parses a single UIDMeta object
* @ throws JSONException if parsing failed
* @ throws BadRequestException if the content was missing or parsing failed */
public UIDMeta parseUidMetaV1 ( ) { } } | final String json = query . getContent ( ) ; if ( json == null || json . isEmpty ( ) ) { throw new BadRequestException ( HttpResponseStatus . BAD_REQUEST , "Missing message content" , "Supply valid JSON formatted data in the body of your request" ) ; } try { return JSON . parseToObject ( json , UIDMeta . class ) ; } catch ( IllegalArgumentException iae ) { throw new BadRequestException ( "Unable to parse the given JSON" , iae ) ; } |
public class UriTemplate { /** * Expand the string with the given parameters .
* @ param parameters The parameters
* @ return The expanded URI */
public String expand ( Map < String , Object > parameters ) { } } | StringBuilder builder = new StringBuilder ( ) ; boolean previousHasContent = false ; boolean anyPreviousHasOperator = false ; for ( PathSegment segment : segments ) { String result = segment . expand ( parameters , previousHasContent , anyPreviousHasOperator ) ; if ( result == null ) { break ; } if ( segment instanceof UriTemplateParser . VariablePathSegment ) { if ( result . contains ( String . valueOf ( ( ( UriTemplateParser . VariablePathSegment ) segment ) . getOperator ( ) ) ) ) { anyPreviousHasOperator = true ; } } previousHasContent = result . length ( ) > 0 ; builder . append ( result ) ; } return builder . toString ( ) ; |
public class CommandHelper { /** * Convert the value according the type of DeviceData .
* @ param shortValues
* the value to insert on DeviceData
* @ param deviceDataArgin
* the DeviceData attribute to write
* @ param dataType
* the type of inserted data
* @ throws DevFailed */
public static void insertFromShortArray ( final short [ ] shortValues , final DeviceData deviceDataArgin , final int dataType ) throws DevFailed { } } | // by default for xdim = 1 , send the sum .
Double doubleSum = new Double ( 0 ) ; for ( final short shortValue : shortValues ) { doubleSum = doubleSum + shortValue ; } switch ( dataType ) { case TangoConst . Tango_DEV_SHORT : deviceDataArgin . insert ( doubleSum . shortValue ( ) ) ; break ; case TangoConst . Tango_DEV_USHORT : deviceDataArgin . insert_us ( doubleSum . intValue ( ) ) ; break ; case TangoConst . Tango_DEV_CHAR : Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input type Tango_DEV_CHAR not supported" , "CommandHelper.insertFromShortArray(short[] values,deviceDataArgin)" ) ; break ; case TangoConst . Tango_DEV_UCHAR : deviceDataArgin . insert ( doubleSum . shortValue ( ) ) ; break ; case TangoConst . Tango_DEV_LONG : deviceDataArgin . insert ( doubleSum . intValue ( ) ) ; break ; case TangoConst . Tango_DEV_ULONG : deviceDataArgin . insert_ul ( doubleSum . longValue ( ) ) ; break ; case TangoConst . Tango_DEV_LONG64 : deviceDataArgin . insert ( doubleSum . intValue ( ) ) ; break ; case TangoConst . Tango_DEV_ULONG64 : deviceDataArgin . insert_u64 ( doubleSum . longValue ( ) ) ; break ; case TangoConst . Tango_DEV_INT : deviceDataArgin . insert ( doubleSum . intValue ( ) ) ; break ; case TangoConst . Tango_DEV_FLOAT : deviceDataArgin . insert ( doubleSum . floatValue ( ) ) ; break ; case TangoConst . Tango_DEV_DOUBLE : deviceDataArgin . insert ( doubleSum . doubleValue ( ) ) ; break ; case TangoConst . Tango_DEV_STRING : deviceDataArgin . insert ( doubleSum . toString ( ) ) ; break ; case TangoConst . Tango_DEV_BOOLEAN : if ( doubleSum . doubleValue ( ) == 1 ) { deviceDataArgin . insert ( true ) ; } else { deviceDataArgin . insert ( false ) ; } break ; case TangoConst . Tango_DEV_STATE : DevState devStateValue = DevState . UNKNOWN ; if ( shortValues . length > 0 ) { try { devStateValue = DevState . from_int ( Short . valueOf ( shortValues [ 0 ] ) . intValue ( ) ) ; } catch ( final org . omg . CORBA . BAD_PARAM badParam ) { devStateValue = DevState . UNKNOWN ; } } deviceDataArgin . insert ( devStateValue ) ; break ; // Array input type
case TangoConst . Tango_DEVVAR_SHORTARRAY : deviceDataArgin . insert ( shortValues ) ; break ; case TangoConst . Tango_DEVVAR_USHORTARRAY : deviceDataArgin . insert_us ( shortValues ) ; break ; case TangoConst . Tango_DEVVAR_CHARARRAY : final byte [ ] byteValues = new byte [ shortValues . length ] ; for ( int i = 0 ; i < shortValues . length ; i ++ ) { byteValues [ i ] = Short . valueOf ( shortValues [ i ] ) . byteValue ( ) ; } deviceDataArgin . insert ( byteValues ) ; break ; case TangoConst . Tango_DEVVAR_LONGARRAY : final int [ ] longValues = new int [ shortValues . length ] ; for ( int i = 0 ; i < shortValues . length ; i ++ ) { longValues [ i ] = Short . valueOf ( shortValues [ i ] ) . intValue ( ) ; } deviceDataArgin . insert ( longValues ) ; break ; case TangoConst . Tango_DEVVAR_ULONGARRAY : final long [ ] ulongValues = new long [ shortValues . length ] ; for ( int i = 0 ; i < shortValues . length ; i ++ ) { ulongValues [ i ] = Short . valueOf ( shortValues [ i ] ) . longValue ( ) ; } deviceDataArgin . insert_ul ( ulongValues ) ; break ; case TangoConst . Tango_DEVVAR_LONG64ARRAY : final int [ ] long64Values = new int [ shortValues . length ] ; for ( int i = 0 ; i < shortValues . length ; i ++ ) { long64Values [ i ] = Short . valueOf ( shortValues [ i ] ) . intValue ( ) ; } deviceDataArgin . insert ( long64Values ) ; break ; case TangoConst . Tango_DEVVAR_ULONG64ARRAY : final long [ ] uLong64Values = new long [ shortValues . length ] ; for ( int i = 0 ; i < shortValues . length ; i ++ ) { uLong64Values [ i ] = Short . valueOf ( shortValues [ i ] ) . longValue ( ) ; } deviceDataArgin . insert_u64 ( uLong64Values ) ; break ; case TangoConst . Tango_DEVVAR_FLOATARRAY : final float [ ] floatValues = new float [ shortValues . length ] ; for ( int i = 0 ; i < shortValues . length ; i ++ ) { floatValues [ i ] = Short . valueOf ( shortValues [ i ] ) . floatValue ( ) ; } deviceDataArgin . insert ( floatValues ) ; break ; case TangoConst . Tango_DEVVAR_DOUBLEARRAY : final double [ ] doubleValues = new double [ shortValues . length ] ; for ( int i = 0 ; i < shortValues . length ; i ++ ) { doubleValues [ i ] = Short . valueOf ( shortValues [ i ] ) . doubleValue ( ) ; } deviceDataArgin . insert ( doubleValues ) ; break ; case TangoConst . Tango_DEVVAR_STRINGARRAY : final String [ ] stringValues = new String [ shortValues . length ] ; for ( int i = 0 ; i < shortValues . length ; i ++ ) { stringValues [ i ] = Short . valueOf ( shortValues [ i ] ) . toString ( ) ; } deviceDataArgin . insert ( stringValues ) ; break ; default : Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "input type " + deviceDataArgin . getType ( ) + " not supported" , "CommandHelper.insertFromShortArray(short[] values,deviceDataArgin)" ) ; break ; } |
public class AmazonSimpleEmailServiceClient { /** * Provides sending statistics for the current AWS Region . The result is a list of data points , representing the
* last two weeks of sending activity . Each data point in the list contains statistics for a 15 - minute period of
* time .
* You can execute this operation no more than once per second .
* @ param getSendStatisticsRequest
* @ return Result of the GetSendStatistics operation returned by the service .
* @ sample AmazonSimpleEmailService . GetSendStatistics
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / email - 2010-12-01 / GetSendStatistics " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public GetSendStatisticsResult getSendStatistics ( GetSendStatisticsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeGetSendStatistics ( request ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.