signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class DisconfFileCoreProcessorImpl { /** * 更新 一個配置文件 , 下载 、 注入到仓库 、 Watch 三步骤 */
private void updateOneConfFile ( String fileName , DisconfCenterFile disconfCenterFile ) throws Exception { } }
|
if ( disconfCenterFile == null ) { throw new Exception ( "cannot find disconfCenterFile " + fileName ) ; } String filePath = fileName ; Map < String , Object > dataMap = new HashMap < String , Object > ( ) ; // 开启disconf才需要远程下载 , 否则就本地就好
if ( DisClientConfig . getInstance ( ) . ENABLE_DISCONF ) { // 下载配置
try { String url = disconfCenterFile . getRemoteServerUrl ( ) ; filePath = fetcherMgr . downloadFileFromServer ( url , fileName , disconfCenterFile . getFileDir ( ) ) ; } catch ( Exception e ) { // 下载失败了 , 尝试使用本地的配置
LOGGER . error ( e . toString ( ) , e ) ; LOGGER . warn ( "using local properties in class path: " + fileName ) ; // change file path
filePath = fileName ; } LOGGER . debug ( "download ok." ) ; } try { dataMap = FileTypeProcessorUtils . getKvMap ( disconfCenterFile . getSupportFileTypeEnum ( ) , disconfCenterFile . getFilePath ( ) ) ; } catch ( Exception e ) { LOGGER . error ( "cannot get kv data for " + filePath , e ) ; } // 注入到仓库中
disconfStoreProcessor . inject2Store ( fileName , new DisconfValue ( null , dataMap ) ) ; LOGGER . debug ( "inject ok." ) ; // 开启disconf才需要进行watch
if ( DisClientConfig . getInstance ( ) . ENABLE_DISCONF ) { // Watch
DisConfCommonModel disConfCommonModel = disconfStoreProcessor . getCommonModel ( fileName ) ; if ( watchMgr != null ) { watchMgr . watchPath ( this , disConfCommonModel , fileName , DisConfigTypeEnum . FILE , GsonUtils . toJson ( disconfCenterFile . getKV ( ) ) ) ; LOGGER . debug ( "watch ok." ) ; } else { LOGGER . warn ( "cannot monitor {} because watch mgr is null" , fileName ) ; } }
|
public class MicroMetaDao { /** * 锟斤拷锟絪ql锟斤拷询 */
public List < Map < String , Object > > queryObjJoinByCondition ( String sql ) { } }
|
/* JdbcTemplate jdbcTemplate = ( JdbcTemplate ) MicroDbHolder
. getDbSource ( dbName ) ; */
JdbcTemplate jdbcTemplate = getMicroJdbcTemplate ( ) ; logger . debug ( sql ) ; List < Map < String , Object > > retList0 = jdbcTemplate . queryForList ( sql ) ; // add 201807 ning
// List < Map < String , Object > > retList = changeOutKeyCase4List ( retList0 ) ;
// add 201902 ning
List < Map < String , Object > > retList = ignoreKeyCase ( ( List ) retList0 ) ; return retList ;
|
public class Bzip2Archiver { /** * Creates Stream for decompression
* @ param path
* path to file to uncompress
* @ param encoding
* ecoding to use
* @ return decompression stream
* @ throws IOException */
public InputStreamReader getDecompressionStream ( String path , String encoding ) throws IOException { } }
|
File fileToUncompress = new File ( path ) ; BufferedInputStream fileStream = new BufferedInputStream ( new FileInputStream ( fileToUncompress ) ) ; // read bzip2 prefix : BZ
fileStream . read ( ) ; fileStream . read ( ) ; BufferedInputStream bufferedStream = new BufferedInputStream ( fileStream ) ; CBZip2InputStream input = new CBZip2InputStream ( bufferedStream ) ; return new InputStreamReader ( input , encoding ) ;
|
public class AbstractCommandLineRunner { /** * Create a writer with the newer output charset . */
@ GwtIncompatible ( "Unnecessary" ) private Writer streamToOutputWriter2 ( OutputStream stream ) { } }
|
if ( outputCharset2 == null ) { return new BufferedWriter ( new OutputStreamWriter ( stream , UTF_8 ) ) ; } else { return new BufferedWriter ( new OutputStreamWriter ( stream , outputCharset2 ) ) ; }
|
public class LatencyMetric { /** * Exports this object to a plain map structure which can be easily converted into other
* target formats .
* @ return the exported structure . */
public Map < String , Object > export ( ) { } }
|
Map < String , Object > result = new HashMap < String , Object > ( ) ; result . put ( "min" , min ( ) ) ; result . put ( "max" , max ( ) ) ; result . put ( "count" , count ( ) ) ; result . put ( "percentiles" , percentiles ( ) ) ; result . put ( "timeUnit" , timeUnit ( ) . toString ( ) ) ; return result ;
|
public class TriggerConfig { /** * The event type or types for which notifications are triggered .
* @ return The event type or types for which notifications are triggered .
* @ see TriggerEventType */
public java . util . List < String > getTriggerEvents ( ) { } }
|
if ( triggerEvents == null ) { triggerEvents = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return triggerEvents ;
|
public class DeviceManagerClient { /** * Modifies the configuration for the device , which is eventually sent from the Cloud IoT Core
* servers . Returns the modified configuration version and its metadata .
* < p > Sample code :
* < pre > < code >
* try ( DeviceManagerClient deviceManagerClient = DeviceManagerClient . create ( ) ) {
* DeviceName name = DeviceName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ REGISTRY ] " , " [ DEVICE ] " ) ;
* ByteString binaryData = ByteString . copyFromUtf8 ( " " ) ;
* DeviceConfig response = deviceManagerClient . modifyCloudToDeviceConfig ( name . toString ( ) , binaryData ) ;
* < / code > < / pre >
* @ param name The name of the device . For example ,
* ` projects / p0 / locations / us - central1 / registries / registry0 / devices / device0 ` or
* ` projects / p0 / locations / us - central1 / registries / registry0 / devices / { num _ id } ` .
* @ param binaryData The configuration data for the device .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final DeviceConfig modifyCloudToDeviceConfig ( String name , ByteString binaryData ) { } }
|
ModifyCloudToDeviceConfigRequest request = ModifyCloudToDeviceConfigRequest . newBuilder ( ) . setName ( name ) . setBinaryData ( binaryData ) . build ( ) ; return modifyCloudToDeviceConfig ( request ) ;
|
public class ResourceController { /** * Of the resource in the given resource list , return the one that ' s
* currently in use .
* If more than one such resource exists , one is chosen and returned .
* This method is used for reporting what ' s causing the blockage . */
public Resource getMissingResource ( final ResourceList resources ) { } }
|
try { return _withLock ( new Callable < Resource > ( ) { @ Override public Resource call ( ) { return resources . getConflict ( inUse ) ; } } ) ; } catch ( Exception e ) { throw new IllegalStateException ( "Inner callable does not throw exception" ) ; }
|
public class RemoteTopicSpaceControl { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . runtime . SIMPRemoteTopicSpaceControllable # getRemoteSubscriptions ( ) */
public SIMPIterator getRemoteSubscriptions ( ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getRemoteSubscriptions" ) ; List durableConsumers = new LinkedList ( ) ; if ( _anycastInputHandler != null ) { // we have a durable consumer
durableConsumers . add ( _anycastInputHandler ) ; } AttachedRemoteSubscriberIterator remoteSubscriptionItr = new AttachedRemoteSubscriberIterator ( durableConsumers ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getRemoteSubscriptions" , remoteSubscriptionItr ) ; return remoteSubscriptionItr ;
|
public class VoldemortBuildAndPushJob { /** * Get the sanitized input path . At the moment of writing , this means the
* # LATEST tag is expanded . */
private synchronized Path getInputPath ( ) throws IOException { } }
|
if ( sanitizedInputPath == null ) { // No need to query Hadoop more than once as this shouldn ' t change mid - run ,
// thus , we can lazily initialize and cache the result .
Path path = new Path ( props . getString ( BUILD_INPUT_PATH ) ) ; sanitizedInputPath = HadoopUtils . getSanitizedPath ( path ) ; } return sanitizedInputPath ;
|
public class ForField { /** * { @ inheritDoc } */
public FieldDescription transform ( TypeDescription instrumentedType , FieldDescription fieldDescription ) { } }
|
return new TransformedField ( instrumentedType , fieldDescription . getDeclaringType ( ) , transformer . transform ( instrumentedType , fieldDescription . asToken ( none ( ) ) ) , fieldDescription . asDefined ( ) ) ;
|
public class EntryController { /** * Called when a patient context change has been requested .
* @ param silent = If true , user interaction is not permitted . */
@ Override public String pending ( boolean silent ) { } }
|
if ( modified && ! warned ) { if ( silent || ! PromptDialog . confirm ( TX_PROMPT ) ) { return "Vital entry in progress." ; } } return null ;
|
public class WakeUpSbb { /** * HELPERS */
private void sendResponse ( RequestEvent event , int responseCode ) throws SipException , InvalidArgumentException , ParseException { } }
|
event . getServerTransaction ( ) . sendResponse ( sipProvider . getMessageFactory ( ) . createResponse ( responseCode , event . getRequest ( ) ) ) ;
|
public class SSLConnectionLink { /** * @ see com . ibm . wsspi . channelfw . base . OutboundProtocolLink # connectAsynch ( java . lang . Object ) */
@ Override public void connectAsynch ( Object address ) { } }
|
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "connectAsynch, vc=" + getVCHash ( ) ) ; } // Determine if this is a redundant connect .
if ( connected ) { handleRedundantConnect ( ) ; } this . targetAddress = ( TCPConnectRequestContext ) address ; // Nothing specific to SSL on connect . Pass through .
( ( OutboundConnectionLink ) getDeviceLink ( ) ) . connectAsynch ( address ) ; // The SSL handshake will happen in the ready method path .
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "connectAsynch" ) ; }
|
public class FormatterRequest { /** * Sets the { @ link # textRegionAccess } . If the region has syntax errors and no explicit { @ link ExceptionAcceptor } is
* configured yet , the { @ link ExceptionAcceptor # IGNORING ignoring acceptor } will be configured . */
public FormatterRequest setTextRegionAccess ( ITextRegionAccess tokens ) { } }
|
if ( tokens . hasSyntaxError ( ) && this . exceptionHandler == null ) this . exceptionHandler = ExceptionAcceptor . IGNORING ; this . textRegionAccess = tokens ; return this ;
|
public class MatchParserImpl { /** * ( < ESCAPE > < STRING _ LITERAL > ) ? */
final public Selector LikePredicate ( ) throws ParseException { } }
|
Selector id ; Token pat ; Token esc = null ; boolean neg = false ; id = FieldRef ( ) ; switch ( ( jj_ntk == - 1 ) ? jj_ntk ( ) : jj_ntk ) { case NOT : jj_consume_token ( NOT ) ; neg = true ; break ; default : jj_la1 [ 17 ] = jj_gen ; ; } jj_consume_token ( LIKE ) ; pat = jj_consume_token ( STRING_LITERAL ) ; switch ( ( jj_ntk == - 1 ) ? jj_ntk ( ) : jj_ntk ) { case ESCAPE : jj_consume_token ( ESCAPE ) ; esc = jj_consume_token ( STRING_LITERAL ) ; break ; default : jj_la1 [ 18 ] = jj_gen ; ; } Selector ans = ParseUtil . convertLike ( id , pat . image , ( esc == null ) ? null : esc . image ) ; if ( ans == null ) generateParseException ( ) ; if ( neg ) { if ( true ) return new OperatorImpl ( Operator . NOT , ans ) ; } else { if ( true ) return ans ; } throw new Error ( "Missing return statement in function" ) ;
|
public class Importer { /** * Imports the engine from the given file
* @ param file is the file containing the engine to import
* @ return the engine represented by the file
* @ throws IOException if any error occurs upon reading the file */
public Engine fromFile ( File file ) throws IOException { } }
|
BufferedReader reader = new BufferedReader ( new InputStreamReader ( new FileInputStream ( file ) , FuzzyLite . UTF_8 ) ) ; String line ; StringBuilder textEngine = new StringBuilder ( ) ; try { while ( ( line = reader . readLine ( ) ) != null ) { textEngine . append ( line ) . append ( "\n" ) ; } } catch ( IOException ex ) { throw ex ; } finally { reader . close ( ) ; } return fromString ( textEngine . toString ( ) ) ;
|
public class ObservableAdapterBuilder { /** * Defines the function for evaluating whether two objects have the same identity , for the purpose of determining
* notifications . */
@ NonNull public ObservableAdapterBuilder < T > identityEquality ( @ Nullable EqualityFunction < ? super T > identityEqualityFunction ) { } }
|
mIdentityEqualityFunction = identityEqualityFunction ; return this ;
|
public class HiveSerDeManager { /** * Get an instance of { @ link HiveSerDeManager } .
* @ param props A { @ link State } object . To get a specific implementation of { @ link HiveSerDeManager } , specify either
* one of the values in { @ link Implementation } ( e . g . , AVRO or ORC ) or the name of a class that implements
* { @ link HiveSerDeManager } in property { @ link # HIVE _ ROW _ FORMAT } . The { @ link State } object is also used to
* instantiate the { @ link HiveSerDeManager } . */
public static HiveSerDeManager get ( State props ) { } }
|
String type = props . getProp ( HIVE_ROW_FORMAT , Implementation . AVRO . name ( ) ) ; Optional < Implementation > implementation = Enums . getIfPresent ( Implementation . class , type . toUpperCase ( ) ) ; try { if ( implementation . isPresent ( ) ) { return ( HiveSerDeManager ) ConstructorUtils . invokeConstructor ( Class . forName ( implementation . get ( ) . toString ( ) ) , props ) ; } return ( HiveSerDeManager ) ConstructorUtils . invokeConstructor ( Class . forName ( type ) , props ) ; } catch ( ReflectiveOperationException e ) { throw new RuntimeException ( "Unable to instantiate " + HiveSerDeManager . class . getSimpleName ( ) + " with type " + type , e ) ; }
|
public class DeleteCorpusDao { /** * Deletes a top level corpus , when it is already exists .
* @ param corpusName */
@ Transactional ( readOnly = false , propagation = Propagation . REQUIRES_NEW , isolation = Isolation . READ_COMMITTED ) public void checkAndRemoveTopLevelCorpus ( String corpusName ) { } }
|
if ( existConflictingTopLevelCorpus ( corpusName ) ) { log . info ( "delete conflicting corpus: {}" , corpusName ) ; List < String > corpusNames = new LinkedList < > ( ) ; corpusNames . add ( corpusName ) ; deleteCorpora ( getQueryDao ( ) . mapCorpusNamesToIds ( corpusNames ) , false ) ; }
|
public class ApiOvhTelephony { /** * Operations on a telephony service
* REST : GET / telephony / { billingAccount } / service / { serviceName } / task
* @ param action [ required ] Filter the value of action property ( = )
* @ param status [ required ] Filter the value of status property ( = )
* @ param serviceType [ required ] Filter the value of serviceType property ( = )
* @ param billingAccount [ required ] The name of your billingAccount
* @ param serviceName [ required ] */
public ArrayList < Long > billingAccount_service_serviceName_task_GET ( String billingAccount , String serviceName , String action , String serviceType , OvhTaskStatusEnum status ) throws IOException { } }
|
String qPath = "/telephony/{billingAccount}/service/{serviceName}/task" ; StringBuilder sb = path ( qPath , billingAccount , serviceName ) ; query ( sb , "action" , action ) ; query ( sb , "serviceType" , serviceType ) ; query ( sb , "status" , status ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t2 ) ;
|
public class AESUtils { /** * Encrypt the plain text .
* @ param salt
* salt is used together with the password to construct a 256 bit SecretKey
* @ param password
* the secret key
* @ param plainText
* unencrypted text
* @ return the iv as a hex string followed by ' $ ' followed by the encrypted text . */
public static String encrypt ( String salt , String password , String plainText ) { } }
|
SecretKey secret = getKey ( salt , password ) ; return encryptBouncyCastle ( secret , plainText ) ;
|
public class Normalizer { /** * Normalizes a < tt > String < / tt > using the given normalization operation .
* The < tt > options < / tt > parameter specifies which optional
* < tt > Normalizer < / tt > features are to be enabled for this operation .
* Currently the only available option is { @ link # UNICODE _ 3_2 } .
* If you want the default behavior corresponding to one of the standard
* Unicode Normalization Forms , use 0 for this argument .
* @ param str the input string to be normalized .
* @ param mode the normalization mode
* @ param options the optional features to be enabled .
* @ return String the normalized string
* @ deprecated ICU 56 Use { @ link Normalizer2 } instead .
* @ hide original deprecated declaration */
@ Deprecated public static String normalize ( String str , Mode mode , int options ) { } }
|
return mode . getNormalizer2 ( options ) . normalize ( str ) ;
|
public class EnvironmentResourceDescription { /** * The Auto Scaling launch configurations in use by this environment .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setLaunchConfigurations ( java . util . Collection ) } or { @ link # withLaunchConfigurations ( java . util . Collection ) }
* if you want to override the existing values .
* @ param launchConfigurations
* The Auto Scaling launch configurations in use by this environment .
* @ return Returns a reference to this object so that method calls can be chained together . */
public EnvironmentResourceDescription withLaunchConfigurations ( LaunchConfiguration ... launchConfigurations ) { } }
|
if ( this . launchConfigurations == null ) { setLaunchConfigurations ( new com . amazonaws . internal . SdkInternalList < LaunchConfiguration > ( launchConfigurations . length ) ) ; } for ( LaunchConfiguration ele : launchConfigurations ) { this . launchConfigurations . add ( ele ) ; } return this ;
|
public class SortUtil { /** * UTF - 8 supports bytes comparison . */
public static void putStringNormalizedKey ( BinaryString value , MemorySegment target , int offset , int numBytes ) { } }
|
final int limit = offset + numBytes ; final int end = value . getSizeInBytes ( ) ; for ( int i = 0 ; i < end && offset < limit ; i ++ ) { target . put ( offset ++ , value . getByte ( i ) ) ; } for ( int i = offset ; i < limit ; i ++ ) { target . put ( i , ( byte ) 0 ) ; }
|
public class JMLambda { /** * Consume if true .
* @ param < T > the type parameter
* @ param < U > the type parameter
* @ param bool the bool
* @ param target1 the target 1
* @ param target2 the target 2
* @ param biConsumer the bi consumer */
public static < T , U > void consumeIfTrue ( boolean bool , T target1 , U target2 , BiConsumer < T , U > biConsumer ) { } }
|
if ( bool ) biConsumer . accept ( target1 , target2 ) ;
|
public class ResponseParser { /** * Find the index matching the end of the header data .
* @ param data the request data
* @ return the index if found , - 1 otherwise */
private static int findEndOfHeaders ( byte [ ] data ) { } }
|
for ( int i = 0 ; i < data . length - 3 ; i ++ ) { if ( data [ i ] != CRLF [ 0 ] || data [ i + 1 ] != CRLF [ 1 ] || data [ i + 2 ] != CRLF [ 0 ] || data [ i + 3 ] != CRLF [ 1 ] ) { continue ; } // Headers finish here
return i ; } return - 1 ;
|
public class InodeLockList { /** * Unlocks the last locked inode . */
public void unlockLastInode ( ) { } }
|
Preconditions . checkState ( endsInInode ( ) ) ; Preconditions . checkState ( ! mEntries . isEmpty ( ) ) ; mLockedInodes . remove ( mLockedInodes . size ( ) - 1 ) ; mEntries . remove ( mEntries . size ( ) - 1 ) . mLock . close ( ) ; mLockMode = LockMode . READ ;
|
public class EasyRandomParameters { /** * Register a custom randomizer for a given type .
* @ param type class of the type to randomize
* @ param randomizer the custom { @ link Randomizer } to use
* @ param < T > The field type
* @ return the current { @ link EasyRandomParameters } instance for method chaining */
public < T > EasyRandomParameters randomize ( Class < T > type , Randomizer < T > randomizer ) { } }
|
Objects . requireNonNull ( type , "Type must not be null" ) ; Objects . requireNonNull ( randomizer , "Randomizer must not be null" ) ; customRandomizerRegistry . registerRandomizer ( type , randomizer ) ; return this ;
|
public class Calc { /** * Multiply elements of a by s
* @ param a
* @ param s
* @ return A new Atom with s * a */
public static Atom scale ( Atom a , double s ) { } }
|
double x = a . getX ( ) ; double y = a . getY ( ) ; double z = a . getZ ( ) ; Atom b = new AtomImpl ( ) ; b . setX ( x * s ) ; b . setY ( y * s ) ; b . setZ ( z * s ) ; return b ;
|
public class MaterialComboBox { /** * Build the Option Element with provided params */
protected Option buildOption ( String text , T value ) { } }
|
Option option = new Option ( ) ; option . setText ( text ) ; option . setValue ( keyFactory . generateKey ( value ) ) ; return option ;
|
public class XmlEscape { /** * Perform a ( configurable ) XML 1.0 < strong > escape < / strong > operation on a < tt > Reader < / tt > input
* meant to be an XML attribute value , writing results to a < tt > Writer < / tt > .
* This method will perform an escape operation according to the specified
* { @ link org . unbescape . xml . XmlEscapeType } and { @ link org . unbescape . xml . XmlEscapeLevel }
* argument values .
* Besides , being an attribute value also < tt > & # 92 ; t < / tt > , < tt > & # 92 ; n < / tt > and < tt > & # 92 ; r < / tt > will
* be escaped to avoid white - space normalization from removing line feeds ( turning them into white
* spaces ) during future parsing operations .
* All other < tt > Reader < / tt > / < tt > Writer < / tt > - based < tt > escapeXml10 * ( . . . ) < / tt > methods call this one with preconfigured
* < tt > type < / tt > and < tt > level < / tt > values .
* This method is < strong > thread - safe < / strong > .
* @ param reader the < tt > Reader < / tt > reading the text to be escaped .
* @ param writer the < tt > java . io . Writer < / tt > to which the escaped result will be written . Nothing will
* be written at all to this writer if input is < tt > null < / tt > .
* @ param type the type of escape operation to be performed , see { @ link org . unbescape . xml . XmlEscapeType } .
* @ param level the escape level to be applied , see { @ link org . unbescape . xml . XmlEscapeLevel } .
* @ throws IOException if an input / output exception occurs
* @ since 1.1.5 */
public static void escapeXml10Attribute ( final Reader reader , final Writer writer , final XmlEscapeType type , final XmlEscapeLevel level ) throws IOException { } }
|
escapeXml ( reader , writer , XmlEscapeSymbols . XML10_ATTRIBUTE_SYMBOLS , type , level ) ;
|
public class SeleniumDriverFixture { /** * < p > < code >
* | start driver | < i > $ Driver < / i > | on url | < i > http : / / localhost < / i > |
* < / code > < / p >
* @ param webDriver a WebDriver instance
* @ param browserUrl */
public void startDriverOnUrl ( final WebDriver webDriver , final String browserUrl ) { } }
|
setCommandProcessor ( startWebDriverCommandProcessor ( browserUrl , webDriver ) ) ; setTimeoutOnSelenium ( ) ; LOG . debug ( "Started command processor" ) ;
|
public class Name { /** * Parse the name string from GEDCOM normal into its component parts . */
public void init ( ) { } }
|
final String string = getString ( ) ; final int bpos = string . indexOf ( '/' ) ; if ( bpos == - 1 ) { prefix = string ; surname = "" ; suffix = "" ; } else { final int epos = string . indexOf ( '/' , bpos + 1 ) ; prefix = string . substring ( 0 , bpos ) ; surname = string . substring ( bpos + 1 , epos ) ; suffix = string . substring ( epos + 1 ) ; }
|
public class BuildState { /** * Load a module from the javac state file . */
public Module loadModule ( String l ) { } }
|
Module m = Module . load ( l ) ; modules . put ( m . name ( ) , m ) ; return m ;
|
public class UrlPatternAnalyzer { public UrlPatternRegexpBox toRegexp ( Method executeMethod , String urlPattern , List < Class < ? > > pathParamTypeList , Map < Integer , Class < ? > > optionalGenericTypeMap ) { } }
|
final StringBuilder sb = new StringBuilder ( 32 ) ; final char [ ] chars = urlPattern . toCharArray ( ) ; final int length = chars . length ; List < String > varList = null ; int parameterIndex = - 1 ; int index = - 1 ; for ( int i = 0 ; i < length ; i ++ ) { final char currentChar = chars [ i ] ; if ( currentChar == '{' ) { // begin brace
index = i ; } else if ( currentChar == '}' ) { // end brace
assertBeginBraceExists ( executeMethod , urlPattern , index , i ) ; ++ parameterIndex ; setupParameterPattern ( sb , pathParamTypeList , optionalGenericTypeMap , parameterIndex ) ; final String elementName = urlPattern . substring ( index + 1 , i ) ; assertNoNameParameter ( executeMethod , urlPattern , elementName ) ; if ( varList == null ) { varList = new ArrayList < String > ( 4 ) ; } varList . add ( buildParamName ( executeMethod , urlPattern , varList , elementName ) ) ; index = - 1 ; } else if ( index < 0 ) { sb . append ( currentChar ) ; } } assertEndBraceExists ( executeMethod , urlPattern , index ) ; return new UrlPatternRegexpBox ( buildRegexpPattern ( sb . toString ( ) ) , varList ) ;
|
public class ApiContext { /** * Fetch a new access token . */
private synchronized void fetchAccessToken ( ) throws ConnectionException , HTTPException { } }
|
if ( accessToken != null ) { return ; } Map < String , String > params = new HashMap < > ( ) ; params . put ( Constants . PARAM_CLIENT_ID , clientId ) ; params . put ( Constants . PARAM_CLIENT_SECRET , clientSecret ) ; if ( isUserAuthFlow ( ) ) { params . put ( Constants . PARAM_GRANT_TYPE , Constants . PARAM_PASSWORD ) ; params . put ( Constants . PARAM_USERNAME , username ) ; params . put ( Constants . PARAM_PASSWORD , password ) ; } else { params . put ( Constants . PARAM_GRANT_TYPE , Constants . GRANT_TYPE_CLIENT_CREDENTIALS ) ; } loadAccessToken ( params ) ;
|
public class MariaDbDataSource { /** * Attempts to establish a connection with the data source that this < code > DataSource < / code >
* object represents .
* @ return a connection to the data source
* @ throws SQLException if a database access error occurs */
public Connection getConnection ( ) throws SQLException { } }
|
try { if ( urlParser == null ) { initialize ( ) ; } return MariaDbConnection . newConnection ( urlParser , null ) ; } catch ( SQLException e ) { throw ExceptionMapper . getException ( e , null , null , false ) ; }
|
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < }
* { @ link CmisExtensionType } { @ code > } */
@ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "extension" , scope = GetContentStream . class ) public JAXBElement < CmisExtensionType > createGetContentStreamExtension ( CmisExtensionType value ) { } }
|
return new JAXBElement < CmisExtensionType > ( _GetPropertiesExtension_QNAME , CmisExtensionType . class , GetContentStream . class , value ) ;
|
public class LogNormalDistribution { /** * Cumulative probability density function ( CDF ) of a normal distribution .
* @ param x value to evaluate CDF at
* @ param mu Mean value
* @ param sigma Standard deviation .
* @ return The CDF of the given normal distribution at x . */
public static double cdf ( double x , double mu , double sigma ) { } }
|
if ( x <= 0. ) { return 0. ; } return .5 * ( 1 + NormalDistribution . erf ( ( FastMath . log ( x ) - mu ) / ( MathUtil . SQRT2 * sigma ) ) ) ;
|
public class StringUtil { /** * Returns a new string based on < code > source < / code > with all instances of < code > before < / code >
* replaced with < code > after < / code > .
* @ deprecated java . lang . String . replace ( ) was added in 1.5 */
@ Deprecated @ ReplacedBy ( value = "java.lang.String.replace()" , reason = "since 1.5" ) public static String replace ( String source , String before , String after ) { } }
|
int pos = source . indexOf ( before ) ; if ( pos == - 1 ) { return source ; } StringBuilder sb = new StringBuilder ( source . length ( ) + 32 ) ; int blength = before . length ( ) ; int start = 0 ; while ( pos != - 1 ) { sb . append ( source . substring ( start , pos ) ) ; sb . append ( after ) ; start = pos + blength ; pos = source . indexOf ( before , start ) ; } sb . append ( source . substring ( start ) ) ; return sb . toString ( ) ;
|
public class JobTracker { /** * Check the job if it has invalid requirements and throw and IOException if does have .
* @ param job
* @ throws IOException */
private void checkMemoryRequirements ( JobInProgress job ) throws IOException { } }
|
if ( ! perTaskMemoryConfigurationSetOnJT ( ) ) { LOG . debug ( "Per-Task memory configuration is not set on JT. " + "Not checking the job for invalid memory requirements." ) ; return ; } boolean invalidJob = false ; String msg = "" ; long maxMemForMapTask = job . getMemoryForMapTask ( ) ; long maxMemForReduceTask = job . getMemoryForReduceTask ( ) ; if ( maxMemForMapTask == JobConf . DISABLED_MEMORY_LIMIT || maxMemForReduceTask == JobConf . DISABLED_MEMORY_LIMIT ) { invalidJob = true ; msg = "Invalid job requirements." ; } if ( maxMemForMapTask > limitMaxMemForMapTasks || maxMemForReduceTask > limitMaxMemForReduceTasks ) { invalidJob = true ; msg = "Exceeds the cluster's max-memory-limit." ; } if ( invalidJob ) { StringBuilder jobStr = new StringBuilder ( ) . append ( job . getJobID ( ) . toString ( ) ) . append ( "(" ) . append ( maxMemForMapTask ) . append ( " memForMapTasks " ) . append ( maxMemForReduceTask ) . append ( " memForReduceTasks): " ) ; LOG . warn ( jobStr . toString ( ) + msg ) ; throw new IOException ( jobStr . toString ( ) + msg ) ; }
|
public class ListRoutingProfilesResult { /** * An array of < code > RoutingProfileSummary < / code > objects that include the ARN , Id , and Name of the routing profile .
* @ param routingProfileSummaryList
* An array of < code > RoutingProfileSummary < / code > objects that include the ARN , Id , and Name of the routing
* profile . */
public void setRoutingProfileSummaryList ( java . util . Collection < RoutingProfileSummary > routingProfileSummaryList ) { } }
|
if ( routingProfileSummaryList == null ) { this . routingProfileSummaryList = null ; return ; } this . routingProfileSummaryList = new java . util . ArrayList < RoutingProfileSummary > ( routingProfileSummaryList ) ;
|
public class MapStoredSessionProviderService { /** * ( non - Javadoc )
* @ see
* org . exoplatform . services . jcr . ext . app . SessionProviderService # getSystemSessionProvider ( java . lang
* . Object ) */
public SessionProvider getSystemSessionProvider ( Object key ) { } }
|
if ( systemProviders . containsKey ( key ) ) { return systemProviders . get ( key ) ; } else { final SessionProvider ssp = SessionProvider . createSystemProvider ( ) ; systemProviders . put ( key , ssp ) ; return ssp ; }
|
public class FieldReaderWriter { /** * Return the value of the field for which is reader - writer exists . To improve performance a fieldAccessor can be
* supplied but if it is missing the code will go and discover it .
* @ param instance the instance for which the field should be fetched
* @ param stateManager an optional state manager containing the map of values ( will be discovered if not supplied )
* @ return the value of the field
* @ throws IllegalAccessException if there is a problem accessing the field value */
public Object getValue ( Object instance , ISMgr stateManager ) throws IllegalAccessException , IllegalArgumentException { } }
|
Object result = null ; String fieldname = theField . getName ( ) ; if ( typeDescriptor . isReloadable ( ) ) { if ( stateManager == null ) { // find it using reflection
stateManager = findInstanceStateManager ( instance ) ; } String declaringTypeName = typeDescriptor . getName ( ) ; Map < String , Object > typeLevelValues = stateManager . getMap ( ) . get ( declaringTypeName ) ; boolean knownField = false ; if ( typeLevelValues != null ) { knownField = typeLevelValues . containsKey ( fieldname ) ; } if ( knownField ) { result = typeLevelValues . get ( fieldname ) ; } // If a field has been deleted it may ' reveal ' a field in a supertype . The revealed field may be in a type
// not yet dealt with . In this case typeLevelValues may be null ( type not seen before ) or the typelevelValues
// may not have heard of our field name . In these cases we need to go and find the field and ' relocate ' it
// into our map , where it will be processed from now on .
if ( typeLevelValues == null || ! knownField ) { FieldMember fieldOnOriginalType = typeDescriptor . getReloadableType ( ) . getTypeRegistry ( ) . getReloadableType ( declaringTypeName ) . getTypeDescriptor ( ) . getField ( fieldname ) ; if ( fieldOnOriginalType != null ) { // Copy the field into the map - that is where it will live from now on
ReloadableType rt = typeDescriptor . getReloadableType ( ) ; try { Field f = rt . getClazz ( ) . getDeclaredField ( fieldname ) ; f . setAccessible ( true ) ; result = f . get ( instance ) ; if ( typeLevelValues == null ) { typeLevelValues = new HashMap < String , Object > ( ) ; stateManager . getMap ( ) . put ( declaringTypeName , typeLevelValues ) ; } typeLevelValues . put ( fieldname , result ) ; } catch ( Exception e ) { throw new IllegalStateException ( "Unexpectedly unable to access field " + fieldname + " on class " + rt . getClazz ( ) , e ) ; } } else { // The field was not on the original type . As not seen before , can default it
result = Utils . toResultCheckIfNull ( null , theField . getDescriptor ( ) ) ; if ( typeLevelValues == null ) { typeLevelValues = new HashMap < String , Object > ( ) ; stateManager . getMap ( ) . put ( declaringTypeName , typeLevelValues ) ; } typeLevelValues . put ( fieldname , result ) ; return result ; } } if ( result != null ) { result = Utils . checkCompatibility ( typeDescriptor . getTypeRegistry ( ) , result , theField . getDescriptor ( ) ) ; if ( result == null ) { // Was not compatible , forget it
typeLevelValues . remove ( fieldname ) ; } } result = Utils . toResultCheckIfNull ( result , theField . getDescriptor ( ) ) ; } else { // the type is not reloadable , must use reflection to access the value .
// TODO measure how often we hit the reflection path , should never happen unless reflection is already on the frame
if ( typeDescriptor . isInterface ( ) ) { // cant be an instance field if it is found to be on an interface
throw new IncompatibleClassChangeError ( "Expected non-static field " + instance . getClass ( ) . getName ( ) + "." + fieldname ) ; } else { result = findAndGetFieldValueInHierarchy ( instance ) ; } } if ( GlobalConfiguration . isRuntimeLogging && log . isLoggable ( Level . FINER ) ) { log . finer ( "<getValue() value of " + theField + " is " + result ) ; } return result ;
|
public class CredentialsInner { /** * Retrieve the credential identified by credential name .
* @ param resourceGroupName Name of an Azure Resource group .
* @ param automationAccountName The name of the automation account .
* @ param credentialName The name of credential .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the CredentialInner object */
public Observable < CredentialInner > getAsync ( String resourceGroupName , String automationAccountName , String credentialName ) { } }
|
return getWithServiceResponseAsync ( resourceGroupName , automationAccountName , credentialName ) . map ( new Func1 < ServiceResponse < CredentialInner > , CredentialInner > ( ) { @ Override public CredentialInner call ( ServiceResponse < CredentialInner > response ) { return response . body ( ) ; } } ) ;
|
public class ControlBean { /** * Returns any formal type parameters that should be bound for the bean ' s superclass ,
* based upon any type bindings that occur on the original interface . */
public String getSuperTypeBinding ( ) { } }
|
InterfaceType superType = _controlIntf . getSuperType ( ) ; if ( superType != null ) { String typeStr = superType . toString ( ) ; int paramIndex = typeStr . indexOf ( '<' ) ; if ( paramIndex > 0 ) return typeStr . substring ( paramIndex ) ; } return "" ;
|
public class GitlabAPI { /** * Get events for a project .
* @ param action If not null , include only events of a particular action type
* @ param targetType If not null , include only events of a particular target type
* @ param before If not null , include only events created before a particular date .
* @ param after If not null , include only events created before a
* particular date .
* @ param sort If null , uses the server ' s default , which is " desc " */
public List < GitlabEvent > getProjectEvents ( Serializable projectId , GitlabEvent . ActionType action , GitlabEvent . TargetType targetType , GitlabDate before , GitlabDate after , SortOrder sort ) throws IOException { } }
|
return getProjectEvents ( projectId , action , targetType , before , after , sort , new Pagination ( ) ) ;
|
public class Pegasos { /** * Sets the regularization constant used for learning . The regularization
* must be positive , and the learning rate is proportional to the
* regularization value . This means regularizations very near zero will
* take a long time to converge .
* @ param reg the regularization to apply */
public void setRegularization ( double reg ) { } }
|
if ( Double . isInfinite ( reg ) || Double . isNaN ( reg ) || reg <= 0.0 ) throw new ArithmeticException ( "Pegasos requires a positive regularization cosntant" ) ; this . reg = reg ;
|
public class WickedChartsWicketAppFixturesService { @ Action ( semantics = SemanticsOf . SAFE ) @ MemberOrder ( sequence = "20" ) public Object installFixturesAndReturnFirst ( ) { } }
|
final List < FixtureResult > run = findFixtureScriptFor ( WickedChartsWicketAppSetUpFixture . class ) . run ( null ) ; return run . get ( 0 ) . getObject ( ) ;
|
public class KTypeArrayDeque { /** * / * # if ( $ TemplateOptions . KTypeGeneric ) */
@ SafeVarargs /* # end */
public static < KType > KTypeArrayDeque < KType > from ( KType ... elements ) { } }
|
final KTypeArrayDeque < KType > coll = new KTypeArrayDeque < KType > ( elements . length ) ; coll . addLast ( elements ) ; return coll ;
|
public class ActivityMgr { /** * 反注册activity onPause事件回调 | unregistration activity OnPause Event Callback
* @ param callback 已经注册的 activity onPause事件回调 | Registered Activity OnPause Event callback */
public void unRegisterActivitPauseEvent ( IActivityPauseCallback callback ) { } }
|
HMSAgentLog . d ( "unRegisterOnPause:" + StrUtils . objDesc ( callback ) ) ; pauseCallbacks . remove ( callback ) ;
|
public class BitmapCacheLoader { /** * Load Steps : < br / >
* 1 . load from caches . < br / >
* 2 . if file not exist in caches , then load it from local storage . < br / >
* 3 . if local storage not exist , then load it from temporary directory . < br / >
* 4 . if file not exist in temporary directory . then download it . < br / >
* 5 . after download , then callback will be added . < br / >
* @ param fileName file name , local or URL 可以是带路径名 , 也可以不带路径名 . 当带路径名时 , 先去查找指定目录 . .
* 然后再找傳入的臨時目錄 .
* @ param tmpDirectory 临时目录 , 下载之后的保存目录 . 默认路径 : mnt / sdcard / cache
* @ param imageCallBack 下载完成时的毁掉
* @ param canRemove 是否可以移除 . 当等待的个数超过最大下载数目时 , 当前条目是否可以清除 .
* @ return the bitmap loaded . if from network , this will return null ,
* return the result from callback */
public Bitmap loadFile ( String fileName , String tmpDirectory , ImageCallBack imageCallBack , boolean canRemove ) { } }
|
return loadFile ( fileName , tmpDirectory , imageCallBack , canRemove , - 1 , - 1 ) ;
|
public class CmsSolrSpellchecker { /** * Sends the JSON - formatted spellchecking results to the client .
* @ param res The HttpServletResponse object .
* @ param request The spellchecking request object .
* @ throws IOException in case writing the response fails */
private void sendResponse ( final HttpServletResponse res , final CmsSpellcheckingRequest request ) throws IOException { } }
|
final PrintWriter pw = res . getWriter ( ) ; final JSONObject response = getJsonFormattedSpellcheckResult ( request ) ; pw . println ( response . toString ( ) ) ; pw . close ( ) ;
|
public class AbstractCache { /** * This implementation of { @ code getAllPresent } lacks any insight into the internal cache data
* structure , and is thus forced to return the query keys instead of the cached keys . This is only
* possible with an unsafe cast which requires { @ code keys } to actually be of type { @ code K } .
* { @ inheritDoc }
* @ since 11.0 */
@ Override public ImmutableMap < K , V > getAllPresent ( Iterable < ? > keys ) { } }
|
Map < K , V > result = Maps . newLinkedHashMap ( ) ; for ( Object key : keys ) { if ( ! result . containsKey ( key ) ) { @ SuppressWarnings ( "unchecked" ) K castKey = ( K ) key ; V value = getIfPresent ( key ) ; if ( value != null ) { result . put ( castKey , value ) ; } } } return ImmutableMap . copyOf ( result ) ;
|
public class AwsHostNameUtils { /** * Attempts to parse the region name from an endpoint based on conventions
* about the endpoint format .
* @ param host the hostname to parse
* @ param serviceHint an optional hint about the service for the endpoint
* @ return the region parsed from the hostname , or
* & quot ; us - east - 1 & quot ; if no region information
* could be found .
* @ deprecatedin favor of { @ link # parseRegion ( String , String ) } . */
@ Deprecated public static String parseRegionName ( final String host , final String serviceHint ) { } }
|
String region = parseRegion ( host , serviceHint ) ; // If region is null , then endpoint is totally non - standard ;
// guess us - east - 1 for lack of a better option .
return region == null ? "us-east-1" : region ;
|
public class Category { /** * Log a message object with the < code > ERROR < / code > level including the
* stack trace of the { @ link Throwable } < code > t < / code > passed as parameter .
* See { @ link # error ( Object ) } form for more detailed information .
* @ param message the message object to log .
* @ param t the exception to log , including its stack trace . */
public void error ( final Object message , final Throwable t ) { } }
|
if ( m_delegate . isErrorEnabled ( ) ) { if ( message != null ) { m_delegate . error ( message . toString ( ) , t ) ; } else { m_delegate . error ( null , t ) ; } }
|
public class CloneUsability { /** * overrides the visitor to look for a CloneNotSupported being thrown
* @ param seen
* the currently parsed opcode */
@ Override public void sawOpcode ( int seen ) { } }
|
try { if ( ( seen == Const . ATHROW ) && ( stack . getStackDepth ( ) > 0 ) ) { OpcodeStack . Item item = stack . getStackItem ( 0 ) ; if ( "Ljava/lang/CloneNotSupportedException;" . equals ( item . getSignature ( ) ) ) { throwsCNFE = true ; } } } finally { stack . sawOpcode ( this , seen ) ; }
|
public class MappingAssistantPanel { /** * GEN - LAST : event _ txtClassUriTemplateFocusGained */
private void txtClassUriTemplateFocusLost ( java . awt . event . FocusEvent evt ) { } }
|
// GEN - FIRST : event _ txtClassUriTemplateFocusLost
String uriTemplate = txtClassUriTemplate . getText ( ) ; if ( predicateSubjectMap == null ) { predicateSubjectMap = createPredicateSubjectMap ( ) ; } predicateSubjectMap . setTargetMapping ( uriTemplate ) ; validateClassUri ( ) ;
|
public class AbstractBlockBasedDataStore { /** * ( non - Javadoc )
* @ see net . timewalker . ffmq4 . utils . store . ChainedDataStore # delete ( int ) */
@ Override public final int delete ( int handle ) throws DataStoreException { } }
|
if ( SAFE_MODE ) checkHandle ( handle ) ; int previousHandle = previousBlock [ handle ] ; int current = handle ; int nextHandle = - 1 ; while ( current != - 1 ) { nextHandle = nextBlock [ current ] ; boolean isEndBlock = ( flags [ current ] & FLAG_END_BLOCK ) > 0 ; // Clear block entry
flags [ current ] = 0 ; allocatedSize [ current ] = - 1 ; previousBlock [ current ] = - 1 ; nextBlock [ current ] = - 1 ; locks . clear ( current ) ; // Update used blocks count
blocksInUse -- ; writeAllocationBlock ( current ) ; if ( isEndBlock ) break ; current = nextHandle ; } // Reconnect chain
if ( nextHandle != - 1 ) { previousBlock [ nextHandle ] = previousHandle ; writeAllocationBlock ( nextHandle ) ; } if ( previousHandle != - 1 ) { nextBlock [ previousHandle ] = nextHandle ; writeAllocationBlock ( previousHandle ) ; } // Update first block if necessary
if ( firstBlock == handle ) { firstBlock = nextHandle ; writeFirstBlock ( ) ; } size -- ; flush ( ) ; // Find previous object start from previous block handle
while ( previousHandle != - 1 && ( flags [ previousHandle ] & FLAG_START_BLOCK ) == 0 ) previousHandle = previousBlock [ previousHandle ] ; return previousHandle ;
|
public class DNFTransformer { /** * Transforms an AQL query to the Disjunctive Normal Form .
* @ param topNode
* @ return */
public static void toDNF ( LogicClause topNode ) { } }
|
while ( makeDNF ( topNode ) == false ) { // do nothing , just repeat
} cleanEmptyLeafs ( topNode ) ; flattenDNF ( topNode ) ;
|
public class XMLFilterBase { /** * Look up the value of a property .
* @ param name The property name .
* @ return The current value of the property .
* @ exception org . xml . sax . SAXNotRecognizedException When the
* XMLReader does not recognize the feature name .
* @ exception org . xml . sax . SAXNotSupportedException When the
* XMLReader recognizes the property name but
* cannot determine its value at this time .
* @ see org . xml . sax . XMLReader # setFeature */
public Object getProperty ( String name ) throws SAXNotRecognizedException , SAXNotSupportedException { } }
|
for ( int i = 0 ; i < LEXICAL_HANDLER_NAMES . length ; i ++ ) { if ( LEXICAL_HANDLER_NAMES [ i ] . equals ( name ) ) { return getLexicalHandler ( ) ; } } return super . getProperty ( name ) ;
|
public class AmazonMTurkClient { /** * The < code > CreateHITType < / code > operation creates a new HIT type . This operation allows you to define a standard
* set of HIT properties to use when creating HITs . If you register a HIT type with values that match an existing
* HIT type , the HIT type ID of the existing type will be returned .
* @ param createHITTypeRequest
* @ return Result of the CreateHITType operation returned by the service .
* @ throws ServiceException
* Amazon Mechanical Turk is temporarily unable to process your request . Try your call again .
* @ throws RequestErrorException
* Your request is invalid .
* @ sample AmazonMTurk . CreateHITType
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / mturk - requester - 2017-01-17 / CreateHITType " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public CreateHITTypeResult createHITType ( CreateHITTypeRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeCreateHITType ( request ) ;
|
public class AlignmentCoverageCalculatorTask { /** * Set size to the nearest upper 2 ^ n number for quick modulus operation
* @ param size */
public void setRegionCoverageSize ( int size ) { } }
|
if ( size < 0 ) { return ; } int lg = ( int ) Math . ceil ( Math . log ( size ) / Math . log ( 2 ) ) ; // int lg = 31 - Integer . numberOfLeadingZeros ( size ) ;
int newRegionCoverageSize = 1 << lg ; int newRegionCoverageMask = newRegionCoverageSize - 1 ; RegionCoverage newCoverage = new RegionCoverage ( newRegionCoverageSize ) ; if ( coverage != null ) { for ( int i = 0 ; i < ( end - start ) ; i ++ ) { newCoverage . getA ( ) [ ( int ) ( ( start + i ) & newRegionCoverageMask ) ] = coverage . getA ( ) [ ( int ) ( ( start + i ) & regionCoverageMask ) ] ; } } regionCoverageSize = newRegionCoverageSize ; regionCoverageMask = newRegionCoverageMask ; coverage = newCoverage ; // System . out . println ( " Region Coverage Mask : " + regionCoverageMask ) ;
|
public class ReadJobRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ReadJobRequest readJobRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( readJobRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( readJobRequest . getId ( ) , ID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class Partial { /** * Does this partial match the specified instant .
* A match occurs when all the fields of this partial are the same as the
* corresponding fields on the specified instant .
* @ param instant an instant to check against , null means now in default zone
* @ return true if this partial matches the specified instant */
public boolean isMatch ( ReadableInstant instant ) { } }
|
long millis = DateTimeUtils . getInstantMillis ( instant ) ; Chronology chrono = DateTimeUtils . getInstantChronology ( instant ) ; for ( int i = 0 ; i < iTypes . length ; i ++ ) { int value = iTypes [ i ] . getField ( chrono ) . get ( millis ) ; if ( value != iValues [ i ] ) { return false ; } } return true ;
|
public class AbstractUniqueServiceLoader { /** * Extends the contract for the { @ link AbstractServiceLoader # add ( Object ) } method to require that
* services have unique keys .
* @ throws ServiceConfigurationError
* If another service is defined with the same key as the service to be added . */
@ Override public void add ( final S service ) { } }
|
if ( service == null ) { throw new NullPointerException ( "Service must not be null" ) ; } final K key = this . getKey ( service ) ; if ( key == null ) { throw new NullPointerException ( "Key for service must not be null" ) ; } Collection < S > set = this . services . get ( key ) ; set . add ( service ) ; if ( set . size ( ) != 1 ) { this . log . error ( "Failing due to a duplicate service with key: {} for class {}" , key , service ) ; throw new ServiceConfigurationError ( "Found a duplicate service with key: " + key + " for class: " + service ) ; } if ( this . log . isDebugEnabled ( ) ) { this . log . debug ( "Added service with key: {}" , key ) ; }
|
public class ParquetGroupConverter { /** * convert deprecated parquet int96 nanosecond timestamp to a long , based on
* https : / / github . com / prestodb / presto / blob / master / presto - hive / src / main / java / com / facebook / presto / hive / parquet / ParquetTimestampUtils . java # L56 */
private static long convertInt96BinaryToTimestamp ( Binary value ) { } }
|
// based on prestodb parquet int96 timestamp conversion
byte [ ] bytes = value . getBytes ( ) ; // little endian encoding - need to invert byte order
long timeOfDayNanos = Longs . fromBytes ( bytes [ 7 ] , bytes [ 6 ] , bytes [ 5 ] , bytes [ 4 ] , bytes [ 3 ] , bytes [ 2 ] , bytes [ 1 ] , bytes [ 0 ] ) ; int julianDay = Ints . fromBytes ( bytes [ 11 ] , bytes [ 10 ] , bytes [ 9 ] , bytes [ 8 ] ) ; long ts = ( ( julianDay - JULIAN_EPOCH_OFFSET_DAYS ) * MILLIS_IN_DAY ) + ( timeOfDayNanos / NANOS_PER_MILLISECOND ) ; return ts ;
|
public class UpdateForClause { /** * Adds a " < code > variable WITHIN path < / code > " section to the clause .
* @ param variable the next variable to add to the clause .
* @ param path the path for the variable , a WITHIN path .
* @ return the clause , for chaining . See { @ link # when ( Expression ) } and { @ link # end ( ) } to complete the clause . */
public UpdateForClause within ( String variable , String path ) { } }
|
vars . add ( x ( variable + " WITHIN " + path ) ) ; return this ;
|
public class ValidatingAction { /** * This method is called if errors are found from validation . < / p >
* This method can be overriden , but don ' t forget to call super . executeOnError if you still want to see error
* messages . < / p >
* @ param event the action event which triggered this action .
* @ param diags the list into which any validation diagnostics were added . */
public void executeOnError ( final ActionEvent event , final List < Diagnostic > diags ) { } }
|
errorsBox . setErrors ( diags ) ; errorsBox . setFocussed ( ) ;
|
public class RandomAccessStorageModule { /** * { @ inheritDoc } */
@ Override public void read ( byte [ ] bytes , long storageIndex ) throws IOException { } }
|
randomAccessFile . seek ( storageIndex ) ; randomAccessFile . read ( bytes , 0 , bytes . length ) ;
|
public class SpringComponent { /** * Instantiates the non - serializable state . */
protected void init ( Map stormConf , TopologyContext topologyContext ) { } }
|
setId ( topologyContext . getThisComponentId ( ) ) ; try { method = inputSignature . findMethod ( beanType ) ; logger . info ( "{} uses {}" , this , method . toGenericString ( ) ) ; } catch ( ReflectiveOperationException e ) { throw new IllegalStateException ( "Unusable input signature" , e ) ; } if ( spring == null ) spring = SingletonApplicationContext . get ( stormConf , topologyContext ) ; spring . getBean ( beanType ) ; logger . debug ( "Bean lookup successful" ) ;
|
public class DaoHelper { /** * 设置分页参数
* @ param query
* @ param pageable
* @ return */
public static Query setPageParameter ( Query query , Pageable pageable ) { } }
|
if ( notNull ( pageable ) ) { query . setFirstResult ( ( pageable . getPageNumber ( ) - 1 ) * pageable . getPageSize ( ) ) ; query . setMaxResults ( pageable . getPageSize ( ) ) ; } return query ;
|
public class H2GISFunctions { /** * Register all H2GIS functions
* @ param connection JDBC Connection
* @ param packagePrepend For OSGi environment only , use
* Bundle - SymbolicName : Bundle - Version :
* @ throws SQLException */
private static void registerH2GISFunctions ( Connection connection , String packagePrepend ) throws SQLException { } }
|
Statement st = connection . createStatement ( ) ; for ( Function function : getBuiltInsFunctions ( ) ) { try { registerFunction ( st , function , packagePrepend ) ; } catch ( SQLException ex ) { // Catch to register other functions
ex . printStackTrace ( System . err ) ; } }
|
public class ZeroLeggedOAuthInterceptor { /** * Get the oauth Authorization string .
* @ param req the request
* @ return the Authorization string */
private String getOAuthAuthString ( HttpRequest req ) throws OAuthException , IOException , URISyntaxException { } }
|
RealmOAuthConsumer consumer = getConsumer ( ) ; OAuthAccessor accessor = new OAuthAccessor ( consumer ) ; String method = req . getMethod ( ) . name ( ) ; URI uri = req . getURI ( ) ; OAuthMessage msg = accessor . newRequestMessage ( method , uri . toString ( ) , null ) ; return msg . getAuthorizationHeader ( consumer . getRealm ( ) ) ;
|
public class XMLAssert { /** * Assert that two XML documents are NOT similar
* @ param control XML to be compared against
* @ param test XML to be tested
* @ throws SAXException
* @ throws IOException */
public static void assertXMLNotEqual ( InputSource control , InputSource test ) throws SAXException , IOException { } }
|
assertXMLNotEqual ( null , control , test ) ;
|
public class AssignmentData { /** * Gets a list of AssignmentRows based on the current Assignments
* @ return */
public List < AssignmentRow > getAssignmentRows ( VariableType varType ) { } }
|
List < AssignmentRow > rows = new ArrayList < AssignmentRow > ( ) ; List < Variable > handledVariables = new ArrayList < Variable > ( ) ; // Create an AssignmentRow for each Assignment
for ( Assignment assignment : assignments ) { if ( assignment . getVariableType ( ) == varType ) { String dataType = getDisplayNameFromDataType ( assignment . getDataType ( ) ) ; AssignmentRow row = new AssignmentRow ( assignment . getName ( ) , assignment . getVariableType ( ) , dataType , assignment . getCustomDataType ( ) , assignment . getProcessVarName ( ) , assignment . getConstant ( ) ) ; rows . add ( row ) ; handledVariables . add ( assignment . getVariable ( ) ) ; } } List < Variable > vars = null ; if ( varType == VariableType . INPUT ) { vars = inputVariables ; } else { vars = outputVariables ; } // Create an AssignmentRow for each Variable that doesn ' t have an Assignment
for ( Variable var : vars ) { if ( ! handledVariables . contains ( var ) ) { AssignmentRow row = new AssignmentRow ( var . getName ( ) , var . getVariableType ( ) , var . getDataType ( ) , var . getCustomDataType ( ) , null , null ) ; rows . add ( row ) ; } } return rows ;
|
public class HadoopUtils { /** * A wrapper around { @ link FileSystem # rename ( Path , Path ) } which throws { @ link IOException } if
* { @ link FileSystem # rename ( Path , Path ) } returns False . */
public static void renamePath ( FileSystem fs , Path oldName , Path newName ) throws IOException { } }
|
renamePath ( fs , oldName , newName , false ) ;
|
public class KriptonContentValues { /** * Adds a value to the set .
* @ param key
* the name of the value to put
* @ param value
* the data for the value to put */
public void put ( String key , Short value ) { } }
|
if ( this . compiledStatement != null ) { if ( value == null ) { this . compiledStatement . bindNull ( compiledStatementBindIndex ++ ) ; } else { compiledStatement . bindLong ( compiledStatementBindIndex ++ , ( short ) value ) ; } } else if ( values != null ) { values . put ( key , value ) ; return ; } names . add ( key ) ; // values . put ( key , value ) ;
args . add ( value ) ; valueType . add ( ParamType . SHORT ) ;
|
public class CmsHtmlImportConverter { /** * Transforms element nodes and create start tags in output . < p >
* @ param node actual element node
* @ param properties the file properties */
private void transformStartElement ( Node node , Hashtable properties ) { } }
|
// check hat kind of node we have
String nodeName = node . getNodeName ( ) ; // the < HTML > and < BODY > node must be skipped
if ( nodeName . equals ( NODE_HTML ) || nodeName . equals ( NODE_BODY ) ) { // the < TITLE > node must be read and its value set as properties to
// the imported file
} else if ( nodeName . equals ( NODE_TITLE ) ) { writeTitleProperty ( node , properties ) ; } else if ( nodeName . equals ( NODE_META ) ) { writeMetaTagProperty ( node , properties ) ; } else if ( nodeName . equals ( NODE_HREF ) ) { // only do some output if we are in writing mode
if ( m_write ) { m_tempString . append ( "<" ) ; m_tempString . append ( nodeName ) ; NamedNodeMap attrs = node . getAttributes ( ) ; // look through all attribs to find the reference
for ( int i = attrs . getLength ( ) - 1 ; i >= 0 ; i -- ) { String name = attrs . item ( i ) . getNodeName ( ) ; String value = attrs . item ( i ) . getNodeValue ( ) ; if ( name . equals ( ATTRIB_HREF ) ) { // check if this is an external link
if ( value . indexOf ( "://" ) > 0 ) { // store it for later creation of an entry in the
// link gallery
String externalLinkFile = m_htmlImport . storeExternalLink ( value ) ; if ( externalLinkFile != null ) { value = m_htmlImport . getLinkGallery ( ) + externalLinkFile ; } } else if ( ! value . startsWith ( "mailto:" ) && ! value . startsWith ( "javascript:" ) ) { // save an existing anchor link for later use
// if ( value . indexOf ( " # " ) > 0 ) {
// String anchor = value . substring ( value . indexOf ( " # " ) , value . length ( ) ) ;
// get the new link into the VFS
String internalUri = m_htmlImport . getAbsoluteUri ( value , m_filename . substring ( 0 , m_filename . lastIndexOf ( "/" ) + 1 ) ) ; value = m_htmlImport . translateLink ( internalUri ) ; } } m_tempString . append ( " " ) ; m_tempString . append ( name ) ; m_tempString . append ( "=\"" ) ; m_tempString . append ( value ) ; m_tempString . append ( "\"" ) ; } m_tempString . append ( ">" ) ; } // this is a imasge , its reference must be converted
} else if ( nodeName . equals ( NODE_IMG ) ) { // only do some output if we are in writing mode
if ( m_write ) { m_tempString . append ( "<" ) ; m_tempString . append ( nodeName ) ; NamedNodeMap attrs = node . getAttributes ( ) ; // look through all attribs to find the src and alt attributes
String imagename = "" ; String altText = "" ; for ( int i = attrs . getLength ( ) - 1 ; i >= 0 ; i -- ) { String name = attrs . item ( i ) . getNodeName ( ) ; String value = attrs . item ( i ) . getNodeValue ( ) ; if ( name . equals ( ATTRIB_SRC ) ) { // we found the src . now check if it refers to an
// external image .
// if not , we must get the correct location in the VFS
if ( value . indexOf ( "://" ) <= 0 ) { imagename = m_htmlImport . getAbsoluteUri ( value , m_filename . substring ( 0 , m_filename . lastIndexOf ( "/" ) + 1 ) ) ; value = m_htmlImport . translateLink ( imagename ) ; } } else if ( name . equals ( ATTRIB_ALT ) ) { altText = value ; } m_tempString . append ( " " ) ; m_tempString . append ( name ) ; m_tempString . append ( "=\"" ) ; m_tempString . append ( value ) ; m_tempString . append ( "\"" ) ; } // store the alt tag of this image for later use
m_htmlImport . storeImageInfo ( imagename , altText ) ; m_tempString . append ( ">" ) ; } } else { // only do some output if we are in writing mode
if ( m_write ) { m_tempString . append ( "<" ) ; m_tempString . append ( nodeName ) ; NamedNodeMap attrs = node . getAttributes ( ) ; for ( int i = attrs . getLength ( ) - 1 ; i >= 0 ; i -- ) { m_tempString . append ( " " + attrs . item ( i ) . getNodeName ( ) + "=" + "\"" ) ; /* scan attribute values and replace subStrings */
m_tempString . append ( attrs . item ( i ) . getNodeValue ( ) + "\"" ) ; } m_tempString . append ( ">" ) ; } }
|
public class RuleRegressionNode { /** * Get learner mode */
public int getLearnerToUse ( int predMode ) { } }
|
int predictionMode = predMode ; if ( predictionMode == 0 ) { double perceptronError = this . perceptron . getCurrentError ( ) ; double meanTargetError = this . targetMean . getCurrentError ( ) ; if ( perceptronError < meanTargetError ) predictionMode = 1 ; // PERCEPTRON
else predictionMode = 2 ; // TARGET MEAN
} return predictionMode ;
|
public class ConsumerGenericFilter { /** * 是否自动加载
* @ param invoker 调用器
* @ return 是否加载本过滤器 */
@ Override public boolean needToLoad ( FilterInvoker invoker ) { } }
|
ConsumerConfig consumerConfig = ( ConsumerConfig ) invoker . getConfig ( ) ; return consumerConfig . isGeneric ( ) ;
|
public class FnBigInteger { /** * Determines whether the target object is null or not .
* @ return false if the target object is null , true if not . */
public static final Function < BigInteger , Boolean > isNotNull ( ) { } }
|
return ( Function < BigInteger , Boolean > ) ( ( Function ) FnObject . isNotNull ( ) ) ;
|
public class ConfluenceGreenPepper { /** * < p > saveExecuteChildren . < / p >
* @ param page a { @ link com . atlassian . confluence . pages . Page } object .
* @ param doExecuteChildren a { @ link java . lang . Boolean } object . */
public void saveExecuteChildren ( Page page , Boolean doExecuteChildren ) { } }
|
ContentEntityObject entityObject = getContentEntityManager ( ) . getById ( page . getId ( ) ) ; getContentPropertyManager ( ) . setStringProperty ( entityObject , ConfluenceGreenPepper . EXECUTE_CHILDREN , doExecuteChildren != null ? String . valueOf ( doExecuteChildren ) : null ) ;
|
public class CachedCounters { /** * Increments counter by given value .
* @ param name a counter name of Increment type .
* @ param value a value to add to the counter . */
public void increment ( String name , int value ) { } }
|
Counter counter = get ( name , CounterType . Increment ) ; counter . setCount ( counter . getCount ( ) != null ? counter . getCount ( ) + value : value ) ; update ( ) ;
|
public class TracingChannelInterceptor { /** * Use this to create a span for processing the given message . Note : the result has no
* name and is not started .
* This creates a child from identifiers extracted from the message headers , or a new
* span if one couldn ' t be extracted .
* @ param message message to use for span creation
* @ return span to be created */
public Span nextSpan ( Message < ? > message ) { } }
|
MessageHeaderAccessor headers = mutableHeaderAccessor ( message ) ; TraceContextOrSamplingFlags extracted = this . extractor . extract ( headers ) ; headers . setImmutable ( ) ; Span result = this . tracer . nextSpan ( extracted ) ; if ( extracted . context ( ) == null && ! result . isNoop ( ) ) { addTags ( message , result , null ) ; } if ( log . isDebugEnabled ( ) ) { log . debug ( "Created a new span " + result ) ; } return result ;
|
public class DescribeNetworkInterfacesRequest { /** * One or more network interface IDs .
* Default : Describes all your network interfaces .
* @ param networkInterfaceIds
* One or more network interface IDs . < / p >
* Default : Describes all your network interfaces . */
public void setNetworkInterfaceIds ( java . util . Collection < String > networkInterfaceIds ) { } }
|
if ( networkInterfaceIds == null ) { this . networkInterfaceIds = null ; return ; } this . networkInterfaceIds = new com . amazonaws . internal . SdkInternalList < String > ( networkInterfaceIds ) ;
|
public class ResourceUtil { /** * Returns the { @ link Byte } array message digest of { @ link # getResourceInputStream ( ) } using the given
* { @ link MessageDigest } . */
public static byte [ ] getDigest ( Resource < ? > resource , MessageDigest digest ) { } }
|
try ( InputStream stream = resource . getResourceInputStream ( ) ; DigestInputStream digestStream = new DigestInputStream ( stream , digest ) ) { byte [ ] buffer = new byte [ 16384 ] ; while ( digestStream . read ( buffer , 0 , buffer . length ) != - 1 ) { } } catch ( IOException e ) { throw new IllegalStateException ( "Error calculating digest for resource [" + resource . getFullyQualifiedName ( ) + "]" , e ) ; } return digest . digest ( ) ;
|
public class RocksDBOperationUtils { /** * Creates a column descriptor for sate column family .
* < p > Sets TTL compaction filter if { @ code ttlCompactFiltersManager } is not { @ code null } . */
public static ColumnFamilyDescriptor createColumnFamilyDescriptor ( RegisteredStateMetaInfoBase metaInfoBase , Function < String , ColumnFamilyOptions > columnFamilyOptionsFactory , @ Nullable RocksDbTtlCompactFiltersManager ttlCompactFiltersManager ) { } }
|
ColumnFamilyOptions options = createColumnFamilyOptions ( columnFamilyOptionsFactory , metaInfoBase . getName ( ) ) ; if ( ttlCompactFiltersManager != null ) { ttlCompactFiltersManager . setAndRegisterCompactFilterIfStateTtl ( metaInfoBase , options ) ; } byte [ ] nameBytes = metaInfoBase . getName ( ) . getBytes ( ConfigConstants . DEFAULT_CHARSET ) ; Preconditions . checkState ( ! Arrays . equals ( RocksDB . DEFAULT_COLUMN_FAMILY , nameBytes ) , "The chosen state name 'default' collides with the name of the default column family!" ) ; return new ColumnFamilyDescriptor ( nameBytes , options ) ;
|
public class MathUtil { /** * 词性词频词长 . 计算出来一个分数
* @ param from
* @ param term
* @ return */
public static double compuScoreFreq ( Term from , Term term ) { } }
|
return from . termNatures ( ) . allFreq + term . termNatures ( ) . allFreq ;
|
public class UserDistributionDataPointTypeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public NotificationChain basicSetParameterValue ( ParameterValue newParameterValue , NotificationChain msgs ) { } }
|
return ( ( FeatureMap . Internal ) getParameterValueGroup ( ) ) . basicAdd ( BpsimPackage . Literals . USER_DISTRIBUTION_DATA_POINT_TYPE__PARAMETER_VALUE , newParameterValue , msgs ) ;
|
public class AmazonCloudDirectoryClient { /** * Retrieves a JSON representation of the schema . See < a
* href = " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / schemas _ jsonformat . html # schemas _ json " > JSON
* Schema Format < / a > for more information .
* @ param getSchemaAsJsonRequest
* @ return Result of the GetSchemaAsJson operation returned by the service .
* @ throws InternalServiceException
* Indicates a problem that must be resolved by Amazon Web Services . This might be a transient error in
* which case you can retry your request until it succeeds . Otherwise , go to the < a
* href = " http : / / status . aws . amazon . com / " > AWS Service Health Dashboard < / a > site to see if there are any
* operational issues with the service .
* @ throws InvalidArnException
* Indicates that the provided ARN value is not valid .
* @ throws RetryableConflictException
* Occurs when a conflict with a previous successful write is detected . For example , if a write operation
* occurs on an object and then an attempt is made to read the object using “ SERIALIZABLE ” consistency , this
* exception may result . This generally occurs when the previous write did not have time to propagate to the
* host serving the current request . A retry ( with appropriate backoff logic ) is the recommended response to
* this exception .
* @ throws ValidationException
* Indicates that your request is malformed in some manner . See the exception message .
* @ throws LimitExceededException
* Indicates that limits are exceeded . See < a
* href = " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / limits . html " > Limits < / a > for more
* information .
* @ throws AccessDeniedException
* Access denied . Check your permissions .
* @ throws ResourceNotFoundException
* The specified resource could not be found .
* @ throws ValidationException
* Indicates that your request is malformed in some manner . See the exception message .
* @ sample AmazonCloudDirectory . GetSchemaAsJson
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / clouddirectory - 2017-01-11 / GetSchemaAsJson " target = " _ top " > AWS
* API Documentation < / a > */
@ Override public GetSchemaAsJsonResult getSchemaAsJson ( GetSchemaAsJsonRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeGetSchemaAsJson ( request ) ;
|
public class PropertyBuilder { /** * Build the property documentation .
* @ param node the XML element that specifies which components to document
* @ param memberDetailsTree the content tree to which the documentation will be added
* @ throws DocletException if there is a problem while building the documentation */
public void buildPropertyDoc ( XMLNode node , Content memberDetailsTree ) throws DocletException { } }
|
if ( writer == null ) { return ; } if ( hasMembersToDocument ( ) ) { Content propertyDetailsTree = writer . getPropertyDetailsTreeHeader ( typeElement , memberDetailsTree ) ; Element lastElement = properties . get ( properties . size ( ) - 1 ) ; for ( Element property : properties ) { currentProperty = ( ExecutableElement ) property ; Content propertyDocTree = writer . getPropertyDocTreeHeader ( currentProperty , propertyDetailsTree ) ; buildChildren ( node , propertyDocTree ) ; propertyDetailsTree . addContent ( writer . getPropertyDoc ( propertyDocTree , currentProperty == lastElement ) ) ; } memberDetailsTree . addContent ( writer . getPropertyDetails ( propertyDetailsTree ) ) ; }
|
public class JavacHandlerUtil { /** * Adds the given new field declaration to the provided type AST Node .
* The field carries the & # 64 ; { @ link SuppressWarnings } ( " all " ) annotation .
* Also takes care of updating the JavacAST . */
public static JavacNode injectFieldAndMarkGenerated ( JavacNode typeNode , JCVariableDecl field ) { } }
|
return injectField ( typeNode , field , true ) ;
|
public class SqlConnRunner { /** * 插入数据 < br >
* 此方法不会关闭Connection
* @ param conn 数据库连接
* @ param record 记录
* @ return 插入行数
* @ throws SQLException SQL执行异常 */
public int insert ( Connection conn , Entity record ) throws SQLException { } }
|
checkConn ( conn ) ; if ( CollectionUtil . isEmpty ( record ) ) { throw new SQLException ( "Empty entity provided!" ) ; } PreparedStatement ps = null ; try { ps = dialect . psForInsert ( conn , record ) ; return ps . executeUpdate ( ) ; } catch ( SQLException e ) { throw e ; } finally { DbUtil . close ( ps ) ; }
|
public class Users { /** * Create a new user using his e - mail and phone . It uses USA country code by default .
* @ param email
* @ param phone
* @ return a User instance */
public com . authy . api . User createUser ( String email , String phone ) { } }
|
return createUser ( email , phone , DEFAULT_COUNTRY_CODE ) ;
|
public class QuantilesHelper { /** * Compute an array of evenly spaced normalized ranks from 0 to 1 inclusive .
* A value of 1 will result in [ 0 ] , 2 will result in [ 0 , 1 ] ,
* 3 will result in [ 0 , . 5 , 1 ] and so on .
* @ param n number of ranks needed ( must be greater than 0)
* @ return array of ranks */
public static double [ ] getEvenlySpacedRanks ( final int n ) { } }
|
if ( n <= 0 ) { throw new SketchesArgumentException ( "n must be > 0" ) ; } final double [ ] fractions = new double [ n ] ; fractions [ 0 ] = 0.0 ; for ( int i = 1 ; i < n ; i ++ ) { fractions [ i ] = ( double ) i / ( n - 1 ) ; } if ( n > 1 ) { fractions [ n - 1 ] = 1.0 ; } return fractions ;
|
public class GVRResourceVolume { /** * Adapt a file path to the current file system .
* @ param filePath The input file path string .
* @ return File path compatible with the file system of this { @ link GVRResourceVolume } . */
protected String adaptFilePath ( String filePath ) { } }
|
// Convert windows file path to target FS
String targetPath = filePath . replaceAll ( "\\\\" , volumeType . getSeparator ( ) ) ; return targetPath ;
|
public class WebContainerBase { /** * { @ inheritDoc }
* @ see org . jboss . shrinkwrap . api . container . WebContainer # addAsWebResource ( org . jboss . shrinkwrap . api . asset . Asset ,
* org . jboss . shrinkwrap . api . ArchivePath ) */
@ Override public T addAsWebResource ( Asset resource , ArchivePath target ) throws IllegalArgumentException { } }
|
Validate . notNull ( resource , "Resource should be specified" ) ; Validate . notNull ( target , "Target should be specified" ) ; ArchivePath location = new BasicPath ( getWebPath ( ) , target ) ; return add ( resource , location ) ;
|
public class CommerceAvailabilityEstimateUtil { /** * Returns the first commerce availability estimate in the ordered set where groupId = & # 63 ; .
* @ param groupId the group ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce availability estimate
* @ throws NoSuchAvailabilityEstimateException if a matching commerce availability estimate could not be found */
public static CommerceAvailabilityEstimate findByGroupId_First ( long groupId , OrderByComparator < CommerceAvailabilityEstimate > orderByComparator ) throws com . liferay . commerce . exception . NoSuchAvailabilityEstimateException { } }
|
return getPersistence ( ) . findByGroupId_First ( groupId , orderByComparator ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.