signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class CampaignResponse { /** * Treatments that are defined in addition to the default treatment .
* @ param additionalTreatments
* Treatments that are defined in addition to the default treatment . */
public void setAdditionalTreatments ( java . util . Collection < TreatmentResource > additionalTreatments ) { } } | if ( additionalTreatments == null ) { this . additionalTreatments = null ; return ; } this . additionalTreatments = new java . util . ArrayList < TreatmentResource > ( additionalTreatments ) ; |
public class CalendarIntervalScheduleBuilder { /** * Specify an interval in the IntervalUnit . HOUR that the produced Trigger will
* repeat at .
* @ param intervalInHours
* the number of hours at which the trigger should repeat .
* @ return the updated CalendarIntervalScheduleBuilder
* @ see ICalendarIntervalTrigger # getRepeatInterval ( )
* @ see ICalendarIntervalTrigger # getRepeatIntervalUnit ( ) */
@ Nonnull public CalendarIntervalScheduleBuilder withIntervalInHours ( final int intervalInHours ) { } } | _validateInterval ( intervalInHours ) ; m_nInterval = intervalInHours ; m_eIntervalUnit = EIntervalUnit . HOUR ; return this ; |
public class LocalDateEditor { /** * Parse the value from the given text , using the specified format . */
public void setAsText ( String text ) throws IllegalArgumentException { } } | if ( this . allowEmpty && ! StringUtils . hasText ( text ) ) { // Treat empty String as null value .
setValue ( null ) ; } else { setValue ( new LocalDate ( this . formatter . parseDateTime ( text ) ) ) ; } |
public class AESAuthRequest { /** * Creates an auth request , secured if able , unsecured if not . */
public static AuthRequest createAuthRequest ( Credentials creds , String version , String [ ] bootGroups , boolean requireSecureAuth , PublicKeyCredentials pkcreds , SecureResponse resp ) { } } | byte [ ] secret = resp == null ? null : resp . getCodeBytes ( pkcreds ) ; if ( pkcreds == null || secret == null ) { return new AuthRequest ( requireSecureAuth ? null : creds , version , bootGroups ) ; } return new AESAuthRequest ( secret , creds , version , bootGroups ) ; |
public class DynamicListener { /** * Looks for a method that matches the supplied signature . */
protected Method resolveMethod ( String name , Object [ ] arguments ) { } } | Class < ? > [ ] ptypes = new Class < ? > [ arguments . length ] ; for ( int ii = 0 ; ii < arguments . length ; ii ++ ) { ptypes [ ii ] = arguments [ ii ] == null ? null : arguments [ ii ] . getClass ( ) ; } try { return _finder . findMethod ( name , ptypes ) ; } catch ( Exception e ) { return null ; } |
public class FutureParser { /** * Parses all the CharSequence .
* @ param in
* The CharSequence to parse
* @ param out
* The Writable to write the result to
* @ throws IOException
* @ throws HttpErrorPage */
public void parse ( CharSequence in , FutureAppendable out ) throws IOException , HttpErrorPage { } } | FutureParserContextImpl ctx = new FutureParserContextImpl ( out , this . httpRequest , this . httpResponse , this . data ) ; Matcher matcher = this . pattern . matcher ( in ) ; int currentPosition = 0 ; while ( matcher . find ( ) ) { String tag = matcher . group ( ) ; ctx . characters ( new CharSequenceFuture ( in . subSequence ( currentPosition , matcher . start ( ) ) ) ) ; currentPosition = matcher . end ( ) ; if ( ctx . isCurrentTagEnd ( tag ) ) { // check if this is the end tag for current element
LOG . info ( "Processing end tag {}" , tag ) ; ctx . endElement ( tag ) ; } else { // if not , it is an opening tag for a new element
LOG . info ( "Processing start tag {}" , tag ) ; FutureElementType type = null ; for ( FutureElementType t : this . elementTypes ) { if ( t . isStartTag ( tag ) ) { type = t ; break ; } } FutureElement element = type . newInstance ( ) ; ctx . startElement ( type , element , tag ) ; if ( type . isSelfClosing ( tag ) ) { ctx . endElement ( tag ) ; } } } // we reached the end of input
ctx . characters ( new CharSequenceFuture ( in . subSequence ( currentPosition , in . length ( ) ) ) ) ; |
public class AmazonLightsailClient { /** * Starts a specific database from a stopped state in Amazon Lightsail . To restart a database , use the
* < code > reboot relational database < / code > operation .
* The < code > start relational database < / code > operation supports tag - based access control via resource tags applied
* to the resource identified by relationalDatabaseName . For more information , see the < a
* href = " https : / / lightsail . aws . amazon . com / ls / docs / en / articles / amazon - lightsail - controlling - access - using - tags "
* > Lightsail Dev Guide < / a > .
* @ param startRelationalDatabaseRequest
* @ return Result of the StartRelationalDatabase operation returned by the service .
* @ throws ServiceException
* A general service exception .
* @ throws InvalidInputException
* Lightsail throws this exception when user input does not conform to the validation rules of an input
* field . < / p > < note >
* Domain - related APIs are only available in the N . Virginia ( us - east - 1 ) Region . Please set your AWS Region
* configuration to us - east - 1 to create , view , or edit these resources .
* @ throws NotFoundException
* Lightsail throws this exception when it cannot find a resource .
* @ throws OperationFailureException
* Lightsail throws this exception when an operation fails to execute .
* @ throws AccessDeniedException
* Lightsail throws this exception when the user cannot be authenticated or uses invalid credentials to
* access a resource .
* @ throws AccountSetupInProgressException
* Lightsail throws this exception when an account is still in the setup in progress state .
* @ throws UnauthenticatedException
* Lightsail throws this exception when the user has not been authenticated .
* @ sample AmazonLightsail . StartRelationalDatabase
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / lightsail - 2016-11-28 / StartRelationalDatabase "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public StartRelationalDatabaseResult startRelationalDatabase ( StartRelationalDatabaseRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeStartRelationalDatabase ( request ) ; |
public class AbstractEntityReader { /** * Invokes parseRelations for relation entity and set relational entity
* within entity
* @ param entity
* @ param pd
* @ param targetEntityMetadata
* @ param relationEntity
* @ param relation
* @ param lazilyloaded
* @ param relationStack */
private void onParseRelation ( Object entity , final PersistenceDelegator pd , EntityMetadata targetEntityMetadata , Object relationEntity , Relation relation , boolean lazilyloaded , Map < Object , Object > relationStack ) { } } | parseRelations ( entity , getEntity ( relationEntity ) , getPersistedRelations ( relationEntity ) , pd , targetEntityMetadata , lazilyloaded , relationStack ) ; // if relation ship is unary , no problem else we need to add
setRelationToEntity ( entity , relationEntity , relation ) ; |
public class ProfileIndexFrameWriter { /** * Gets each profile name as a separate link .
* @ param profileName the profile being documented
* @ return content for the profile link */
protected Content getProfile ( String profileName ) { } } | Content profileLinkContent ; Content profileLabel ; profileLabel = new StringContent ( profileName ) ; profileLinkContent = getHyperLink ( DocPaths . profileFrame ( profileName ) , profileLabel , "" , "packageListFrame" ) ; Content li = HtmlTree . LI ( profileLinkContent ) ; return li ; |
public class SocketChannelStream { /** * Reads bytes from the socket .
* @ param buf byte buffer receiving the bytes
* @ param offset offset into the buffer
* @ param length number of bytes to read
* @ return number of bytes read or - 1
* @ exception throws ClientDisconnectException if the connection is dropped */
@ Override public int read ( byte [ ] buf , int offset , int length ) throws IOException { } } | try { SocketChannel s = _s ; if ( s == null ) { return - 1 ; } int remaining = _readBuffer . remaining ( ) ; if ( remaining <= 0 ) { _readBuffer . clear ( ) ; if ( s . read ( _readBuffer ) < 0 ) { _readBuffer . flip ( ) ; return - 1 ; } _readBuffer . flip ( ) ; remaining = _readBuffer . remaining ( ) ; } int sublen = Math . min ( remaining , length ) ; _readBuffer . get ( buf , offset , sublen ) ; int readLength = sublen ; if ( readLength >= 0 ) { _totalReadBytes += readLength ; } return readLength ; } catch ( InterruptedIOException e ) { if ( _throwReadInterrupts ) throw e ; log . log ( Level . FINEST , e . toString ( ) , e ) ; } catch ( IOException e ) { if ( _throwReadInterrupts ) { throw e ; } if ( log . isLoggable ( Level . FINEST ) ) { log . log ( Level . FINEST , e . toString ( ) , e ) ; } else { log . finer ( e . toString ( ) ) ; } // server / 0611
/* try {
close ( ) ;
} catch ( IOException e1 ) { */
} return - 1 ; |
public class AuthenticatedClientUser { /** * Gets the connection user name from the { @ link ThreadLocal } variable .
* @ param conf Alluxio configuration
* @ return the client user in string , null if the user is not present
* @ throws AccessControlException if the authentication is not enabled */
public static String getConnectionUser ( AlluxioConfiguration conf ) throws AccessControlException { } } | if ( ! SecurityUtils . isAuthenticationEnabled ( conf ) ) { throw new AccessControlException ( ExceptionMessage . AUTHENTICATION_IS_NOT_ENABLED . getMessage ( ) ) ; } User user = sConnectionUserThreadLocal . get ( ) ; if ( user == null ) { return null ; } return user . getName ( ) ; |
public class CompilerExecutor { /** * Compiles java classes from ' src / main / java ' .
* @ param mojo the mojo
* @ throws MojoExecutionException if the compilation fails . */
public void execute ( AbstractWisdomMojo mojo ) throws MojoExecutionException { } } | String version = PluginExtractor . getBuildPluginVersion ( mojo , MAVEN_COMPILER_PLUGIN ) ; if ( version == null ) { version = DEFAULT_VERSION ; } final Plugin plugin = plugin ( GROUP_ID , MAVEN_COMPILER_PLUGIN , version ) ; Xpp3Dom configuration = PluginExtractor . getBuildPluginConfiguration ( mojo , MAVEN_COMPILER_PLUGIN , COMPILE_GOAL ) ; if ( configuration == null ) { Properties properties = mojo . project . getProperties ( ) ; String source = properties . getProperty ( "maven.compiler.source" , "1.7" ) ; String target = properties . getProperty ( "maven.compiler.target" , "1.7" ) ; configuration = configuration ( element ( "compileSourceRoots" , "${project.compileSourceRoots}" ) , element ( "classpathElements" , "${project.compileClasspathElements}" ) , element ( "outputDirectory" , "${project.build.outputDirectory}" ) , element ( "projectArtifact" , "${project.artifact}" ) , element ( "generatedSourcesDirectory" , "${project.build.directory}/generated-sources/annotations" ) , element ( "target" , source ) , element ( "source" , target ) ) ; } else { mojo . getLog ( ) . debug ( "Loading maven-compiler-plugin configuration:" ) ; PluginExtractor . extractEligibleConfigurationForGoal ( mojo , plugin , COMPILE_GOAL , configuration ) ; mojo . getLog ( ) . debug ( configuration . toString ( ) ) ; } // Compile sources
executeMojo ( plugin , goal ( COMPILE_GOAL ) , configuration , executionEnvironment ( mojo . project , mojo . session , mojo . pluginManager ) ) ; |
public class QuartzPlugin { /** * for Service */
@ Override public Collection < Class < ? extends Module > > modules ( ) { } } | final Collection < Class < ? extends Module > > modules = Lists . newArrayList ( ) ; modules . add ( ScheduleModule . class ) ; return modules ; |
public class CheckMissingGetCssName { /** * Returns whether the node is an argument of a function that returns
* a unique id ( the last part of the qualified name matches
* GET _ UNIQUE _ ID _ FUNCTION ) . */
private static boolean insideGetUniqueIdCall ( Node n ) { } } | Node parent = n . getParent ( ) ; String name = parent . isCall ( ) ? parent . getFirstChild ( ) . getQualifiedName ( ) : null ; return name != null && name . endsWith ( GET_UNIQUE_ID_FUNCTION ) ; |
public class TransformerImpl { /** * Set a parameter for the templates .
* @ param name The name of the parameter .
* @ param namespace The namespace of the parameter .
* @ param value The value object . This can be any valid Java object
* - - it ' s up to the processor to provide the proper
* coersion to the object , or simply pass it on for use
* in extensions . */
public void setParameter ( String name , String namespace , Object value ) { } } | VariableStack varstack = getXPathContext ( ) . getVarStack ( ) ; QName qname = new QName ( namespace , name ) ; XObject xobject = XObject . create ( value , getXPathContext ( ) ) ; StylesheetRoot sroot = m_stylesheetRoot ; Vector vars = sroot . getVariablesAndParamsComposed ( ) ; int i = vars . size ( ) ; while ( -- i >= 0 ) { ElemVariable variable = ( ElemVariable ) vars . elementAt ( i ) ; if ( variable . getXSLToken ( ) == Constants . ELEMNAME_PARAMVARIABLE && variable . getName ( ) . equals ( qname ) ) { varstack . setGlobalVariable ( i , xobject ) ; } } |
public class TimeoutProvider { /** * Returns the timeout in { @ link TimeUnit # NANOSECONDS } for { @ link RedisCommand } .
* @ param command the command .
* @ return timeout in { @ link TimeUnit # NANOSECONDS } . */
public long getTimeoutNs ( RedisCommand < ? , ? , ? > command ) { } } | long timeoutNs = - 1 ; State state = this . state ; if ( state == null ) { state = this . state = new State ( timeoutOptionsSupplier . get ( ) ) ; } if ( ! state . applyDefaultTimeout ) { timeoutNs = state . timeoutSource . getTimeUnit ( ) . toNanos ( state . timeoutSource . getTimeout ( command ) ) ; } return timeoutNs > 0 ? timeoutNs : defaultTimeoutSupplier . getAsLong ( ) ; |
public class Actions { /** * Converts an { @ link Action8 } to a function that calls the action and returns a specified value .
* @ param action the { @ link Action8 } to convert
* @ param result the value to return from the function call
* @ return a { @ link Func8 } that calls { @ code action } and returns { @ code result } */
public static < T1 , T2 , T3 , T4 , T5 , T6 , T7 , T8 , R > Func8 < T1 , T2 , T3 , T4 , T5 , T6 , T7 , T8 , R > toFunc ( final Action8 < T1 , T2 , T3 , T4 , T5 , T6 , T7 , T8 > action , final R result ) { } } | return new Func8 < T1 , T2 , T3 , T4 , T5 , T6 , T7 , T8 , R > ( ) { @ Override public R call ( T1 t1 , T2 t2 , T3 t3 , T4 t4 , T5 t5 , T6 t6 , T7 t7 , T8 t8 ) { action . call ( t1 , t2 , t3 , t4 , t5 , t6 , t7 , t8 ) ; return result ; } } ; |
public class ReportUtil { /** * Test if sql from a report object and sql from all parameter sources ( if any ) are valid
* @ param con database connection
* @ param report report object
* @ return return message error if sql is not valid , null otherwise */
public static String isValidSqlWithMessage ( Connection con , Report report ) { } } | String sql = getSql ( report ) ; List < QueryParameter > parameters = report . getParameters ( ) ; String message = isValidSqlWithMessage ( con , sql , parameters ) ; if ( message == null ) { for ( QueryParameter qp : parameters ) { if ( qp . isManualSource ( ) ) { String parMessage = isValidSqlWithMessage ( con , qp . getSource ( ) , parameters ) ; if ( parMessage != null ) { parMessage = "Parameter '" + qp . getName ( ) + "'\n" + parMessage ; return parMessage ; } } } } return message ; |
public class JodaBeanSimpleMapWriter { /** * write counted set */
private Object writeCounted ( final SerIterator itemIterator ) { } } | List < Object > result = new ArrayList < > ( ) ; while ( itemIterator . hasNext ( ) ) { itemIterator . next ( ) ; Object outputValue = writeObject ( itemIterator . valueType ( ) , itemIterator . value ( ) , itemIterator ) ; int outputCount = itemIterator . count ( ) ; result . add ( Arrays . asList ( outputValue , outputCount ) ) ; } return result ; |
public class VirtualMediaPanel { /** * Implements the standard pathable tracking support . Derived classes may wish to override
* this if they desire custom tracking functionality . */
protected void trackPathable ( ) { } } | // if we ' re tracking a pathable , adjust our view coordinates
if ( _fpath == null ) { return ; } int width = getWidth ( ) , height = getHeight ( ) ; int nx = _vbounds . x , ny = _vbounds . y ; // figure out where to move
switch ( _fmode ) { case TRACK_PATHABLE : nx = _fpath . getX ( ) ; ny = _fpath . getY ( ) ; break ; case CENTER_ON_PATHABLE : nx = _fpath . getX ( ) - width / 2 ; ny = _fpath . getY ( ) - height / 2 ; break ; case ENCLOSE_PATHABLE : Rectangle bounds = _fpath . getBounds ( ) ; if ( nx > bounds . x ) { nx = bounds . x ; } else if ( nx + width < bounds . x + bounds . width ) { nx = bounds . x + bounds . width - width ; } if ( ny > bounds . y ) { ny = bounds . y ; } else if ( ny + height < bounds . y + bounds . height ) { ny = bounds . y + bounds . height - height ; } break ; default : log . warning ( "Eh? Set to invalid pathable mode" , "mode" , _fmode ) ; break ; } // Log . info ( " Tracking pathable [ mode = " + _ fmode +
// " , pable = " + _ fpath + " , nx = " + nx + " , ny = " + ny + " ] . " ) ;
setViewLocation ( nx , ny ) ; |
public class Descriptor { /** * indexed getter for categories - gets an indexed value - List of Wikipedia categories associated with a Wikipedia page .
* @ generated
* @ param i index in the array to get
* @ return value of the element at index i */
public Title getCategories ( int i ) { } } | if ( Descriptor_Type . featOkTst && ( ( Descriptor_Type ) jcasType ) . casFeat_categories == null ) jcasType . jcas . throwFeatMissing ( "categories" , "de.julielab.jules.types.wikipedia.Descriptor" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Descriptor_Type ) jcasType ) . casFeatCode_categories ) , i ) ; return ( Title ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Descriptor_Type ) jcasType ) . casFeatCode_categories ) , i ) ) ) ; |
public class TargetHttpProxyClient { /** * Creates a TargetHttpProxy resource in the specified project using the data included in the
* request .
* < p > Sample code :
* < pre > < code >
* try ( TargetHttpProxyClient targetHttpProxyClient = TargetHttpProxyClient . create ( ) ) {
* ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ;
* TargetHttpProxy targetHttpProxyResource = TargetHttpProxy . newBuilder ( ) . build ( ) ;
* Operation response = targetHttpProxyClient . insertTargetHttpProxy ( project . toString ( ) , targetHttpProxyResource ) ;
* < / code > < / pre >
* @ param project Project ID for this request .
* @ param targetHttpProxyResource A TargetHttpProxy resource . This resource defines an HTTP proxy .
* ( = = resource _ for beta . targetHttpProxies = = ) ( = = resource _ for v1 . targetHttpProxies = = )
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation insertTargetHttpProxy ( String project , TargetHttpProxy targetHttpProxyResource ) { } } | InsertTargetHttpProxyHttpRequest request = InsertTargetHttpProxyHttpRequest . newBuilder ( ) . setProject ( project ) . setTargetHttpProxyResource ( targetHttpProxyResource ) . build ( ) ; return insertTargetHttpProxy ( request ) ; |
public class UniformMutation { /** * Perform the operation
* @ param probability Mutation setProbability
* @ param solution The solution to mutate */
public void doMutation ( double probability , DoubleSolution solution ) { } } | for ( int i = 0 ; i < solution . getNumberOfVariables ( ) ; i ++ ) { if ( randomGenenerator . getRandomValue ( ) < probability ) { double rand = randomGenenerator . getRandomValue ( ) ; double tmp = ( rand - 0.5 ) * perturbation ; tmp += solution . getVariableValue ( i ) ; if ( tmp < solution . getLowerBound ( i ) ) { tmp = solution . getLowerBound ( i ) ; } else if ( tmp > solution . getUpperBound ( i ) ) { tmp = solution . getUpperBound ( i ) ; } solution . setVariableValue ( i , tmp ) ; } } |
public class ApiOvhPrice { /** * Get the price for extra sql perso option
* REST : GET / price / hosting / web / extraSqlPerso / { extraSqlPersoName }
* @ param extraSqlPersoName [ required ] ExtraSqlPerso */
public OvhPrice hosting_web_extraSqlPerso_extraSqlPersoName_GET ( net . minidev . ovh . api . price . hosting . web . OvhExtraSqlPersoEnum extraSqlPersoName ) throws IOException { } } | String qPath = "/price/hosting/web/extraSqlPerso/{extraSqlPersoName}" ; StringBuilder sb = path ( qPath , extraSqlPersoName ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhPrice . class ) ; |
public class UriEscape { /** * Perform am URI path < strong > unescape < / strong > operation
* on a < tt > char [ ] < / tt > input .
* This method will unescape every percent - encoded ( < tt > % HH < / tt > ) sequences present in input ,
* even for those characters that do not need to be percent - encoded in this context ( unreserved characters
* can be percent - encoded even if / when this is not required , though it is not generally considered a
* good practice ) .
* This method will use specified < tt > encoding < / tt > in order to determine the characters specified in the
* percent - encoded byte sequences .
* This method is < strong > thread - safe < / strong > .
* @ param text the < tt > char [ ] < / tt > to be unescaped .
* @ param offset the position in < tt > text < / tt > at which the escape operation should start .
* @ param len the number of characters in < tt > text < / tt > that should be escaped .
* @ param writer the < tt > java . io . Writer < / tt > to which the unescaped result will be written . Nothing will
* be written at all to this writer if input is < tt > null < / tt > .
* @ param encoding the encoding to be used for unescaping .
* @ throws IOException if an input / output exception occurs */
public static void unescapeUriPath ( final char [ ] text , final int offset , final int len , final Writer writer , final String encoding ) throws IOException { } } | if ( writer == null ) { throw new IllegalArgumentException ( "Argument 'writer' cannot be null" ) ; } if ( encoding == null ) { throw new IllegalArgumentException ( "Argument 'encoding' cannot be null" ) ; } final int textLen = ( text == null ? 0 : text . length ) ; if ( offset < 0 || offset > textLen ) { throw new IllegalArgumentException ( "Invalid (offset, len). offset=" + offset + ", len=" + len + ", text.length=" + textLen ) ; } if ( len < 0 || ( offset + len ) > textLen ) { throw new IllegalArgumentException ( "Invalid (offset, len). offset=" + offset + ", len=" + len + ", text.length=" + textLen ) ; } UriEscapeUtil . unescape ( text , offset , len , writer , UriEscapeUtil . UriEscapeType . PATH , encoding ) ; |
public class OutputChannels { /** * Creates an < code > OutputChannels < / code > instance
* corresponding to the given channel code .
* @ paramcode one of the OutputChannels channel code constants .
* @ throwsIllegalArgumentException if code is not a valid
* channel code . */
static public OutputChannels fromInt ( int code ) { } } | switch ( code ) { case LEFT_CHANNEL : return LEFT ; case RIGHT_CHANNEL : return RIGHT ; case BOTH_CHANNELS : return BOTH ; case DOWNMIX_CHANNELS : return DOWNMIX ; default : throw new IllegalArgumentException ( "Invalid channel code: " + code ) ; } |
public class HungarianAlgorithm { /** * Find a valid matching by greedily selecting among zero - cost matchings .
* This is a heuristic to jump - start the augmentation algorithm . */
protected void greedyMatch ( ) { } } | for ( int w = 0 ; w < dim ; w ++ ) { for ( int j = 0 ; j < dim ; j ++ ) { if ( matchJobByWorker [ w ] == - 1 && matchWorkerByJob [ j ] == - 1 && costMatrix [ w ] [ j ] - labelByWorker [ w ] - labelByJob [ j ] == 0 ) { match ( w , j ) ; } } } |
public class NioGroovyMethods { /** * Write the text to the Path . If the default charset is
* " UTF - 16BE " or " UTF - 16LE " ( or an equivalent alias ) and
* < code > writeBom < / code > is < code > true < / code > , the requisite byte order
* mark is written to the file before the text .
* @ param self a Path
* @ param text the text to write to the Path
* @ param writeBom whether to write the BOM
* @ throws java . io . IOException if an IOException occurs .
* @ since 2.5.0 */
public static void write ( Path self , String text , boolean writeBom ) throws IOException { } } | write ( self , text , Charset . defaultCharset ( ) . name ( ) , writeBom ) ; |
public class ImageLayer { /** * Sets the texture rendered by this layer to the texture provided by { @ code source } . If { @ code
* source } is not yet ready , the texture will be set when it becomes ready . Until then any
* previous texture will continue to be displayed . */
public ImageLayer setSource ( TileSource source ) { } } | if ( source . isLoaded ( ) ) setTile ( source . tile ( ) ) ; else source . tileAsync ( ) . onSuccess ( new Slot < Tile > ( ) { public void onEmit ( Tile tile ) { setTile ( tile ) ; } } ) ; return this ; |
public class Rational { /** * Compares the value of this with another constant .
* @ param val the other constant to compare with
* @ return - 1 , 0 or 1 if this number is numerically less than , equal to ,
* or greater than val . */
public int compareTo ( final Rational val ) { } } | /* Since we have always kept the denominators positive ,
* simple cross - multiplying works without changing the sign . */
final BigInteger left = a . multiply ( val . b ) ; final BigInteger right = val . a . multiply ( b ) ; return left . compareTo ( right ) ; |
public class Matrix4f { /** * Apply an oblique projection transformation to this matrix with the given values for < code > a < / code > and
* < code > b < / code > and store the result in < code > dest < / code > .
* If < code > M < / code > is < code > this < / code > matrix and < code > O < / code > the oblique transformation matrix ,
* then the new matrix will be < code > M * O < / code > . So when transforming a
* vector < code > v < / code > with the new matrix by using < code > M * O * v < / code > , the
* oblique transformation will be applied first !
* The oblique transformation is defined as :
* < pre >
* x ' = x + a * z
* y ' = y + a * z
* z ' = z
* < / pre >
* or in matrix form :
* < pre >
* 1 0 a 0
* 0 1 b 0
* 0 0 1 0
* 0 0 0 1
* < / pre >
* @ param a
* the value for the z factor that applies to x
* @ param b
* the value for the z factor that applies to y
* @ param dest
* will hold the result
* @ return dest */
public Matrix4f obliqueZ ( float a , float b , Matrix4f dest ) { } } | dest . m00 = m00 ; dest . m01 = m01 ; dest . m02 = m02 ; dest . m03 = m03 ; dest . m10 = m10 ; dest . m11 = m11 ; dest . m12 = m12 ; dest . m13 = m13 ; dest . m20 = m00 * a + m10 * b + m20 ; dest . m21 = m01 * a + m11 * b + m21 ; dest . m22 = m02 * a + m12 * b + m22 ; dest . m23 = m23 ; dest . m30 = m30 ; dest . m31 = m31 ; dest . m32 = m32 ; dest . m33 = m33 ; dest . _properties ( this . properties & PROPERTY_AFFINE ) ; return dest ; |
public class RequestUtil { /** * Convenience method to get the application ' s URL based on request
* variables . */
public static String getAppURL ( HttpServletRequest request ) { } } | StringBuffer url = new StringBuffer ( ) ; int port = request . getServerPort ( ) ; if ( port < 0 ) { port = 80 ; // Work around java . net . URL bug
} String scheme = request . getScheme ( ) ; url . append ( scheme ) ; url . append ( "://" ) ; url . append ( request . getServerName ( ) ) ; if ( ( scheme . equals ( "http" ) && ( port != 80 ) ) || ( scheme . equals ( "https" ) && ( port != 443 ) ) ) { url . append ( ':' ) ; url . append ( port ) ; } return url . toString ( ) ; |
public class FeatureWebSecurityConfigImpl { /** * { @ inheritDoc } */
@ Override public boolean getHttpOnlyCookies ( ) { } } | WebAppSecurityConfig globalConfig = WebAppSecurityCollaboratorImpl . getGlobalWebAppSecurityConfig ( ) ; if ( globalConfig != null ) return WebAppSecurityCollaboratorImpl . getGlobalWebAppSecurityConfig ( ) . getHttpOnlyCookies ( ) ; else return httpOnlyCookies ; |
public class LObjByteFunctionBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */
@ Nonnull public static < T , R > LObjByteFunctionBuilder < T , R > objByteFunction ( Consumer < LObjByteFunction < T , R > > consumer ) { } } | return new LObjByteFunctionBuilder ( consumer ) ; |
public class CreateServicePage { /** * Ensures that both text fields are set . */
private void dialogChanged ( ) { } } | // Validate MEP
int mepIdx = mepCombo . getSelectionIndex ( ) ; if ( mepIdx == - 1 ) { updateStatus ( "Select a message exchange pattern" ) ; return ; } if ( mepIdx == 2 ) { updateStatus ( "Message exchange pattern One-Way-Robust is deprecated." ) ; return ; } // Do not allow pub / sub mep right now . . .
if ( mepIdx == 3 ) { updateStatus ( "Message exchange pattern Publish/Subscribe not yet supported" ) ; return ; } // Validate Inbound Transport
int ibtIdx = inboundTransportCombo . getSelectionIndex ( ) ; if ( ibtIdx == - 1 ) { updateStatus ( "Select an inbound transport" ) ; return ; } // Validate Outbound Transport
int obtIdx = outboundTransportCombo . getSelectionIndex ( ) ; if ( obtIdx == - 1 ) { updateStatus ( "Select an outbound transport" ) ; return ; } // Validate Transformer - type , do no allow smooks for oneway ' s right now .
int ttIdx = transformerType . value ; if ( mepIdx == 1 && ttIdx == 1 ) { updateStatus ( "Smooks based transformers are currently not supported for oneway message exchange patterns" ) ; return ; } String containerName = getContainerName ( ) . trim ( ) ; IResource container = ResourcesPlugin . getWorkspace ( ) . getRoot ( ) . findMember ( new Path ( containerName ) ) ; String serviceName = getServiceName ( ) ; if ( containerName . length ( ) == 0 ) { updateStatus ( "Select a project" ) ; return ; } if ( container == null || ( container . getType ( ) & ( IResource . PROJECT | IResource . FOLDER ) ) == 0 ) { updateStatus ( "Selected project must exist" ) ; return ; } if ( ! container . isAccessible ( ) ) { updateStatus ( "Selected project must be writable" ) ; return ; } if ( serviceName . length ( ) == 0 ) { updateStatus ( "Name of the service must be specified" ) ; return ; } if ( ! isJavaIdentifier ( ModelUtil . makeJavaName ( serviceName ) ) ) { updateStatus ( "Service name must be a valid Java identifier" ) ; return ; } updateStatus ( null ) ; |
public class UfsJournalCheckpointThread { /** * Initiates the shutdown of this checkpointer thread , and also waits for it to finish .
* @ param waitQuietPeriod whether to wait for a quiet period to pass before terminating the thread */
public void awaitTermination ( boolean waitQuietPeriod ) { } } | LOG . info ( "{}: Journal checkpointer shutdown has been initiated." , mMaster . getName ( ) ) ; mWaitQuietPeriod = waitQuietPeriod ; mShutdownInitiated = true ; // Actively interrupt to cancel slow checkpoints .
synchronized ( mCheckpointingLock ) { if ( mCheckpointing ) { interrupt ( ) ; } } try { // Wait for the thread to finish .
join ( ) ; LOG . info ( "{}: Journal shutdown complete" , mMaster . getName ( ) ) ; } catch ( InterruptedException e ) { LOG . error ( "{}: journal checkpointer shutdown is interrupted." , mMaster . getName ( ) , e ) ; // Kills the master . This can happen in the following two scenarios :
// 1 . The user Ctrl - C the server .
// 2 . Zookeeper selects this master as standby before the master finishes the previous
// standby - > leader transition . It is safer to crash the server because the behavior is
// undefined to have two journal checkpointer running concurrently .
throw new RuntimeException ( e ) ; } mStopped = true ; |
public class SightResourcesImpl { /** * Sets the publish status of a Sight and returns the new status , including the URLs of any enabled publishing .
* It mirrors to the following Smartsheet REST API method : POST / sights / { sightId } / publish
* @ param sightId the Id of the Sight
* @ param sightPublish the SightPublish object containing publish status
* @ return the Sight ' s publish status .
* @ throws IllegalArgumentException if any argument is null or empty string
* @ throws InvalidRequestException if there is any problem with the REST API request
* @ throws AuthorizationException if there is any problem with the REST API authorization ( access token )
* @ throws ResourceNotFoundException if the resource cannot be found
* @ throws ServiceUnavailableException if the REST API service is not available ( possibly due to rate limiting )
* @ throws SmartsheetException if there is any other error during the operation */
public SightPublish setPublishStatus ( long sightId , SightPublish sightPublish ) throws SmartsheetException { } } | Util . throwIfNull ( sightPublish ) ; return this . updateResource ( "sights/" + sightId + "/publish" , SightPublish . class , sightPublish ) ; |
public class DBFKRelationPropertySheet { /** * GEN - LAST : event _ tfJavaFieldNameFocusLost */
private void tfJavaFieldNameActionPerformed ( java . awt . event . ActionEvent evt ) // GEN - FIRST : event _ tfJavaFieldNameActionPerformed
{ } } | // GEN - HEADEREND : event _ tfJavaFieldNameActionPerformed
// Commit value if ENTER is typed
aRelation . setFieldName ( tfJavaFieldName . getText ( ) ) ; |
public class SQLiteModelMethod { /** * Look for a method parameter which is annotated with an annotationClass
* annotation . When it is found , a client action is required through
* listener .
* @ param < A >
* the generic type
* @ param parent
* the parent
* @ param annotationClazz
* the annotation clazz
* @ param unsupportedQueryType
* the unsupported query type
* @ param listener
* the listener */
private < A extends Annotation > void findStringDynamicStatement ( SQLiteDaoDefinition parent , Class < A > annotationClazz , List < Class < ? extends Annotation > > unsupportedQueryType , OnFoundDynamicParameter listener ) { } } | int counter = 0 ; for ( VariableElement p : element . getParameters ( ) ) { A annotation = p . getAnnotation ( annotationClazz ) ; if ( annotation != null ) { // Dynamic queries can not be used in Inser SQL .
for ( Class < ? extends Annotation > item : unsupportedQueryType ) { AssertKripton . assertTrueOrInvalidMethodSignException ( element . getAnnotation ( item ) == null , this , "in this method is not allowed to mark parameters with @%s annotation." , annotationClazz . getSimpleName ( ) ) ; } AssertKripton . assertTrueOrInvalidMethodSignException ( TypeUtility . isString ( TypeUtility . typeName ( p ) ) , this , "only String parameters can be marked with @%s annotation." , annotationClazz . getSimpleName ( ) ) ; listener . onFoundParameter ( p . getSimpleName ( ) . toString ( ) ) ; counter ++ ; } } AssertKripton . assertTrueOrInvalidMethodSignException ( counter < 2 , this , "there are %s parameters marked with @%s. Only one is allowed." , counter , annotationClazz . getSimpleName ( ) ) ; |
public class LocaleFormatter { /** * Format the given value as percentage . The " % " sign is automatically
* appended according to the requested locale . The number of fractional digits
* depend on the locale .
* @ param dValue
* The value to be used . E . g . " 0.125 " will result in something like
* " 12.5 % "
* @ param aDisplayLocale
* The locale to use .
* @ return The non - < code > null < / code > formatted string . */
@ Nonnull public static String getFormattedPercent ( final double dValue , @ Nonnull final Locale aDisplayLocale ) { } } | ValueEnforcer . notNull ( aDisplayLocale , "DisplayLocale" ) ; return NumberFormat . getPercentInstance ( aDisplayLocale ) . format ( dValue ) ; |
public class CmsPersistentLoginTokenHandler { /** * Invalidates all tokens for the given user . < p >
* @ param user the user
* @ param token the token string
* @ throws CmsException if something goes wrong */
public void invalidateToken ( CmsUser user , String token ) throws CmsException { } } | Token tokenObj = new Token ( token ) ; if ( tokenObj . isValid ( ) ) { String addInfoKey = tokenObj . getAdditionalInfoKey ( ) ; if ( null != user . getAdditionalInfo ( ) . remove ( addInfoKey ) ) { m_adminCms . writeUser ( user ) ; } } |
public class FractionNumber { /** * 分子の指定した桁の値を取得する 。
* @ param digit 1から始まる
* @ return 存在しない桁の場合は空文字を返す 。 */
public String getNumeratorPart ( final int digit ) { } } | final int length = numeratorPart . length ( ) ; if ( length < digit || digit <= 0 ) { return "" ; } return String . valueOf ( numeratorPart . charAt ( length - digit ) ) ; |
public class TraversalPlanner { /** * if in - sub starts from an indexed supertype , update the fragment cost of in - isa starting from the subtypes */
private static void updateFixedCostSubsReachableByIndex ( ImmutableMap < NodeId , Node > allNodes , Map < Node , Double > nodesWithFixedCost , Set < Fragment > fragments ) { } } | Set < Fragment > validSubFragments = fragments . stream ( ) . filter ( fragment -> { if ( fragment instanceof InSubFragment ) { Node superType = allNodes . get ( NodeId . of ( NodeId . NodeType . VAR , fragment . start ( ) ) ) ; if ( nodesWithFixedCost . containsKey ( superType ) && nodesWithFixedCost . get ( superType ) > 0D ) { Node subType = allNodes . get ( NodeId . of ( NodeId . NodeType . VAR , fragment . end ( ) ) ) ; return ! nodesWithFixedCost . containsKey ( subType ) ; } } return false ; } ) . collect ( Collectors . toSet ( ) ) ; if ( ! validSubFragments . isEmpty ( ) ) { validSubFragments . forEach ( fragment -> { // TODO : should decrease the weight of sub type after each level
nodesWithFixedCost . put ( allNodes . get ( NodeId . of ( NodeId . NodeType . VAR , fragment . end ( ) ) ) , nodesWithFixedCost . get ( allNodes . get ( NodeId . of ( NodeId . NodeType . VAR , fragment . start ( ) ) ) ) ) ; } ) ; // recursively process all the sub fragments
updateFixedCostSubsReachableByIndex ( allNodes , nodesWithFixedCost , fragments ) ; } |
public class LinearClassifier { /** * Returns a counter for the log probability of each of the classes
* looking at the the sum of e ^ v for each count v , should be 1 */
@ Deprecated public Counter < L > logProbabilityOf ( RVFDatum < L , F > example ) { } } | // NB : this duplicate method is needed so it calls the scoresOf method
// with an RVFDatum signature ! ! Don ' t remove it !
// JLS : type resolution of method parameters is static
Counter < L > scores = scoresOf ( example ) ; Counters . logNormalizeInPlace ( scores ) ; return scores ; |
public class DataTypeParser { /** * Method which performs the actual parsing of the data type name and applicable values ( i . e . VARCHAR ( 20 ) ) if data type is
* found .
* @ param tokens
* @ return the { @ link DataType }
* @ throws ParsingException */
public DataType parse ( DdlTokenStream tokens ) throws ParsingException { } } | DataType result = null ; if ( isDatatype ( tokens , DataTypes . DTYPE_CODE_CHAR_STRING ) ) { result = parseCharStringType ( tokens ) ; } else if ( isDatatype ( tokens , DataTypes . DTYPE_CODE_NCHAR_STRING ) ) { result = parseNationalCharStringType ( tokens ) ; } else if ( isDatatype ( tokens , DataTypes . DTYPE_CODE_BIT_STRING ) ) { result = parseBitStringType ( tokens ) ; } else if ( isDatatype ( tokens , DataTypes . DTYPE_CODE_EXACT_NUMERIC ) ) { result = parseExactNumericType ( tokens ) ; } else if ( isDatatype ( tokens , DataTypes . DTYPE_CODE_APROX_NUMERIC ) ) { result = parseApproxNumericType ( tokens ) ; } else if ( isDatatype ( tokens , DataTypes . DTYPE_CODE_DATE_TIME ) ) { result = parseDateTimeType ( tokens ) ; } else if ( isDatatype ( tokens , DataTypes . DTYPE_CODE_MISC ) ) { result = parseMiscellaneousType ( tokens ) ; } else { result = parseCustomType ( tokens ) ; } /* * ( FROM http : / / www . postgresql . org / docs / 8.4 / static / arrays . html )
8.14.1 . Declaration of Array Types
To illustrate the use of array types , we create this table :
CREATE TABLE sal _ emp (
name text ,
pay _ by _ quarter integer [ ] ,
schedule text [ ] [ ]
As shown , an array data type is named by appending square brackets ( [ ] ) to the data type name of the array elements .
The above command will create a table named sal _ emp with a column of type text ( name ) , a one - dimensional array of type
integer ( pay _ by _ quarter ) , which represents the employee ' s salary by quarter , and a two - dimensional array of text ( schedule ) ,
which represents the employee ' s weekly schedule .
The syntax for CREATE TABLE allows the exact size of arrays to be specified , for example :
CREATE TABLE tictactoe (
squares integer [ 3 ] [ 3]
However , the current implementation ignores any supplied array size limits , i . e . , the behavior is the same as for
arrays of unspecified length .
The current implementation does not enforce the declared number of dimensions either . Arrays of a particular element
type are all considered to be of the same type , regardless of size or number of dimensions . So , declaring the array size
or number of dimensions in CREATE TABLE is simply documentation ; it does not affect run - time behavior .
An alternative syntax , which conforms to the SQL standard by using the keyword ARRAY , can be used for one - dimensional
arrays . pay _ by _ quarter could have been defined as :
pay _ by _ quarter integer ARRAY [ 4 ] ,
Or , if no array size is to be specified :
pay _ by _ quarter integer ARRAY , */
if ( tokens . canConsume ( '[' ) ) { if ( ! tokens . canConsume ( ']' ) ) { // assume integer value
tokens . consume ( ) ; tokens . consume ( ']' ) ; } if ( tokens . canConsume ( '[' ) ) { if ( ! tokens . canConsume ( ']' ) ) { // assume integer value
tokens . consume ( ) ; tokens . consume ( ']' ) ; } } } return result ; |
public class JCalendarPopup { /** * Convert this data to a string ( using the supplied format ) .
* @ param dateTarget The date to convert to a string .
* @ param iDateFormat The format for the date .
* @ return The date as a string . */
public String getDateString ( Date dateTarget , int iDateFormat ) { } } | stringBuffer . setLength ( 0 ) ; FieldPosition fieldPosition = new FieldPosition ( iDateFormat ) ; String string = null ; string = dateFormat . format ( dateTarget , stringBuffer , fieldPosition ) . toString ( ) ; int iBegin = fieldPosition . getBeginIndex ( ) ; int iEnd = fieldPosition . getEndIndex ( ) ; string = string . substring ( iBegin , iEnd ) ; return string ; |
public class OWLEntityCollectionContainerCollector { /** * XXX not in the interface */
public void reset ( Set < OWLEntity > toReturn ) { } } | objects = toReturn ; if ( anonymousIndividuals != null ) { verifyNotNull ( anonymousIndividuals ) . clear ( ) ; } |
public class TwoDimTableV3 { /** * Turn a description such as " Avg . Training MSE " into a JSON - usable field name " avg _ training _ mse "
* @ param n
* @ return */
private String pythonify ( String n ) { } } | if ( n == null || name . toLowerCase ( ) . contains ( "confusion" ) ) return n ; StringBuilder sb = new StringBuilder ( ) ; String [ ] modified = n . split ( "[\\s_]+" ) ; for ( int i = 0 ; i < modified . length ; ++ i ) { if ( i != 0 ) sb . append ( "_" ) ; String s = modified [ i ] ; // if ( ! s . matches ( " ^ [ A - Z ] { 2,3 } $ " ) ) {
sb . append ( s . toLowerCase ( ) ) ; // everything goes lowercase
// } else {
// sb . append ( s ) ;
} String newString = sb . toString ( ) . replaceAll ( "[^\\w]" , "" ) ; // if ( ! newString . equals ( name ) ) {
// Log . warn ( " Turning column description into field name : " + name + " - - > " + newString ) ;
return newString ; |
public class PersistenceUnitMetadata { /** * Returns list of managed urls .
* @ return */
public List < URL > getManagedURLs ( ) { } } | // should we cache it ?
List < URL > managedURL = getJarFileUrls ( ) ; if ( managedURL == null ) { managedURL = new ArrayList < URL > ( 1 ) ; } if ( ! getExcludeUnlistedClasses ( ) ) { managedURL . add ( getPersistenceUnitRootUrl ( ) ) ; } return managedURL ; |
public class GetCostForecastResult { /** * The forecasts for your query , in order . For < code > DAILY < / code > forecasts , this is a list of days . For
* < code > MONTHLY < / code > forecasts , this is a list of months .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setForecastResultsByTime ( java . util . Collection ) } or
* { @ link # withForecastResultsByTime ( java . util . Collection ) } if you want to override the existing values .
* @ param forecastResultsByTime
* The forecasts for your query , in order . For < code > DAILY < / code > forecasts , this is a list of days . For
* < code > MONTHLY < / code > forecasts , this is a list of months .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetCostForecastResult withForecastResultsByTime ( ForecastResult ... forecastResultsByTime ) { } } | if ( this . forecastResultsByTime == null ) { setForecastResultsByTime ( new java . util . ArrayList < ForecastResult > ( forecastResultsByTime . length ) ) ; } for ( ForecastResult ele : forecastResultsByTime ) { this . forecastResultsByTime . add ( ele ) ; } return this ; |
public class AssetsInner { /** * Create or update an Asset .
* Creates or updates an Asset in the Media Services account .
* @ param resourceGroupName The name of the resource group within the Azure subscription .
* @ param accountName The Media Services account name .
* @ param assetName The Asset name .
* @ param parameters The request parameters
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < AssetInner > createOrUpdateAsync ( String resourceGroupName , String accountName , String assetName , AssetInner parameters , final ServiceCallback < AssetInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , accountName , assetName , parameters ) , serviceCallback ) ; |
public class UpdateRelationalDatabaseRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateRelationalDatabaseRequest updateRelationalDatabaseRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateRelationalDatabaseRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateRelationalDatabaseRequest . getRelationalDatabaseName ( ) , RELATIONALDATABASENAME_BINDING ) ; protocolMarshaller . marshall ( updateRelationalDatabaseRequest . getMasterUserPassword ( ) , MASTERUSERPASSWORD_BINDING ) ; protocolMarshaller . marshall ( updateRelationalDatabaseRequest . getRotateMasterUserPassword ( ) , ROTATEMASTERUSERPASSWORD_BINDING ) ; protocolMarshaller . marshall ( updateRelationalDatabaseRequest . getPreferredBackupWindow ( ) , PREFERREDBACKUPWINDOW_BINDING ) ; protocolMarshaller . marshall ( updateRelationalDatabaseRequest . getPreferredMaintenanceWindow ( ) , PREFERREDMAINTENANCEWINDOW_BINDING ) ; protocolMarshaller . marshall ( updateRelationalDatabaseRequest . getEnableBackupRetention ( ) , ENABLEBACKUPRETENTION_BINDING ) ; protocolMarshaller . marshall ( updateRelationalDatabaseRequest . getDisableBackupRetention ( ) , DISABLEBACKUPRETENTION_BINDING ) ; protocolMarshaller . marshall ( updateRelationalDatabaseRequest . getPubliclyAccessible ( ) , PUBLICLYACCESSIBLE_BINDING ) ; protocolMarshaller . marshall ( updateRelationalDatabaseRequest . getApplyImmediately ( ) , APPLYIMMEDIATELY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class BaseServerResource { /** * Adds params found in the URL that are not in the { @ link DescribeService }
* document of the service . Having this , we can add in the URL to POIProxy
* params from the original service . This should not be a good option when
* we want to have a single interface , but allows anyone to access the
* original API adding the original parameters to the POIProxy request
* @ param params
* @ param extractedParams */
protected void addAdditionalParams ( HashMap < String , String > params , ArrayList < Param > extractedParams ) { } } | for ( String key : params . keySet ( ) ) { if ( ! ParamEnum . from ( key ) ) { extractedParams . add ( new Param ( key , params . get ( key ) ) ) ; } } |
public class CleanupQueue { /** * Force to clean the all path , it should be called when task tracker is shut down
* Now we only called it in MiniCoronaCluster to make sure the unit test run in a clean
* fixture */
public void forceClean ( ) { } } | while ( true ) { PathDeletionContext context = null ; try { context = cleanupThread . queue . poll ( 50L , TimeUnit . MILLISECONDS ) ; if ( context == null ) { return ; } if ( ! deletePath ( context ) ) { LOG . warn ( "forceClean:Unable to delete path " + context . fullPath ) ; } else { LOG . info ( "foceClean DELETED " + context . fullPath ) ; } } catch ( InterruptedException e ) { return ; } catch ( Exception e ) { LOG . warn ( "Error deleting path " + context . fullPath + ": " + e ) ; } } |
public class B2BAccountUrl { /** * Get Resource Url for RemoveUserRoleAsync
* @ param accountId Unique identifier of the customer account .
* @ param roleId
* @ param userId Unique identifier of the user whose tenant scopes you want to retrieve .
* @ return String Resource Url */
public static MozuUrl removeUserRoleAsyncUrl ( Integer accountId , Integer roleId , String userId ) { } } | UrlFormatter formatter = new UrlFormatter ( "/api/commerce/customer/b2baccounts/{accountId}/user/{userId}/roles/{roleId}" ) ; formatter . formatUrl ( "accountId" , accountId ) ; formatter . formatUrl ( "roleId" , roleId ) ; formatter . formatUrl ( "userId" , userId ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ; |
public class MpMessages { /** * 群发视频消息给特定用户
* @ param openIds
* @ param mediaId
* @ param title
* @ param desc
* @ return */
public long video ( List < String > openIds , String mediaId , String title , String desc ) { } } | return video ( new Filter ( false , null ) , openIds , mediaId , title , desc ) ; |
public class AbstractProperties { public static void validateStringValue ( String value ) throws ParameterException { } } | Validate . notNull ( value ) ; Validate . notEmpty ( value ) ; |
public class Mutator { /** * Creates a superset of the input Schema , taking all the Fields in the input schema
* and adding some new ones . The new fields are fully specified in a Field class .
* The name of the schema is also specified as a parameter . */
public static Schema superSetOf ( String newName , Schema schema , Field ... newFields ) { } } | List < Field > newSchema = new ArrayList < Field > ( ) ; newSchema . addAll ( schema . getFields ( ) ) ; for ( Field newField : newFields ) { newSchema . add ( newField ) ; } return new Schema ( newName , newSchema ) ; |
public class StringUtil { /** * Cuts a string between two words , before a sepcified length , if the
* string is longer than the maxium lenght . The string is optionally padded
* with the pad argument . The method assumes words to be separated by the
* space character ( " " ) .
* Note that the maximum length argument is absolute , and will also include
* the length of the padding .
* @ param pString The string to cut
* @ param pMaxLen The maximum length before cutting
* @ param pPad The string to append at the end , aftrer cutting
* @ return The cutted string with padding , or the original string , if it
* was shorter than the max length .
* @ see # pad ( String , int , String , boolean ) */
public static String cut ( String pString , int pMaxLen , String pPad ) { } } | if ( pString == null ) { return null ; } if ( pPad == null ) { pPad = "" ; } int len = pString . length ( ) ; if ( len > pMaxLen ) { len = pString . lastIndexOf ( ' ' , pMaxLen - pPad . length ( ) ) ; } else { return pString ; } return pString . substring ( 0 , len ) + pPad ; |
public class CalculateDateExtensions { /** * Substract years to the given Date object and returns it .
* @ param date
* The Date object to substract the years .
* @ param substractYears
* The years to substract .
* @ return The resulted Date object . */
public static Date substractYearsFromDate ( final Date date , final int substractYears ) { } } | final Calendar dateOnCalendar = Calendar . getInstance ( ) ; dateOnCalendar . setTime ( date ) ; dateOnCalendar . add ( Calendar . YEAR , substractYears * - 1 ) ; return dateOnCalendar . getTime ( ) ; |
public class IdemixCredRequest { /** * Convert the enrollment request to a JSON object */
public JsonObject toJsonObject ( ) { } } | JsonObjectBuilder factory = Json . createObjectBuilder ( ) ; if ( nym != null ) { JsonObjectBuilder factory2 = Json . createObjectBuilder ( ) ; factory2 . add ( "x" , Base64 . getEncoder ( ) . encodeToString ( IdemixUtils . bigToBytes ( nym . getX ( ) ) ) ) ; factory2 . add ( "y" , Base64 . getEncoder ( ) . encodeToString ( IdemixUtils . bigToBytes ( nym . getY ( ) ) ) ) ; factory . add ( "nym" , factory2 . build ( ) ) ; } if ( issuerNonce != null ) { String b64encoded = Base64 . getEncoder ( ) . encodeToString ( IdemixUtils . bigToBytes ( issuerNonce ) ) ; factory . add ( "issuer_nonce" , b64encoded ) ; } if ( proofC != null ) { factory . add ( "proof_c" , Base64 . getEncoder ( ) . encodeToString ( IdemixUtils . bigToBytes ( proofC ) ) ) ; } if ( proofS != null ) { factory . add ( "proof_s" , Base64 . getEncoder ( ) . encodeToString ( IdemixUtils . bigToBytes ( proofS ) ) ) ; } return factory . build ( ) ; |
public class ApiConnection { /** * Extracts warnings that are returned in an API response .
* @ param root
* root node of the JSON result */
List < String > getWarnings ( JsonNode root ) { } } | ArrayList < String > warnings = new ArrayList < > ( ) ; if ( root . has ( "warnings" ) ) { JsonNode warningNode = root . path ( "warnings" ) ; Iterator < Map . Entry < String , JsonNode > > moduleIterator = warningNode . fields ( ) ; while ( moduleIterator . hasNext ( ) ) { Map . Entry < String , JsonNode > moduleNode = moduleIterator . next ( ) ; Iterator < JsonNode > moduleOutputIterator = moduleNode . getValue ( ) . elements ( ) ; while ( moduleOutputIterator . hasNext ( ) ) { JsonNode moduleOutputNode = moduleOutputIterator . next ( ) ; if ( moduleOutputNode . isTextual ( ) ) { warnings . add ( "[" + moduleNode . getKey ( ) + "]: " + moduleOutputNode . textValue ( ) ) ; } else if ( moduleOutputNode . isArray ( ) ) { Iterator < JsonNode > messageIterator = moduleOutputNode . elements ( ) ; while ( messageIterator . hasNext ( ) ) { JsonNode messageNode = messageIterator . next ( ) ; warnings . add ( "[" + moduleNode . getKey ( ) + "]: " + messageNode . path ( "html" ) . path ( "*" ) . asText ( messageNode . toString ( ) ) ) ; } } else { warnings . add ( "[" + moduleNode . getKey ( ) + "]: " + "Warning was not understood. Please report this to Wikidata Toolkit. JSON source: " + moduleOutputNode . toString ( ) ) ; } } } } return warnings ; |
public class AnimationSequencer { /** * Adds the supplied animation to the sequence with the given parameters . Note that care
* should be taken if this is called after the animation sequence has begun firing animations .
* Do not add new animations after the final animation in the sequence has been started or you
* run the risk of attempting to add an animation to the sequence after it thinks that it has
* finished ( in which case this method will fail ) .
* @ param anim the animation to be sequenced , or null if the completion action should be run
* immediately when this " animation " is ready to fired .
* @ param delta the number of milliseconds following the < em > start < / em > of the previous
* animation in the queue that this animation should be started ; 0 if it should be started
* simultaneously with its predecessor in the queue ; - 1 if it should be started when its
* predecessor has completed .
* @ param completionAction a runnable to be executed when this animation completes . */
public void addAnimation ( Animation anim , long delta , Runnable completionAction ) { } } | // sanity check
if ( _finished ) { throw new IllegalStateException ( "Animation added to finished sequencer" ) ; } // if this guy is triggering on a previous animation , grab that
// good fellow here
AnimRecord trigger = null ; if ( delta == - 1 ) { if ( _queued . size ( ) > 0 ) { // if there are queued animations we want the most
// recently queued animation
trigger = _queued . get ( _queued . size ( ) - 1 ) ; } else if ( _running . size ( ) > 0 ) { // otherwise , if there are running animations , we want the
// last one in that list
trigger = _running . get ( _running . size ( ) - 1 ) ; } // otherwise we have no trigger , we ' ll just start ASAP
} AnimRecord arec = new AnimRecord ( anim , delta , trigger , completionAction ) ; // Log . info ( " Queued " + arec + " . " ) ;
_queued . add ( arec ) ; |
public class LineDocInputFormat { /** * / * ( non - Javadoc )
* @ see org . apache . hadoop . mapred . FileInputFormat # getRecordReader ( org . apache . hadoop . mapred . InputSplit , org . apache . hadoop . mapred . JobConf , org . apache . hadoop . mapred . Reporter ) */
public RecordReader < DocumentID , LineDocTextAndOp > getRecordReader ( InputSplit split , JobConf job , Reporter reporter ) throws IOException { } } | reporter . setStatus ( split . toString ( ) ) ; return new LineDocRecordReader ( job , ( FileSplit ) split ) ; |
public class Multisets { /** * Returns { @ code true } if { @ code subMultiset . count ( o ) < =
* superMultiset . count ( o ) } for all { @ code o } .
* @ since 10.0 */
public static boolean containsOccurrences ( Multiset < ? > superMultiset , Multiset < ? > subMultiset ) { } } | checkNotNull ( superMultiset ) ; checkNotNull ( subMultiset ) ; for ( Entry < ? > entry : subMultiset . entrySet ( ) ) { int superCount = superMultiset . count ( entry . getElement ( ) ) ; if ( superCount < entry . getCount ( ) ) { return false ; } } return true ; |
public class SnapshotDaemon { /** * Process a response to a request to delete snapshots .
* Always transitions to the waiting state even if the delete
* fails . This ensures the system will continue to snapshot
* until the disk is full in the event that there is an administration
* error or a bug .
* @ param response */
private void processDeleteResponse ( ClientResponse response ) { } } | // Continue snapshotting even if a delete fails .
setState ( State . WAITING ) ; if ( response . getStatus ( ) != ClientResponse . SUCCESS ) { logFailureResponse ( "Delete of snapshots failed" , response ) ; return ; } final VoltTable results [ ] = response . getResults ( ) ; final String err = SnapshotUtil . didSnapshotRequestFailWithErr ( results ) ; if ( err != null ) { SNAP_LOG . warn ( "Snapshot delete failed with failure response: " + err ) ; return ; } |
public class UiCompat { /** * Sets the custom Outline provider on API > = 21.
* Does nothing on API < 21
* @ param view View
* @ param balloonMarkerDrawable OutlineProvider Drawable */
public static void setOutlineProvider ( View view , final BalloonMarkerDrawable balloonMarkerDrawable ) { } } | if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . LOLLIPOP ) { UiCompatNotCrash . setOutlineProvider ( view , balloonMarkerDrawable ) ; } |
public class MailFromAttributesMarshaller { /** * Marshall the given parameter object . */
public void marshall ( MailFromAttributes mailFromAttributes , ProtocolMarshaller protocolMarshaller ) { } } | if ( mailFromAttributes == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( mailFromAttributes . getMailFromDomain ( ) , MAILFROMDOMAIN_BINDING ) ; protocolMarshaller . marshall ( mailFromAttributes . getMailFromDomainStatus ( ) , MAILFROMDOMAINSTATUS_BINDING ) ; protocolMarshaller . marshall ( mailFromAttributes . getBehaviorOnMxFailure ( ) , BEHAVIORONMXFAILURE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class DefaultIncrementalAttributesMapper { /** * Lookup all values for the specified attribute , looping through the results incrementally if necessary .
* @ param ldapOperations The instance to use for performing the actual lookup .
* @ param dn The distinguished name of the object to find .
* @ param attribute name of the attribute to request .
* @ return a list with all attribute values found for the requested attribute .
* Never < code > null < / code > , an empty list indicates that the attribute was not set or empty . */
public static List < Object > lookupAttributeValues ( LdapOperations ldapOperations , String dn , String attribute ) { } } | return lookupAttributeValues ( ldapOperations , LdapUtils . newLdapName ( dn ) , attribute ) ; |
public class PathBuilder { /** * Create a PathBuilder instance for the given property
* @ param property property name
* @ return property path */
@ SuppressWarnings ( "unchecked" ) public PathBuilder < Object > get ( String property ) { } } | SimpleEntry < String , Class < ? > > entry = new SimpleEntry < String , Class < ? > > ( property , Object . class ) ; PathBuilder < Object > path = ( PathBuilder < Object > ) properties . get ( entry ) ; PathBuilder < ? > existingPath = null ; if ( path == null ) { Class < ? > vtype = validate ( property , Object . class ) ; path = new PathBuilder < Object > ( vtype , forProperty ( property ) , validator ) ; existingPath = properties . putIfAbsent ( entry , path ) ; } return existingPath == null ? path : ( PathBuilder < Object > ) existingPath ; |
public class PrintWriter { /** * Writes a single character .
* @ param c int specifying a character to be written . */
public void write ( int c ) { } } | try { synchronized ( lock ) { ensureOpen ( ) ; out . write ( c ) ; } } catch ( InterruptedIOException x ) { Thread . currentThread ( ) . interrupt ( ) ; } catch ( IOException x ) { trouble = true ; } |
public class CSVDataWriterImpl { /** * The method to export a row from sas7bdat file ( stored as an object of the { @ link SasFileReaderImpl } class )
* using { @ link CSVDataWriterImpl # writer } .
* @ param columns the { @ link Column } class variables list that stores columns description from the sas7bdat file .
* @ param row the Objects arrays that stores data from the sas7bdat file .
* @ throws java . io . IOException appears if the output into writer is impossible . */
@ Override public void writeRow ( List < Column > columns , Object [ ] row ) throws IOException { } } | if ( row == null ) { return ; } Writer writer = getWriter ( ) ; List < String > valuesToPrint = DataWriterUtil . getRowValues ( columns , row , getLocale ( ) , columnFormatters ) ; for ( int currentColumnIndex = 0 ; currentColumnIndex < columns . size ( ) ; currentColumnIndex ++ ) { writer . write ( checkSurroundByQuotes ( getDelimiter ( ) , valuesToPrint . get ( currentColumnIndex ) ) ) ; if ( currentColumnIndex != columns . size ( ) - 1 ) { writer . write ( getDelimiter ( ) ) ; } } writer . write ( getEndline ( ) ) ; writer . flush ( ) ; |
public class TrieMap { /** * Sets the given value as the new value on the given node , increases size
* and modCount . */
void modifyData ( final TrieNode < V > node , final V value ) { } } | node . value = value ; ++ modCount ; ++ size ; |
public class URLTemplates { /** * Add a template reference group from url - template - config by name .
* @ param refGroupName the name of the template reference group .
* @ param templateRefGroup the template reference group . */
public void addTemplateRefGroup ( String refGroupName , Map /* < String , String > */
templateRefGroup ) { } } | if ( refGroupName == null || refGroupName . length ( ) == 0 ) { throw new IllegalArgumentException ( "Template Reference Group name cannot be null or empty." ) ; } if ( templateRefGroup == null || templateRefGroup . size ( ) == 0 ) { throw new IllegalArgumentException ( "Template Reference Group cannot be null or empty." ) ; } _templateRefGroups . put ( refGroupName , templateRefGroup ) ; |
public class JsonPropertiesSource { /** * Generates the properties from a JSON node
* @ param node
* the JSON node
* @ param props
* the properties to populate */
private void generateProperties ( JsonNode node , String parentPrefix , Properties props ) { } } | Iterator < Entry < String , JsonNode > > fields = node . fields ( ) ; while ( fields . hasNext ( ) ) { Entry < String , JsonNode > entry = fields . next ( ) ; String fieldName = entry . getKey ( ) ; JsonNode jsonNode = entry . getValue ( ) ; String nodePrefix = parentPrefix == null ? fieldName : parentPrefix + FIELD_NAME_SEPARATOR + fieldName ; if ( jsonNode . isTextual ( ) || jsonNode . isBoolean ( ) ) { props . put ( nodePrefix , jsonNode . asText ( ) ) ; } else if ( jsonNode . isNumber ( ) ) { props . put ( nodePrefix , Integer . toString ( jsonNode . asInt ( ) ) ) ; } else if ( jsonNode . isArray ( ) ) { String arrayValue = convertToString ( jsonNode ) ; props . put ( nodePrefix , arrayValue ) ; } generateProperties ( jsonNode , nodePrefix , props ) ; } |
public class CmsLinkProcessor { /** * Process an object tag . < p >
* @ param tag the tag to process */
protected void processObjectTag ( ObjectTag tag ) { } } | CmsRelationType type = CmsRelationType . valueOf ( tag . getTagName ( ) ) ; for ( int i = 0 ; i < OBJECT_TAG_LINKED_ATTRIBS . length ; i ++ ) { String attr = OBJECT_TAG_LINKED_ATTRIBS [ i ] ; processLink ( tag , attr , type ) ; if ( ( i == 0 ) && ( tag . getAttribute ( attr ) != null ) ) { // if code base is available , the other attributes are relative to it , so do not process them
break ; } } SimpleNodeIterator itChildren = tag . children ( ) ; while ( itChildren . hasMoreNodes ( ) ) { Node node = itChildren . nextNode ( ) ; if ( node instanceof Tag ) { Tag childTag = ( Tag ) node ; if ( TAG_PARAM . equals ( childTag . getTagName ( ) ) ) { processLink ( childTag , ATTRIBUTE_VALUE , type ) ; } } } |
public class MLEDependencyGrammar { /** * Tune the smoothing and interpolation parameters of the dependency
* grammar based on a tuning treebank .
* @ param trees A Collection of Trees for setting parameters */
@ Override public void tune ( Collection < Tree > trees ) { } } | List < IntDependency > deps = new ArrayList < IntDependency > ( ) ; for ( Tree tree : trees ) { deps . addAll ( treeToDependencyList ( tree , wordIndex , tagIndex ) ) ; } double bestScore = Double . NEGATIVE_INFINITY ; double bestSmooth_stop = 0.0 ; double bestSmooth_aTW_hTWd = 0.0 ; double bestSmooth_aT_hTWd = 0.0 ; double bestInterp = 0.0 ; System . err . println ( "Tuning smooth_stop..." ) ; for ( smooth_stop = 1.0 / 100.0 ; smooth_stop < 100.0 ; smooth_stop *= 1.25 ) { double totalScore = 0.0 ; for ( IntDependency dep : deps ) { if ( ! rootTW ( dep . head ) ) { double stopProb = getStopProb ( dep ) ; if ( ! dep . arg . equals ( stopTW ) ) { stopProb = 1.0 - stopProb ; } if ( stopProb > 0.0 ) { totalScore += Math . log ( stopProb ) ; } } } if ( totalScore > bestScore ) { bestScore = totalScore ; bestSmooth_stop = smooth_stop ; } } smooth_stop = bestSmooth_stop ; System . err . println ( "Tuning selected smooth_stop: " + smooth_stop ) ; for ( Iterator < IntDependency > iter = deps . iterator ( ) ; iter . hasNext ( ) ; ) { IntDependency dep = iter . next ( ) ; if ( dep . arg . equals ( stopTW ) ) { iter . remove ( ) ; } } System . err . println ( "Tuning other parameters..." ) ; if ( ! useSmoothTagProjection ) { bestScore = Double . NEGATIVE_INFINITY ; for ( smooth_aTW_hTWd = 0.5 ; smooth_aTW_hTWd < 100.0 ; smooth_aTW_hTWd *= 1.25 ) { System . err . print ( "." ) ; for ( smooth_aT_hTWd = 0.5 ; smooth_aT_hTWd < 100.0 ; smooth_aT_hTWd *= 1.25 ) { for ( interp = 0.02 ; interp < 1.0 ; interp += 0.02 ) { double totalScore = 0.0 ; for ( IntDependency dep : deps ) { double score = score ( dep ) ; if ( score > Double . NEGATIVE_INFINITY ) { totalScore += score ; } } if ( totalScore > bestScore ) { bestScore = totalScore ; bestInterp = interp ; bestSmooth_aTW_hTWd = smooth_aTW_hTWd ; bestSmooth_aT_hTWd = smooth_aT_hTWd ; System . err . println ( "Current best interp: " + interp + " with score " + totalScore ) ; } } } } smooth_aTW_hTWd = bestSmooth_aTW_hTWd ; smooth_aT_hTWd = bestSmooth_aT_hTWd ; interp = bestInterp ; } else { // for useSmoothTagProjection
double bestSmooth_aTW_aT = 0.0 ; double bestSmooth_aTW_hTd = 0.0 ; double bestSmooth_aT_hTd = 0.0 ; bestScore = Double . NEGATIVE_INFINITY ; for ( smooth_aTW_hTWd = 1.125 ; smooth_aTW_hTWd < 100.0 ; smooth_aTW_hTWd *= 1.5 ) { System . err . print ( "#" ) ; for ( smooth_aT_hTWd = 1.125 ; smooth_aT_hTWd < 100.0 ; smooth_aT_hTWd *= 1.5 ) { System . err . print ( ":" ) ; for ( smooth_aTW_aT = 1.125 ; smooth_aTW_aT < 200.0 ; smooth_aTW_aT *= 1.5 ) { System . err . print ( "." ) ; for ( smooth_aTW_hTd = 1.125 ; smooth_aTW_hTd < 100.0 ; smooth_aTW_hTd *= 1.5 ) { for ( smooth_aT_hTd = 1.125 ; smooth_aT_hTd < 100.0 ; smooth_aT_hTd *= 1.5 ) { for ( interp = 0.2 ; interp <= 0.8 ; interp += 0.02 ) { double totalScore = 0.0 ; for ( IntDependency dep : deps ) { double score = score ( dep ) ; if ( score > Double . NEGATIVE_INFINITY ) { totalScore += score ; } } if ( totalScore > bestScore ) { bestScore = totalScore ; bestInterp = interp ; bestSmooth_aTW_hTWd = smooth_aTW_hTWd ; bestSmooth_aT_hTWd = smooth_aT_hTWd ; bestSmooth_aTW_aT = smooth_aTW_aT ; bestSmooth_aTW_hTd = smooth_aTW_hTd ; bestSmooth_aT_hTd = smooth_aT_hTd ; System . err . println ( "Current best interp: " + interp + " with score " + totalScore ) ; } } } } } } System . err . println ( ) ; } smooth_aTW_hTWd = bestSmooth_aTW_hTWd ; smooth_aT_hTWd = bestSmooth_aT_hTWd ; smooth_aTW_aT = bestSmooth_aTW_aT ; smooth_aTW_hTd = bestSmooth_aTW_hTd ; smooth_aT_hTd = bestSmooth_aT_hTd ; interp = bestInterp ; } System . err . println ( "\nTuning selected smooth_aTW_hTWd: " + smooth_aTW_hTWd + " smooth_aT_hTWd: " + smooth_aT_hTWd + " interp: " + interp + " smooth_aTW_aT: " + smooth_aTW_aT + " smooth_aTW_hTd: " + smooth_aTW_hTd + " smooth_aT_hTd: " + smooth_aT_hTd ) ; |
public class Annotation { /** * Determines if this annotation ' s tag is an instance of the given tag ( String form ) . The string form the given tag
* will be decoded into the correct tag type .
* @ param tag the string form of the tag to check
* @ return True if this annotation ' s tag is an instance of the given tag . */
public boolean isInstanceOfTag ( String tag ) { } } | return ! StringUtils . isNullOrBlank ( tag ) && isInstanceOfTag ( Cast . < Tag > as ( getType ( ) . getTagAttribute ( ) . getValueType ( ) . decode ( tag ) ) ) ; |
public class Timestamp { /** * Creates an instance representing the value of { @ code microseconds } .
* @ throws IllegalArgumentException if the timestamp is outside the representable range */
public static Timestamp ofTimeMicroseconds ( long microseconds ) { } } | long seconds = microseconds / 1_000_000 ; int nanos = ( int ) ( microseconds % 1_000_000 * 1000 ) ; if ( nanos < 0 ) { seconds -- ; nanos += 1_000_000_000 ; } checkArgument ( Timestamps . isValid ( seconds , nanos ) , "timestamp out of range: %s, %s" , seconds , nanos ) ; return new Timestamp ( seconds , nanos ) ; |
public class MetaTagHandler { /** * Invoking / extending this method will cause the results of the created MetaRuleset to auto - wire state to
* the passed instance .
* @ param ctx
* @ param instance */
protected void setAttributes ( FaceletContext ctx , Object instance ) { } } | if ( instance != null ) { Class < ? > type = instance . getClass ( ) ; if ( _mapper == null || ! _lastType . equals ( type ) ) { _lastType = type ; _mapper = createMetaRuleset ( type ) . finish ( ) ; } _mapper . applyMetadata ( ctx , instance ) ; } |
public class CPDefinitionOptionValueRelPersistenceImpl { /** * Removes the cp definition option value rel where uuid = & # 63 ; and groupId = & # 63 ; from the database .
* @ param uuid the uuid
* @ param groupId the group ID
* @ return the cp definition option value rel that was removed */
@ Override public CPDefinitionOptionValueRel removeByUUID_G ( String uuid , long groupId ) throws NoSuchCPDefinitionOptionValueRelException { } } | CPDefinitionOptionValueRel cpDefinitionOptionValueRel = findByUUID_G ( uuid , groupId ) ; return remove ( cpDefinitionOptionValueRel ) ; |
public class BaseController { /** * 错误 : 参数错误
* @ param field
* @ param message
* @ return */
protected JsonObjectBase buildParamError ( String field , String message , ErrorCode errorCode ) { } } | Map < String , String > map = new HashMap < String , String > ( ) ; map . put ( field , message ) ; if ( errorCode == null ) { return JsonObjectUtils . buildFieldError ( map , ErrorCode . FIELD_ERROR ) ; } return JsonObjectUtils . buildFieldError ( map , errorCode ) ; |
public class PeerAwareInstanceRegistryImpl { /** * Checks to see if the registry access is allowed or the server is in a
* situation where it does not all getting registry information . The server
* does not return registry information for a period specified in
* { @ link EurekaServerConfig # getWaitTimeInMsWhenSyncEmpty ( ) } , if it cannot
* get the registry information from the peer eureka nodes at start up .
* @ return false - if the instances count from a replica transfer returned
* zero and if the wait time has not elapsed , otherwise returns true */
@ Override public boolean shouldAllowAccess ( boolean remoteRegionRequired ) { } } | if ( this . peerInstancesTransferEmptyOnStartup ) { if ( ! ( System . currentTimeMillis ( ) > this . startupTime + serverConfig . getWaitTimeInMsWhenSyncEmpty ( ) ) ) { return false ; } } if ( remoteRegionRequired ) { for ( RemoteRegionRegistry remoteRegionRegistry : this . regionNameVSRemoteRegistry . values ( ) ) { if ( ! remoteRegionRegistry . isReadyForServingData ( ) ) { return false ; } } } return true ; |
public class SarlBatchCompiler { /** * Change the loggers that are internally used by Xtext . */
protected void overrideXtextInternalLoggers ( ) { } } | final Logger logger = getLogger ( ) ; final org . apache . log4j . spi . LoggerFactory factory = new InternalXtextLoggerFactory ( logger ) ; final org . apache . log4j . Logger internalLogger = org . apache . log4j . Logger . getLogger ( MessageFormat . format ( Messages . SarlBatchCompiler_40 , logger . getName ( ) ) , factory ) ; setStaticField ( BatchLinkableResourceStorageWritable . class , "LOG" , internalLogger ) ; // $ NON - NLS - 1 $
setStaticField ( BatchLinkableResource . class , "log" , internalLogger ) ; // $ NON - NLS - 1 $
setStaticField ( ProcessorInstanceForJvmTypeProvider . class , "logger" , internalLogger ) ; // $ NON - NLS - 1 $ |
public class FCAlignHelper { /** * local - model */
private void doAlign ( ) { } } | int i , j ; double s , e , c , d , wa ; double [ ] CC = new double [ N + 1 ] ; // note N + 1
double [ ] DD = new double [ N + 1 ] ; double maxs = - 100 ; char trace_e , trace_d ; // forward - phase
CC [ 0 ] = 0 ; for ( j = 1 ; j <= N ; j ++ ) { CC [ j ] = 0 ; DD [ j ] = - g ; } // local - alignment , no terminal penalty
for ( i = 1 ; i <= M ; i ++ ) { CC [ 0 ] = c = s = 0 ; e = - g ; for ( j = 1 ; j <= N ; j ++ ) { trace_e = 'e' ; if ( ( c = c - m ) > ( e = e - h ) ) { e = c ; trace_e = 'E' ; } // insertion
trace_d = 'd' ; if ( ( c = CC [ j ] - m ) > ( d = DD [ j ] - h ) ) { d = c ; trace_d = 'D' ; } // deletion
// ie CC [ j ] = = CC [ i - 1 ] [ j ] DD [ j ] = = DD [ i - 1 ] [ j ]
wa = sij [ i - 1 ] [ j - 1 ] ; // note i - 1 , j - 1
c = s + wa ; // s = = CC [ i - 1 ] [ j - 1]
trace [ i ] [ j ] = 's' ; if ( e > c ) { c = e ; trace [ i ] [ j ] = trace_e ; } if ( d > c ) { c = d ; trace [ i ] [ j ] = trace_d ; } etrace [ i ] [ j ] = trace_e ; dtrace [ i ] [ j ] = trace_d ; s = CC [ j ] ; // important for next replace
CC [ j ] = c ; // CC [ i ] [ j ]
DD [ j ] = d ; // DD [ i ] [ j ]
if ( c < 0 ) { CC [ j ] = 0 ; DD [ j ] = - g ; c = 0 ; e = - g ; trace [ i ] [ j ] = '0' ; } // local - N
if ( c > maxs ) { E1 = i ; E2 = j ; maxs = c ; } // local - C
} } alignScore = maxs ; // printf ( " alignment score % f \ n " , alignScore ) ;
// trace - back
if ( trace [ E1 ] [ E2 ] != 's' ) { throw new RuntimeException ( "FCAlignHelper encoutered Exception: Not ending with substitution" ) ; } // Trace ( maxs , E1 , E2 ) ;
trace ( 's' , E1 , E2 ) ; // printf ( " B1 % d B2 % d , E1 % d E2 % d \ n " , B1 , B2 , E1 , E2 ) ;
// check - alignment
checkAlign ( ) ; |
public class JAXBMarshallerHelper { /** * Set the Sun specific property for the indent string .
* @ param aMarshaller
* The marshaller to set the property . May not be < code > null < / code > .
* @ param sIndentString
* the value to be set */
public static void setSunIndentString ( @ Nonnull final Marshaller aMarshaller , @ Nullable final String sIndentString ) { } } | final String sPropertyName = SUN_INDENT_STRING ; _setProperty ( aMarshaller , sPropertyName , sIndentString ) ; |
public class ParseException { /** * Creates a new parse exception for situations in which both the start and end positions of the
* error are known .
* @ param errorMessage the user error message .
* @ param logMessage the original log message .
* @ param start the index of the first character in the invalid section of the log message .
* @ param end the index after the last character in the invalid section of the log message .
* @ return the parser exception . */
public static ParseException withBounds ( String errorMessage , String logMessage , int start , int end ) { } } | return new ParseException ( msg ( errorMessage , logMessage , start , end ) , logMessage ) ; |
public class CPDefinitionInventoryUtil { /** * Returns the first cp definition inventory in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp definition inventory , or < code > null < / code > if a matching cp definition inventory could not be found */
public static CPDefinitionInventory fetchByUuid_C_First ( String uuid , long companyId , OrderByComparator < CPDefinitionInventory > orderByComparator ) { } } | return getPersistence ( ) . fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ; |
public class CmsSelectWidget { /** * Helper class for parsing the configuration of the select - box . < p >
* @ param config the configuration string */
private void parseConfiguration ( String config ) { } } | CmsSelectConfigurationParser parser = new CmsSelectConfigurationParser ( config ) ; // set the help info first ! !
for ( Entry < String , String > helpEntry : parser . getHelpTexts ( ) . entrySet ( ) ) { m_selectBox . setTitle ( helpEntry . getKey ( ) , helpEntry . getValue ( ) ) ; } // set value and option to the combo box .
m_selectBox . setItems ( parser . getOptions ( ) ) ; // if one entrance is declared for default .
if ( parser . getDefaultValue ( ) != null ) { // set the declared value selected .
m_selectBox . selectValue ( parser . getDefaultValue ( ) ) ; m_defaultValue = parser . getDefaultValue ( ) ; } fireChangeEvent ( ) ; |
public class PNMImageReader { /** * TODO : Candidate util method */
private static void readFully ( final DataInput input , final float [ ] floats ) throws IOException { } } | if ( input instanceof ImageInputStream ) { // Optimization for ImageInputStreams , read all in one go
( ( ImageInputStream ) input ) . readFully ( floats , 0 , floats . length ) ; } else { for ( int i = 0 ; i < floats . length ; i ++ ) { floats [ i ] = input . readFloat ( ) ; } } |
public class TikaTextExtractor { /** * Creates a new tika metadata object used by the parser . This will contain the mime - type of the content being parsed , if this
* is available to the underlying context . If not , Tika ' s autodetection mechanism is used to try and get the mime - type .
* @ param binary a < code > org . modeshape . jcr . api . Binary < / code > instance of the content being parsed
* @ param context the extraction context ; may not be null
* @ return a < code > Metadata < / code > instance .
* @ throws java . io . IOException if auto - detecting the mime - type via Tika fails
* @ throws RepositoryException if error obtaining MIME - type of the binary parameter */
protected final Metadata prepareMetadata ( final Binary binary , final Context context ) throws IOException , RepositoryException { } } | Metadata metadata = new Metadata ( ) ; String mimeType = binary . getMimeType ( ) ; if ( StringUtil . isBlank ( mimeType ) ) { // Call the detector ( we don ' t know the name ) . . .
mimeType = context . mimeTypeOf ( null , binary ) ; } if ( ! StringUtil . isBlank ( mimeType ) ) { metadata . set ( Metadata . CONTENT_TYPE , mimeType ) ; } return metadata ; |
public class ServiceDiscoveryManager { /** * Find all services under the users service that provide a given feature .
* @ param feature the feature to search for
* @ param stopOnFirst if true , stop searching after the first service was found
* @ param useCache if true , query a cache first to avoid network I / O
* @ return a possible empty list of services providing the given feature
* @ throws NoResponseException
* @ throws XMPPErrorException
* @ throws NotConnectedException
* @ throws InterruptedException */
public List < DomainBareJid > findServices ( String feature , boolean stopOnFirst , boolean useCache ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } } | List < DiscoverInfo > services = findServicesDiscoverInfo ( feature , stopOnFirst , useCache ) ; List < DomainBareJid > res = new ArrayList < > ( services . size ( ) ) ; for ( DiscoverInfo info : services ) { res . add ( info . getFrom ( ) . asDomainBareJid ( ) ) ; } return res ; |
public class TableNCALTAB { /** * { @ inheritDoc } */
@ Override protected void readRow ( int uniqueID , byte [ ] data ) { } } | if ( data [ 0 ] != ( byte ) 0xFF ) { Map < String , Object > map = new HashMap < String , Object > ( ) ; map . put ( "UNIQUE_ID" , Integer . valueOf ( uniqueID ) ) ; map . put ( "NAME" , PEPUtility . getString ( data , 1 , 8 ) ) ; map . put ( "START" , PEPUtility . getStartDate ( data , 9 ) ) ; map . put ( "BASE_CALENDAR_ID" , Integer . valueOf ( PEPUtility . getShort ( data , 11 ) ) ) ; map . put ( "FIRST_CALENDAR_EXCEPTION_ID" , Integer . valueOf ( PEPUtility . getShort ( data , 13 ) ) ) ; map . put ( "SUNDAY" , Boolean . valueOf ( ( data [ 17 ] & 0x40 ) == 0 ) ) ; map . put ( "MONDAY" , Boolean . valueOf ( ( data [ 17 ] & 0x02 ) == 0 ) ) ; map . put ( "TUESDAY" , Boolean . valueOf ( ( data [ 17 ] & 0x04 ) == 0 ) ) ; map . put ( "WEDNESDAY" , Boolean . valueOf ( ( data [ 17 ] & 0x08 ) == 0 ) ) ; map . put ( "THURSDAY" , Boolean . valueOf ( ( data [ 17 ] & 0x10 ) == 0 ) ) ; map . put ( "FRIDAY" , Boolean . valueOf ( ( data [ 17 ] & 0x20 ) == 0 ) ) ; map . put ( "SATURDAY" , Boolean . valueOf ( ( data [ 17 ] & 0x01 ) == 0 ) ) ; addRow ( uniqueID , map ) ; } |
public class BaseXMLBuilder { /** * Look up the namespace matching the current builder node ' s qualified
* name prefix ( if any ) or the document ' s default namespace .
* @ param name
* the name of the XML element .
* @ return
* The namespace URI , or null if none applies . */
protected String lookupNamespaceURIImpl ( String name ) { } } | String prefix = getPrefixFromQualifiedName ( name ) ; String namespaceURI = this . xmlNode . lookupNamespaceURI ( prefix ) ; return namespaceURI ; |
public class HBaseTokenUtils { /** * Gets a HBase delegation token and stores it in the given Credentials .
* @ return the same Credentials instance as the one given in parameter . */
public static Credentials obtainToken ( Configuration hConf , Credentials credentials ) { } } | if ( ! User . isHBaseSecurityEnabled ( hConf ) ) { return credentials ; } try { Class c = Class . forName ( "org.apache.hadoop.hbase.security.token.TokenUtil" ) ; Method method = c . getMethod ( "obtainToken" , Configuration . class ) ; Token < ? extends TokenIdentifier > token = castToken ( method . invoke ( null , hConf ) ) ; credentials . addToken ( token . getService ( ) , token ) ; return credentials ; } catch ( Exception e ) { LOG . error ( "Failed to get secure token for HBase." , e ) ; throw Throwables . propagate ( e ) ; } |
public class Pathname { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */
public boolean isSet ( _Fields field ) { } } | if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case PATHNAME : return isSetPathname ( ) ; } throw new IllegalStateException ( ) ; |
public class Page { /** * Gets which element IDs were generated . */
public Set < String > getGeneratedIds ( ) { } } | synchronized ( lock ) { if ( generatedIds == null ) return Collections . emptySet ( ) ; if ( frozen ) return generatedIds ; return AoCollections . unmodifiableCopySet ( generatedIds ) ; } |
public class MoleculePropertyCalculator { /** * method to get for every atom the number of occurences
* @ param molecule input Molecule
* @ param mapAtoms Map of atoms with the number its occurences
* @ return Map of atoms with the number of its occurences
* @ throws BuilderMoleculeException if the Rgroups of the molecule can not be
* merged into it
* @ throws CTKException
* @ throws ChemistryException if the Chemistry Engine can not be initialized */
private static Map < String , Integer > generateAtomNumberMap ( AbstractMolecule molecule , Map < String , Integer > mapAtoms ) throws BuilderMoleculeException , CTKException , ChemistryException { } } | molecule = BuilderMolecule . mergeRgroups ( molecule ) ; LOG . info ( "Merge group is finished" ) ; String formula = Chemistry . getInstance ( ) . getManipulator ( ) . getMoleculeInfo ( molecule ) . getMolecularFormula ( ) ; String atom = "" ; String number = "" ; for ( int i = 0 ; i < formula . length ( ) ; i ++ ) { String oneChar = String . valueOf ( formula . charAt ( i ) ) ; if ( oneChar . matches ( "[A-Z]" ) ) { if ( atom . length ( ) == 0 ) { atom = oneChar ; } else { if ( number == "" ) { number = "1" ; } if ( mapAtoms . get ( atom ) != null ) { mapAtoms . put ( atom , mapAtoms . get ( atom ) + Integer . valueOf ( number ) ) ; } else { mapAtoms . put ( atom , Integer . valueOf ( number ) ) ; } atom = oneChar ; number = "" ; } } else if ( oneChar . matches ( "[a-z]" ) ) { if ( atom . length ( ) > 0 ) { atom = atom + oneChar ; } } else { if ( number . length ( ) == 0 ) { number = oneChar ; } else { number = number + oneChar ; } } } if ( number == "" ) { number = "1" ; } if ( mapAtoms . get ( atom ) != null ) { mapAtoms . put ( atom , mapAtoms . get ( atom ) + Integer . valueOf ( number ) ) ; } else { mapAtoms . put ( atom , Integer . valueOf ( number ) ) ; } return mapAtoms ; |
public class AutoFringer { /** * Compose a FringeTile out of the various fringe images needed . */
protected FringeTile composeFringeTile ( FringerRec [ ] fringers , Map < FringeTile , WeakReference < FringeTile > > fringes , int hashValue , boolean passable , Map < Long , BufferedImage > masks ) { } } | // sort the array so that higher priority fringers get drawn first
QuickSort . sort ( fringers ) ; // Generate an identifier for the fringe tile being created as an array of the keys of its
// component tiles in the order they ' ll be drawn in the fringe tile .
List < Long > keys = Lists . newArrayList ( ) ; for ( FringerRec fringer : fringers ) { int [ ] indexes = getFringeIndexes ( fringer . bits ) ; FringeConfiguration . FringeTileSetRecord tsr = _fringeconf . getFringe ( fringer . baseset , hashValue ) ; int fringeset = tsr . fringe_tsid ; for ( int index : indexes ) { // Add a key for this tile as a long containing its base tile , the fringe set it ' s
// working with and the index used in that set .
keys . add ( ( ( ( long ) fringer . baseset ) << 32 ) + ( fringeset << 16 ) + index ) ; } } long [ ] fringeId = new long [ keys . size ( ) ] ; for ( int ii = 0 ; ii < fringeId . length ; ii ++ ) { fringeId [ ii ] = keys . get ( ii ) ; } FringeTile frTile = new FringeTile ( fringeId , passable ) ; // If the fringes map contains something with the same fringe identifier , this will pull
// it out and we can use it instead .
WeakReference < FringeTile > result = fringes . get ( frTile ) ; if ( result != null ) { FringeTile fringe = result . get ( ) ; if ( fringe != null ) { return fringe ; } } // There ' s no fringe with he same identifier , so we need to create the tile .
BufferedImage img = null ; for ( FringerRec fringer : fringers ) { int [ ] indexes = getFringeIndexes ( fringer . bits ) ; FringeConfiguration . FringeTileSetRecord tsr = _fringeconf . getFringe ( fringer . baseset , hashValue ) ; for ( int index : indexes ) { try { img = getTileImage ( img , tsr , fringer . baseset , index , hashValue , masks ) ; } catch ( NoSuchTileSetException nstse ) { log . warning ( "Autofringer couldn't find a needed tileset" , nstse ) ; } } } frTile . setImage ( new BufferedMirage ( img ) ) ; fringes . put ( frTile , new WeakReference < FringeTile > ( frTile ) ) ; return frTile ; |
public class AbstractBeanDefinition { /** * Obtains all bean definitions for a constructor argument at the given index
* Warning : this method is used by internal generated code and should not be called by user code .
* @ param resolutionContext The resolution context
* @ param context The context
* @ param constructorInjectionPoint The constructor injection point
* @ param argument The argument
* @ return The resolved bean */
@ SuppressWarnings ( "WeakerAccess" ) @ Internal protected final Collection getBeansOfTypeForConstructorArgument ( BeanResolutionContext resolutionContext , BeanContext context , @ SuppressWarnings ( "unused" ) ConstructorInjectionPoint < T > constructorInjectionPoint , Argument argument ) { } } | return resolveBeanWithGenericsFromConstructorArgument ( resolutionContext , argument , ( beanType , qualifier ) -> { boolean hasNoGenerics = ! argument . getType ( ) . isArray ( ) && argument . getTypeVariables ( ) . isEmpty ( ) ; if ( hasNoGenerics ) { return ( ( DefaultBeanContext ) context ) . getBean ( resolutionContext , beanType , qualifier ) ; } else { return ( ( DefaultBeanContext ) context ) . getBeansOfType ( resolutionContext , beanType , qualifier ) ; } } ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.