signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ApiOvhMe { /** * Get this object properties
* REST : GET / me / fidelityAccount */
public OvhFidelityAccount fidelityAccount_GET ( ) throws IOException { } } | String qPath = "/me/fidelityAccount" ; StringBuilder sb = path ( qPath ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhFidelityAccount . class ) ; |
public class JcrQuery { /** * { @ inheritDoc }
* @ see javax . jcr . query . Query # execute ( ) */
@ SuppressWarnings ( "deprecation" ) @ Override public org . modeshape . jcr . api . query . QueryResult execute ( ) throws RepositoryException { } } | context . checkValid ( ) ; final long start = System . nanoTime ( ) ; // Create an executable query and set it on this object . . .
CancellableQuery newExecutable = context . createExecutableQuery ( query , hints , variables ) ; CancellableQuery executable = executingQuery . getAndSet ( newExecutable ) ; if ( executable == null ) { // We are the first to call ' execute ( ) ' , so use our newly - created one . . .
executable = newExecutable ; } // otherwise , some other thread called execute , so we can use it and just wait for the results . . .
final QueryResults result = executable . execute ( ) ; // may be cancelled
// And reset the reference to null ( if not already set to something else ) . . .
executingQuery . compareAndSet ( executable , null ) ; checkForProblems ( result . getProblems ( ) ) ; context . recordDuration ( Math . abs ( System . nanoTime ( ) - start ) , TimeUnit . NANOSECONDS , statement , language ) ; if ( Query . XPATH . equals ( language ) ) { return new XPathQueryResult ( context , statement , result , hints . restartable , hints . rowsKeptInMemory ) ; } else if ( Query . SQL . equals ( language ) ) { return new JcrSqlQueryResult ( context , statement , result , hints . restartable , hints . rowsKeptInMemory ) ; } return new JcrQueryResult ( context , statement , result , hints . restartable , hints . rowsKeptInMemory ) ; |
public class CheckpointTupleForwarder { /** * Forwards the checkpoint tuple downstream . Sub - classes can override
* with the logic for handling checkpoint tuple .
* @ param checkpointTuple the checkpoint tuple
* @ param action the action ( prepare , commit , rollback or initstate )
* @ param txid the transaction id . */
protected void handleCheckpoint ( Tuple checkpointTuple , Action action , long txid ) { } } | collector . emit ( CheckpointSpout . CHECKPOINT_STREAM_ID , checkpointTuple , new Values ( txid , action ) ) ; collector . ack ( checkpointTuple ) ; |
public class TypeLexer { /** * $ ANTLR start " EXT " */
public final void mEXT ( ) throws RecognitionException { } } | try { int _type = EXT ; int _channel = DEFAULT_TOKEN_CHANNEL ; // org / javaruntype / type / parser / Type . g : 37:5 : ( ' EXT ' )
// org / javaruntype / type / parser / Type . g : 37:7 : ' EXT '
{ match ( "EXT" ) ; } state . type = _type ; state . channel = _channel ; } finally { } |
public class A_CmsPublishGroupHelper { /** * Given a list of resources , this method returns a list of their modification dates . < p >
* @ param resources a list of resources
* @ return the modification dates of the resources , in the same order as the resources */
public List < Long > getModificationDates ( List < RESOURCE > resources ) { } } | List < Long > result = new ArrayList < Long > ( ) ; for ( RESOURCE res : resources ) { result . add ( new Long ( getDateLastModified ( res ) ) ) ; } return result ; |
public class Passwords { /** * Validates a password using a hash .
* @ param password
* the password to check
* @ param goodHash
* the hash of the valid password
* @ return true if the password is correct , false if not */
public static boolean validatePassword ( String password , String goodHash ) throws NoSuchAlgorithmException , InvalidKeySpecException { } } | return validatePassword ( password . toCharArray ( ) , goodHash ) ; |
public class BackupImpl { /** * Must be called by the cache when the line is synchronized , and under a read - lock ( i . e . between startBackup and endBackup )
* @ param id
* @ param version */
@ Override public void backup ( long id , long version ) { } } | if ( LOG . isDebugEnabled ( ) ) LOG . debug ( "Backup: {} ver: {} {}" , new Object [ ] { hex ( id ) , version , copyImmediately ? "(COPY)" : "" } ) ; if ( copyImmediately ) { currentBackups . put ( id , makeBackup ( cache . getLine ( id ) , version ) ) ; oldMap ( ) . remove ( id ) ; } else map . put ( id , new BackupEntry ( id , version ) ) ; |
public class TimerServiceRamp { /** * Implements { @ link Timers # schedule ( Runnable , TimerScheduler ) } */
@ Override public void schedule ( @ Pin Consumer < ? super Cancel > task , LongUnaryOperator scheduler , Result < ? super Cancel > result ) { } } | Objects . requireNonNull ( task ) ; Objects . requireNonNull ( scheduler ) ; // cancel ( task ) ;
long now = CurrentTime . currentTime ( ) ; long nextTime = scheduler . applyAsLong ( now ) ; TimerListener listener = new TimerListener ( task , scheduler ) ; result . ok ( listener ) ; if ( now <= nextTime ) { listener . queueAt ( nextTime ) ; } /* return listener ; */ |
public class ReceiptTemplate { /** * Set Address
* @ param street _ 1 the street address 1
* @ param street _ 2 the street address 2
* @ param city the city
* @ param postal _ code the postal code
* @ param state the state
* @ param country the country */
public void setAddress ( String street_1 , String street_2 , String city , String postal_code , String state , String country ) { } } | this . address . put ( "street_1" , street_1 ) ; this . address . put ( "street_2" , street_2 ) ; this . address . put ( "city" , city ) ; this . address . put ( "postal_code" , postal_code ) ; this . address . put ( "state" , state ) ; this . address . put ( "country" , country ) ; |
public class Path { /** * Returns a qualified path object . */
public Path makeQualified ( FileSystem fs ) { } } | Path path = this ; if ( ! isAbsolute ( ) ) { FileSystem . LogForCollect . info ( "make Qualify non absolute path: " + this . toString ( ) + " working directory: " + fs . getWorkingDirectory ( ) ) ; path = new Path ( fs . getWorkingDirectory ( ) , this ) ; } URI pathUri = path . toUri ( ) ; URI fsUri = fs . getUri ( ) ; String scheme = pathUri . getScheme ( ) ; String authority = pathUri . getAuthority ( ) ; if ( scheme != null && ( authority != null || fsUri . getAuthority ( ) == null ) ) return path ; if ( scheme == null ) { scheme = fsUri . getScheme ( ) ; } if ( authority == null ) { authority = fsUri . getAuthority ( ) ; if ( authority == null ) { authority = "" ; } } return new Path ( scheme + ":" + "//" + authority + pathUri . getPath ( ) ) ; |
public class SarlcConfigModule { /** * Replies the instance of the sarl configuration .
* @ param configFactory accessor to the bootique factory .
* @ param injector the current injector .
* @ return the path configuration accessor . */
@ SuppressWarnings ( "static-method" ) @ Provides @ Singleton public SarlConfig getSarlcConfig ( ConfigurationFactory configFactory , Injector injector ) { } } | final SarlConfig config = SarlConfig . getConfiguration ( configFactory ) ; injector . injectMembers ( config ) ; return config ; |
public class HBaseUtils { /** * From bytes .
* @ param b
* the b
* @ param clazz
* the clazz
* @ return the object */
public static Object fromBytes ( byte [ ] b , Class < ? > clazz ) { } } | if ( clazz . isAssignableFrom ( String . class ) ) { return Bytes . toString ( b ) ; } else if ( clazz . equals ( int . class ) || clazz . isAssignableFrom ( Integer . class ) ) { return Bytes . toInt ( b ) ; } else if ( clazz . equals ( long . class ) || clazz . isAssignableFrom ( Long . class ) ) { return Bytes . toLong ( b ) ; } else if ( clazz . equals ( boolean . class ) || clazz . isAssignableFrom ( Boolean . class ) ) { return Bytes . toBoolean ( b ) ; } else if ( clazz . equals ( double . class ) || clazz . isAssignableFrom ( Double . class ) ) { return Bytes . toDouble ( b ) ; } else if ( clazz . equals ( float . class ) || clazz . isAssignableFrom ( Float . class ) ) { return Bytes . toFloat ( b ) ; } else if ( clazz . equals ( short . class ) || clazz . isAssignableFrom ( Short . class ) ) { return Bytes . toShort ( b ) ; } else if ( clazz . equals ( BigDecimal . class ) ) { return Bytes . toBigDecimal ( b ) ; } else { return PropertyAccessorFactory . getPropertyAccessor ( clazz ) . fromBytes ( clazz , b ) ; } |
public class IVFPQ { /** * Computes the residual vector .
* @ param vector
* The original vector
* @ param centroidIndex
* The centroid of the coarse quantizer from which the original vector is subtracted
* @ return The residual vector */
private double [ ] computeResidualVector ( double [ ] vector , int centroidIndex ) { } } | double [ ] residualVector = new double [ vectorLength ] ; for ( int i = 0 ; i < vectorLength ; i ++ ) { residualVector [ i ] = coarseQuantizer [ centroidIndex ] [ i ] - vector [ i ] ; } return residualVector ; |
public class MatrixExtensions { /** * Replies the division of this matrix by the given scalar : { @ code left / right } .
* < p > This function is an implementation of the operator for
* the languages that defined or based on the
* < a href = " https : / / www . eclipse . org / Xtext / " > Xtext framework < / a > .
* < p > The operation { @ code right / left } is supported by { @ link Matrix3d # operator _ divide ( double ) } .
* @ param < M > the type of the matrix .
* @ param left the scalar .
* @ param right the matrix .
* @ return the division of the matrix by the scalar .
* @ see Matrix3d # mul ( double )
* @ see Matrix3d # operator _ divide ( double ) */
@ Pure @ XtextOperator ( "/" ) @ SuppressWarnings ( "unchecked" ) public static < M extends Matrix3d > M operator_divide ( double left , M right ) { } } | assert right != null : AssertMessages . notNullParameter ( 1 ) ; final M result = ( M ) right . clone ( ) ; result . set ( left / right . getM00 ( ) , left / right . getM01 ( ) , left / right . getM02 ( ) , left / right . getM10 ( ) , left / right . getM11 ( ) , left / right . getM12 ( ) , left / right . getM20 ( ) , left / right . getM21 ( ) , left / right . getM22 ( ) ) ; return result ; |
public class SpiceManager { /** * Put some new data in cache using cache key < i > requestCacheKey < / i > . This
* method doesn ' t perform any network processing , it just data in cache ,
* erasing any previsouly saved date in cache using the same class and key .
* Don ' t call this method in the main thread because you could block it .
* Instead , use the asynchronous version of this method :
* { @ link # putInCache ( Class , Object , Object ) } .
* @ param cacheKey
* the key used to store and retrieve the result of the request
* in the cache
* @ param data
* the data to be saved in cache .
* @ return the data has it has been saved by an ObjectPersister in cache .
* @ throws CacheLoadingException
* Exception thrown when a problem occurs while loading data
* from cache . */
public < T > Future < T > putDataInCache ( final Object cacheKey , final T data ) throws CacheSavingException , CacheCreationException { } } | return executeCommand ( new PutDataInCacheCommand < T > ( this , data , cacheKey ) ) ; |
public class ServletContextResource { /** * This implementation delegates to < code > ServletContext . getResourceAsStream < / code > ,
* but throws a FileNotFoundException if no resource found .
* @ see javax . servlet . ServletContext # getResourceAsStream ( String ) */
public InputStream getInputStream ( ) throws IOException { } } | InputStream is = this . servletContext . getResourceAsStream ( this . path ) ; if ( is == null ) { throw new FileNotFoundException ( "Could not open " + getDescription ( ) ) ; } return is ; |
public class TreeInfo { /** * Return the statement referenced by a label .
* If the label refers to a loop or switch , return that switch
* otherwise return the labelled statement itself */
public static JCTree referencedStatement ( JCLabeledStatement tree ) { } } | JCTree t = tree ; do t = ( ( JCLabeledStatement ) t ) . body ; while ( t . hasTag ( LABELLED ) ) ; switch ( t . getTag ( ) ) { case DOLOOP : case WHILELOOP : case FORLOOP : case FOREACHLOOP : case SWITCH : return t ; default : return tree ; } |
public class StringGroovyMethods { /** * Support the subscript operator for CharSequence .
* @ param text a CharSequence
* @ param index the index of the Character to get
* @ return the Character at the given index
* @ since 1.0 */
public static CharSequence getAt ( CharSequence text , int index ) { } } | index = normaliseIndex ( index , text . length ( ) ) ; return text . subSequence ( index , index + 1 ) ; |
public class UpdateClusterConfigRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateClusterConfigRequest updateClusterConfigRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateClusterConfigRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateClusterConfigRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( updateClusterConfigRequest . getResourcesVpcConfig ( ) , RESOURCESVPCCONFIG_BINDING ) ; protocolMarshaller . marshall ( updateClusterConfigRequest . getLogging ( ) , LOGGING_BINDING ) ; protocolMarshaller . marshall ( updateClusterConfigRequest . getClientRequestToken ( ) , CLIENTREQUESTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class JavaParser { /** * src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 565:1 : formalParameters : ' ( ' ( formalParameterDecls ) ? ' ) ' ; */
public final void formalParameters ( ) throws RecognitionException { } } | int formalParameters_StartIndex = input . index ( ) ; try { if ( state . backtracking > 0 && alreadyParsedRule ( input , 56 ) ) { return ; } // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 566:5 : ( ' ( ' ( formalParameterDecls ) ? ' ) ' )
// src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 566:7 : ' ( ' ( formalParameterDecls ) ? ' ) '
{ match ( input , 36 , FOLLOW_36_in_formalParameters2025 ) ; if ( state . failed ) return ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 566:11 : ( formalParameterDecls ) ?
int alt78 = 2 ; int LA78_0 = input . LA ( 1 ) ; if ( ( LA78_0 == Identifier || LA78_0 == 58 || LA78_0 == 65 || LA78_0 == 67 || LA78_0 == 71 || LA78_0 == 77 || LA78_0 == 83 || LA78_0 == 85 || LA78_0 == 92 || LA78_0 == 94 || LA78_0 == 105 ) ) { alt78 = 1 ; } switch ( alt78 ) { case 1 : // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 566:11 : formalParameterDecls
{ pushFollow ( FOLLOW_formalParameterDecls_in_formalParameters2027 ) ; formalParameterDecls ( ) ; state . _fsp -- ; if ( state . failed ) return ; } break ; } match ( input , 37 , FOLLOW_37_in_formalParameters2030 ) ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { // do for sure before leaving
if ( state . backtracking > 0 ) { memoize ( input , 56 , formalParameters_StartIndex ) ; } } |
public class OptionsConnectionPanel { /** * This method initializes panelProxyChain
* @ return javax . swing . JPanel */
private JPanel getPanelProxyChain ( ) { } } | if ( panelProxyChain == null ) { panelProxyChain = new JPanel ( ) ; java . awt . GridBagConstraints gridBagConstraints92 = new GridBagConstraints ( ) ; javax . swing . JLabel jLabel8 = new JLabel ( ) ; java . awt . GridBagConstraints gridBagConstraints102 = new GridBagConstraints ( ) ; panelProxyChain . setLayout ( new GridBagLayout ( ) ) ; gridBagConstraints92 . gridx = 0 ; gridBagConstraints92 . gridy = 0 ; gridBagConstraints92 . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; gridBagConstraints92 . anchor = java . awt . GridBagConstraints . NORTHWEST ; gridBagConstraints92 . fill = java . awt . GridBagConstraints . HORIZONTAL ; panelProxyChain . setName ( "Proxy Chain" ) ; jLabel8 . setText ( "" ) ; gridBagConstraints102 . anchor = java . awt . GridBagConstraints . NORTHWEST ; gridBagConstraints102 . fill = java . awt . GridBagConstraints . BOTH ; gridBagConstraints102 . gridx = 0 ; gridBagConstraints102 . gridy = 1 ; gridBagConstraints102 . weightx = 1.0D ; gridBagConstraints102 . weighty = 1.0D ; panelProxyChain . add ( getPanelProxyAuth ( ) , gridBagConstraints92 ) ; panelProxyChain . add ( jLabel8 , gridBagConstraints102 ) ; } return panelProxyChain ; |
public class CmsGwtService { /** * Ensures that the user session is still valid . < p >
* @ throws CmsException if the current user is the guest user */
protected void ensureSession ( ) throws CmsException { } } | CmsUser user = getCmsObject ( ) . getRequestContext ( ) . getCurrentUser ( ) ; if ( user . isGuestUser ( ) ) { throw new CmsException ( Messages . get ( ) . container ( Messages . ERR_SESSION_EXPIRED_0 ) ) ; } |
public class SNAXParser { /** * Parse a data stream , represented by a Reader , to completion . This will process the entire
* document and trigger any ElementHandler calls that result from applying the selectors defined
* in the NodeModel . The data parameter will be passed back as an argument to all ElementHandler
* calls .
* @ param reader XML content to process
* @ param data optional , user - defined object to be passed as an argument to ElementHandlers
* @ throws XMLStreamException if there is an error with the underlying XML
* @ throws SNAXUserException if there is an error in an attached < code > ElementHandler < / code > */
public void parse ( Reader reader , T data ) throws XMLStreamException , SNAXUserException { } } | // TODO : this needs to catch concurrent parse attempts
init ( reader , data ) ; for ( XMLEvent event = xmlReader . nextEvent ( ) ; xmlReader . hasNext ( ) ; event = xmlReader . nextEvent ( ) ) { processEvent ( event ) ; } |
public class FieldType { /** * Note the start of the default value in the parsed thrift file , this can be used
* for making more accurate exception / parse data from the const parser .
* @ return Optional of the < code > value _ start _ pos < / code > field value . */
@ javax . annotation . Nonnull public java . util . Optional < net . morimekta . providence . model . FilePos > optionalValueStartPos ( ) { } } | return java . util . Optional . ofNullable ( mValueStartPos ) ; |
public class AbstractSegment3F { /** * Compute and replies the perpendicular squared distance from a point to a segment .
* @ param sx1
* is the X coord of the first point of the segment
* @ param sy1
* is the Y coord of the first point of the segment
* @ param sz1
* is the Z coord of the first point of the segment
* @ param sx2
* is the X coord of the second point of the segment
* @ param sy2
* is the Y coord of the second point of the segment
* @ param sz2
* is the Z coord of the second point of the segment
* @ param px
* is the X coord of the point
* @ param py
* is the Y coord of the point
* @ param pz
* is the Z coord of the point
* @ return the distance */
@ Pure public static double distanceSquaredSegmentPoint ( double sx1 , double sy1 , double sz1 , double sx2 , double sy2 , double sz2 , double px , double py , double pz ) { } } | double ratio = getPointProjectionFactorOnSegmentLine ( px , py , pz , sx1 , sy1 , sz1 , sx2 , sy2 , sz2 ) ; if ( ratio <= 0. ) return FunctionalPoint3D . distanceSquaredPointPoint ( px , py , pz , sx1 , sy1 , sz1 ) ; if ( ratio >= 1. ) return FunctionalPoint3D . distanceSquaredPointPoint ( px , py , pz , sx2 , sy2 , sz2 ) ; return FunctionalPoint3D . distanceSquaredPointPoint ( px , py , pz , ( 1. - ratio ) * sx1 + ratio * sx2 , ( 1. - ratio ) * sy1 + ratio * sy2 , ( 1. - ratio ) * sz1 + ratio * sz2 ) ; |
public class FontSize { /** * Returns the font size expressed in points .
* @ return Points . */
public final float toPoint ( ) { } } | if ( unit == FontSizeUnit . PIXEL ) { return ( size / 4 * 3 ) ; } if ( unit == FontSizeUnit . EM ) { return ( size * 12 ) ; } if ( unit == FontSizeUnit . PERCENT ) { return ( size / 100 * 12 ) ; } if ( unit == FontSizeUnit . POINT ) { return size ; } throw new IllegalStateException ( "Unknown unit: " + unit ) ; |
public class OAuthConsumerContextFilter { /** * Common logic for OAuth failed . ( Note that the default logic doesn ' t pass the failure through so as to not mess
* with the current authentication . )
* @ param request The request .
* @ param response The response .
* @ param failure The failure .
* @ throws ServletException in the case of an underlying Servlet API exception
* @ throws IOException in the case of general IO exceptions */
protected void fail ( HttpServletRequest request , HttpServletResponse response , OAuthRequestFailedException failure ) throws IOException , ServletException { } } | try { // attempt to set the last exception .
request . getSession ( ) . setAttribute ( OAUTH_FAILURE_KEY , failure ) ; } catch ( Exception e ) { // fall through . . . .
} if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( failure ) ; } if ( getOAuthFailureHandler ( ) != null ) { getOAuthFailureHandler ( ) . handle ( request , response , failure ) ; } else { throw failure ; } |
public class HttpBase { /** * 设置请求头 < br >
* 不覆盖原有请求头
* @ param headers 请求头
* @ param isOverride 是否覆盖已有头信息
* @ return this
* @ since 4.0.8 */
public T header ( Map < String , List < String > > headers , boolean isOverride ) { } } | if ( CollectionUtil . isEmpty ( headers ) ) { return ( T ) this ; } String name ; for ( Entry < String , List < String > > entry : headers . entrySet ( ) ) { name = entry . getKey ( ) ; for ( String value : entry . getValue ( ) ) { this . header ( name , StrUtil . nullToEmpty ( value ) , isOverride ) ; } } return ( T ) this ; |
public class Utils { /** * Compares two Objects for equality . This functionality is provided by { @ code
* Objects . equal ( Object , Object ) } in Java 7. */
public static boolean equalsObjects ( @ javax . annotation . Nullable Object x , @ javax . annotation . Nullable Object y ) { } } | return x == null ? y == null : x . equals ( y ) ; |
public class HttpMethodDirector { /** * Process the redirect response .
* @ return < code > true < / code > if the redirect was successful */
private boolean processRedirectResponse ( final HttpMethod method ) throws RedirectException { } } | // get the location header to find out where to redirect to
Header locationHeader = method . getResponseHeader ( "location" ) ; if ( locationHeader == null ) { // got a redirect response , but no location header
LOG . error ( "Received redirect response " + method . getStatusCode ( ) + " but no location header" ) ; return false ; } String location = locationHeader . getValue ( ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Redirect requested to location '" + location + "'" ) ; } // rfc2616 demands the location value be a complete URI
// Location = " Location " " : " absoluteURI
URI redirectUri = null ; URI currentUri = null ; try { currentUri = new URI ( this . conn . getProtocol ( ) . getScheme ( ) , null , this . conn . getHost ( ) , this . conn . getPort ( ) , method . getPath ( ) ) ; String charset = method . getParams ( ) . getUriCharset ( ) ; redirectUri = new URI ( location , true , charset ) ; if ( redirectUri . isRelativeURI ( ) ) { if ( this . params . isParameterTrue ( HttpClientParams . REJECT_RELATIVE_REDIRECT ) ) { LOG . warn ( "Relative redirect location '" + location + "' not allowed" ) ; return false ; } else { // location is incomplete , use current values for defaults
LOG . debug ( "Redirect URI is not absolute - parsing as relative" ) ; redirectUri = new URI ( currentUri , redirectUri ) ; } } else { // Reset the default params
method . getParams ( ) . setDefaults ( this . params ) ; } method . setURI ( redirectUri ) ; hostConfiguration . setHost ( redirectUri ) ; } catch ( URIException ex ) { throw new InvalidRedirectLocationException ( "Invalid redirect location: " + location , location , ex ) ; } if ( this . params . isParameterFalse ( HttpClientParams . ALLOW_CIRCULAR_REDIRECTS ) ) { if ( this . redirectLocations == null ) { this . redirectLocations = new HashSet < URI > ( ) ; } this . redirectLocations . add ( currentUri ) ; try { if ( redirectUri . hasQuery ( ) ) { redirectUri . setQuery ( null ) ; } } catch ( URIException e ) { // Should never happen
return false ; } if ( this . redirectLocations . contains ( redirectUri ) ) { throw new CircularRedirectException ( "Circular redirect to '" + redirectUri + "'" ) ; } } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Redirecting from '" + currentUri . getEscapedURI ( ) + "' to '" + redirectUri . getEscapedURI ( ) ) ; } // And finally invalidate the actual authentication scheme
method . getHostAuthState ( ) . invalidate ( ) ; method . getProxyAuthState ( ) . invalidate ( ) ; return true ; |
public class AbstractWatchService { /** * Registers the given watchable with this service , returning a new watch key for it . This
* implementation just checks that the service is open and creates a key ; subclasses may override
* it to do other things as well . */
public Key register ( Watchable watchable , Iterable < ? extends WatchEvent . Kind < ? > > eventTypes ) throws IOException { } } | checkOpen ( ) ; return new Key ( this , watchable , eventTypes ) ; |
public class HessianSchurComplement_DSCC { /** * Compuets the Hessian in block form
* @ param jacLeft ( Input ) Left side of Jacobian
* @ param jacRight ( Input ) Right side of Jacobian */
@ Override public void computeHessian ( DMatrixSparseCSC jacLeft , DMatrixSparseCSC jacRight ) { } } | A . reshape ( jacLeft . numCols , jacLeft . numCols , 1 ) ; B . reshape ( jacLeft . numCols , jacRight . numCols , 1 ) ; D . reshape ( jacRight . numCols , jacRight . numCols , 1 ) ; // take advantage of the inner product ' s symmetry when possible to reduce
// the number of calculations
CommonOps_DSCC . innerProductLower ( jacLeft , tmp0 , gw , gx ) ; CommonOps_DSCC . symmLowerToFull ( tmp0 , A , gw ) ; CommonOps_DSCC . multTransA ( jacLeft , jacRight , B , gw , gx ) ; CommonOps_DSCC . innerProductLower ( jacRight , tmp0 , gw , gx ) ; CommonOps_DSCC . symmLowerToFull ( tmp0 , D , gw ) ; |
public class FluentLogger { /** * Returns a new logger instance which parses log messages using printf format for the enclosing
* class using the system default logging backend . */
public static FluentLogger forEnclosingClass ( ) { } } | // NOTE : It is _ vital _ that the call to " caller finder " is made directly inside the static
// factory method . See getCallerFinder ( ) for more information .
String loggingClass = Platform . getCallerFinder ( ) . findLoggingClass ( FluentLogger . class ) ; return new FluentLogger ( Platform . getBackend ( loggingClass ) ) ; |
public class GenericAnalyticsRequest { /** * Create a { @ link GenericAnalyticsRequest } and mark it as containing a full Analytics query in Json form
* ( including additional query parameters ) .
* The simplest form of such a query is a single statement encapsulated in a json query object :
* < pre > { " statement " : " SELECT * FROM default " } < / pre > .
* @ param jsonQuery the Analytics query in json form .
* @ param bucket the bucket on which to perform the query .
* @ param password the password for the target bucket .
* @ return a { @ link GenericAnalyticsRequest } for this full query . */
public static GenericAnalyticsRequest jsonQuery ( String jsonQuery , String bucket , String username , String password , int priority ) { } } | return new GenericAnalyticsRequest ( jsonQuery , true , bucket , username , password , null , priority ) ; |
public class CPMeasurementUnitPersistenceImpl { /** * Returns the number of cp measurement units where groupId = & # 63 ; and key = & # 63 ; and type = & # 63 ; .
* @ param groupId the group ID
* @ param key the key
* @ param type the type
* @ return the number of matching cp measurement units */
@ Override public int countByG_K_T ( long groupId , String key , int type ) { } } | FinderPath finderPath = FINDER_PATH_COUNT_BY_G_K_T ; Object [ ] finderArgs = new Object [ ] { groupId , key , type } ; Long count = ( Long ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( count == null ) { StringBundler query = new StringBundler ( 4 ) ; query . append ( _SQL_COUNT_CPMEASUREMENTUNIT_WHERE ) ; query . append ( _FINDER_COLUMN_G_K_T_GROUPID_2 ) ; boolean bindKey = false ; if ( key == null ) { query . append ( _FINDER_COLUMN_G_K_T_KEY_1 ) ; } else if ( key . equals ( "" ) ) { query . append ( _FINDER_COLUMN_G_K_T_KEY_3 ) ; } else { bindKey = true ; query . append ( _FINDER_COLUMN_G_K_T_KEY_2 ) ; } query . append ( _FINDER_COLUMN_G_K_T_TYPE_2 ) ; String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; qPos . add ( groupId ) ; if ( bindKey ) { qPos . add ( StringUtil . toLowerCase ( key ) ) ; } qPos . add ( type ) ; count = ( Long ) q . uniqueResult ( ) ; finderCache . putResult ( finderPath , finderArgs , count ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return count . intValue ( ) ; |
public class RTreeIndexCoreExtension { /** * Get or create the extension
* @ param featureTable
* feature table
* @ return extension */
public Extensions getOrCreate ( FeatureTable featureTable ) { } } | return getOrCreate ( featureTable . getTableName ( ) , featureTable . getGeometryColumn ( ) . getName ( ) ) ; |
public class Node { /** * Unregister a listener for publication events .
* @ param listener The handler to unregister */
public void removeItemEventListener ( @ SuppressWarnings ( "rawtypes" ) ItemEventListener listener ) { } } | StanzaListener conListener = itemEventToListenerMap . remove ( listener ) ; if ( conListener != null ) pubSubManager . getConnection ( ) . removeSyncStanzaListener ( conListener ) ; |
public class streamselector { /** * Use this API to fetch all the streamselector resources that are configured on netscaler . */
public static streamselector [ ] get ( nitro_service service ) throws Exception { } } | streamselector obj = new streamselector ( ) ; streamselector [ ] response = ( streamselector [ ] ) obj . get_resources ( service ) ; return response ; |
public class Node { /** * Remove an attribute from this element .
* @ param attributeKey The attribute to remove .
* @ return this ( for chaining ) */
public Node removeAttr ( String attributeKey ) { } } | Validate . notNull ( attributeKey ) ; attributes ( ) . removeIgnoreCase ( attributeKey ) ; return this ; |
public class Stream { /** * Sample code :
* < pre >
* < code >
* final BlockingQueue < String > queue = new ArrayBlockingQueue < > ( 32 ) ;
* Stream . observe ( queue , Duration . ofMillis ( 100 ) ) . filter ( s - > s . startsWith ( " a " ) ) . asyncRun ( s - > s . forEach ( Fn . println ( ) ) ) ;
* N . asList ( " a " , " b " , " ab " , " bc " , " 1 " , " a " ) . forEach ( queue : : add ) ;
* N . sleep ( 10 ) ;
* N . println ( " = = = = = " ) ;
* N . sleep ( 100 ) ;
* N . println ( " = = = = = " ) ;
* N . sleep ( 10 ) ;
* < / code >
* < / pre >
* @ param queue
* @ param duration
* @ param onComplete
* @ return */
public static < T > Stream < T > observe ( final BlockingQueue < T > queue , final Duration duration , final Runnable onComplete ) { } } | N . checkArgNotNull ( queue , "queue" ) ; N . checkArgNotNull ( duration , "duration" ) ; N . checkArgNotNull ( onComplete , "onComplete" ) ; final long now = System . currentTimeMillis ( ) ; final long endTime = duration . toMillis ( ) >= Long . MAX_VALUE - now ? Long . MAX_VALUE : now + duration . toMillis ( ) ; final Iterator < T > iter = new ObjIterator < T > ( ) { private T next = null ; @ Override public boolean hasNext ( ) { if ( next == null ) { final long curTime = System . currentTimeMillis ( ) ; if ( curTime <= endTime ) { try { next = queue . poll ( endTime - curTime , TimeUnit . MILLISECONDS ) ; } catch ( InterruptedException e ) { throw new RuntimeException ( e ) ; } } } return next != null ; } @ Override public T next ( ) { if ( hasNext ( ) == false ) { throw new NoSuchElementException ( ) ; } final T res = next ; next = null ; return res ; } } ; return of ( iter ) . onClose ( onComplete ) ; |
public class BigtableTableAdminClient { /** * Asynchronously creates , updates and drops ColumnFamilies as per the request .
* < p > Sample code :
* < pre > { @ code
* ApiFuture < Table > modifiedTableFuture = client . modifyFamiliesAsync (
* ModifyColumnFamiliesRequest . of ( tableId )
* . addFamily ( " cf1 " )
* . addFamily ( " cf2 " , GCRULES . maxAge ( Duration . ofSeconds ( 1000 , 20000 ) ) )
* . updateFamily (
* " cf3 " ,
* GCRULES . union ( )
* . rule ( GCRULES . maxAge ( Duration . ofSeconds ( 100 ) ) )
* . rule ( GCRULES . maxVersions ( 1 ) )
* . addFamily (
* " cf4 " ,
* GCRULES . intersection ( )
* . rule ( GCRULES . maxAge ( Duration . ofSeconds ( 2000 ) ) )
* . rule ( GCRULES . maxVersions ( 10 ) )
* . dropFamily ( " cf5 " )
* ApiFutures . addCallback (
* modifiedTableFuture ,
* new ApiFutureCallback < Table > ( ) {
* public void onSuccess ( Table table ) {
* System . out . println ( " Modified table : " + table . getTableName ( ) ) ;
* System . out . println ( " Resulting families : " ) ;
* for ( ColumnFamily cf : modifiedTable . getColumnFamilies ( ) ) {
* System . out . println ( cf . getId ( ) ) ;
* public void onFailure ( Throwable t ) {
* t . printStackTrace ( ) ;
* MoreExecutors . directExecutor ( )
* } < / pre >
* @ see ModifyColumnFamiliesRequest for available options . */
@ SuppressWarnings ( "WeakerAccess" ) public ApiFuture < Table > modifyFamiliesAsync ( ModifyColumnFamiliesRequest request ) { } } | return transformToTableResponse ( this . stub . modifyColumnFamiliesCallable ( ) . futureCall ( request . toProto ( projectId , instanceId ) ) ) ; |
public class DigestAuthenticator { public String newNonce ( HttpRequest request ) { } } | long ts = request . getTimeStamp ( ) ; long sk = nonceSecret ; byte [ ] nounce = new byte [ 24 ] ; for ( int i = 0 ; i < 8 ; i ++ ) { nounce [ i ] = ( byte ) ( ts & 0xff ) ; ts = ts >> 8 ; nounce [ 8 + i ] = ( byte ) ( sk & 0xff ) ; sk = sk >> 8 ; } byte [ ] hash = null ; try { MessageDigest md = MessageDigest . getInstance ( "MD5" ) ; md . reset ( ) ; md . update ( nounce , 0 , 16 ) ; hash = md . digest ( ) ; } catch ( Exception e ) { log . fatal ( this , e ) ; } for ( int i = 0 ; i < hash . length ; i ++ ) { nounce [ 8 + i ] = hash [ i ] ; if ( i == 23 ) break ; } return new String ( B64Code . encode ( nounce ) ) ; |
public class CommerceUserSegmentCriterionLocalServiceBaseImpl { /** * Updates the commerce user segment criterion in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners .
* @ param commerceUserSegmentCriterion the commerce user segment criterion
* @ return the commerce user segment criterion that was updated */
@ Indexable ( type = IndexableType . REINDEX ) @ Override public CommerceUserSegmentCriterion updateCommerceUserSegmentCriterion ( CommerceUserSegmentCriterion commerceUserSegmentCriterion ) { } } | return commerceUserSegmentCriterionPersistence . update ( commerceUserSegmentCriterion ) ; |
public class SARLProposalProvider { /** * Complete the " extends " if the proposals are enabled .
* @ param model the model .
* @ param context the context .
* @ param acceptor the proposal acceptor .
* @ see # isSarlProposalEnabled ( ) */
protected void completeExtends ( EObject model , ContentAssistContext context , ICompletionProposalAcceptor acceptor ) { } } | if ( isSarlProposalEnabled ( ) ) { if ( model instanceof SarlAgent ) { completeSarlAgents ( false , true , context , acceptor ) ; } else if ( model instanceof SarlBehavior ) { completeSarlBehaviors ( false , true , context , acceptor ) ; } else if ( model instanceof SarlCapacity ) { completeSarlCapacities ( false , true , context , acceptor ) ; } else if ( model instanceof SarlSkill ) { completeSarlSkills ( false , true , context , acceptor ) ; } else if ( model instanceof SarlEvent ) { completeSarlEvents ( false , true , context , acceptor ) ; } else if ( model instanceof SarlClass ) { completeJavaTypes ( context , createExtensionFilter ( context , IJavaSearchConstants . CLASS ) , acceptor ) ; } else if ( model instanceof SarlInterface ) { completeJavaTypes ( context , createExtensionFilter ( context , IJavaSearchConstants . INTERFACE ) , acceptor ) ; } } |
public class StencilInterpreter { /** * Switch to a new environment
* @ param env Environment to switch to
* @ returns Previous environment */
private Environment switchEnvironment ( Environment env ) { } } | Environment prev = currentEnvironment ; currentEnvironment = env ; return prev ; |
public class SimpleBase { /** * Extracts a row or column from this matrix . The returned vector will either be a row
* or column vector depending on the input type .
* @ param extractRow If true a row will be extracted .
* @ param element The row or column the vector is contained in .
* @ return Extracted vector . */
public T extractVector ( boolean extractRow , int element ) { } } | if ( extractRow ) { return extractMatrix ( element , element + 1 , 0 , SimpleMatrix . END ) ; } else { return extractMatrix ( 0 , SimpleMatrix . END , element , element + 1 ) ; } |
public class AzureBatchEvaluatorShimManager { /** * A utility method which builds the evaluator shim JAR file and uploads it to Azure Storage .
* @ return SAS URI to where the evaluator shim JAR was uploaded . */
public URI generateShimJarFile ( ) { } } | try { Set < FileResource > globalFiles = new HashSet < > ( ) ; final File globalFolder = new File ( this . reefFileNames . getGlobalFolderPath ( ) ) ; final File [ ] filesInGlobalFolder = globalFolder . listFiles ( ) ; for ( final File fileEntry : filesInGlobalFolder != null ? filesInGlobalFolder : new File [ ] { } ) { globalFiles . add ( getFileResourceFromFile ( fileEntry , FileType . LIB ) ) ; } File jarFile = this . jobJarMaker . newBuilder ( ) . addGlobalFileSet ( globalFiles ) . build ( ) ; return uploadFile ( jarFile ) ; } catch ( IOException ex ) { LOG . log ( Level . SEVERE , "Failed to build JAR file" , ex ) ; throw new RuntimeException ( ex ) ; } |
public class TSCopy { /** * Copies a { @ link UniversalAutomaton } with possibly heterogeneous input alphabets , but compatible properties .
* @ param method
* the traversal method to use
* @ param in
* the input transition system
* @ param limit
* the traversal limit , a value less than 0 means no limit
* @ param inputs
* the inputs to consider
* @ param out
* the output automaton
* @ param inputsMapping
* the transformation for input symbols
* @ param stateFilter
* the filter predicate for states
* @ param transFilter
* the filter predicate for transitions
* @ return a mapping from old to new states */
public static < S1 , I1 , T1 , SP , TP , S2 , I2 , T2 > Mapping < S1 , S2 > copy ( TSTraversalMethod method , UniversalTransitionSystem < S1 , ? super I1 , T1 , ? extends SP , ? extends TP > in , int limit , Collection < ? extends I1 > inputs , MutableAutomaton < S2 , I2 , T2 , ? super SP , ? super TP > out , Function < ? super I1 , ? extends I2 > inputsMapping , Predicate < ? super S1 > stateFilter , TransitionPredicate < ? super S1 , ? super I1 , ? super T1 > transFilter ) { } } | return copy ( method , in , limit , inputs , out , inputsMapping , Function . identity ( ) , Function . identity ( ) , stateFilter , transFilter ) ; |
public class Cell { /** * Sets the prefWidth and prefHeight to the specified values . */
public Cell < C , T > prefSize ( Value < C , T > width , Value < C , T > height ) { } } | prefWidth = width ; prefHeight = height ; return this ; |
public class GetKeyRotationStatusRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetKeyRotationStatusRequest getKeyRotationStatusRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getKeyRotationStatusRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getKeyRotationStatusRequest . getKeyId ( ) , KEYID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ComputeExecutor { /** * The Graql compute mean query run method
* @ return a Answer object containing a Number that represents the answer */
private Stream < Numeric > runComputeMean ( GraqlCompute . Statistics . Value query ) { } } | Map < String , Double > meanPair = runComputeStatistics ( query ) ; if ( meanPair == null ) return Stream . empty ( ) ; Double mean = meanPair . get ( MeanMapReduce . SUM ) / meanPair . get ( MeanMapReduce . COUNT ) ; return Stream . of ( new Numeric ( mean ) ) ; |
public class Utils { /** * Pad to the left
* @ param s string
* @ param len desired len
* @ param c padding char
* @ return padded string */
public static String padleft ( String s , int len , char c ) { } } | s = s . trim ( ) ; if ( s . length ( ) > len ) { return s ; } final StringBuilder sb = new StringBuilder ( len ) ; int fill = len - s . length ( ) ; while ( fill -- > 0 ) { sb . append ( c ) ; } sb . append ( s ) ; return sb . toString ( ) ; |
public class UserTaglet { /** * { @ inheritDoc } */
public Content getTagletOutput ( Element holder , TagletWriter writer ) { } } | Content output = writer . getOutputInstance ( ) ; Utils utils = writer . configuration ( ) . utils ; List < ? extends DocTree > tags = utils . getBlockTags ( holder , getName ( ) ) ; if ( ! tags . isEmpty ( ) ) { String tagString = userTaglet . toString ( tags , holder ) ; if ( tagString != null ) { output . addContent ( new RawHtml ( tagString ) ) ; } } return output ; |
public class PoolsImpl { /** * Enables automatic scaling for a pool .
* You cannot enable automatic scaling on a pool if a resize operation is in progress on the pool . If automatic scaling of the pool is currently disabled , you must specify a valid autoscale formula as part of the request . If automatic scaling of the pool is already enabled , you may specify a new autoscale formula and / or a new evaluation interval . You cannot call this API for the same pool more than once every 30 seconds .
* @ param poolId The ID of the pool on which to enable automatic scaling .
* @ param poolEnableAutoScaleParameter The parameters for the request .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponseWithHeaders } object if successful . */
public Observable < Void > enableAutoScaleAsync ( String poolId , PoolEnableAutoScaleParameter poolEnableAutoScaleParameter ) { } } | return enableAutoScaleWithServiceResponseAsync ( poolId , poolEnableAutoScaleParameter ) . map ( new Func1 < ServiceResponseWithHeaders < Void , PoolEnableAutoScaleHeaders > , Void > ( ) { @ Override public Void call ( ServiceResponseWithHeaders < Void , PoolEnableAutoScaleHeaders > response ) { return response . body ( ) ; } } ) ; |
public class DefaultBlockMaster { /** * Generates block info , including worker locations , for a block id .
* @ param blockId a block id
* @ return optional block info , empty if the block does not exist */
@ GuardedBy ( "masterBlockInfo" ) private Optional < BlockInfo > generateBlockInfo ( long blockId ) throws UnavailableException { } } | if ( mSafeModeManager . isInSafeMode ( ) ) { throw new UnavailableException ( ExceptionMessage . MASTER_IN_SAFEMODE . getMessage ( ) ) ; } BlockMeta block ; List < BlockLocation > blockLocations ; try ( LockResource lr = lockBlock ( blockId ) ) { Optional < BlockMeta > blockOpt = mBlockStore . getBlock ( blockId ) ; if ( ! blockOpt . isPresent ( ) ) { return Optional . empty ( ) ; } block = blockOpt . get ( ) ; blockLocations = new ArrayList < > ( mBlockStore . getLocations ( blockId ) ) ; } // Sort the block locations by their alias ordinal in the master storage tier mapping
Collections . sort ( blockLocations , Comparator . comparingInt ( o -> mGlobalStorageTierAssoc . getOrdinal ( o . getTier ( ) ) ) ) ; List < alluxio . wire . BlockLocation > locations = new ArrayList < > ( ) ; for ( BlockLocation location : blockLocations ) { MasterWorkerInfo workerInfo = mWorkers . getFirstByField ( ID_INDEX , location . getWorkerId ( ) ) ; if ( workerInfo != null ) { // worker metadata is intentionally not locked here because :
// - it would be an incorrect order ( correct order is lock worker first , then block )
// - only uses getters of final variables
locations . add ( new alluxio . wire . BlockLocation ( ) . setWorkerId ( location . getWorkerId ( ) ) . setWorkerAddress ( workerInfo . getWorkerAddress ( ) ) . setTierAlias ( location . getTier ( ) ) ) ; } } return Optional . of ( new BlockInfo ( ) . setBlockId ( blockId ) . setLength ( block . getLength ( ) ) . setLocations ( locations ) ) ; |
public class BulletGraphView { /** * / * bullet graph specs */
private PVPanel createVisualization ( final ProtovisWidget graphWidget ) { } } | final PVPanel vis = graphWidget . getPVPanel ( ) . width ( 400 ) . height ( 30 ) . margin ( 20 ) . left ( 100 ) // translate ( _ , y )
. top ( new JsDoubleFunction ( ) { public double f ( JsArgs args ) { PVMark _this = args . getThis ( ) ; return 10 + _this . index ( ) * 60 ; // translate ( x , _ )
} } ) ; PVBulletLayout bullet = vis . add ( PV . Layout . Bullet ( ) ) . orient ( LEFT ) . ranges ( new JsFunction < JsArrayNumber > ( ) { public JsArrayNumber f ( JsArgs args ) { Bullet d = args . getObject ( ) ; return JsUtils . toJsArrayNumber ( d . ranges ) ; } } ) . measures ( new JsFunction < JsArrayNumber > ( ) { public JsArrayNumber f ( JsArgs args ) { Bullet d = args . getObject ( ) ; return JsUtils . toJsArrayNumber ( d . measures ) ; } } ) . markers ( new JsFunction < JsArrayNumber > ( ) { public JsArrayNumber f ( JsArgs args ) { Bullet d = args . getObject ( ) ; return JsUtils . toJsArrayNumber ( d . markers ) ; } } ) ; // workaround for right hand side labels
graphWidget . addAttachHandler ( new AttachEvent . Handler ( ) { @ Override public void onAttachOrDetach ( AttachEvent event ) { if ( event . isAttached ( ) ) { Scheduler . get ( ) . scheduleDeferred ( new Scheduler . ScheduledCommand ( ) { @ Override public void execute ( ) { Element svg = graphWidget . getElement ( ) . getFirstChildElement ( ) ; if ( svg != null ) { svg . setAttribute ( "overflow" , "visible" ) ; } } } ) ; } } } ) ; bullet . strokeStyle ( "#CFCFCF" ) ; bullet . lineWidth ( 0.9 ) ; bullet . range ( ) . add ( PV . Bar ) . fillStyle ( "#ffffff" ) ; bullet . measure ( ) . add ( PV . Bar ) . fillStyle ( "#666666" ) ; bullet . marker ( ) . add ( PV . Dot ) . shape ( PVShape . TRIANGLE ) . fillStyle ( "white" ) ; bullet . tick ( ) . add ( PV . Rule ) . strokeStyle ( "#CFCFCF" ) . anchor ( BOTTOM ) . add ( PV . Label ) . text ( bullet . x ( ) . tickFormat ( ) ) ; // title
bullet . anchor ( LEFT ) . add ( PV . Label ) . font ( "12px sans-serif" ) . textAlign ( RIGHT ) . textBaseline ( BOTTOM ) . text ( new JsStringFunction ( ) { public String f ( JsArgs args ) { Bullet d = args . getObject ( 0 ) ; return d . title ; } } ) ; // subtitle
bullet . anchor ( LEFT ) . add ( PV . Label ) . textStyle ( "#616161" ) . textAlign ( RIGHT ) . textBaseline ( TOP ) . text ( new JsStringFunction ( ) { public String f ( JsArgs args ) { Bullet d = args . getObject ( 0 ) ; return d . subtitle ; } } ) ; // scale
bullet . anchor ( RIGHT ) . add ( PV . Label ) . textStyle ( "#616161" ) . textAlign ( LEFT ) . textBaseline ( MIDDLE ) . text ( new JsStringFunction ( ) { public String f ( JsArgs args ) { Bullet d = args . getObject ( 0 ) ; double measures = d . measures [ 0 ] ; return measures > 0.00 ? String . valueOf ( Double . valueOf ( d . ranges [ 0 ] ) . longValue ( ) ) : "" ; } } ) ; return vis ; |
public class ReflectionUtil { /** * Creates a new instance of the given class . The class is loaded using the
* current thread ' s context class loader and instantiated using its default
* constructor .
* @ param s fully qualified name of the class to instantiate .
* @ return the new instance or < code > null < / code > on failure . */
public static Object newInstance ( String s ) { } } | final ClassLoader classLoader = Thread . currentThread ( ) . getContextClassLoader ( ) ; try { Class < ? > clazz = classLoader . loadClass ( s ) ; Constructor < ? > constructor = clazz . getConstructor ( ) ; return constructor . newInstance ( ) ; } catch ( ClassNotFoundException e ) { return null ; } catch ( IllegalAccessException e ) { return null ; } catch ( InstantiationException e ) { return null ; } catch ( NoSuchMethodException e ) { // no default constructor
return null ; } catch ( InvocationTargetException e ) { // constructor threw an exception
return null ; } |
public class TableBuilder { /** * add new tr having specified attributes . */
public tr addTr ( Map < String , Object > attrMap ) { } } | tr tr = new tr ( ) ; tr . setAttr ( attrMap ) ; trList . add ( tr ) ; return tr ; |
public class QrCodePositionPatternDetector { /** * Takes the detected squares and turns it into a list of { @ link PositionPatternNode } . */
private void squaresToPositionList ( ) { } } | this . positionPatterns . reset ( ) ; List < DetectPolygonFromContour . Info > infoList = squareDetector . getPolygonInfo ( ) ; for ( int i = 0 ; i < infoList . size ( ) ; i ++ ) { DetectPolygonFromContour . Info info = infoList . get ( i ) ; // The test below has been commented out because the new external only contour
// detector discards all information related to internal contours
// squares with no internal contour cannot possibly be a finder pattern
// if ( ! info . hasInternal ( ) )
// continue ;
// See if the appearance matches a finder pattern
double grayThreshold = ( info . edgeInside + info . edgeOutside ) / 2 ; if ( ! checkPositionPatternAppearance ( info . polygon , ( float ) grayThreshold ) ) continue ; // refine the edge estimate
squareDetector . refine ( info ) ; PositionPatternNode pp = this . positionPatterns . grow ( ) ; pp . reset ( ) ; pp . square = info . polygon ; pp . grayThreshold = grayThreshold ; graph . computeNodeInfo ( pp ) ; } |
public class CmsColor { /** * Sets the hexadecimal representation of Red , Green and Blue . < p >
* @ param hex The hexadecimal string notation . It must be 6 or 3 letters long and consist of the characters 0-9 and A - F
* @ throws java . lang . Exception if something goes wrong */
public void setHex ( String hex ) throws Exception { } } | if ( hex . length ( ) == 6 ) { setRGB ( Integer . parseInt ( hex . substring ( 0 , 2 ) , 16 ) , Integer . parseInt ( hex . substring ( 2 , 4 ) , 16 ) , Integer . parseInt ( hex . substring ( 4 , 6 ) , 16 ) ) ; } else if ( hex . length ( ) == 3 ) { setRGB ( Integer . parseInt ( hex . substring ( 0 , 1 ) , 16 ) , Integer . parseInt ( hex . substring ( 1 , 2 ) , 16 ) , Integer . parseInt ( hex . substring ( 2 , 3 ) , 16 ) ) ; } else { setRGB ( 255 , 255 , 255 ) ; } |
public class PasswordEditText { /** * Removes all helper texts , which are contained by a specific collection .
* @ param helperTexts
* A collection , which contains the helper texts , which should be removed , as an
* instance of the type { @ link Collection } or an empty collection , if no helper texts
* should be removed */
public final void removeAllHelperTexts ( @ NonNull final Collection < CharSequence > helperTexts ) { } } | Condition . INSTANCE . ensureNotNull ( helperTexts , "The collection may not be null" ) ; for ( CharSequence helperText : helperTexts ) { removeHelperText ( helperText ) ; } |
public class BundleUtils { /** * Returns any bundle with the given symbolic name , or null if no such bundle exists . If there
* are multiple bundles with the same symbolic name and different version , this method returns
* the first bundle found .
* @ param bc bundle context
* @ param symbolicName bundle symbolic name
* @ return matching bundle , or null */
public static Bundle getBundle ( BundleContext bc , String symbolicName ) { } } | return getBundle ( bc , symbolicName , null ) ; |
public class CmsJspNavBuilder { /** * Initializes this bean . < p >
* @ param cms the current cms context
* @ param locale the locale for which properties should be read
* @ param requestUri the request URI */
public void init ( CmsObject cms , Locale locale , String requestUri ) { } } | m_cms = cms ; m_locale = locale ; m_requestUri = requestUri ; m_requestUriFolder = CmsResource . getFolderPath ( m_requestUri ) ; |
public class DOMConfigurator { /** * Used internally to parse an appender element . */
protected Appender parseAppender ( Element appenderElement ) { } } | String className = subst ( appenderElement . getAttribute ( CLASS_ATTR ) ) ; LogLog . debug ( "Class name: [" + className + ']' ) ; try { Object instance = Loader . loadClass ( className ) . newInstance ( ) ; Appender appender = ( Appender ) instance ; PropertySetter propSetter = new PropertySetter ( appender ) ; appender . setName ( subst ( appenderElement . getAttribute ( NAME_ATTR ) ) ) ; NodeList children = appenderElement . getChildNodes ( ) ; final int length = children . getLength ( ) ; for ( int loop = 0 ; loop < length ; loop ++ ) { Node currentNode = children . item ( loop ) ; /* We ' re only interested in Elements */
if ( currentNode . getNodeType ( ) == Node . ELEMENT_NODE ) { Element currentElement = ( Element ) currentNode ; // Parse appender parameters
if ( currentElement . getTagName ( ) . equals ( PARAM_TAG ) ) { setParameter ( currentElement , propSetter ) ; } // Set appender layout
else if ( currentElement . getTagName ( ) . equals ( LAYOUT_TAG ) ) { appender . setLayout ( parseLayout ( currentElement ) ) ; } // Add filters
else if ( currentElement . getTagName ( ) . equals ( FILTER_TAG ) ) { parseFilters ( currentElement , appender ) ; } else if ( currentElement . getTagName ( ) . equals ( ERROR_HANDLER_TAG ) ) { parseErrorHandler ( currentElement , appender ) ; } else if ( currentElement . getTagName ( ) . equals ( APPENDER_REF_TAG ) ) { String refName = subst ( currentElement . getAttribute ( REF_ATTR ) ) ; if ( appender instanceof AppenderAttachable ) { AppenderAttachable aa = ( AppenderAttachable ) appender ; LogLog . debug ( "Attaching appender named [" + refName + "] to appender named [" + appender . getName ( ) + "]." ) ; aa . addAppender ( findAppenderByReference ( currentElement ) ) ; } else { LogLog . error ( "Requesting attachment of appender named [" + refName + "] to appender named [" + appender . getName ( ) + "] which does not implement org.apache.log4j.spi.AppenderAttachable." ) ; } } else { parseUnrecognizedElement ( instance , currentElement , props ) ; } } } propSetter . activate ( ) ; return appender ; } /* Yes , it ' s ugly . But all of these exceptions point to the same
problem : we can ' t create an Appender */
catch ( Exception oops ) { LogLog . error ( "Could not create an Appender. Reported error follows." , oops ) ; return null ; } |
public class UrlEncoded { /** * Decoded parameters to Map .
* @ param content the string containing the encoded parameters */
public static void decodeTo ( String content , MultiMap map , String charset ) { } } | if ( charset == null ) charset = StringUtil . __ISO_8859_1 ; synchronized ( map ) { String key = null ; String value = null ; int mark = - 1 ; boolean encoded = false ; for ( int i = 0 ; i < content . length ( ) ; i ++ ) { char c = content . charAt ( i ) ; switch ( c ) { case '&' : value = encoded ? decodeString ( content , mark + 1 , i - mark - 1 , charset ) : content . substring ( mark + 1 , i ) ; mark = i ; encoded = false ; if ( key != null ) { map . add ( key , value ) ; key = null ; } break ; case '=' : if ( key != null ) break ; key = encoded ? decodeString ( content , mark + 1 , i - mark - 1 , charset ) : content . substring ( mark + 1 , i ) ; mark = i ; encoded = false ; break ; case '+' : encoded = true ; break ; case '%' : encoded = true ; break ; } } if ( key != null ) { value = encoded ? decodeString ( content , mark + 1 , content . length ( ) - mark - 1 , charset ) : content . substring ( mark + 1 ) ; map . add ( key , value ) ; } else if ( mark < content . length ( ) ) { key = encoded ? decodeString ( content , mark + 1 , content . length ( ) - mark - 1 , charset ) : content . substring ( mark + 1 ) ; map . add ( key , "" ) ; } } |
public class Transmitter { /** * Immediately closes the socket connection if it ' s currently held . Use this to interrupt an
* in - flight request from any thread . It ' s the caller ' s responsibility to close the request body
* and response body streams ; otherwise resources may be leaked .
* < p > This method is safe to be called concurrently , but provides limited guarantees . If a
* transport layer connection has been established ( such as a HTTP / 2 stream ) that is terminated .
* Otherwise if a socket connection is being established , that is terminated . */
public void cancel ( ) { } } | Exchange exchangeToCancel ; RealConnection connectionToCancel ; synchronized ( connectionPool ) { canceled = true ; exchangeToCancel = exchange ; connectionToCancel = exchangeFinder != null && exchangeFinder . connectingConnection ( ) != null ? exchangeFinder . connectingConnection ( ) : connection ; } if ( exchangeToCancel != null ) { exchangeToCancel . cancel ( ) ; } else if ( connectionToCancel != null ) { connectionToCancel . cancel ( ) ; } |
public class GISCoordinates { /** * This function convert extended France Lambert II coordinate to
* France Lambert III coordinate .
* @ param x is the coordinate in extended France Lambert II
* @ param y is the coordinate in extended France Lambert II
* @ return the France Lambert III coordinate . */
@ Pure public static Point2d EL2_L3 ( double x , double y ) { } } | final Point2d ntfLambdaPhi = NTFLambert_NTFLambdaPhi ( x , y , LAMBERT_2E_N , LAMBERT_2E_C , LAMBERT_2E_XS , LAMBERT_2E_YS ) ; return NTFLambdaPhi_NTFLambert ( ntfLambdaPhi . getX ( ) , ntfLambdaPhi . getY ( ) , LAMBERT_3_N , LAMBERT_3_C , LAMBERT_3_XS , LAMBERT_3_YS ) ; |
public class ClassGraph { /** * Start the first inner table of a class . */
private void firstInnerTableStart ( Options opt ) { } } | w . print ( linePrefix + linePrefix + "<tr>" + opt . shape . extraColumn ( ) + "<td><table border=\"0\" cellspacing=\"0\" " + "cellpadding=\"1\">" + linePostfix ) ; |
public class JobRequest { /** * / * package */
long getBackoffOffset ( boolean endTime ) { } } | if ( isPeriodic ( ) ) { return 0L ; } long offset ; switch ( getBackoffPolicy ( ) ) { case LINEAR : offset = mFailureCount * getBackoffMs ( ) ; break ; case EXPONENTIAL : if ( mFailureCount == 0 ) { offset = 0L ; } else { offset = ( long ) ( getBackoffMs ( ) * Math . pow ( 2 , mFailureCount - 1 ) ) ; } break ; default : throw new IllegalStateException ( "not implemented" ) ; } if ( endTime && ! isExact ( ) ) { offset *= 1.2f ; } return Math . min ( offset , TimeUnit . HOURS . toMillis ( 5 ) ) ; // use max of 5 hours like JobScheduler |
public class Query { /** * Encode a command into { @ code x - www - form - urlencoded } format .
* @ param command
* the command to be encoded .
* @ return a encoded command . */
public static String encode ( final String command ) { } } | try { return URLEncoder . encode ( command , StandardCharsets . UTF_8 . name ( ) ) ; } catch ( UnsupportedEncodingException e ) { throw new IllegalStateException ( "Every JRE must support UTF-8" , e ) ; } |
public class JSONMapping { /** * Append the external representation for this JSON object to a given { @ link Appendable } .
* @ param a the { @ link Appendable }
* @ throws IOException if thrown by the { @ link Appendable }
* @ see JSONValue # appendJSON ( Appendable ) */
@ Override public void appendJSON ( Appendable a ) throws IOException { } } | a . append ( '{' ) ; int n = list . size ( ) ; if ( n > 0 ) { int i = 0 ; for ( ; ; ) { Entry < String , V > entry = list . get ( i ++ ) ; a . append ( '"' ) ; Strings . appendEscaped ( a , entry . getKey ( ) , JSON . charMapper ) ; a . append ( '"' ) . append ( ':' ) ; JSON . appendJSON ( a , entry . getValue ( ) ) ; if ( i >= n ) break ; a . append ( ',' ) ; } } a . append ( '}' ) ; |
public class ActiveSyncManager { /** * Stops the sync manager and any outstanding threads , does not change the sync points .
* This stops four things in the following order .
* 1 . Stop any outstanding initial sync futures for the sync points . ( syncFuture . cancel )
* 2 . Stop the heartbeat thread that periodically wakes up to process events that have been
* recorded for the past heartbeat interval .
* 3 . Tell the polling thread to stop monitoring the path for events
* 4 . Stop the thread that is polling HDFS for events */
public void stop ( ) { } } | for ( AlluxioURI syncPoint : mSyncPathList ) { MountTable . Resolution resolution = null ; try { resolution = mMountTable . resolve ( syncPoint ) ; } catch ( InvalidPathException e ) { LOG . warn ( "stop: InvalidPathException resolving syncPoint {}, exception {}" , syncPoint , e ) ; } long mountId = resolution . getMountId ( ) ; // Remove initial sync thread
Future < ? > syncFuture = mSyncPathStatus . remove ( syncPoint ) ; if ( syncFuture != null ) { syncFuture . cancel ( true ) ; } Future < ? > future = mPollerMap . remove ( mountId ) ; if ( future != null ) { future . cancel ( true ) ; } // Tell UFS to stop monitoring the path
try ( CloseableResource < UnderFileSystem > ufs = resolution . acquireUfsResource ( ) ) { ufs . get ( ) . stopSync ( resolution . getUri ( ) ) ; } catch ( IOException e ) { LOG . warn ( "Ufs IOException for uri {}, exception is {}" , syncPoint , e ) ; } try ( CloseableResource < UnderFileSystem > ufs = resolution . acquireUfsResource ( ) ) { ufs . get ( ) . stopActiveSyncPolling ( ) ; } catch ( IOException e ) { LOG . warn ( "Encountered IOException when trying to stop polling thread {}" , e ) ; } } |
public class Maxent { /** * Returns the dot product between weight vector and x ( augmented with 1 ) . */
private static double dot ( int [ ] x , double [ ] w ) { } } | double dot = w [ w . length - 1 ] ; for ( int i : x ) { dot += w [ i ] ; } return dot ; |
public class CmsUpdateDBProjectId { /** * Creates the CMS _ HISTORY _ PROJECTS table if it does not exist yet . < p >
* @ param dbCon the db connection interface
* @ throws SQLException if soemthing goes wrong */
protected void createHistProjectsTable ( CmsSetupDb dbCon ) throws SQLException { } } | System . out . println ( new Exception ( ) . getStackTrace ( ) [ 0 ] . toString ( ) ) ; if ( ! dbCon . hasTableOrColumn ( HISTORY_PROJECTS_TABLE , null ) ) { String createStatement = readQuery ( QUERY_CREATE_HISTORY_PROJECTS_TABLE ) ; dbCon . updateSqlStatement ( createStatement , null , null ) ; transferDataToHistoryTable ( dbCon ) ; } else { System . out . println ( "table " + HISTORY_PROJECTS_TABLE + " already exists" ) ; } |
public class Graphics { /** * Get the colour of a single pixel in this graphics context
* @ param x
* The x coordinate of the pixel to read
* @ param y
* The y coordinate of the pixel to read
* @ return The colour of the pixel at the specified location */
public Color getPixel ( int x , int y ) { } } | predraw ( ) ; GL . glReadPixels ( x , screenHeight - y , 1 , 1 , SGL . GL_RGBA , SGL . GL_UNSIGNED_BYTE , readBuffer ) ; postdraw ( ) ; return new Color ( translate ( readBuffer . get ( 0 ) ) , translate ( readBuffer . get ( 1 ) ) , translate ( readBuffer . get ( 2 ) ) , translate ( readBuffer . get ( 3 ) ) ) ; |
public class OgnlSetter { /** * { @ inheritDoc } */
@ Override public Object send ( String ... args ) throws Exception { } } | assertArgumentsCount ( args ) ; ognlExpression . insertValue ( args [ 0 ] ) ; return null ; |
public class PublicSuffixes { /** * bulids regular expression from prefix - tree { @ code alt } into buffer { @ code sb } .
* @ param alt prefix tree root .
* @ param sb StringBuffer to store regular expression . */
protected static void buildRegex ( Node alt , StringBuilder sb ) { } } | String close = null ; if ( alt . cs != null ) { // actually ' ! ' always be the first character , because it is
// always used along with ' * ' .
for ( int i = 0 ; i < alt . cs . length ( ) ; i ++ ) { char c = alt . cs . charAt ( i ) ; if ( c == '!' ) { if ( close != null ) throw new RuntimeException ( "more than one '!'" ) ; sb . append ( "(?=" ) ; close = ")" ; } else if ( c == '*' ) { sb . append ( "[-\\w]+" ) ; } else { sb . append ( c ) ; } } } if ( alt . branches != null ) { // alt . branches . size ( ) should always be > 1
if ( alt . branches . size ( ) > 1 ) { sb . append ( "(?:" ) ; } String sep = "" ; for ( Node alt1 : alt . branches ) { sb . append ( sep ) ; sep = "|" ; buildRegex ( alt1 , sb ) ; } if ( alt . branches . size ( ) > 1 ) { sb . append ( ")" ) ; } } if ( close != null ) sb . append ( close ) ; |
public class CommerceOrderPersistenceImpl { /** * Returns the last commerce order in the ordered set where groupId = & # 63 ; and userId = & # 63 ; and orderStatus = & # 63 ; .
* @ param groupId the group ID
* @ param userId the user ID
* @ param orderStatus the order status
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching commerce order , or < code > null < / code > if a matching commerce order could not be found */
@ Override public CommerceOrder fetchByG_U_O_Last ( long groupId , long userId , int orderStatus , OrderByComparator < CommerceOrder > orderByComparator ) { } } | int count = countByG_U_O ( groupId , userId , orderStatus ) ; if ( count == 0 ) { return null ; } List < CommerceOrder > list = findByG_U_O ( groupId , userId , orderStatus , count - 1 , count , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ; |
public class MinusOperator { /** * ( non - Javadoc )
* @ see net . timewalker . ffmq4 . common . message . selector . expression . SelectorNode # evaluate ( javax . jms . Message ) */
@ Override public Object evaluate ( Message message ) throws JMSException { } } | Number value = operand . evaluateNumeric ( message ) ; if ( value == null ) return null ; return ArithmeticUtils . minus ( value ) ; |
public class CommerceDiscountUsageEntryLocalServiceBaseImpl { /** * Performs a dynamic query on the database and returns the matching rows .
* @ param dynamicQuery the dynamic query
* @ return the matching rows */
@ Override public < T > List < T > dynamicQuery ( DynamicQuery dynamicQuery ) { } } | return commerceDiscountUsageEntryPersistence . findWithDynamicQuery ( dynamicQuery ) ; |
public class LdapUtils { /** * Find the Rdn with the requested key in the supplied Name .
* @ param name the Name in which to search for the key .
* @ param key the attribute key to search for .
* @ return the rdn corresponding to the < b > first < / b > occurrence of the requested key .
* @ throws NoSuchElementException if no corresponding entry is found .
* @ since 2.0 */
public static Rdn getRdn ( Name name , String key ) { } } | Assert . notNull ( name , "name must not be null" ) ; Assert . hasText ( key , "key must not be blank" ) ; LdapName ldapName = returnOrConstructLdapNameFromName ( name ) ; List < Rdn > rdns = ldapName . getRdns ( ) ; for ( Rdn rdn : rdns ) { NamingEnumeration < String > ids = rdn . toAttributes ( ) . getIDs ( ) ; while ( ids . hasMoreElements ( ) ) { String id = ids . nextElement ( ) ; if ( key . equalsIgnoreCase ( id ) ) { return rdn ; } } } throw new NoSuchElementException ( "No Rdn with the requested key: '" + key + "'" ) ; |
public class CatalogUtil { /** * Parse the deployment . xml file and add its data into the catalog .
* @ param catalog Catalog to be updated .
* @ param deployment Parsed representation of the deployment . xml file .
* @ param isPlaceHolderCatalog if the catalog is isPlaceHolderCatalog and we are verifying only deployment xml .
* @ return String containing any errors parsing / validating the deployment . NULL on success . */
public static String compileDeployment ( Catalog catalog , DeploymentType deployment , boolean isPlaceHolderCatalog ) { } } | String errmsg = null ; try { validateDeployment ( catalog , deployment ) ; // add our hacky Deployment to the catalog
if ( catalog . getClusters ( ) . get ( "cluster" ) . getDeployment ( ) . get ( "deployment" ) == null ) { catalog . getClusters ( ) . get ( "cluster" ) . getDeployment ( ) . add ( "deployment" ) ; } // set the cluster info
setClusterInfo ( catalog , deployment ) ; // Set the snapshot schedule
setSnapshotInfo ( catalog , deployment . getSnapshot ( ) ) ; // Set enable security
setSecurityEnabled ( catalog , deployment . getSecurity ( ) ) ; // set the users info
// We ' ll skip this when building the dummy catalog on startup
// so that we don ' t spew misleading user / role warnings
if ( ! isPlaceHolderCatalog ) { setUsersInfo ( catalog , deployment . getUsers ( ) ) ; } // set the HTTPD info
setHTTPDInfo ( catalog , deployment . getHttpd ( ) , deployment . getSsl ( ) ) ; setDrInfo ( catalog , deployment . getDr ( ) , deployment . getCluster ( ) , isPlaceHolderCatalog ) ; if ( ! isPlaceHolderCatalog ) { setExportInfo ( catalog , deployment . getExport ( ) ) ; setImportInfo ( catalog , deployment . getImport ( ) ) ; setSnmpInfo ( deployment . getSnmp ( ) ) ; } setCommandLogInfo ( catalog , deployment . getCommandlog ( ) ) ; // This is here so we can update our local list of paths .
// I would not have needed this if validateResourceMonitorInfo didnt exist here .
VoltDB . instance ( ) . loadLegacyPathProperties ( deployment ) ; setupPaths ( deployment . getPaths ( ) ) ; validateResourceMonitorInfo ( deployment ) ; } catch ( Exception e ) { // Anything that goes wrong anywhere in trying to handle the deployment file
// should return an error , and let the caller decide what to do ( crash or not , for
// example )
errmsg = "Error validating deployment configuration: " + e . getMessage ( ) ; hostLog . error ( errmsg ) ; return errmsg ; } return null ; |
public class WTextAreaRenderer { /** * Paints the given WTextArea .
* @ param component the WTextArea to paint .
* @ param renderContext the RenderContext to paint to . */
@ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } } | WTextArea textArea = ( WTextArea ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; boolean readOnly = textArea . isReadOnly ( ) ; xml . appendTagOpen ( "ui:textarea" ) ; xml . appendAttribute ( "id" , component . getId ( ) ) ; xml . appendOptionalAttribute ( "class" , component . getHtmlClass ( ) ) ; xml . appendOptionalAttribute ( "track" , component . isTracking ( ) , "true" ) ; xml . appendOptionalAttribute ( "hidden" , textArea . isHidden ( ) , "true" ) ; if ( readOnly ) { xml . appendAttribute ( "readOnly" , "true" ) ; } else { int cols = textArea . getColumns ( ) ; int rows = textArea . getRows ( ) ; int minLength = textArea . getMinLength ( ) ; int maxLength = textArea . getMaxLength ( ) ; WComponent submitControl = textArea . getDefaultSubmitButton ( ) ; String submitControlId = submitControl == null ? null : submitControl . getId ( ) ; xml . appendOptionalAttribute ( "disabled" , textArea . isDisabled ( ) , "true" ) ; xml . appendOptionalAttribute ( "required" , textArea . isMandatory ( ) , "true" ) ; xml . appendOptionalAttribute ( "minLength" , minLength > 0 , minLength ) ; xml . appendOptionalAttribute ( "maxLength" , maxLength > 0 , maxLength ) ; xml . appendOptionalAttribute ( "toolTip" , textArea . getToolTip ( ) ) ; xml . appendOptionalAttribute ( "accessibleText" , textArea . getAccessibleText ( ) ) ; xml . appendOptionalAttribute ( "rows" , rows > 0 , rows ) ; xml . appendOptionalAttribute ( "cols" , cols > 0 , cols ) ; xml . appendOptionalAttribute ( "buttonId" , submitControlId ) ; String placeholder = textArea . getPlaceholder ( ) ; xml . appendOptionalAttribute ( "placeholder" , ! Util . empty ( placeholder ) , placeholder ) ; String autocomplete = textArea . getAutocomplete ( ) ; xml . appendOptionalAttribute ( "autocomplete" , ! Util . empty ( autocomplete ) , autocomplete ) ; } xml . appendClose ( ) ; if ( textArea . isRichTextArea ( ) ) { /* * This is a nested element instead of an attribute to cater for future enhancements
* such as turning rich text features on or off , or specifying JSON config either as
* a URL attribute or a nested CDATA section . */
xml . append ( "<ui:rtf />" ) ; } String textString = textArea . getText ( ) ; if ( textString != null ) { if ( textArea . isReadOnly ( ) && textArea . isRichTextArea ( ) ) { // read only we want to output unescaped , but it must still be XML valid .
xml . write ( HtmlToXMLUtil . unescapeToXML ( textString ) ) ; } else { xml . appendEscaped ( textString ) ; } } if ( ! readOnly ) { DiagnosticRenderUtil . renderDiagnostics ( textArea , renderContext ) ; } xml . appendEndTag ( "ui:textarea" ) ; |
public class ChannelPool { /** * { @ inheritDoc } */
@ Override public boolean awaitTermination ( long timeout , TimeUnit unit ) throws InterruptedException { } } | long endTimeNanos = System . nanoTime ( ) + unit . toNanos ( timeout ) ; for ( ManagedChannel channel : channels ) { if ( channel . isTerminated ( ) ) { continue ; } long awaitTimeNanos = endTimeNanos - System . nanoTime ( ) ; if ( awaitTimeNanos <= 0 ) { break ; } channel . awaitTermination ( awaitTimeNanos , TimeUnit . NANOSECONDS ) ; } return isTerminated ( ) ; |
public class ServiceHandler { /** * rebalance a topology
* @ param topologyName topology name
* @ param options RebalanceOptions */
@ Override public void rebalance ( String topologyName , RebalanceOptions options ) throws TException { } } | try { checkTopologyActive ( data , topologyName , true ) ; Integer wait_amt = null ; String jsonConf = null ; Boolean reassign = false ; if ( options != null ) { if ( options . is_set_wait_secs ( ) ) wait_amt = options . get_wait_secs ( ) ; if ( options . is_set_reassign ( ) ) reassign = options . is_reassign ( ) ; if ( options . is_set_conf ( ) ) jsonConf = options . get_conf ( ) ; } LOG . info ( "Begin to rebalance " + topologyName + "wait_time:" + wait_amt + ", reassign: " + reassign + ", new worker/bolt configuration:" + jsonConf ) ; Map < Object , Object > conf = ( Map < Object , Object > ) JStormUtils . from_json ( jsonConf ) ; NimbusUtils . transitionName ( data , topologyName , true , StatusType . rebalance , wait_amt , reassign , conf ) ; notifyTopologyActionListener ( topologyName , "rebalance" ) ; } catch ( NotAliveException e ) { String errMsg = "Rebalance error, topology " + topologyName + " is not alive!" ; LOG . error ( errMsg , e ) ; throw new NotAliveException ( errMsg ) ; } catch ( Exception e ) { String errMsg = "Failed to rebalance topology " + topologyName ; LOG . error ( errMsg , e ) ; throw new TException ( errMsg ) ; } |
public class BundlesHandlerFactory { /** * Builds a single bundle containing all the paths specified . Useful to make
* a single bundle out of every resource that is orphan after processing
* config definitions .
* @ param bundleId
* the bundle Id
* @ param orphanPaths
* the orphan paths
* @ return a single bundle containing all the paths specified */
private JoinableResourceBundle buildOrphansResourceBundle ( String bundleId , List < String > orphanPaths ) { } } | JoinableResourceBundle newBundle = new JoinableResourceBundleImpl ( bundleId , generateBundleNameFromBundleId ( bundleId ) , null , fileExtension , new InclusionPattern ( ) , orphanPaths , resourceReaderHandler , jawrConfig . getGeneratorRegistry ( ) ) ; return newBundle ; |
public class AmazonAlexaForBusinessClient { /** * Forgets smart home appliances associated to a room .
* @ param forgetSmartHomeAppliancesRequest
* @ return Result of the ForgetSmartHomeAppliances operation returned by the service .
* @ throws NotFoundException
* The resource is not found .
* @ sample AmazonAlexaForBusiness . ForgetSmartHomeAppliances
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / alexaforbusiness - 2017-11-09 / ForgetSmartHomeAppliances "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ForgetSmartHomeAppliancesResult forgetSmartHomeAppliances ( ForgetSmartHomeAppliancesRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeForgetSmartHomeAppliances ( request ) ; |
public class SuiteDeployer { /** * Method ignoring GenerateDeployment events if deployment is already done .
* @ param eventContext Event to check */
public void blockGenerateDeploymentWhenNeeded ( @ Observes EventContext < GenerateDeployment > eventContext ) { } } | if ( ! extensionEnabled ( ) ) { eventContext . proceed ( ) ; } else if ( suiteDeploymentGenerated ) { // Do nothing with event .
debug ( "Blocking GenerateDeployment event {0}" , eventContext . getEvent ( ) . toString ( ) ) ; } else { suiteDeploymentGenerated = true ; debug ( "NOT Blocking GenerateDeployment event {0}" , eventContext . getEvent ( ) . toString ( ) ) ; eventContext . proceed ( ) ; } |
public class GenericUtils { /** * Format the next 16 bytes of the input data starting from the input offset
* as ASCII characters . Non - ASCII bytes will be printed as a period symbol .
* @ param buffer
* @ param data
* @ param inOffset
* @ return StringBuilder */
static private StringBuilder formatTextData ( StringBuilder buffer , byte [ ] data , int inOffset ) { } } | int offset = inOffset ; int end = offset + 16 ; for ( ; offset < end ; offset ++ ) { if ( offset >= data . length ) { buffer . append ( " " ) ; continue ; } if ( Character . isLetterOrDigit ( data [ offset ] ) ) { buffer . append ( ( char ) data [ offset ] ) ; } else { buffer . append ( '.' ) ; } } return buffer ; |
public class StdEnumDateElement { /** * < p > Obtains the effective calendar type . < / p >
* @ param attributes format attributes
* @ return effective calendar type */
protected String getCalendarType ( AttributeQuery attributes ) { } } | if ( this . isMonthElement ( ) || this . isEraElement ( ) ) { return attributes . get ( Attributes . CALENDAR_TYPE , this . defaultCalendarType ) ; } else if ( this . isWeekdayElement ( ) ) { return CalendarText . ISO_CALENDAR_TYPE ; } else { return this . defaultCalendarType ; } |
public class ClusterManagerClient { /** * Deletes the cluster , including the Kubernetes endpoint and all worker nodes .
* < p > Firewalls and routes that were configured during cluster creation are also deleted .
* < p > Other Google Compute Engine resources that might be in use by the cluster ( e . g . load
* balancer resources ) will not be deleted if they weren ' t present at the initial create time .
* < p > Sample code :
* < pre > < code >
* try ( ClusterManagerClient clusterManagerClient = ClusterManagerClient . create ( ) ) {
* String projectId = " " ;
* String zone = " " ;
* String clusterId = " " ;
* Operation response = clusterManagerClient . deleteCluster ( projectId , zone , clusterId ) ;
* < / code > < / pre >
* @ param projectId Deprecated . The Google Developers Console [ project ID or project
* number ] ( https : / / support . google . com / cloud / answer / 6158840 ) . This field has been deprecated
* and replaced by the name field .
* @ param zone Deprecated . The name of the Google Compute Engine
* [ zone ] ( / compute / docs / zones # available ) in which the cluster resides . This field has been
* deprecated and replaced by the name field .
* @ param clusterId Deprecated . The name of the cluster to delete . This field has been deprecated
* and replaced by the name field .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
public final Operation deleteCluster ( String projectId , String zone , String clusterId ) { } } | DeleteClusterRequest request = DeleteClusterRequest . newBuilder ( ) . setProjectId ( projectId ) . setZone ( zone ) . setClusterId ( clusterId ) . build ( ) ; return deleteCluster ( request ) ; |
public class InternalXbaseParser { /** * InternalXbase . g : 6061:1 : entryRuleJvmLowerBoundAnded returns [ EObject current = null ] : iv _ ruleJvmLowerBoundAnded = ruleJvmLowerBoundAnded EOF ; */
public final EObject entryRuleJvmLowerBoundAnded ( ) throws RecognitionException { } } | EObject current = null ; EObject iv_ruleJvmLowerBoundAnded = null ; try { // InternalXbase . g : 6061:59 : ( iv _ ruleJvmLowerBoundAnded = ruleJvmLowerBoundAnded EOF )
// InternalXbase . g : 6062:2 : iv _ ruleJvmLowerBoundAnded = ruleJvmLowerBoundAnded EOF
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmLowerBoundAndedRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleJvmLowerBoundAnded = ruleJvmLowerBoundAnded ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleJvmLowerBoundAnded ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class CritBit64 { /** * Get the value for a given key .
* @ param key The key
* @ return the values associated with { @ code key } or { @ code null } if the key does not exist . */
public V get ( long key ) { } } | if ( size == 0 ) { return null ; } if ( size == 1 ) { int posDiff = compare ( key , rootKey ) ; if ( posDiff == - 1 ) { return rootVal ; } return null ; } Node < V > n = root ; while ( true ) { if ( ! doesInfixMatch ( n , key ) ) { return null ; } // infix matches , so now we check sub - nodes and postfixes
if ( getBit ( key , n . posDiff ) ) { if ( n . hi != null ) { n = n . hi ; continue ; } if ( compare ( key , n . hiPost ) == - 1 ) { return n . hiVal ; } } else { if ( n . lo != null ) { n = n . lo ; continue ; } if ( compare ( key , n . loPost ) == - 1 ) { return n . loVal ; } } return null ; } |
public class FailoverGroupsInner { /** * Updates a failover group .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server containing the failover group .
* @ param failoverGroupName The name of the failover group .
* @ param parameters The failover group parameters .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the FailoverGroupInner object if successful . */
public FailoverGroupInner beginUpdate ( String resourceGroupName , String serverName , String failoverGroupName , FailoverGroupUpdate parameters ) { } } | return beginUpdateWithServiceResponseAsync ( resourceGroupName , serverName , failoverGroupName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class StructuredDataId { /** * Creates an id using another id to supply default values .
* @ param id The original StructuredDataId .
* @ return the new StructuredDataId . */
public StructuredDataId makeId ( final StructuredDataId id ) { } } | if ( id == null ) { return this ; } return makeId ( id . getName ( ) , id . getEnterpriseNumber ( ) ) ; |
public class Controller { /** * Convenience method , calls { @ link # view ( String , Object ) } internally .
* The keys in the map are converted to String values .
* @ param values map with values to pass to view . */
protected void view ( Map < String , Object > values ) { } } | for ( String key : values . keySet ( ) ) { view ( key , values . get ( key ) ) ; } |
public class FileBlobStoreImpl { /** * Get an object tied to writing the data .
* @ param key the key of the part to write to .
* @ return an object that can be used to both write to , but also commit / cancel the operation .
* @ throws IOException on any error */
public LocalFsBlobStoreFile write ( String key , boolean create ) throws IOException { } } | return new LocalFsBlobStoreFile ( getKeyDir ( key ) , true , create ) ; |
public class IntArrayList { /** * Searches an element .
* @ param ele element to look for
* @ return index of the first element found ; - 1 if nothing was found */
public int indexOf ( int ele ) { } } | int i ; for ( i = 0 ; i < size ; i ++ ) { if ( data [ i ] == ele ) { return i ; } } return - 1 ; |
public class FileStorage { /** * Returns a { @ link com . frostwire . jlibtorrent . PeerRequest } representing the
* piece index , byte offset and size the specified file range overlaps .
* This is the inverse mapping of { @ link # mapBlock ( int , long , int ) } .
* Note that the { @ link PeerRequest } return type
* is meant to hold bittorrent block requests , which may not be larger
* than 16 kiB . Mapping a range larger than that may return an overflown
* integer .
* @ param file
* @ param offset
* @ param size
* @ return */
public PeerRequest mapFile ( int file , long offset , int size ) { } } | return new PeerRequest ( fs . map_file ( file , offset , size ) ) ; |
public class SyncPageWorker { /** * Get saved property .
* @ param strKey
* @ return */
public Object get ( String strKey ) { } } | if ( m_map != null ) return m_map . get ( strKey ) ; return null ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.