signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class StorageSize { /** * Converts the size of this StorageSize to another unit , rounding the result to an integer < br / >
* This method may result in the loss of information ( due to rounding ) . If precision is required then getDecimalAmount ( unit )
* should be used < br / >
* @ param unit
* the unit to use ( instead of the default StorageSize unit )
* @ return the size ( as an integer ) in the provided < code > unit < / code > . */
public BigInteger getAmount ( final StorageUnit unit ) { } } | final BigDecimal amount = getDecimalAmount ( unit ) ; return amount . toBigInteger ( ) ; |
public class WSRdbManagedConnectionImpl { /** * Process request for a CONNECTION _ ERROR _ OCCURRED event .
* @ param event the Connection handle requesting to send the event .
* @ param ex the exception which indicates the connection error , or null if no exception .
* @ param logEvent fire a logging or non - logging event to be interpreted by the connection manager . */
public void processConnectionErrorOccurredEvent ( Object handle , Exception ex , boolean logEvent ) { } } | // Method is not synchronized because of the contract that add / remove event
// listeners will only be used on ManagedConnection create / destroy , when the
// ManagedConnection is not used by any other threads .
// Some object using the physical jdbc connection has received a SQLException that
// when translated to a ResourceException is determined to be a connection event error .
// The SQLException is mapped to a StaleConnectionException in the
// helper . SCE ' s will ( almost ) always be connection errors .
// Track whether a fatal Connection error was detected .
// Technically , the Connection Manager is required to be able to handle duplicate
// events , but since we already have a flag for the occasion , we ' ll be nice and skip
// the unnecessary event when convenient .
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( inCleanup ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "An error occured during connection cleanup. Since the container drives " + "the cleanup op, it will directly receive the exception." ) ; return ; } if ( connectionErrorDetected ) { if ( isTraceOn && tc . isEventEnabled ( ) ) Tr . event ( this , tc , "CONNECTION_ERROR_OCCURRED event already fired" ) ; return ; } if ( ex instanceof SQLException && mcf . helper . isAnAuthorizationException ( ( SQLException ) ex ) ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "CONNECTION_ERROR_OCCURRED will fire an event to only purge and destroy this connection" ) ; connectionErrorDetected = true ; closeHandles ( ) ; // Create a Connection Error Event with the given SQLException .
// Reuse a single ConnectionEvent instance .
// - Modified to use J2C defined event .
connEvent . recycle ( WSConnectionEvent . SINGLE_CONNECTION_ERROR_OCCURRED , ex , handle ) ; if ( isTraceOn && tc . isEventEnabled ( ) ) Tr . event ( this , tc , "Firing Single CONNECTION_ERROR_OCCURRED" , handle ) ; // loop through the listeners
for ( int i = 0 ; i < numListeners ; i ++ ) { // send Connection Error Occurred event to the current listener
ivEventListeners [ i ] . connectionErrorOccurred ( connEvent ) ; } return ; } mcf . fatalErrorCount . incrementAndGet ( ) ; if ( mcf . oracleRACXARetryDelay > 0l ) mcf . oracleRACLastStale . set ( System . currentTimeMillis ( ) ) ; connectionErrorDetected = true ; // The connectionErrorDetected indicator is no longer required for ManagedConnection
// cleanup since we are now required to invalidate all handles at that point
// regardless of whether a connection error has occurred .
// Close all active handles for this ManagedConnection , since we cannot rely on the
// ConnectionManager to request cleanup / destroy immediately . The ConnectionManager is
// required to wait until the transaction has ended .
closeHandles ( ) ; // Create a Connection Error Event with the given SQLException .
// Reuse a single ConnectionEvent instance .
// - Fire the normal logging event if logEvent = = true . Otherwise , fire the non - logging connection error event .
connEvent . recycle ( ( logEvent ? ConnectionEvent . CONNECTION_ERROR_OCCURRED : WSConnectionEvent . CONNECTION_ERROR_OCCURRED_NO_EVENT ) , ex , handle ) ; if ( isTraceOn && tc . isEventEnabled ( ) ) Tr . event ( this , tc , "Firing " + ( logEvent ? "CONNECTION_ERROR_OCCURRED" : "CONNECTION_ERROR_OCCURRED_NO_EVENT" ) , handle ) ; // loop through the listeners
for ( int i = 0 ; i < numListeners ; i ++ ) { // send Connection Error Occurred event to the current listener
ivEventListeners [ i ] . connectionErrorOccurred ( connEvent ) ; } // Replace ConnectionEvent caching with a single reusable instance per
// ManagedConnection . |
public class DBInstance { /** * Provides the list of DB parameter groups applied to this DB instance .
* @ param dBParameterGroups
* Provides the list of DB parameter groups applied to this DB instance . */
public void setDBParameterGroups ( java . util . Collection < DBParameterGroupStatus > dBParameterGroups ) { } } | if ( dBParameterGroups == null ) { this . dBParameterGroups = null ; return ; } this . dBParameterGroups = new java . util . ArrayList < DBParameterGroupStatus > ( dBParameterGroups ) ; |
public class SVDModel { /** * Write out K / V pairs */
@ Override protected AutoBuffer writeAll_impl ( AutoBuffer ab ) { } } | ab . putKey ( _output . _u_key ) ; ab . putKey ( _output . _v_key ) ; return super . writeAll_impl ( ab ) ; |
public class DeviceImpl { void set_non_auto_polled_cmd ( final String [ ] s ) { } } | for ( final String value : s ) { ext . non_auto_polled_cmd . add ( value ) ; } |
public class BatchStatementBuilder { /** * Sets the CQL keyspace to execute this batch in . Shortcut for { @ link # setKeyspace ( CqlIdentifier )
* setKeyspace ( CqlIdentifier . fromCql ( keyspaceName ) ) } .
* @ return this builder ; never { @ code null } . */
@ NonNull public BatchStatementBuilder setKeyspace ( @ NonNull String keyspaceName ) { } } | return setKeyspace ( CqlIdentifier . fromCql ( keyspaceName ) ) ; |
public class MtasPennTreebankParser { /** * Creates the node mappings .
* @ param mtasTokenIdFactory
* the mtas token id factory
* @ param level
* the level
* @ param parentLevel
* the parent level */
private void createNodeMappings ( MtasTokenIdFactory mtasTokenIdFactory , Level level , Level parentLevel ) { } } | MtasToken nodeToken ; if ( level . node != null && level . positionStart != null && level . positionEnd != null ) { nodeToken = new MtasTokenString ( mtasTokenIdFactory . createTokenId ( ) , level . node , "" ) ; nodeToken . setOffset ( level . offsetStart , level . offsetEnd ) ; nodeToken . setRealOffset ( level . realOffsetStart , level . realOffsetEnd ) ; nodeToken . addPositionRange ( level . positionStart , level . positionEnd ) ; tokenCollection . add ( nodeToken ) ; if ( parentLevel != null ) { parentLevel . tokens . add ( nodeToken ) ; } // only for first mapping ( ? )
for ( MtasToken token : level . tokens ) { token . setParentId ( nodeToken . getId ( ) ) ; } } |
public class ComputeTypeMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ComputeType computeType , ProtocolMarshaller protocolMarshaller ) { } } | if ( computeType == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( computeType . getName ( ) , NAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class MetaPropertyVersion { /** * Compares two string ordered lists containing numbers .
* @ return - 1 when first group is higher , 0 if equals , 1 when second group is higher */
private static int compareTo ( Deque < String > first , Deque < String > second ) { } } | if ( 0 == first . size ( ) && 0 == second . size ( ) ) { return 0 ; } if ( 0 == first . size ( ) ) { return 1 ; } if ( 0 == second . size ( ) ) { return - 1 ; } int headsComparation = ( Integer . valueOf ( first . remove ( ) ) ) . compareTo ( Integer . parseInt ( second . remove ( ) ) ) ; if ( 0 == headsComparation ) { return compareTo ( first , second ) ; } else { return headsComparation ; } |
public class MinioClient { /** * Get bucket life cycle configuration .
* @ param bucketName Bucket name .
* < / p > < b > Example : < / b > < br >
* < pre > { @ code String bucketLifeCycle = minioClient . getBucketLifecycle ( " my - bucketname " ) ;
* } < / pre >
* @ throws InvalidBucketNameException upon invalid bucket name is given
* @ throws NoSuchAlgorithmException upon requested algorithm was not found during
* signature calculation
* @ throws InsufficientDataException upon getting EOFException while reading given
* InputStream even before reading given length
* @ throws IOException upon connection error
* @ throws InvalidKeyException upon an invalid access key or secret key
* @ throws NoResponseException upon no response from server
* @ throws XmlPullParserException upon parsing response xml
* @ throws ErrorResponseException upon unsuccessful execution
* @ throws InternalException upon internal library error */
public String getBucketLifeCycle ( String bucketName ) throws InvalidBucketNameException , NoSuchAlgorithmException , InsufficientDataException , IOException , InvalidKeyException , NoResponseException , XmlPullParserException , ErrorResponseException , InternalException { } } | Map < String , String > queryParamMap = new HashMap < > ( ) ; queryParamMap . put ( "lifecycle" , "" ) ; HttpResponse response = null ; String bodyContent = "" ; Scanner scanner = null ; try { response = executeGet ( bucketName , "" , null , queryParamMap ) ; scanner = new Scanner ( response . body ( ) . charStream ( ) ) ; // read entire body stream to string .
scanner . useDelimiter ( "\\A" ) ; if ( scanner . hasNext ( ) ) { bodyContent = scanner . next ( ) ; } } catch ( ErrorResponseException e ) { if ( e . errorResponse ( ) . errorCode ( ) != ErrorCode . NO_SUCH_LIFECYCLE_CONFIGURATION ) { throw e ; } } finally { if ( response != null && response . body ( ) != null ) { response . body ( ) . close ( ) ; } if ( scanner != null ) { scanner . close ( ) ; } } return bodyContent ; |
public class LCTManager { /** * Extracts the constant fields from the specified class using the ClassInspector . */
public void register ( Class < ? > aClass ) { } } | List < ConstantField > tmp = inspector . getConstants ( aClass ) ; constantList . addAll ( tmp ) ; for ( ConstantField constant : tmp ) constants . put ( constant . name , constant ) ; |
public class Checks { /** * Performs check with the regular expression pattern .
* @ param reference reference to check
* @ param pattern the regular expression pattern
* @ param errorMessage the exception message to use if the check fails ; will
* be converted to a string using { @ link String # valueOf ( Object ) }
* @ throws IllegalArgumentException if the { @ code reference } doesn ' t match provided regular expression
* @ see Checks # checkMatches ( String , java . util . regex . Pattern , String , Object . . . ) */
@ Beta public static void checkMatches ( String reference , Pattern pattern , @ Nullable Object errorMessage ) { } } | checkMatches ( reference , pattern , String . valueOf ( errorMessage ) , EMPTY_ERROR_MESSAGE_ARGS ) ; |
public class TypeUtils { /** * Format a { @ link TypeVariable } including its { @ link GenericDeclaration } .
* @ param var the type variable to create a String representation for , not { @ code null }
* @ return String
* @ since 3.2 */
public static String toLongString ( final TypeVariable < ? > var ) { } } | Validate . notNull ( var , "var is null" ) ; final StringBuilder buf = new StringBuilder ( ) ; final GenericDeclaration d = var . getGenericDeclaration ( ) ; if ( d instanceof Class < ? > ) { Class < ? > c = ( Class < ? > ) d ; while ( true ) { if ( c . getEnclosingClass ( ) == null ) { buf . insert ( 0 , c . getName ( ) ) ; break ; } buf . insert ( 0 , c . getSimpleName ( ) ) . insert ( 0 , '.' ) ; c = c . getEnclosingClass ( ) ; } } else if ( d instanceof Type ) { // not possible as of now
buf . append ( toString ( ( Type ) d ) ) ; } else { buf . append ( d ) ; } return buf . append ( ':' ) . append ( typeVariableToString ( var ) ) . toString ( ) ; |
public class ClientDObjectMgr { /** * Called periodically to flush any objects that have been lingering due to a previously
* enacted flush delay . */
protected void flushObjects ( ) { } } | long now = System . currentTimeMillis ( ) ; for ( Iterator < IntMap . IntEntry < FlushRecord > > iter = _flushes . intEntrySet ( ) . iterator ( ) ; iter . hasNext ( ) ; ) { IntMap . IntEntry < FlushRecord > entry = iter . next ( ) ; // int oid = entry . getIntKey ( ) ;
FlushRecord rec = entry . getValue ( ) ; if ( rec . expire <= now ) { iter . remove ( ) ; flushObject ( rec . object ) ; // Log . info ( " Flushed object " + oid + " . " ) ;
} } |
public class PaigeTarjanInitializers { /** * Initializes the partition refinement data structure from a given abstracted deterministic automaton , using a
* predefined initial partitioning mode .
* @ param pt
* the partition refinement data structure
* @ param absAutomaton
* the abstraction of the input automaton
* @ param ip
* the initial partitioning mode
* @ param pruneUnreachable
* whether or not to prune unreachable states during initialization */
public static void initCompleteDeterministic ( PaigeTarjan pt , UniversalDeterministicAutomaton . FullIntAbstraction < ? , ? , ? > absAutomaton , AutomatonInitialPartitioning ip , boolean pruneUnreachable ) { } } | initCompleteDeterministic ( pt , absAutomaton , ip . initialClassifier ( absAutomaton ) , pruneUnreachable ) ; |
public class DomainTopicsInner { /** * List domain topics .
* List all the topics in a domain .
* @ param resourceGroupName The name of the resource group within the user ' s subscription .
* @ param domainName Domain name .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the List & lt ; DomainTopicInner & gt ; object */
public Observable < List < DomainTopicInner > > listByDomainAsync ( String resourceGroupName , String domainName ) { } } | return listByDomainWithServiceResponseAsync ( resourceGroupName , domainName ) . map ( new Func1 < ServiceResponse < List < DomainTopicInner > > , List < DomainTopicInner > > ( ) { @ Override public List < DomainTopicInner > call ( ServiceResponse < List < DomainTopicInner > > response ) { return response . body ( ) ; } } ) ; |
public class FpUtils { /** * Returns a floating - point power of two in the normal range . */
static double powerOfTwoD ( int n ) { } } | assert ( n >= DoubleConsts . MIN_EXPONENT && n <= DoubleConsts . MAX_EXPONENT ) ; return Double . longBitsToDouble ( ( ( ( long ) n + ( long ) DoubleConsts . EXP_BIAS ) << ( DoubleConsts . SIGNIFICAND_WIDTH - 1 ) ) & DoubleConsts . EXP_BIT_MASK ) ; |
public class ManyToOneAttribute { /** * Sets the specified long attribute to the specified value .
* @ param name name of the attribute
* @ param value value of the attribute
* @ since 1.9.0 */
public void setLongAttribute ( String name , Long value ) { } } | ensureValue ( ) ; Attribute attribute = new LongAttribute ( value ) ; attribute . setEditable ( isEditable ( name ) ) ; getValue ( ) . getAllAttributes ( ) . put ( name , attribute ) ; |
public class HttpServer { /** * Start the server . Does not wait for the server to start . */
public void start ( ) throws IOException { } } | try { int port = 0 ; int oriPort = listener . getPort ( ) ; // The original requested port
while ( true ) { try { port = webServer . getConnectors ( ) [ 0 ] . getLocalPort ( ) ; LOG . info ( "Port returned by webServer.getConnectors()[0]." + "getLocalPort() before open() is " + port + ". Opening the listener on " + oriPort ) ; listener . open ( ) ; port = listener . getLocalPort ( ) ; LOG . info ( "listener.getLocalPort() returned " + listener . getLocalPort ( ) + " webServer.getConnectors()[0].getLocalPort() returned " + webServer . getConnectors ( ) [ 0 ] . getLocalPort ( ) ) ; // Workaround to handle the problem reported in HADOOP - 4744
if ( port < 0 ) { Thread . sleep ( 100 ) ; int numRetries = 1 ; while ( port < 0 ) { LOG . warn ( "listener.getLocalPort returned " + port ) ; if ( numRetries ++ > MAX_RETRIES ) { throw new Exception ( " listener.getLocalPort is returning " + "less than 0 even after " + numRetries + " resets" ) ; } for ( int i = 0 ; i < 2 ; i ++ ) { LOG . info ( "Retrying listener.getLocalPort()" ) ; port = listener . getLocalPort ( ) ; if ( port > 0 ) { break ; } Thread . sleep ( 200 ) ; } if ( port > 0 ) { break ; } LOG . info ( "Bouncing the listener" ) ; listener . close ( ) ; Thread . sleep ( 1000 ) ; listener . setPort ( oriPort == 0 ? 0 : ( oriPort += 1 ) ) ; listener . open ( ) ; Thread . sleep ( 100 ) ; port = listener . getLocalPort ( ) ; } } // Workaround end
LOG . info ( "Jetty bound to port " + port ) ; webServer . start ( ) ; // Workaround for HADOOP - 6386
port = listener . getLocalPort ( ) ; if ( port < 0 ) { LOG . warn ( "Bounds port is " + port + " after webserver start" ) ; for ( int i = 0 ; i < MAX_RETRIES / 2 ; i ++ ) { try { webServer . stop ( ) ; } catch ( Exception e ) { LOG . warn ( "Can't stop web-server" , e ) ; } Thread . sleep ( 1000 ) ; listener . setPort ( oriPort == 0 ? 0 : ( oriPort += 1 ) ) ; listener . open ( ) ; Thread . sleep ( 100 ) ; webServer . start ( ) ; LOG . info ( i + "attempts to restart webserver" ) ; port = listener . getLocalPort ( ) ; if ( port > 0 ) break ; } if ( port < 0 ) throw new Exception ( "listener.getLocalPort() is returning " + "less than 0 even after " + MAX_RETRIES + " resets" ) ; } // End of HADOOP - 6386 workaround
break ; } catch ( IOException ex ) { // if this is a bind exception ,
// then try the next port number .
if ( ex instanceof BindException ) { if ( ! findPort ) { throw ( BindException ) ex ; } } else { LOG . info ( "HttpServer.start() threw a non Bind IOException" ) ; throw ex ; } } catch ( MultiException ex ) { LOG . info ( "HttpServer.start() threw a MultiException" ) ; throw ex ; } listener . setPort ( ( oriPort += 1 ) ) ; } } catch ( IOException e ) { throw e ; } catch ( Exception e ) { throw new IOException ( "Problem starting http server" , e ) ; } |
public class ClinAsserTraitSetType { /** * Gets the value of the attributeSet property .
* This accessor method returns a reference to the live list ,
* not a snapshot . Therefore any modification you make to the
* returned list will be present inside the JAXB object .
* This is why there is not a < CODE > set < / CODE > method for the attributeSet property .
* For example , to add a new item , do as follows :
* < pre >
* getAttributeSet ( ) . add ( newItem ) ;
* < / pre >
* Objects of the following type ( s ) are allowed in the list
* { @ link ClinAsserTraitSetType . AttributeSet } */
public List < ClinAsserTraitSetType . AttributeSet > getAttributeSet ( ) { } } | if ( attributeSet == null ) { attributeSet = new ArrayList < ClinAsserTraitSetType . AttributeSet > ( ) ; } return this . attributeSet ; |
public class RemoteMessageRequest { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . runtime . SIMPRemoteMessageRequestControllable # getRequestMessageInfo ( ) */
public SIMPRequestMessageInfo getRequestMessageInfo ( ) throws SIMPRuntimeOperationFailedException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getRequestMessageInfo" ) ; SIMPRequestMessageInfo requestMessageInfo = null ; try { if ( State . REQUEST . toString ( ) . equals ( getState ( ) ) ) { // This RemoteMessageRequest is in state request so lets get the info
TickRange tickRange = _aiStream . getTickRange ( _tick ) ; requestMessageInfo = new RequestMessageInfo ( ( AIRequestedTick ) tickRange . value ) ; } } catch ( SIMPException e ) { // FFDC
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.runtime.RemoteMessageRequest.getRequestMessageInfo" , "1:407:1.34" , this ) ; SIMPRuntimeOperationFailedException e1 = new SIMPRuntimeOperationFailedException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0003" , new Object [ ] { "RemoteMessageRequest.getRequestMessageInfo" , "1:415:1.34" , e , _aiStream . getStreamId ( ) } , null ) , e ) ; SibTr . exception ( tc , e1 ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getRequestMessageInfo" , e1 ) ; throw e1 ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getRequestMessageInfo" , requestMessageInfo ) ; return requestMessageInfo ; |
public class BrokerSession { /** * Performs a remote procedure call .
* @ param name Name of the remote procedure . This has the format :
* < pre >
* & lt ; remote procedure name & gt ; [ : & lt ; remote procedure version & gt ; ] [ : & lt ; calling context & gt ; ]
* < / pre >
* where only the remote procedure name is required . If the server supports multiple
* versions of a remote procedure , an explicit version specifier may be added . If a
* different calling context is desired , this may be specified to override the
* default . For example :
* < pre >
* GET LAB RESULTS : 2.4 : LR CONTEXT
* < / pre >
* @ param async If true , the remote procedure call will be executed asynchronously . In this
* case , the value returned by the method will be the unique handle for the
* asynchronous request .
* @ param timeout The timeout , in milliseconds , to wait for remote procedure completion .
* @ param params Parameters to be passed to the remote procedure . This may be null .
* @ return The data returned by the remote procedure called if called synchronously , or the
* unique handle of the request , if call asynchronously . */
public String callRPC ( String name , boolean async , int timeout , RPCParameters params ) { } } | ensureConnection ( ) ; String version = "" ; String context = connectionParams . getAppid ( ) ; if ( name . contains ( ":" ) ) { String pcs [ ] = StrUtil . split ( name , ":" , 3 , true ) ; name = pcs [ 0 ] ; version = pcs [ 1 ] ; context = pcs [ 2 ] . isEmpty ( ) ? context : pcs [ 2 ] ; } Request request = new Request ( Action . RPC ) ; request . addParameter ( "UID" , id ) ; request . addParameter ( "CTX" , context ) ; request . addParameter ( "VER" , version ) ; request . addParameter ( "RPC" , name ) ; request . addParameter ( "ASY" , async ) ; if ( params != null ) { request . addParameters ( params ) ; } Response response = netCall ( request , timeout ) ; return response . getData ( ) ; |
public class ScriptPluginProviderLoader { /** * Load plugin metadata for a file and zip inputstream
* @ param jar the file
* @ param zipinput zip input stream
* @ return loaded metadata , or null if it is invalid or not found */
static PluginMeta loadMeta ( final File jar , final ZipInputStream zipinput ) throws IOException { } } | final String basename = basename ( jar ) ; PluginMeta metadata = null ; boolean topfound = false ; boolean found = false ; boolean dirfound = false ; boolean resfound = false ; ZipEntry nextEntry = zipinput . getNextEntry ( ) ; Set < String > paths = new HashSet < > ( ) ; while ( null != nextEntry ) { paths . add ( nextEntry . getName ( ) ) ; if ( ! found && ! nextEntry . isDirectory ( ) && nextEntry . getName ( ) . equals ( basename + "/plugin.yaml" ) ) { // debug ( " Found metadata : " + nextEntry . getName ( ) ) ;
try { metadata = loadMetadataYaml ( zipinput ) ; found = true ; } catch ( Throwable e ) { log . error ( "Error parsing metadata file plugin.yaml: " + e . getMessage ( ) , e ) ; } } nextEntry = zipinput . getNextEntry ( ) ; } if ( ! found || metadata == null ) { log . error ( "Plugin not loaded: Found no " + basename + "/plugin.yaml within: " + jar . getAbsolutePath ( ) ) ; } String resdir = null != metadata ? getResourcesBasePath ( metadata ) : null ; for ( String path : paths ) { if ( ! topfound && path . startsWith ( basename + "/" ) ) { topfound = true ; } if ( ! dirfound && ( path . startsWith ( basename + "/contents/" ) || path . equals ( basename + "/contents" ) ) ) { dirfound = true ; } if ( ! resfound && resdir != null && ( path . startsWith ( basename + "/" + resdir + "/" ) || path . equals ( basename + "/" + resdir ) ) ) { resfound = true ; } } if ( ! topfound ) { log . error ( "Plugin not loaded: Found no " + basename + "/ dir within file: " + jar . getAbsolutePath ( ) ) ; } if ( ! dirfound && ! resfound ) { log . error ( "Plugin not loaded: Found no " + basename + "/contents or " + basename + "/" + resdir + " dir within: " + jar . getAbsolutePath ( ) ) ; } if ( found && ( dirfound || resfound ) ) { return metadata ; } return null ; |
public class OperationFuture { /** * Whether or not the Operation is done and result can be retrieved with
* get ( ) .
* The most common way to wait for this OperationFuture is to use the get ( )
* method which will block . This method allows one to check if it ' s complete
* without blocking .
* @ return true if the Operation is done */
public boolean isDone ( ) { } } | assert op != null : "No operation" ; return latch . getCount ( ) == 0 || op . isCancelled ( ) || op . getState ( ) == OperationState . COMPLETE ; |
public class PaxWicketApplicationFactory { /** * < p > createPaxWicketApplicationFactory . < / p >
* @ param bundleContext a { @ link org . osgi . framework . BundleContext } object .
* @ param webApplicationFactory a { @ link org . ops4j . pax . wicket . api . WebApplicationFactory } object .
* @ param reference a { @ link org . osgi . framework . ServiceReference } object .
* @ return a { @ link org . ops4j . pax . wicket . internal . PaxWicketApplicationFactory } object . */
@ SuppressWarnings ( "unchecked" ) public static PaxWicketApplicationFactory createPaxWicketApplicationFactory ( BundleContext bundleContext , WebApplicationFactory < ? > webApplicationFactory , ServiceReference < WebApplicationFactory < ? > > reference ) { } } | File tmpDir = retrieveTmpFile ( bundleContext ) ; tmpDir . mkdirs ( ) ; String mountPoint = ( String ) reference . getProperty ( Constants . MOUNTPOINT ) ; String applicationName = ( String ) reference . getProperty ( Constants . APPLICATION_NAME ) ; Map < String , String > contextParams = ( Map < String , String > ) reference . getProperty ( Constants . CONTEXT_PARAMS ) ; if ( contextParams == null ) { contextParams = new HashMap < String , String > ( ) ; } if ( ! contextParams . containsKey ( WicketFilter . FILTER_MAPPING_PARAM ) ) { contextParams . put ( WicketFilter . FILTER_MAPPING_PARAM , "/" + mountPoint + "/*" ) ; } FilterDelegator filterDelegator = new FilterDelegator ( reference . getBundle ( ) . getBundleContext ( ) , applicationName ) ; PaxWicketApplicationFactory factory = new PaxWicketApplicationFactory ( bundleContext , webApplicationFactory , applicationName , mountPoint , contextParams , tmpDir , filterDelegator ) ; return factory ; |
public class BigramCollocationFinder { /** * Help function for calculating likelihood ratio statistic . */
private double logL ( int k , long n , double x ) { } } | if ( x == 0.0 ) x = 0.01 ; if ( x == 1.0 ) x = 0.99 ; return k * Math . log ( x ) + ( n - k ) * Math . log ( 1 - x ) ; |
public class FTPClient { /** * Reads a GFD . 47 compliant 127 reply and extracts the port
* information from it . */
protected HostPort get127Reply ( ) throws ServerException , IOException , FTPReplyParseException { } } | Reply reply = controlChannel . read ( ) ; if ( Reply . isTransientNegativeCompletion ( reply ) || Reply . isPermanentNegativeCompletion ( reply ) ) { throw ServerException . embedUnexpectedReplyCodeException ( new UnexpectedReplyCodeException ( reply ) , reply . getMessage ( ) ) ; } if ( reply . getCode ( ) != 127 ) { throw new ServerException ( ServerException . WRONG_PROTOCOL , reply . getMessage ( ) ) ; } Matcher matcher = portPattern . matcher ( reply . getMessage ( ) ) ; if ( ! matcher . find ( ) ) { throw new ServerException ( ServerException . WRONG_PROTOCOL , "Cannot parse 127 reply: " + reply . getMessage ( ) ) ; } return new HostPort ( matcher . group ( ) ) ; |
public class TextUtil { /** * Format the text to be sure that each line is not
* more longer than the specified critera .
* @ param text is the string to cut
* @ param critera is the critera to respect .
* @ param output is the given { @ code text } splitted in lines separated by < code > \ n < / code > .
* @ since 4.0 */
public static void cutStringAsArray ( String text , CutStringCritera critera , List < String > output ) { } } | cutStringAlgo ( text , critera , new CutStringToArray ( output ) ) ; |
public class QuatSymmetryResults { /** * Determine if this symmetry result is a subset of the other Symmetry result .
* Checks the following conditions :
* - ' Other ' includes all subunits of ' this ' .
* - ' Other ' has the same or higher order than ' this ' .
* Special treatment for the helical symmetry :
* - ' Other ' includes all subunits of ' this ' .
* - ' this ' may be Cn , as well as H
* Note that isSupersededBy establishes a partial order , i . e . for some
* symmetries A and B , neither A . isSupersededBy ( B ) nor B . isSupersededBy ( A )
* may be true .
* @ param other
* QuatSymmetryResults
* @ return true if other supersedes this , false otherwise */
public boolean isSupersededBy ( QuatSymmetryResults other ) { } } | if ( other . getSymmetry ( ) . startsWith ( "H" ) ) { if ( this . getSymmetry ( ) . startsWith ( "C" ) || this . getSymmetry ( ) . startsWith ( "H" ) ) { if ( other . subunits . containsAll ( this . subunits ) ) { return true ; } } return false ; } if ( this . getSymmetry ( ) . startsWith ( "H" ) ) { return false ; } if ( this . rotationGroup . getOrder ( ) <= other . rotationGroup . getOrder ( ) && other . subunits . containsAll ( this . subunits ) ) { return true ; } return false ; |
public class GeoJsonReaderDriver { /** * Parses a GeoJSON coordinate array and check if it ' s wellformed . The first
* token corresponds to the first X value . The last token correponds to the
* end of the coordinate array " ] " .
* Parsed syntax :
* 100.0 , 0.0]
* @ param jp
* @ throws IOException
* @ return Coordinate */
private void parseCoordinateMetadata ( JsonParser jp ) throws IOException { } } | jp . nextToken ( ) ; jp . nextToken ( ) ; // second value
// We look for a z value
jp . nextToken ( ) ; if ( jp . getCurrentToken ( ) != JsonToken . END_ARRAY ) { jp . nextToken ( ) ; // exit array
} jp . nextToken ( ) ; |
public class CmsSearchDialog { /** * Returns a list of < code > { @ link CmsSelectWidgetOption } < / code > objects for field list selection . < p >
* @ return a list of < code > { @ link CmsSelectWidgetOption } < / code > objects */
private List < CmsSelectWidgetOption > getFieldList ( ) { } } | List < CmsSelectWidgetOption > retVal = new ArrayList < CmsSelectWidgetOption > ( ) ; try { Iterator < CmsLuceneField > i = getFields ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { CmsLuceneField field = i . next ( ) ; if ( isInitialCall ( ) ) { // search form is in the initial state
retVal . add ( new CmsSelectWidgetOption ( field . getName ( ) , true , getMacroResolver ( ) . resolveMacros ( field . getDisplayName ( ) ) ) ) ; } else { // search form is not in the initial state
retVal . add ( new CmsSelectWidgetOption ( field . getName ( ) , false , getMacroResolver ( ) . resolveMacros ( field . getDisplayName ( ) ) ) ) ; } } } catch ( Exception e ) { // noop
} return retVal ; |
public class PropertyFilterList { /** * Returns the amount of previous remaining list nodes . */
public int getPreviousRemaining ( ) { } } | int remaining = mPrevRemaining ; if ( remaining < 0 ) { mPrevRemaining = remaining = ( ( mPrev == null ) ? 0 : ( mPrev . getPreviousRemaining ( ) + 1 ) ) ; } return remaining ; |
public class DsParser { /** * Store a driver
* @ param drv The driver
* @ param writer The writer
* @ exception Exception Thrown if an error occurs */
protected void storeDriver ( Driver drv , XMLStreamWriter writer ) throws Exception { } } | writer . writeStartElement ( XML . ELEMENT_DRIVER ) ; if ( drv . getName ( ) != null ) writer . writeAttribute ( XML . ATTRIBUTE_NAME , drv . getValue ( XML . ATTRIBUTE_NAME , drv . getName ( ) ) ) ; if ( drv . getModule ( ) != null ) writer . writeAttribute ( XML . ATTRIBUTE_MODULE , drv . getValue ( XML . ATTRIBUTE_MODULE , drv . getModule ( ) ) ) ; if ( drv . getMajorVersion ( ) != null ) writer . writeAttribute ( XML . ATTRIBUTE_MAJOR_VERSION , drv . getValue ( XML . ATTRIBUTE_MAJOR_VERSION , drv . getMajorVersion ( ) . toString ( ) ) ) ; if ( drv . getMinorVersion ( ) != null ) writer . writeAttribute ( XML . ATTRIBUTE_MINOR_VERSION , drv . getValue ( XML . ATTRIBUTE_MINOR_VERSION , drv . getMinorVersion ( ) . toString ( ) ) ) ; if ( drv . getDriverClass ( ) != null ) { writer . writeStartElement ( XML . ELEMENT_DRIVER_CLASS ) ; writer . writeCharacters ( drv . getValue ( XML . ELEMENT_DRIVER_CLASS , drv . getDriverClass ( ) ) ) ; writer . writeEndElement ( ) ; } if ( drv . getDataSourceClass ( ) != null ) { writer . writeStartElement ( XML . ELEMENT_DATASOURCE_CLASS ) ; writer . writeCharacters ( drv . getValue ( XML . ELEMENT_DATASOURCE_CLASS , drv . getDataSourceClass ( ) ) ) ; writer . writeEndElement ( ) ; } if ( drv . getXaDataSourceClass ( ) != null ) { writer . writeStartElement ( XML . ELEMENT_XA_DATASOURCE_CLASS ) ; writer . writeCharacters ( drv . getValue ( XML . ELEMENT_XA_DATASOURCE_CLASS , drv . getXaDataSourceClass ( ) ) ) ; writer . writeEndElement ( ) ; } writer . writeEndElement ( ) ; |
public class RedisStrSortSet { /** * 删除有序集合中的一个成员
* @ param member
* @ return */
public boolean remove ( String mem ) { } } | try { return getJedisCommands ( groupName ) . zrem ( key , mem ) >= 1 ; } finally { getJedisProvider ( groupName ) . release ( ) ; } |
public class ExportGeneration { /** * Create export ack mailbox during generation initialization , do nothing if generation has already initialized .
* @ param localPartitions locally covered partitions */
private void createAckMailboxesIfNeeded ( final Set < Integer > localPartitions ) { } } | m_mailboxesZKPath = VoltZK . exportGenerations + "/" + "mailboxes" ; if ( m_mbox == null ) { m_mbox = new LocalMailbox ( m_messenger ) { @ Override public void deliver ( VoltMessage message ) { if ( message instanceof BinaryPayloadMessage ) { BinaryPayloadMessage bpm = ( BinaryPayloadMessage ) message ; ByteBuffer buf = ByteBuffer . wrap ( bpm . m_payload ) ; final byte msgType = buf . get ( ) ; final int partition = buf . getInt ( ) ; final Map < String , ExportDataSource > partitionSources = m_dataSourcesByPartition . get ( partition ) ; final int length = buf . getInt ( ) ; byte stringBytes [ ] = new byte [ length ] ; buf . get ( stringBytes ) ; String tableName = new String ( stringBytes , Constants . UTF8ENCODING ) ; if ( partitionSources == null ) { if ( ! m_removingPartitions . contains ( partition ) ) { exportLog . error ( "Received an export message " + msgType + " for partition " + partition + " which does not exist on this node" ) ; } return ; } final ExportDataSource eds = partitionSources . get ( tableName ) ; if ( eds == null ) { // For dangling buffers
if ( msgType == ExportManager . TAKE_MASTERSHIP ) { final long requestId = buf . getLong ( ) ; if ( exportLog . isDebugEnabled ( ) ) { exportLog . debug ( "Received TAKE_MASTERSHIP message(" + requestId + ") for a stream that no longer exists from " + CoreUtils . hsIdToString ( message . m_sourceHSId ) + " to " + CoreUtils . hsIdToString ( m_mbox . getHSId ( ) ) ) ; } sendDummyTakeMastershipResponse ( message . m_sourceHSId , requestId , partition , stringBytes ) ; } else { exportLog . warn ( "Received export message " + msgType + " for partition " + partition + " source " + tableName + " which does not exist on this node, sources = " + partitionSources ) ; } return ; } if ( msgType == ExportManager . RELEASE_BUFFER ) { final long seqNo = buf . getLong ( ) ; final long catalogVersion = buf . getInt ( ) ; try { if ( exportLog . isDebugEnabled ( ) ) { exportLog . debug ( "Received RELEASE_BUFFER message for " + eds . toString ( ) + " , sequence number: " + seqNo + ", catalogVersion: " + catalogVersion + " from " + CoreUtils . hsIdToString ( message . m_sourceHSId ) + " to " + CoreUtils . hsIdToString ( m_mbox . getHSId ( ) ) ) ; } eds . remoteAck ( seqNo ) ; } catch ( RejectedExecutionException ignoreIt ) { // ignore it : as it is already shutdown
} } else if ( msgType == ExportManager . GIVE_MASTERSHIP ) { final long ackSeqNo = buf . getLong ( ) ; try { if ( exportLog . isDebugEnabled ( ) ) { exportLog . debug ( "Received GIVE_MASTERSHIP message for " + eds . toString ( ) + " with sequence number:" + ackSeqNo + " from " + CoreUtils . hsIdToString ( message . m_sourceHSId ) + " to " + CoreUtils . hsIdToString ( m_mbox . getHSId ( ) ) ) ; } eds . remoteAck ( ackSeqNo ) ; } catch ( RejectedExecutionException ignoreIt ) { // ignore it : as it is already shutdown
} eds . acceptMastership ( ) ; } else if ( msgType == ExportManager . GAP_QUERY ) { final long requestId = buf . getLong ( ) ; long gapStart = buf . getLong ( ) ; if ( exportLog . isDebugEnabled ( ) ) { exportLog . debug ( "Received GAP_QUERY message(" + requestId + ") for " + eds . toString ( ) + " from " + CoreUtils . hsIdToString ( message . m_sourceHSId ) + " to " + CoreUtils . hsIdToString ( m_mbox . getHSId ( ) ) ) ; } eds . handleQueryMessage ( message . m_sourceHSId , requestId , gapStart ) ; } else if ( msgType == ExportManager . QUERY_RESPONSE ) { final long requestId = buf . getLong ( ) ; final long lastSeq = buf . getLong ( ) ; if ( exportLog . isDebugEnabled ( ) ) { exportLog . debug ( "Received QUERY_RESPONSE message(" + requestId + "," + lastSeq + ") for " + eds . toString ( ) + " from " + CoreUtils . hsIdToString ( message . m_sourceHSId ) + " to " + CoreUtils . hsIdToString ( m_mbox . getHSId ( ) ) ) ; } eds . handleQueryResponse ( message . m_sourceHSId , requestId , lastSeq ) ; } else if ( msgType == ExportManager . TAKE_MASTERSHIP ) { final long requestId = buf . getLong ( ) ; if ( exportLog . isDebugEnabled ( ) ) { exportLog . debug ( "Received TAKE_MASTERSHIP message(" + requestId + ") for " + eds . toString ( ) + " from " + CoreUtils . hsIdToString ( message . m_sourceHSId ) + " to " + CoreUtils . hsIdToString ( m_mbox . getHSId ( ) ) ) ; } eds . handleTakeMastershipMessage ( message . m_sourceHSId , requestId ) ; } else if ( msgType == ExportManager . TAKE_MASTERSHIP_RESPONSE ) { final long requestId = buf . getLong ( ) ; if ( exportLog . isDebugEnabled ( ) ) { exportLog . debug ( "Received TAKE_MASTERSHIP_RESPONSE message(" + requestId + ") for " + eds . toString ( ) + " from " + CoreUtils . hsIdToString ( message . m_sourceHSId ) + " to " + CoreUtils . hsIdToString ( m_mbox . getHSId ( ) ) ) ; } eds . handleTakeMastershipResponse ( message . m_sourceHSId , requestId ) ; } else { exportLog . error ( "Receive unsupported message type " + message + " in export subsystem" ) ; } } else { exportLog . error ( "Receive unexpected message " + message + " in export subsystem" ) ; } } } ; m_messenger . createMailbox ( null , m_mbox ) ; } // Rejoining node may receives gap query message before childUpdating thread gets back result ,
// in case it couldn ' t find local mailbox to send back response , add local mailbox to the list first .
for ( Integer partition : localPartitions ) { updateAckMailboxes ( partition , null ) ; } // Update latest replica list to each data source .
updateReplicaList ( localPartitions ) ; |
public class RandomCollectionUtils { /** * Returns a list filled randomly from the given elements .
* @ param elements elements to randomly fill list from
* @ param size range that the size of the list will be randomly chosen from
* @ param < T > the type of elements in the given iterable
* @ return list filled randomly from the given elements
* @ throws IllegalArgumentException if the elements to fill list from is empty or if the size
* range contains negative integers */
public static < T > List < T > randomListFrom ( Iterable < T > elements , Range < Integer > size ) { } } | checkArgument ( ! isEmpty ( elements ) , "Elements to populate from must not be empty" ) ; return randomListFrom ( ( ) -> IterableUtils . randomFrom ( elements ) , size ) ; |
public class BackupConsole { /** * Check is " / repo " or " / repo / ws " parameter .
* @ param parameter
* String , parameter .
* @ return Boolean
* return " true " if it " / repo " or " / repo / ws " parameter */
private static boolean isRepoWS ( String parameter ) { } } | String repWS = parameter ; repWS = repWS . replaceAll ( "\\\\" , "/" ) ; if ( ! repWS . matches ( "[/][^/]+" ) && ! repWS . matches ( "[/][^/]+[/][^/]+" ) ) { return false ; } else { return true ; } |
public class Agent { /** * / * Methods required to extend JGroupsJobManager . */
@ Override public BubingJob fromString ( final String s ) { } } | final URI url = BURL . parse ( s ) ; if ( url != null && url . isAbsolute ( ) ) return new BubingJob ( ByteArrayList . wrap ( BURL . toByteArray ( url ) ) ) ; throw new IllegalArgumentException ( ) ; |
public class TableManipulationConfigurationBuilder { /** * The name of the database column used to store the entries */
public S dataColumnName ( String dataColumnName ) { } } | attributes . attribute ( DATA_COLUMN_NAME ) . set ( dataColumnName ) ; return self ( ) ; |
public class Configuration { /** * Enforce the fact that a parameter is mandatory
* @ param name The name of the parameter
* @ return The value found
* @ throws RuntimeException When a mandatory parameter is missing */
private String getMandatory ( String name ) { } } | if ( ! config . containsKey ( name ) ) { throw new RuntimeException ( name + " parameter is missing." ) ; } else { return config . getString ( name ) ; } |
public class DiscoveryClientConfiguration { /** * Sets the Discovery servers to use for the default zone .
* @ param defaultZone The default zone */
public void setDefaultZone ( List < URL > defaultZone ) { } } | this . defaultZone = defaultZone . stream ( ) . map ( uriMapper ( ) ) . map ( uri -> ServiceInstance . builder ( getServiceID ( ) , uri ) . build ( ) ) . collect ( Collectors . toList ( ) ) ; |
public class RoleResource1 { /** * Returns all roles for which the caller has read access . Since the number of roles is typically low
* this call does not support " from " or " limit " parameters similar to the system or record . */
@ GET public Iterator < EmoRole > getAllRoles ( final @ Authenticated Subject subject ) { } } | return _uac . getAllRoles ( subject ) ; |
public class CmsEditSearchIndexDialog { /** * Returns the rebuild mode widget configuration . < p >
* @ return the rebuild mode widget configuration */
private List < CmsSelectWidgetOption > getRebuildModeWidgetConfiguration ( ) { } } | List < CmsSelectWidgetOption > result = new ArrayList < CmsSelectWidgetOption > ( ) ; String rebuildMode = getSearchIndexIndex ( ) . getRebuildMode ( ) ; result . add ( new CmsSelectWidgetOption ( "auto" , "auto" . equals ( rebuildMode ) ) ) ; result . add ( new CmsSelectWidgetOption ( "manual" , "manual" . equals ( rebuildMode ) ) ) ; result . add ( new CmsSelectWidgetOption ( "offline" , "offline" . equals ( rebuildMode ) ) ) ; return result ; |
public class JSMessageImpl { /** * Locking : The caller is expected to hold the lock . */
void invalidateSchemaCache ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . entry ( this , tc , "invalidateSchemaCache" ) ; // If this is the master message , clear the cache . . . .
if ( isMaster ( ) ) { schemata = null ; } // . . . otherwise call on up the tree .
else { getParent ( ) . invalidateSchemaCache ( ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "invalidateSchemaCache" ) ; |
public class JavaBackedType { /** * If method m is annotated with FEELProperty , will return FEELProperty . value , otherwise empty . */
private static Optional < String > methodToCustomProperty ( Method m ) { } } | return Optional . ofNullable ( m . getAnnotation ( FEELProperty . class ) ) . map ( a -> a . value ( ) ) ; |
public class StackBenchmark { /** * Bench for pushing the data to the { @ link FastIntStack } . */
@ Bench ( runs = RUNS ) public void benchFastIntPush ( ) { } } | fastInt = new FastIntStack ( ) ; for ( final int i : intData ) { fastInt . push ( i ) ; } |
public class SchemaRepositoryParser { /** * Decides whether the given element is a xml schema repository .
* Note :
* If the " type " attribute has not been set , the repository is interpreted as a xml repository by definition .
* This is important to guarantee downwards compatibility .
* @ param element The element to be checked
* @ return Whether the given element is a xml schema repository */
private boolean isXmlSchemaRepository ( Element element ) { } } | String schemaRepositoryType = element . getAttribute ( "type" ) ; return StringUtils . isEmpty ( schemaRepositoryType ) || "xml" . equals ( schemaRepositoryType ) ; |
public class DateBackwardHandler { /** * If the current date of the give calculator is a non - working day , it will
* be moved according to the algorithm implemented .
* @ param calculator
* the calculator
* @ return the date which may have moved . */
@ Override public Date moveCurrentDate ( final BaseCalculator < Date > calculator ) { } } | return adjustDate ( calculator . getCurrentBusinessDate ( ) , - 1 , calculator ) ; |
public class DSResultIterator { /** * ( non - Javadoc )
* @ see com . impetus . client . cassandra . query . ResultIterator # next ( ) */
@ Override public E next ( ) { } } | if ( results != null && ! results . isEmpty ( ) && count < results . size ( ) ) { current = results . get ( count ++ ) ; return current ; } else { throw new NoSuchElementException ( "No object found in the iterator... Use hasNext() to check for valid next()" ) ; } |
public class GrailsWebRequest { /** * Looks up the current Grails WebRequest instance
* @ return The GrailsWebRequest instance */
public static @ Nullable GrailsWebRequest lookup ( ) { } } | GrailsWebRequest webRequest = null ; RequestAttributes requestAttributes = RequestContextHolder . getRequestAttributes ( ) ; if ( requestAttributes instanceof GrailsWebRequest ) { webRequest = ( GrailsWebRequest ) requestAttributes ; } return webRequest ; |
public class DateUtils { /** * Get how many seconds between two date .
* @ param date1 date to be tested .
* @ param date2 date to be tested .
* @ return how many seconds between two date . */
public static long subSeconds ( final Date date1 , final Date date2 ) { } } | return subTime ( date1 , date2 , DatePeriod . SECOND ) ; |
public class ConfigurationsInner { /** * Configures the HTTP settings on the specified cluster . This API is deprecated , please use UpdateGatewaySettings in cluster endpoint instead .
* @ param resourceGroupName The name of the resource group .
* @ param clusterName The name of the cluster .
* @ param configurationName The name of the cluster configuration .
* @ param parameters The cluster configurations .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < ServiceResponse < Void > > updateWithServiceResponseAsync ( String resourceGroupName , String clusterName , String configurationName , Map < String , String > parameters ) { } } | if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( clusterName == null ) { throw new IllegalArgumentException ( "Parameter clusterName is required and cannot be null." ) ; } if ( configurationName == null ) { throw new IllegalArgumentException ( "Parameter configurationName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "Parameter parameters is required and cannot be null." ) ; } Validator . validate ( parameters ) ; Observable < Response < ResponseBody > > observable = service . update ( this . client . subscriptionId ( ) , resourceGroupName , clusterName , configurationName , this . client . apiVersion ( ) , parameters , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) ; return client . getAzureClient ( ) . getPostOrDeleteResultAsync ( observable , new TypeToken < Void > ( ) { } . getType ( ) ) ; |
public class ModelSqlUtils { /** * use getter to guess column name , if there is annotation then use annotation value , if not then guess from field name
* @ param getter
* @ param f
* @ return
* @ throws NoColumnAnnotationFoundException */
private static String getColumnNameFromGetter ( Method getter , Field f ) { } } | String columnName = "" ; Column columnAnno = getter . getAnnotation ( Column . class ) ; if ( columnAnno != null ) { // 如果是列注解就读取name属性
columnName = columnAnno . name ( ) ; } if ( columnName == null || "" . equals ( columnName ) ) { // 如果没有列注解就用命名方式去猜
columnName = IdUtils . toUnderscore ( f . getName ( ) ) ; } return columnName ; |
public class CRDTMigrationTask { /** * Performs migration of a { @ link CRDTReplicationAwareService } to the
* given target .
* @ param service the service to migrate
* @ param target the target to migrate to
* @ param maxConfiguredReplicaCount the maximum configured replica count
* for the CRDTs to be migrated ( excluding )
* @ see CRDTReplicationAwareService */
private boolean migrate ( CRDTReplicationAwareService service , Member target , int maxConfiguredReplicaCount ) { } } | if ( Thread . currentThread ( ) . isInterrupted ( ) ) { return false ; } final OperationService operationService = nodeEngine . getOperationService ( ) ; final CRDTReplicationContainer migrationOperation = service . prepareMigrationOperation ( maxConfiguredReplicaCount ) ; if ( migrationOperation == null ) { logger . finest ( "Skipping migration of " + service . getName ( ) + " for target " + target ) ; return true ; } try { logger . finest ( "Migrating " + service . getName ( ) + " to " + target ) ; operationService . invokeOnTarget ( null , migrationOperation . getOperation ( ) , target . getAddress ( ) ) . join ( ) ; final boolean allMigrated = service . clearCRDTState ( migrationOperation . getVectorClocks ( ) ) ; if ( ! allMigrated ) { logger . fine ( service . getName ( ) + " CRDTs have been mutated since migrated to target " + target + ". Rescheduling migration in " + MIGRATION_RETRY_DELAY_SECONDS + " second(s)." ) ; } return allMigrated ; } catch ( Exception e ) { if ( logger . isFineEnabled ( ) ) { logger . fine ( "Failed migration of " + service . getName ( ) + " for target " + target + ". Rescheduling migration in " + MIGRATION_RETRY_DELAY_SECONDS + " second(s)." , e ) ; } else { logger . info ( "Failed migration of " + service . getName ( ) + " for target " + target + ". Rescheduling migration in " + MIGRATION_RETRY_DELAY_SECONDS + " second(s)." ) ; } return false ; } |
public class GroveQTouch_Example { /** * This function prints out the button numbers from 0 through 6
* @ param buttonNumber */
public static void printButtons ( int buttonNumber ) { } } | boolean buttonPressed = false ; System . out . print ( "Button Pressed: " ) ; for ( int i = 0 ; i < 7 ; i ++ ) { if ( ( buttonNumber & ( 1 << i ) ) != 0 ) { System . out . println ( i + " " ) ; buttonPressed = true ; } } if ( ! buttonPressed ) { System . out . println ( "None " ) ; } |
public class SpdyStreamStatus { /** * Returns the { @ link SpdyStreamStatus } represented by the specified code .
* If the specified code is a defined SPDY status code , a cached instance
* will be returned . Otherwise , a new instance will be returned . */
public static SpdyStreamStatus valueOf ( int code ) { } } | if ( code == 0 ) { throw new IllegalArgumentException ( "0 is not a valid status code for a RST_STREAM" ) ; } switch ( code ) { case 1 : return PROTOCOL_ERROR ; case 2 : return INVALID_STREAM ; case 3 : return REFUSED_STREAM ; case 4 : return UNSUPPORTED_VERSION ; case 5 : return CANCEL ; case 6 : return INTERNAL_ERROR ; case 7 : return FLOW_CONTROL_ERROR ; case 8 : return STREAM_IN_USE ; case 9 : return STREAM_ALREADY_CLOSED ; case 10 : return INVALID_CREDENTIALS ; case 11 : return FRAME_TOO_LARGE ; } return new SpdyStreamStatus ( code , "UNKNOWN (" + code + ')' ) ; |
public class Trace { /** * Put spans with null endpoints first , so that their data can be attached to the first span with
* the same ID and endpoint . It is possible that a server can get the same request on a different
* port . Not addressing this . */
static int compareEndpoint ( Endpoint left , Endpoint right ) { } } | if ( left == null ) { // nulls first
return ( right == null ) ? 0 : - 1 ; } else if ( right == null ) { return 1 ; } int byService = nullSafeCompareTo ( left . serviceName ( ) , right . serviceName ( ) , false ) ; if ( byService != 0 ) return byService ; int byIpV4 = nullSafeCompareTo ( left . ipv4 ( ) , right . ipv4 ( ) , false ) ; if ( byIpV4 != 0 ) return byIpV4 ; return nullSafeCompareTo ( left . ipv6 ( ) , right . ipv6 ( ) , false ) ; |
public class MessageSetImpl { /** * Gets messages in the given channel before a given message in any channel while they meet the given condition .
* If the first message does not match the condition , an empty set is returned .
* @ param channel The channel of the messages .
* @ param condition The condition that has to be met .
* @ param before Get messages before the message with this id .
* @ return The messages .
* @ see # getMessagesBeforeAsStream ( TextChannel , long ) */
public static CompletableFuture < MessageSet > getMessagesBeforeWhile ( TextChannel channel , Predicate < Message > condition , long before ) { } } | return getMessagesWhile ( channel , condition , before , - 1 ) ; |
public class SystemInputs { /** * Maps every property in the given FunctionInputDef to the conditional elements that reference it . */
public static Map < String , Collection < IConditional > > getPropertyReferences ( FunctionInputDef function ) { } } | SetValuedMap < String , IConditional > refs = MultiMapUtils . newSetValuedHashMap ( ) ; conditionals ( function ) . forEach ( conditional -> propertiesReferenced ( conditional . getCondition ( ) ) . forEach ( p -> refs . put ( p , conditional ) ) ) ; return refs . asMap ( ) ; |
public class Main { /** * Parse command - line arguments and start server . */
public static void main ( String [ ] args ) throws IOException { } } | if ( args . length != 3 ) { System . out . println ( "" ) ; System . out . println ( "Usage: COMMAND <http-listen-port> <app-info-file> <database-file>" ) ; System . out . println ( "" ) ; System . out . println ( " <http-listen-port>: The port to run the HTTP server on. For example," ) ; System . out . println ( " \"8080\")." ) ; System . out . println ( "" ) ; System . out . println ( " <app-info-file>: A JSON file containing your Dropbox API app key, secret" ) ; System . out . println ( " and access type. For example, \"my-app.app\" with:" ) ; System . out . println ( "" ) ; System . out . println ( " {" ) ; System . out . println ( " \"key\": \"Your Dropbox app key...\"," ) ; System . out . println ( " \"secret\": \"Your Dropbox app secret...\"" ) ; System . out . println ( " }" ) ; System . out . println ( "" ) ; System . out . println ( " <database-file>: Where you want this program to store its database. For" ) ; System . out . println ( " example, \"web-file-browser.db\"." ) ; System . out . println ( "" ) ; System . exit ( 1 ) ; return ; } String argPort = args [ 0 ] ; String argAppInfo = args [ 1 ] ; String argDatabase = args [ 2 ] ; // Figure out what port to listen on .
int port ; try { port = Integer . parseInt ( argPort ) ; if ( port < 1 || port > 65535 ) { System . err . println ( "Expecting <http-listen-port> to be a number from 1 to 65535. Got: " + port + "." ) ; System . exit ( 1 ) ; return ; } } catch ( NumberFormatException ex ) { System . err . println ( "Expecting <http-listen-port> to be a number from 1 to 65535. Got: " + jq ( argPort ) + "." ) ; System . exit ( 1 ) ; return ; } // Read app info file ( contains app key and app secret )
DbxAppInfo dbxAppInfo ; try { dbxAppInfo = DbxAppInfo . Reader . readFromFile ( argAppInfo ) ; } catch ( JsonReader . FileLoadException ex ) { System . err . println ( "Error loading <app-info-file>: " + ex . getMessage ( ) ) ; System . exit ( 1 ) ; return ; } System . out . println ( "Loaded app info from " + jq ( argAppInfo ) ) ; File dbFile = new File ( argDatabase ) ; // Run server
try { Main main = new Main ( new PrintWriter ( System . out , true ) , dbxAppInfo , dbFile ) ; Server server = new Server ( port ) ; SessionHandler sessionHandler = new SessionHandler ( ) ; sessionHandler . setServer ( server ) ; sessionHandler . setHandler ( main ) ; server . setHandler ( sessionHandler ) ; server . start ( ) ; System . out . println ( "Server running: http://localhost:" + port + "/" ) ; server . join ( ) ; } catch ( Exception ex ) { System . err . println ( "Error running server: " + ex . getMessage ( ) ) ; System . exit ( 1 ) ; } |
public class JacksonJsonNode { /** * Maps the json represented by this object to a java object of the given type .
* @ throws SpinJsonException if the json representation cannot be mapped to the specified type */
public < C > C mapTo ( Class < C > type ) { } } | DataFormatMapper mapper = dataFormat . getMapper ( ) ; return mapper . mapInternalToJava ( jsonNode , type ) ; |
public class SetupProjectProcess { /** * PopulateSourceDir Method . */
public boolean populateSourceDir ( String templateDir ) { } } | URL fromDirUrl = this . getTask ( ) . getApplication ( ) . getResourceURL ( templateDir , null ) ; if ( "jar" . equalsIgnoreCase ( fromDirUrl . getProtocol ( ) ) ) { // Copy jar files
try { String fileName = fromDirUrl . getFile ( ) ; if ( fileName . lastIndexOf ( ':' ) != - 1 ) fileName = fileName . substring ( fileName . lastIndexOf ( ':' ) + 1 ) ; if ( fileName . lastIndexOf ( '!' ) != - 1 ) fileName = fileName . substring ( 0 , fileName . lastIndexOf ( '!' ) ) ; InputStream inStream = new FileInputStream ( fileName ) ; JarInputStream file = new JarInputStream ( inStream ) ; ZipEntry entry = null ; while ( ( entry = file . getNextEntry ( ) ) != null ) { String path = entry . getName ( ) ; if ( path . contains ( "META-INF" ) ) continue ; if ( ! path . endsWith ( "pom.xml" ) ) continue ; if ( path . startsWith ( templateDir ) ) path = path . substring ( templateDir . length ( ) ) ; int lStreamLength = ( int ) entry . getSize ( ) ; String xml = this . transferStream ( file , lStreamLength ) ; this . convertAndWriteXML ( xml , path ) ; } } catch ( IOException ex ) { ex . printStackTrace ( ) ; } } return true ; |
public class PathMessageBodyWriter { /** * { @ inheritDoc } */
@ Override public void writeTo ( Path path , Class < ? > type , Type genericType , Annotation [ ] annotations , MediaType mediaType , MultivaluedMap < String , Object > httpHeaders , OutputStream entityStream ) throws IOException , WebApplicationException { } } | try ( InputStream in = Files . newInputStream ( path ) ) { ReaderWriter . writeTo ( in , entityStream ) ; } |
public class AbstractReadableProperty { /** * Notifies the listeners that the property value has changed , if the property is not inhibited .
* @ param oldValue Previous value .
* @ param newValue New value .
* @ see # maybeNotifyListeners ( Object , Object )
* @ see # doNotifyListeners ( Object , Object ) */
private void notifyListenersIfUninhibited ( R oldValue , R newValue ) { } } | if ( inhibited ) { inhibitCount ++ ; lastInhibitedValue = newValue ; } else { lastInhibitedValue = newValue ; // Just in case , even though not really necessary
lastNonInhibitedValue = newValue ; doNotifyListeners ( oldValue , newValue ) ; } |
public class OpenSslSessionStats { /** * Returns the number of sessions proposed by clients that were not found in the internal session cache
* in server mode . */
public long misses ( ) { } } | Lock readerLock = context . ctxLock . readLock ( ) ; readerLock . lock ( ) ; try { return SSLContext . sessionMisses ( context . ctx ) ; } finally { readerLock . unlock ( ) ; } |
public class TrainingSpecification { /** * A list of the instance types that this algorithm can use for training .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setSupportedTrainingInstanceTypes ( java . util . Collection ) } or
* { @ link # withSupportedTrainingInstanceTypes ( java . util . Collection ) } if you want to override the existing values .
* @ param supportedTrainingInstanceTypes
* A list of the instance types that this algorithm can use for training .
* @ return Returns a reference to this object so that method calls can be chained together .
* @ see TrainingInstanceType */
public TrainingSpecification withSupportedTrainingInstanceTypes ( String ... supportedTrainingInstanceTypes ) { } } | if ( this . supportedTrainingInstanceTypes == null ) { setSupportedTrainingInstanceTypes ( new java . util . ArrayList < String > ( supportedTrainingInstanceTypes . length ) ) ; } for ( String ele : supportedTrainingInstanceTypes ) { this . supportedTrainingInstanceTypes . add ( ele ) ; } return this ; |
public class ProcyonDecompiler { /** * Default settings set type loader to ClasspathTypeLoader if not set before . */
private DecompilerSettings getDefaultSettings ( File outputDir ) { } } | DecompilerSettings settings = new DecompilerSettings ( ) ; procyonConf . setDecompilerSettings ( settings ) ; settings . setOutputDirectory ( outputDir . getPath ( ) ) ; settings . setShowSyntheticMembers ( false ) ; settings . setForceExplicitImports ( true ) ; if ( settings . getTypeLoader ( ) == null ) settings . setTypeLoader ( new ClasspathTypeLoader ( ) ) ; return settings ; |
public class CmsMacroFormatterResolver { /** * Resolves the macro . < p >
* @ throws IOException in case writing to the page context output stream fails
* @ throws CmsException in case reading the macro settings fails */
public void resolve ( ) throws IOException , CmsException { } } | initMacroContent ( ) ; String input = getMacroInput ( ) ; if ( input == null ) { return ; } if ( input . length ( ) < 3 ) { // macro must have at last 3 chars " $ { } " or " % ( ) "
m_context . getOut ( ) . print ( input ) ; return ; } int newDelimPos = input . indexOf ( I_CmsMacroResolver . MACRO_DELIMITER ) ; int oldDelomPos = input . indexOf ( I_CmsMacroResolver . MACRO_DELIMITER_OLD ) ; if ( ( oldDelomPos == - 1 ) && ( newDelimPos == - 1 ) ) { // no macro delimiter found in input
m_context . getOut ( ) . print ( input ) ; return ; } int len = input . length ( ) ; int nextDelimPos , delimPos1 , delimPos2 , endPos ; String macro ; char startChar , endChar ; int delimPos ; if ( ( oldDelomPos == - 1 ) || ( ( newDelimPos > - 1 ) && ( newDelimPos < oldDelomPos ) ) ) { delimPos = newDelimPos ; startChar = I_CmsMacroResolver . MACRO_START ; endChar = I_CmsMacroResolver . MACRO_END ; } else { delimPos = oldDelomPos ; startChar = I_CmsMacroResolver . MACRO_START_OLD ; endChar = I_CmsMacroResolver . MACRO_END_OLD ; } // append chars before the first delimiter found
m_context . getOut ( ) . print ( input . substring ( 0 , delimPos ) ) ; do { delimPos1 = delimPos + 1 ; delimPos2 = delimPos1 + 1 ; if ( delimPos2 >= len ) { // remaining chars can ' t be a macro ( minimum size is 3)
m_context . getOut ( ) . print ( input . substring ( delimPos , len ) ) ; break ; } // get the next macro delimiter
if ( ( newDelimPos > - 1 ) && ( newDelimPos < delimPos1 ) ) { newDelimPos = input . indexOf ( I_CmsMacroResolver . MACRO_DELIMITER , delimPos1 ) ; } if ( ( oldDelomPos > - 1 ) && ( oldDelomPos < delimPos1 ) ) { oldDelomPos = input . indexOf ( I_CmsMacroResolver . MACRO_DELIMITER_OLD , delimPos1 ) ; } if ( ( oldDelomPos == - 1 ) && ( newDelimPos == - 1 ) ) { // none found , make sure remaining chars in this segment are appended
nextDelimPos = len ; } else { // check if the next delimiter is old or new style
if ( ( oldDelomPos == - 1 ) || ( ( newDelimPos > - 1 ) && ( newDelimPos < oldDelomPos ) ) ) { nextDelimPos = newDelimPos ; } else { nextDelimPos = oldDelomPos ; } } // check if the next char is a " macro start "
char start = input . charAt ( delimPos1 ) ; if ( start == startChar ) { // we have a starting macro sequence " $ { " or " % ( " , now check if this segment contains a " } " or " ) "
endPos = input . indexOf ( endChar , delimPos ) ; if ( ( endPos > 0 ) && ( endPos < nextDelimPos ) ) { // this segment contains a closing macro delimiter " } " or " ] " , so we may have found a macro
macro = input . substring ( delimPos2 , endPos ) ; // resolve macro
try { printMacroValue ( macro ) ; } catch ( Exception ex ) { LOG . error ( "Writing value for macro '" + macro + "' failed." , ex ) ; } endPos ++ ; } else { // no complete macro " $ { . . . } " or " % ( . . . ) " in this segment
endPos = delimPos ; } } else { // no macro start char after the " $ " or " % "
endPos = delimPos ; } // set macro style for next delimiter found
if ( nextDelimPos == newDelimPos ) { startChar = I_CmsMacroResolver . MACRO_START ; endChar = I_CmsMacroResolver . MACRO_END ; } else { startChar = I_CmsMacroResolver . MACRO_START_OLD ; endChar = I_CmsMacroResolver . MACRO_END_OLD ; } // append the remaining chars after the macro to the start of the next macro
m_context . getOut ( ) . print ( input . substring ( endPos , nextDelimPos ) ) ; delimPos = nextDelimPos ; } while ( delimPos < len ) ; |
public class MethodCompiler { /** * Add new local variable
* @ param name
* @ param type */
public void addVariable ( String name , TypeMirror type ) { } } | localVariables . add ( new LocalVariable ( executableElement , type , name ) ) ; |
public class NorwegianDateUtil { /** * Check if the given date represents the given date and month .
* @ param cal
* The Calendar object representing date to check .
* @ param date
* The date .
* @ param month
* The month .
* @ return true if they match , false otherwise . */
private static boolean checkDate ( Calendar cal , int date , int month ) { } } | return cal . get ( Calendar . DATE ) == date && cal . get ( Calendar . MONTH ) == month ; |
public class SpecificationAction { /** * < p > getRenderedResults . < / p >
* @ return a { @ link java . lang . String } object . */
public String getRenderedResults ( ) { } } | String results = execution . getResults ( ) ; if ( results != null ) { results = results . replaceAll ( "greenpepper-manage-not-rendered" , "greenpepper-manage" ) ; results = results . replaceAll ( "greenpepper-hierarchy-not-rendered" , "greenpepper-hierarchy" ) ; results = results . replaceAll ( "greenpepper-children-not-rendered" , "greenpepper-children" ) ; results = results . replaceAll ( "greenpepper-labels-not-rendered" , "greenpepper-labels" ) ; results = results . replaceAll ( "greenpepper-group-not-rendered" , "greenpepper-group" ) ; results = results . replaceAll ( "Unknown macro:" , "" ) ; return HtmlUtil . cleanUpResults ( results ) ; } return null ; |
public class SetUtils { /** * Return is s1 \ s2 */
public static < T > Set < T > difference ( Collection < ? extends T > s1 , Collection < ? extends T > s2 ) { } } | Set < T > s3 = new HashSet < > ( s1 ) ; s3 . removeAll ( s2 ) ; return s3 ; |
public class A_CmsPublishGroupHelper { /** * Gets the difference in days between to dates given as longs . < p >
* The first date must be later than the second date .
* @ param first the first date
* @ param second the second date
* @ return the difference between the two dates in days */
public int getDayDifference ( long first , long second ) { } } | Calendar firstDay = getStartOfDay ( first ) ; Calendar secondDay = getStartOfDay ( second ) ; int result = 0 ; if ( first >= second ) { while ( firstDay . after ( secondDay ) ) { firstDay . add ( Calendar . DAY_OF_MONTH , - 1 ) ; result += 1 ; } } else { while ( secondDay . after ( firstDay ) ) { secondDay . add ( Calendar . DAY_OF_MONTH , - 1 ) ; result -= 1 ; } } return result ; |
public class StaticFilesConfiguration { /** * Clears all static file configuration */
public void clear ( ) { } } | if ( staticResourceHandlers != null ) { staticResourceHandlers . clear ( ) ; staticResourceHandlers = null ; } staticResourcesSet = false ; externalStaticResourcesSet = false ; |
public class ProxyHandler { /** * Is scheme , host & port Forbidden .
* @ param scheme A scheme that mast be in the proxySchemes StringMap .
* @ param host A host that must pass the white and black lists
* @ param port A port that must in the allowedConnectPorts Set
* @ param openNonPrivPorts If true ports greater than 1024 are allowed .
* @ return True if the request to the scheme , host and port is not forbidden . */
protected boolean isForbidden ( String scheme , String host , int port , boolean openNonPrivPorts ) { } } | // Check port
Integer p = new Integer ( port ) ; if ( port > 0 && ! _allowedConnectPorts . contains ( p ) ) { if ( ! openNonPrivPorts || port <= 1024 ) return true ; } // Must be a scheme that can be proxied .
if ( scheme == null || ! _ProxySchemes . containsKey ( scheme ) ) return true ; // Must be in any defined white list
if ( _proxyHostsWhiteList != null && ! _proxyHostsWhiteList . contains ( host ) ) return true ; // Must not be in any defined black list
if ( _proxyHostsBlackList != null && _proxyHostsBlackList . contains ( host ) ) return true ; return false ; |
public class MaskImpl { /** * Method insert { @ code input } to the buffer . Only validated characters would be inserted .
* Hardcoded slots are omitted . Method returns new cursor position that is affected by input
* and
* { @ code cursorAfterTrailingHardcoded } flag . In most cases if input string is followed by a
* sequence of hardcoded characters we should place cursor after them . But this behaviour can
* be
* modified by { @ code cursorAfterTrailingHardcoded } flag .
* @ param position from which position to begin input
* @ param input string to insert
* @ param cursorAfterTrailingHardcoded when input is followed by a hardcoded characters
* sequence
* then this flag defines whether new cursor position
* should
* be after or before them
* @ return cursor position after insert */
@ Override public int insertAt ( final int position , @ Nullable final CharSequence input , boolean cursorAfterTrailingHardcoded ) { } } | if ( slots . isEmpty ( ) || ! slots . checkIsIndex ( position ) || input == null || input . length ( ) == 0 ) { return position ; } showHardcodedTail = true ; int cursorPosition = position ; Slot slotCandidate = slots . getSlot ( position ) ; if ( forbidInputWhenFilled && filledFrom ( slotCandidate ) ) { return position ; } Deque < Character > inStack = dequeFrom ( input ) ; while ( ! inStack . isEmpty ( ) ) { char newValue = inStack . pop ( ) ; // find index offset to the next slot we can input current character to
final SlotIndexOffset slotForInputIndex = validSlotIndexOffset ( slotCandidate , newValue ) ; // if there were any non - hardcoded slots skipped while looking for next slot offset
// and we don ' t allow ' spots ' in the input - we should stop inserting right now
if ( ! showingEmptySlots && slotForInputIndex . nonHarcodedSlotSkipped ) { break ; } cursorPosition += slotForInputIndex . indexOffset ; final Slot slotForInput = slots . getSlot ( cursorPosition ) ; if ( slotForInput != null ) { slotCandidate = slotForInput ; final int insertOffset = slotCandidate . setValue ( newValue , slotForInputIndex . indexOffset > 0 ) ; cursorPosition += insertOffset ; slotCandidate = slots . getSlot ( cursorPosition ) ; if ( ! terminated && emptySlotsOnTail ( ) < 1 ) { extendTail ( 1 ) ; } } } if ( cursorAfterTrailingHardcoded ) { int hardcodedTailLength = 0 ; if ( slotCandidate != null ) { hardcodedTailLength = slotCandidate . hardcodedSequenceEndIndex ( ) ; } if ( hardcodedTailLength > 0 ) { cursorPosition += hardcodedTailLength ; } } // allow hardcoded tail be visible only if we ' ve inserted at the end of the input
final Slot nextSlot = slots . getSlot ( cursorPosition ) ; showHardcodedTail = nextSlot == null || ! nextSlot . anyInputToTheRight ( ) ; return cursorPosition ; |
public class VodClient { /** * Load a media resource from URL to VOD .
* @ param sourceUrl The source url of the media resource
* @ param title The title string of the media resource
* @ param description The description string of the media resource
* @ param transcodingPresetGroupName set transcoding presetgroup name , if NULL , use default
* @ param priority set transcoding priority [ 0,9 ] , lowest priority is 0 . Only effect your own jobs
* @ param mode the mode of the media resource
* @ return A PutObjectResponse object containing the information returned by Bos for the newly created object . */
public CreateMediaResourceResponse createMediaResource ( String sourceUrl , String title , String description , String transcodingPresetGroupName , int priority , String mode ) { } } | checkStringNotEmpty ( sourceUrl , "sourceUrl should not be null or empty!" ) ; // generate media Id
GenerateMediaIdResponse generateMediaIdresponse ; if ( mode == null ) { generateMediaIdresponse = applyMedia ( ) ; } else { generateMediaIdresponse = applyMedia ( mode ) ; } String mediaId = generateMediaIdresponse . getMediaId ( ) ; String targetBucket = generateMediaIdresponse . getSourceBucket ( ) ; String targetKey = generateMediaIdresponse . getSourceKey ( ) ; // fetch to temp bucket
bosClient . fetchObject ( targetBucket , targetKey , sourceUrl ) ; // create mediaId
InternalCreateMediaRequest request = new InternalCreateMediaRequest ( ) . withMediaId ( mediaId ) . withTitle ( title ) . withDescription ( description ) . withTranscodingPresetGroupName ( transcodingPresetGroupName ) . withPriority ( priority ) ; InternalCreateMediaResponse internalResponse = processMedia ( request ) ; CreateMediaResourceResponse response = new CreateMediaResourceResponse ( ) ; response . setMediaId ( internalResponse . getMediaId ( ) ) ; return response ; |
public class FormLayout { /** * Sets the ColumnSpec at the specified column index .
* @ param columnIndex the index of the column to be changed
* @ param columnSpec the ColumnSpec to be set
* @ throws NullPointerException if { @ code columnSpec } is { @ code null }
* @ throws IndexOutOfBoundsException if the column index is out of range */
public void setColumnSpec ( int columnIndex , ColumnSpec columnSpec ) { } } | checkNotNull ( columnSpec , "The column spec must not be null." ) ; colSpecs . set ( columnIndex - 1 , columnSpec ) ; |
public class EventHelper { /** * prints debug info when property debug is true , calls renderHeader and renderFooter and
* { @ link Advanced # draw ( com . itextpdf . text . Rectangle , java . lang . String ) } with { @ link Document # getPageSize ( ) }
* and null for { @ link DefaultStylerFactory # PAGESTYLERS } .
* @ param writer
* @ param document */
@ Override public final void onEndPage ( PdfWriter writer , Document document ) { } } | super . onEndPage ( writer , document ) ; sanitize ( writer ) ; try { if ( failuresHereAfter || debugHereAfter ) { PdfContentByte bg = writer . getDirectContentUnder ( ) ; Rectangle rect = writer . getPageSize ( ) ; rect . setBackgroundColor ( itextHelper . fromColor ( getSettings ( ) . getColorProperty ( new Color ( 240 , 240 , 240 ) , "legendbackground" ) ) ) ; bg . rectangle ( rect ) ; bg . closePathFillStroke ( ) ; } else { for ( Advanced a : doForAllPages ) { try { if ( a . shouldDraw ( null ) ) { a . draw ( document . getPageSize ( ) , null ) ; } } catch ( VectorPrintException ex ) { throw new VectorPrintRuntimeException ( ex ) ; } } } if ( ! debugHereAfter && getSettings ( ) . getBooleanProperty ( false , DEBUG ) ) { PdfContentByte canvas = writer . getDirectContent ( ) ; Rectangle rect = new Rectangle ( document . leftMargin ( ) , document . bottomMargin ( ) , document . right ( ) - document . rightMargin ( ) , document . top ( ) - document . topMargin ( ) ) ; DebugHelper . debugRect ( canvas , rect , new float [ ] { 10 , 2 } , 0.3f , getSettings ( ) , stylerFactory . getLayerManager ( ) ) ; } renderHeader ( writer , document ) ; maxTagForGenericTagOnPage = ( ( DefaultElementProducer ) elementProducer ) . getAdvancedTag ( ) ; if ( getSettings ( ) . getBooleanProperty ( Boolean . FALSE , ReportConstants . PRINTFOOTER ) ) { renderFooter ( writer , document ) ; } else { log . warning ( "not printing footer, if you want page footers set " + ReportConstants . PRINTFOOTER + " to true" ) ; } maxTagForGenericTagOnPage = Integer . MAX_VALUE ; } catch ( VectorPrintException | DocumentException | InstantiationException | IllegalAccessException e ) { throw new VectorPrintRuntimeException ( "failed to create the report header or footer: " , e ) ; } |
public class JBBPTextWriter { /** * Print string values .
* @ param str array of string values , must not be null but may contain nulls
* @ return the context
* @ throws IOException it will be thrown for error */
public JBBPTextWriter Str ( final String ... str ) throws IOException { } } | JBBPUtils . assertNotNull ( str , "String must not be null" ) ; final String oldPrefix = this . prefixValue ; final String oldPostfix = this . postfixValue ; this . prefixValue = "" ; this . postfixValue = "" ; for ( final String s : str ) { ensureValueMode ( ) ; printValueString ( s == null ? "<NULL>" : s ) ; } this . prefixValue = oldPrefix ; this . postfixValue = oldPostfix ; return this ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link CurveArrayPropertyType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link CurveArrayPropertyType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "curveArrayProperty" ) public JAXBElement < CurveArrayPropertyType > createCurveArrayProperty ( CurveArrayPropertyType value ) { } } | return new JAXBElement < CurveArrayPropertyType > ( _CurveArrayProperty_QNAME , CurveArrayPropertyType . class , null , value ) ; |
public class InputSampler { /** * Driver for InputSampler from the command line .
* Configures a JobConf instance and calls { @ link # writePartitionFile } . */
public int run ( String [ ] args ) throws Exception { } } | JobConf job = ( JobConf ) getConf ( ) ; ArrayList < String > otherArgs = new ArrayList < String > ( ) ; Sampler < K , V > sampler = null ; for ( int i = 0 ; i < args . length ; ++ i ) { try { if ( "-r" . equals ( args [ i ] ) ) { job . setNumReduceTasks ( Integer . parseInt ( args [ ++ i ] ) ) ; } else if ( "-inFormat" . equals ( args [ i ] ) ) { job . setInputFormat ( Class . forName ( args [ ++ i ] ) . asSubclass ( InputFormat . class ) ) ; } else if ( "-keyClass" . equals ( args [ i ] ) ) { job . setMapOutputKeyClass ( Class . forName ( args [ ++ i ] ) . asSubclass ( WritableComparable . class ) ) ; } else if ( "-splitSample" . equals ( args [ i ] ) ) { int numSamples = Integer . parseInt ( args [ ++ i ] ) ; int maxSplits = Integer . parseInt ( args [ ++ i ] ) ; if ( 0 >= maxSplits ) maxSplits = Integer . MAX_VALUE ; sampler = new SplitSampler < K , V > ( numSamples , maxSplits ) ; } else if ( "-splitRandom" . equals ( args [ i ] ) ) { double pcnt = Double . parseDouble ( args [ ++ i ] ) ; int numSamples = Integer . parseInt ( args [ ++ i ] ) ; int maxSplits = Integer . parseInt ( args [ ++ i ] ) ; if ( 0 >= maxSplits ) maxSplits = Integer . MAX_VALUE ; sampler = new RandomSampler < K , V > ( pcnt , numSamples , maxSplits ) ; } else if ( "-splitInterval" . equals ( args [ i ] ) ) { double pcnt = Double . parseDouble ( args [ ++ i ] ) ; int maxSplits = Integer . parseInt ( args [ ++ i ] ) ; if ( 0 >= maxSplits ) maxSplits = Integer . MAX_VALUE ; sampler = new IntervalSampler < K , V > ( pcnt , maxSplits ) ; } else { otherArgs . add ( args [ i ] ) ; } } catch ( NumberFormatException except ) { System . out . println ( "ERROR: Integer expected instead of " + args [ i ] ) ; return printUsage ( ) ; } catch ( ArrayIndexOutOfBoundsException except ) { System . out . println ( "ERROR: Required parameter missing from " + args [ i - 1 ] ) ; return printUsage ( ) ; } } if ( job . getNumReduceTasks ( ) <= 1 ) { System . err . println ( "Sampler requires more than one reducer" ) ; return printUsage ( ) ; } if ( otherArgs . size ( ) < 2 ) { System . out . println ( "ERROR: Wrong number of parameters: " ) ; return printUsage ( ) ; } if ( null == sampler ) { sampler = new RandomSampler < K , V > ( 0.1 , 10000 , 10 ) ; } Path outf = new Path ( otherArgs . remove ( otherArgs . size ( ) - 1 ) ) ; TotalOrderPartitioner . setPartitionFile ( job , outf ) ; for ( String s : otherArgs ) { FileInputFormat . addInputPath ( job , new Path ( s ) ) ; } InputSampler . < K , V > writePartitionFile ( job , sampler ) ; return 0 ; |
public class DescribeSuggestersRequest { /** * The suggesters you want to describe .
* @ param suggesterNames
* The suggesters you want to describe . */
public void setSuggesterNames ( java . util . Collection < String > suggesterNames ) { } } | if ( suggesterNames == null ) { this . suggesterNames = null ; return ; } this . suggesterNames = new com . amazonaws . internal . SdkInternalList < String > ( suggesterNames ) ; |
public class AbstractMenu { /** * The method takes values of the { @ link # commands } and returnes them as
* { @ link java . util . ArrayList } .
* @ return the values of the { @ link # commands } map instance as array list
* @ see # commands
* @ see # add ( Command )
* @ see # add ( Menu ) */
public List < AbstractCommand > getCommands ( ) { } } | return commands . values ( ) . stream ( ) . map ( id -> AbstractCommand . search ( id ) ) . collect ( Collectors . toList ( ) ) ; |
public class DateUtil { /** * 计算两个日期相差年数 < br >
* 在非重置情况下 , 如果起始日期的月小于结束日期的月 , 年数要少算1 ( 不足1年 )
* @ param beginDate 起始日期
* @ param endDate 结束日期
* @ param isReset 是否重置时间为起始时间 ( 重置月天时分秒 )
* @ return 相差年数
* @ since 3.0.8 */
public static long betweenYear ( Date beginDate , Date endDate , boolean isReset ) { } } | return new DateBetween ( beginDate , endDate ) . betweenYear ( isReset ) ; |
public class JavaParser { /** * This will take a RecognitionException , and create a sensible error message out of it */
public String createErrorMessage ( RecognitionException e ) { } } | StringBuilder message = new StringBuilder ( ) ; message . append ( source + ":" + e . line + ":" + e . charPositionInLine + " " ) ; if ( e instanceof MismatchedTokenException ) { MismatchedTokenException mte = ( MismatchedTokenException ) e ; message . append ( "mismatched token: " + e . token + "; expecting type " + tokenNames [ mte . expecting ] ) ; } else if ( e instanceof MismatchedTreeNodeException ) { MismatchedTreeNodeException mtne = ( MismatchedTreeNodeException ) e ; message . append ( "mismatched tree node: " + mtne . node + "; expecting type " + tokenNames [ mtne . expecting ] ) ; } else if ( e instanceof NoViableAltException ) { NoViableAltException nvae = ( NoViableAltException ) e ; message . append ( "Unexpected token '" + e . token . getText ( ) + "'" ) ; /* message . append ( " decision = < < " + nvae . grammarDecisionDescription + " > > " +
" state " + nvae . stateNumber +
" ( decision = " + nvae . decisionNumber +
" ) no viable alt ; token = " +
e . token ) ; */
} else if ( e instanceof EarlyExitException ) { EarlyExitException eee = ( EarlyExitException ) e ; message . append ( "required (...)+ loop (decision=" + eee . decisionNumber + ") did not match anything; token=" + e . token ) ; } else if ( e instanceof MismatchedSetException ) { MismatchedSetException mse = ( MismatchedSetException ) e ; message . append ( "mismatched token '" + e . token + "' expecting set " + mse . expecting ) ; } else if ( e instanceof MismatchedNotSetException ) { MismatchedNotSetException mse = ( MismatchedNotSetException ) e ; message . append ( "mismatched token '" + e . token + "' expecting set " + mse . expecting ) ; } else if ( e instanceof FailedPredicateException ) { FailedPredicateException fpe = ( FailedPredicateException ) e ; message . append ( "rule " + fpe . ruleName + " failed predicate: {" + fpe . predicateText + "}?" ) ; } return message . toString ( ) ; |
public class TempCharReader { /** * Reads the next character . */
public int read ( ) { } } | if ( _length <= _offset ) { if ( _head == null ) return - 1 ; TempCharBuffer next = _head . getNext ( ) ; if ( _isFree ) TempCharBuffer . free ( _head ) ; _head = next ; if ( _head == null ) return - 1 ; _buffer = _head . buffer ( ) ; _length = _head . getLength ( ) ; _offset = 0 ; } return _buffer [ _offset ++ ] ; |
public class ContentMappings { /** * Looks up or creates a new ContentMapping for the given original string and a ContentMapping generator . */
public @ Nonnull ContentMapping getMappingOrCreate ( @ Nonnull String original , @ Nonnull Function < String , ContentMapping > generator ) { } } | boolean isNew = ! mappings . containsKey ( original ) ; ContentMapping mapping = mappings . computeIfAbsent ( original , generator ) ; try { if ( isNew ) { save ( ) ; } } catch ( IOException e ) { LOGGER . log ( Level . WARNING , "Could not save mappings file" , e ) ; } return mapping ; |
public class AbstractHttp2ClientTransport { /** * 调用前设置一些属性
* @ param context RPC上下文
* @ param request 请求对象 */
protected void beforeSend ( RpcInternalContext context , SofaRequest request ) { } } | currentRequests . incrementAndGet ( ) ; context . getStopWatch ( ) . tick ( ) . read ( ) ; context . setLocalAddress ( localAddress ( ) ) ; if ( EventBus . isEnable ( ClientBeforeSendEvent . class ) ) { EventBus . post ( new ClientBeforeSendEvent ( request ) ) ; } |
public class QueryRunner { /** * Execute an SQL SELECT query with replacement parameters . The
* caller is responsible for closing the connection .
* @ param conn The connection to execute the query in .
* @ param sql The query to execute .
* @ param params The replacement parameters .
* @ param rsh The handler that converts the results into an object .
* @ return The object returned by the handler .
* @ throws java . sql . SQLException if a database access error occurs */
public < T > T query ( Connection conn , String sql , Object [ ] params , ResultSetHandler < T > rsh ) throws SQLException { } } | PreparedStatement stmt = null ; ResultSet rs = null ; T result = null ; try { stmt = this . prepareStatement ( conn , sql ) ; this . fillStatement ( stmt , params ) ; rs = this . wrap ( stmt . executeQuery ( ) ) ; result = rsh . handle ( rs ) ; } catch ( SQLException e ) { this . rethrow ( e , sql , params ) ; } finally { try { close ( rs ) ; } finally { close ( stmt ) ; } } return result ; |
public class DIRImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setRWIDTHFRACTION ( Integer newRWIDTHFRACTION ) { } } | Integer oldRWIDTHFRACTION = rwidthfraction ; rwidthfraction = newRWIDTHFRACTION ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . DIR__RWIDTHFRACTION , oldRWIDTHFRACTION , rwidthfraction ) ) ; |
public class CurrentGpsInfo { /** * Method to add a new { @ link GSASentence } .
* @ param gsa the sentence to add . */
public void addGSA ( GSASentence gsa ) { } } | try { if ( gsa . isValid ( ) ) { gpsFixStatus = gsa . getFixStatus ( ) ; horizontalPrecision = gsa . getHorizontalDOP ( ) ; verticalPrecision = gsa . getVerticalDOP ( ) ; positionPrecision = gsa . getPositionDOP ( ) ; satelliteIds = gsa . getSatelliteIds ( ) ; } } catch ( Exception e ) { // ignore it , this should be handled in the isValid ,
// if an exception is thrown , we can ' t deal with it here .
} |
public class RestApiClient { /** * Delete chat room .
* @ param roomName
* the room name
* @ return the response */
public Response deleteChatRoom ( String roomName ) { } } | return restClient . delete ( "chatrooms/" + roomName , new HashMap < String , String > ( ) ) ; |
public class DefaultPageHeader { /** * Writes this header to the specified file . Writes the { @ link # FILE _ VERSION
* version } of this header and the integer value of { @ link # pageSize } to the
* file . */
@ Override public void writeHeader ( RandomAccessFile file ) throws IOException { } } | file . seek ( 0 ) ; file . writeInt ( FILE_VERSION ) ; file . writeInt ( this . pageSize ) ; |
public class ListGrantsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListGrantsRequest listGrantsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( listGrantsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listGrantsRequest . getLimit ( ) , LIMIT_BINDING ) ; protocolMarshaller . marshall ( listGrantsRequest . getMarker ( ) , MARKER_BINDING ) ; protocolMarshaller . marshall ( listGrantsRequest . getKeyId ( ) , KEYID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class CalendarQuarter { /** * / * [ deutsch ]
* < p > Addiert die angegebenen Jahre zu diesem Kalenderquartal . < / p >
* @ param years the count of years to be added
* @ return result of addition */
public CalendarQuarter plus ( Years < CalendarUnit > years ) { } } | if ( years . isEmpty ( ) ) { return this ; } return CalendarQuarter . of ( MathUtils . safeAdd ( this . year , years . getAmount ( ) ) , this . quarter ) ; |
public class CmsCmisUtil { /** * Converts milliseconds into a calendar object .
* @ param millis a time given in milliseconds after epoch
* @ return the calendar object for the given time */
public static GregorianCalendar millisToCalendar ( long millis ) { } } | GregorianCalendar result = new GregorianCalendar ( ) ; result . setTimeZone ( TimeZone . getTimeZone ( "GMT" ) ) ; result . setTimeInMillis ( ( long ) ( Math . ceil ( millis / 1000 ) * 1000 ) ) ; return result ; |
public class CacheStore { /** * Then remove the session from cache */
@ Override public void remoteInvalidate ( String sessionId , boolean backendUpdate ) { } } | super . remoteInvalidate ( sessionId , backendUpdate ) ; if ( backendUpdate ) { ( ( CacheHashMap ) _sessions ) . setMaxInactToZero ( sessionId , getId ( ) ) ; } // now clean this session out of cache - - we do this even if not doing db inval
Enumeration < String > e = Collections . enumeration ( Collections . singleton ( sessionId ) ) ; ( ( BackedHashMap ) _sessions ) . handleDiscardedCacheItems ( e ) ; |
public class AbstractEJBRuntime { /** * Creates a non - persistent calendar based EJB timer .
* @ param beanId the bean Id for which the timer is being created
* @ param parsedExpr the parsed values of the schedule for a calendar - based timer
* @ param info application information to be delivered to the timeout method , or null */
protected Timer createNonPersistentCalendarTimer ( BeanO beanO , ParsedScheduleExpression parsedExpr , Serializable info ) { } } | final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . entry ( tc , "createNonPersistentCalendarTimer : " + beanO ) ; // create the non - persistent Timer
TimerNpImpl timer = new TimerNpImpl ( beanO . getId ( ) , parsedExpr , info ) ; // queue timer to start ( or start immediately if not in a global tran )
queueOrStartNpTimer ( beanO , timer ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "createNonPersistentCalendarTimer : " + timer ) ; return timer ; |
public class AgentsClient { /** * Restores the specified agent from a ZIP file .
* < p > Replaces the current agent version with a new one . All the intents and entity types in the
* older version are deleted .
* < p > Operation & lt ; response : [ google . protobuf . Empty ] [ google . protobuf . Empty ] & gt ;
* < p > Sample code :
* < pre > < code >
* try ( AgentsClient agentsClient = AgentsClient . create ( ) ) {
* ProjectName parent = ProjectName . of ( " [ PROJECT ] " ) ;
* RestoreAgentRequest request = RestoreAgentRequest . newBuilder ( )
* . setParent ( parent . toString ( ) )
* . build ( ) ;
* agentsClient . restoreAgentAsync ( request ) . get ( ) ;
* < / code > < / pre >
* @ param request The request object containing all of the parameters for the API call .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Empty , Struct > restoreAgentAsync ( RestoreAgentRequest request ) { } } | return restoreAgentOperationCallable ( ) . futureCall ( request ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.