signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class UsableURI { /** * The two String fields cachedHost and cachedAuthorityMinusUserInfo are * usually identical ; if so , coalesce into a single instance . */ protected void coalesceHostAuthorityStrings ( ) { } }
if ( this . cachedAuthorityMinusUserinfo != null && this . cachedHost != null && this . cachedHost . length ( ) == this . cachedAuthorityMinusUserinfo . length ( ) ) { // lengths can only be identical if contents // are identical ; use only one instance this . cachedAuthorityMinusUserinfo = this . cachedHost ; }
public class DatatypeConverter { /** * Parse a percent complete value . * @ param value sting representation of a percent complete value . * @ return Double instance */ public static final Double parsePercent ( String value ) { } }
return value == null ? null : Double . valueOf ( Double . parseDouble ( value ) * 100.0 ) ;
public class Resulting { /** * Override this to handle a request failed in your own way . */ public void requestFailed ( Exception cause ) { } }
if ( _chain != null ) { _chain . requestFailed ( cause ) ; } else if ( _invChain != null ) { _invChain . requestFailed ( ( cause instanceof InvocationException ) ? cause . getMessage ( ) : InvocationCodes . INTERNAL_ERROR ) ; } else { Object [ ] logArgs = MoreObjects . firstNonNull ( _logArgs , ArrayUtil . EMPTY_OBJECT ) ; Object [ ] args ; if ( cause instanceof InvocationException ) { args = new Object [ logArgs . length + 4 ] ; args [ args . length - 2 ] = "error" ; args [ args . length - 1 ] = cause . getMessage ( ) ; } else { args = new Object [ logArgs . length + 3 ] ; args [ args . length - 1 ] = cause ; } args [ 0 ] = "Resulting" ; args [ 1 ] = this ; System . arraycopy ( logArgs , 0 , args , 2 , logArgs . length ) ; MoreObjects . firstNonNull ( _log , log ) . warning ( "Request failed" , args ) ; }
public class SqlParserImpl { /** * コメント解析 */ protected void parseComment ( ) { } }
String comment = tokenizer . getToken ( ) ; if ( isTargetComment ( comment ) ) { if ( isIfComment ( comment ) ) { parseIf ( ) ; } else if ( isElIfComment ( comment ) ) { parseElIf ( ) ; } else if ( isElseComment ( comment ) ) { parseElse ( comment . length ( ) + 4 ) ; } else if ( isBeginComment ( comment ) ) { parseBegin ( ) ; } else if ( isEndComment ( comment ) ) { return ; } else { parseCommentBindVariable ( ) ; } } else { parseNormalComment ( ) ; }
public class RegionCommitmentClient { /** * Retrieves an aggregated list of commitments . * < p > Sample code : * < pre > < code > * try ( RegionCommitmentClient regionCommitmentClient = RegionCommitmentClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * for ( CommitmentsScopedList element : regionCommitmentClient . aggregatedListRegionCommitments ( project . toString ( ) ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final AggregatedListRegionCommitmentsPagedResponse aggregatedListRegionCommitments ( String project ) { } }
AggregatedListRegionCommitmentsHttpRequest request = AggregatedListRegionCommitmentsHttpRequest . newBuilder ( ) . setProject ( project ) . build ( ) ; return aggregatedListRegionCommitments ( request ) ;
public class CryptonitAdapters { /** * There is no method to discern market versus limit order type - so this returns a generic * CryptonitGenericOrder as a status */ public static CryptonitGenericOrder adaptOrder ( String orderId , CryptonitOrderStatusResponse cryptonitOrderStatusResponse ) { } }
CryptonitOrderTransaction [ ] cryptonitTransactions = cryptonitOrderStatusResponse . getTransactions ( ) ; CurrencyPair currencyPair = null ; Date date = null ; BigDecimal averagePrice = null ; BigDecimal cumulativeAmount = null ; BigDecimal totalFee = null ; // Use only the first transaction , because we assume that for a single order id all transactions // will // be of the same currency pair if ( cryptonitTransactions . length > 0 ) { currencyPair = adaptCurrencyPair ( cryptonitTransactions [ 0 ] ) ; date = cryptonitTransactions [ 0 ] . getDatetime ( ) ; averagePrice = Arrays . stream ( cryptonitTransactions ) . map ( t -> t . getPrice ( ) ) . reduce ( ( x , y ) -> x . add ( y ) ) . get ( ) . divide ( BigDecimal . valueOf ( cryptonitTransactions . length ) , 2 ) ; cumulativeAmount = Arrays . stream ( cryptonitTransactions ) . map ( t -> getBaseCurrencyAmountFromCryptonitTransaction ( t ) ) . reduce ( ( x , y ) -> x . add ( y ) ) . get ( ) ; totalFee = Arrays . stream ( cryptonitTransactions ) . map ( t -> t . getFee ( ) ) . reduce ( ( x , y ) -> x . add ( y ) ) . get ( ) ; } OrderStatus orderStatus = adaptOrderStatus ( cryptonitOrderStatusResponse . getStatus ( ) , cryptonitTransactions . length ) ; CryptonitGenericOrder cryptonitGenericOrder = new CryptonitGenericOrder ( null , // not discernable from response data null , // not discernable from the data currencyPair , orderId , date , averagePrice , cumulativeAmount , totalFee , orderStatus ) ; return cryptonitGenericOrder ;
public class ExpressRouteCircuitsInner { /** * Gets all the stats from an express route circuit in a resource group . * @ param resourceGroupName The name of the resource group . * @ param circuitName The name of the express route circuit . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ExpressRouteCircuitStatsInner object if successful . */ public ExpressRouteCircuitStatsInner getStats ( String resourceGroupName , String circuitName ) { } }
return getStatsWithServiceResponseAsync ( resourceGroupName , circuitName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class LogRecordBrowser { /** * Returns records belonging to a process and satisfying the filter . * @ param after the location of a record we need to restart iteration after . * @ param max the upper time limit ( the youngest log record ) . * @ param filter criteria to filter logs records on . * @ return the iterable list of records . */ public OnePidRecordListImpl recordsInProcess ( RepositoryPointer after , long max , final LogRecordFilter filter ) { } }
return restartRecordsInProcess ( after , max , filter == null ? new AllAcceptVerifier ( ) : new FullFilterVerifier ( filter ) ) ;
public class EntityImpl { /** * Returns all < code > named - entity - graph < / code > elements * @ return list of < code > named - entity - graph < / code > */ public List < NamedEntityGraph < Entity < T > > > getAllNamedEntityGraph ( ) { } }
List < NamedEntityGraph < Entity < T > > > list = new ArrayList < NamedEntityGraph < Entity < T > > > ( ) ; List < Node > nodeList = childNode . get ( "named-entity-graph" ) ; for ( Node node : nodeList ) { NamedEntityGraph < Entity < T > > type = new NamedEntityGraphImpl < Entity < T > > ( this , "named-entity-graph" , childNode , node ) ; list . add ( type ) ; } return list ;
public class PGXAConnection { /** * < p > Prepares transaction . Preconditions : < / p > * < ol > * < li > xid ! = null < / li > * < li > xid is in ended state < / li > * < / ol > * < p > Implementation deficiency preconditions : < / p > * < ol > * < li > xid was associated with this connection < / li > * < / ol > * < p > Postconditions : < / p > * < ol > * < li > Transaction is prepared < / li > * < / ol > */ @ Override public int prepare ( Xid xid ) throws XAException { } }
if ( LOGGER . isLoggable ( Level . FINEST ) ) { debug ( "preparing transaction xid = " + xid ) ; } // Check preconditions if ( currentXid == null && preparedXid != null ) { if ( LOGGER . isLoggable ( Level . FINEST ) ) { debug ( "Prepare xid " + xid + " but current connection is not attached to a transaction" + " while it was prepared in past with prepared xid " + preparedXid ) ; } throw new PGXAException ( GT . tr ( "Preparing already prepared transaction, the prepared xid {0}, prepare xid={1}" , preparedXid , xid ) , XAException . XAER_PROTO ) ; } else if ( currentXid == null ) { throw new PGXAException ( GT . tr ( "Current connection does not have an associated xid. prepare xid={0}" , xid ) , XAException . XAER_NOTA ) ; } if ( ! currentXid . equals ( xid ) ) { if ( LOGGER . isLoggable ( Level . FINEST ) ) { debug ( "Error to prepare xid " + xid + ", the current connection already bound with xid " + currentXid ) ; } throw new PGXAException ( GT . tr ( "Not implemented: Prepare must be issued using the same connection that started the transaction. currentXid={0}, prepare xid={1}" , currentXid , xid ) , XAException . XAER_RMERR ) ; } if ( state != State . ENDED ) { throw new PGXAException ( GT . tr ( "Prepare called before end. prepare xid={0}, state={1}" , xid ) , XAException . XAER_INVAL ) ; } state = State . IDLE ; preparedXid = currentXid ; currentXid = null ; try { String s = RecoveredXid . xidToString ( xid ) ; Statement stmt = conn . createStatement ( ) ; try { stmt . executeUpdate ( "PREPARE TRANSACTION '" + s + "'" ) ; } finally { stmt . close ( ) ; } conn . setAutoCommit ( localAutoCommitMode ) ; return XA_OK ; } catch ( SQLException ex ) { throw new PGXAException ( GT . tr ( "Error preparing transaction. prepare xid={0}" , xid ) , ex , mapSQLStateToXAErrorCode ( ex ) ) ; }
public class JournalNodeJspHelper { /** * Generate health report for journal nodes */ public static String getNodeReport ( QJMStatus status ) { } }
StringBuilder sb = new StringBuilder ( ) ; sb . append ( "<table border=1 cellpadding=1 cellspacing=0 title=\"Journals\">" ) ; sb . append ( "<thead><tr><td><b>Journal node</b></td><td><b>Alive</b></td></tr></thead>" ) ; for ( Entry < String , Boolean > e : status . getAliveMap ( ) . entrySet ( ) ) { if ( e . getValue ( ) ) { sb . append ( "<tr><td>" + e . getKey ( ) + "</td><td><font color=green>Active</font></td></tr>" ) ; } else { sb . append ( "<tr><td>" + e . getKey ( ) + "</td><td><font color=red>Failed</font></td></tr>" ) ; } } sb . append ( "</table>" ) ; return sb . toString ( ) ;
public class AppsImpl { /** * Gets the endpoint URLs for the prebuilt Cortana applications . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < PersonalAssistantsResponse > listCortanaEndpointsAsync ( final ServiceCallback < PersonalAssistantsResponse > serviceCallback ) { } }
return ServiceFuture . fromResponse ( listCortanaEndpointsWithServiceResponseAsync ( ) , serviceCallback ) ;
public class StringUtils { /** * < p > Replaces a String with another String inside a larger String , * for the first { @ code max } values of the search String . < / p > * < p > A { @ code null } reference passed to this method is a no - op . < / p > * < pre > * StringUtils . replace ( null , * , * , * ) = null * StringUtils . replace ( " " , * , * , * ) = " " * StringUtils . replace ( " any " , null , * , * ) = " any " * StringUtils . replace ( " any " , * , null , * ) = " any " * StringUtils . replace ( " any " , " " , * , * ) = " any " * StringUtils . replace ( " any " , * , * , 0 ) = " any " * StringUtils . replace ( " abaa " , " a " , null , - 1 ) = " abaa " * StringUtils . replace ( " abaa " , " a " , " " , - 1 ) = " b " * StringUtils . replace ( " abaa " , " a " , " z " , 0 ) = " abaa " * StringUtils . replace ( " abaa " , " a " , " z " , 1 ) = " zbaa " * StringUtils . replace ( " abaa " , " a " , " z " , 2 ) = " zbza " * StringUtils . replace ( " abaa " , " a " , " z " , - 1 ) = " zbzz " * < / pre > * @ param text text to search and replace in , may be null * @ param searchString the String to search for , may be null * @ param replacement the String to replace it with , may be null * @ param max maximum number of values to replace , or { @ code - 1 } if no maximum * @ return the text with any replacements processed , * { @ code null } if null String input */ public static String replace ( final String text , final String searchString , final String replacement , final int max ) { } }
return replace ( text , searchString , replacement , max , false ) ;
public class ArchivedDialogsActor { /** * Messages */ @ Override public void onReceive ( Object message ) { } }
if ( message instanceof LoadMore ) { onLoadMore ( ( ( LoadMore ) message ) . isInit ( ) , ( ( LoadMore ) message ) . getCallback ( ) ) ; } else { super . onReceive ( message ) ; }
public class UrlReverseOption { public UrlReverseOption filterActionName ( Function < String , String > filter ) { } }
if ( filter == null ) { throw new IllegalArgumentException ( "The argument 'filter' should not be null." ) ; } actionNameFilter = filter ; return this ;
public class UserResource { /** * Delete a user . * @ param id unique user id * @ return http 204 is successful */ @ DELETE @ Path ( "{id}" ) @ RolesAllowed ( { } }
"ROLE_ADMIN" } ) public Response delete ( @ PathParam ( "id" ) Long id ) { checkNotNull ( id ) ; userService . deleteById ( id ) ; return Response . noContent ( ) . build ( ) ;
public class CentralDogmaEndpointGroup { /** * Creates a new { @ link CentralDogmaEndpointGroup } . * @ param watcher a { @ link Watcher } * @ param endpointListDecoder an { @ link EndpointListDecoder } */ public static < T > CentralDogmaEndpointGroup < T > ofWatcher ( Watcher < T > watcher , EndpointListDecoder < T > endpointListDecoder ) { } }
return new CentralDogmaEndpointGroup < > ( watcher , endpointListDecoder ) ;
public class CmsJspTagResourceLoad { /** * Load the next resource from the initialized list of resources . < p > */ protected void doLoadNextResource ( ) { } }
// get the next resource from the collector CmsResource resource = getNextResource ( ) ; if ( resource == null ) { m_resourceName = null ; m_resource = null ; return ; } // set the resource name m_resourceName = m_cms . getSitePath ( resource ) ; // set the resource m_resource = resource ;
public class SynchroData { /** * Read the table headers . This allows us to break the file into chunks * representing the individual tables . * @ param is input stream * @ return list of tables in the file */ private List < SynchroTable > readTableHeaders ( InputStream is ) throws IOException { } }
// Read the headers List < SynchroTable > tables = new ArrayList < SynchroTable > ( ) ; byte [ ] header = new byte [ 48 ] ; while ( true ) { is . read ( header ) ; m_offset += 48 ; SynchroTable table = readTableHeader ( header ) ; if ( table == null ) { break ; } tables . add ( table ) ; } // Ensure sorted by offset Collections . sort ( tables , new Comparator < SynchroTable > ( ) { @ Override public int compare ( SynchroTable o1 , SynchroTable o2 ) { return o1 . getOffset ( ) - o2 . getOffset ( ) ; } } ) ; // Calculate lengths SynchroTable previousTable = null ; for ( SynchroTable table : tables ) { if ( previousTable != null ) { previousTable . setLength ( table . getOffset ( ) - previousTable . getOffset ( ) ) ; } previousTable = table ; } for ( SynchroTable table : tables ) { SynchroLogger . log ( "TABLE" , table ) ; } return tables ;
public class X509Certificate { /** * Creates a new { @ code X509Certificate } and initializes it from the * specified byte array . * @ param certData * byte array containing data to initialize the certificate . * @ return the certificate initialized from the specified byte array * @ throws CertificateException * if the certificate cannot be created or initialized . */ public static final X509Certificate getInstance ( byte [ ] certData ) throws CertificateException { } }
if ( certData == null ) { throw new CertificateException ( "certData == null" ) ; } ByteArrayInputStream bais = new ByteArrayInputStream ( certData ) ; return getInstance ( bais ) ;
public class Input { /** * Add a controller listener to be notified of controller input events * @ param listener The listener to be notified */ public void addControllerListener ( ControllerListener listener ) { } }
if ( controllerListeners . contains ( listener ) ) { return ; } controllerListeners . add ( listener ) ; allListeners . add ( listener ) ;
public class BatterySkin { /** * * * * * * Methods * * * * * */ @ Override protected void handleEvents ( final String EVENT_TYPE ) { } }
super . handleEvents ( EVENT_TYPE ) ; if ( "RECALC" . equals ( EVENT_TYPE ) ) { setBar ( gauge . getCurrentValue ( ) ) ; resize ( ) ; redraw ( ) ; } else if ( "SECTION" . equals ( EVENT_TYPE ) ) { sections = gauge . getSections ( ) ; resize ( ) ; redraw ( ) ; } else if ( "VISIBILITY" . equals ( EVENT_TYPE ) ) { valueText . setVisible ( gauge . isValueVisible ( ) ) ; valueText . setManaged ( gauge . isValueVisible ( ) ) ; redraw ( ) ; }
public class ImportXMLScanListener { /** * RestoreOldProperties Method . */ public void restoreOldProperties ( Map < String , String > oldProperties , Record record ) { } }
this . restoreOldProperty ( oldProperties , record . getDatabaseName ( ) + BaseDatabase . DBSHARED_PARAM_SUFFIX ) ; this . restoreOldProperty ( oldProperties , record . getDatabaseName ( ) + BaseDatabase . DBUSER_PARAM_SUFFIX ) ; this . restoreOldProperty ( oldProperties , DBConstants . DB_USER_PREFIX ) ; this . restoreOldProperty ( oldProperties , DBConstants . SYSTEM_NAME ) ; this . restoreOldProperty ( oldProperties , DBParams . LANGUAGE ) ;
public class QuestConfigPanel { /** * GEN - LAST : event _ chkAnnotationsActionPerformed */ private void chkSameAsActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ chkSameAsActionPerformed preference . put ( OntopOBDASettings . SAME_AS , String . valueOf ( chkSameAs . isSelected ( ) ) ) ;
public class FtpClient { /** * Check file path type directory or file . * @ param remoteFilePath * @ return * @ throws IOException */ protected boolean isDirectory ( String remoteFilePath ) throws IOException { } }
if ( ! ftpClient . changeWorkingDirectory ( remoteFilePath ) ) { // not a directory or not accessible switch ( ftpClient . listFiles ( remoteFilePath ) . length ) { case 0 : throw new CitrusRuntimeException ( "Remote file path does not exist or is not accessible: " + remoteFilePath ) ; case 1 : return false ; default : throw new CitrusRuntimeException ( "Unexpected file type result for file path: " + remoteFilePath ) ; } } else { return true ; }
public class InstantiationUtil { /** * Checks , whether the class is an inner class that is not statically accessible . That is especially true for * anonymous inner classes . * @ param clazz The class to check . * @ return True , if the class is a non - statically accessible inner class . */ public static boolean isNonStaticInnerClass ( Class < ? > clazz ) { } }
if ( clazz . getEnclosingClass ( ) == null ) { // no inner class return false ; } else { // inner class if ( clazz . getDeclaringClass ( ) != null ) { // named inner class return ! Modifier . isStatic ( clazz . getModifiers ( ) ) ; } else { // anonymous inner class return true ; } }
public class AbstractTextExtractor { /** * Generates a new AbstractTokenizer object by using the provided tokenizer class . * @ return */ protected AbstractTokenizer generateTokenizer ( ) { } }
Class < ? extends AbstractTokenizer > tokenizer = parameters . getTokenizer ( ) ; if ( tokenizer == null ) { return null ; } try { return tokenizer . newInstance ( ) ; } catch ( InstantiationException | IllegalAccessException ex ) { throw new RuntimeException ( ex ) ; }
public class RecordAPI { /** * Removing features */ @ Trace ( dispatcher = true ) public void removeRecord ( Long recordId ) throws GloboDnsException { } }
NewRelic . setTransactionName ( null , "/globodns/removeRecord" ) ; if ( recordId == null ) { throw new GloboDnsException ( "Record id cannot be null" ) ; } GloboDnsRoot < Record > globoDnsRoot = this . delete ( "/records/" + recordId + ".json" , false ) ; if ( globoDnsRoot == null ) { throw new GloboDnsException ( "Invalid response" ) ; } return ;
public class HtmlElementUtils { /** * Parses locator string to identify the proper By subclass before calling Selenium * { @ link WebElement # findElement ( By ) } to locate the web element nested within the parent web element . * @ param locator * String that represents the means to locate this element ( could be id / name / xpath / css locator ) . * @ param parent * { @ link ParentTraits } object that represents the parent element for this element . * @ return { @ link RemoteWebElement } that represents the html element that was located using the locator provided . */ public static RemoteWebElement locateElement ( String locator , ParentTraits parent ) { } }
logger . entering ( new Object [ ] { locator , parent } ) ; Preconditions . checkArgument ( StringUtils . isNotBlank ( locator ) , INVALID_LOCATOR_ERR_MSG ) ; Preconditions . checkArgument ( parent != null , INVALID_PARENT_ERR_MSG ) ; RemoteWebElement element = parent . locateChildElement ( locator ) ; logger . exiting ( element ) ; return element ;
public class DocumentRootImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case ColorPackage . DOCUMENT_ROOT__MIXED : if ( coreType ) return getMixed ( ) ; return ( ( FeatureMap . Internal ) getMixed ( ) ) . getWrapper ( ) ; case ColorPackage . DOCUMENT_ROOT__XMLNS_PREFIX_MAP : if ( coreType ) return getXMLNSPrefixMap ( ) ; else return getXMLNSPrefixMap ( ) . map ( ) ; case ColorPackage . DOCUMENT_ROOT__XSI_SCHEMA_LOCATION : if ( coreType ) return getXSISchemaLocation ( ) ; else return getXSISchemaLocation ( ) . map ( ) ; case ColorPackage . DOCUMENT_ROOT__BACKGROUND_COLOR : return getBackgroundColor ( ) ; case ColorPackage . DOCUMENT_ROOT__BORDER_COLOR : return getBorderColor ( ) ; case ColorPackage . DOCUMENT_ROOT__COLOR : return getColor ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class JedisSortedSet { /** * Get the cardinality of the set ( i . e . the number of elements ) . < br > * This is similar to { @ link # size ( ) } but is preferred since redis ' capacity supports long typed values * @ return the number of elements in the set */ public long cardinality ( ) { } }
return doWithJedis ( new JedisCallable < Long > ( ) { @ Override public Long call ( Jedis jedis ) { return jedis . zcard ( getKey ( ) ) ; } } ) ;
public class Navis { /** * Return bearing from wp1 to wp2 in degrees * @ param wp1 * @ param wp2 * @ return Degrees */ public static final double bearing ( WayPoint wp1 , WayPoint wp2 ) { } }
double lat1 = wp1 . getLatitude ( ) ; double lat2 = wp2 . getLatitude ( ) ; double lon1 = wp1 . getLongitude ( ) ; double lon2 = wp2 . getLongitude ( ) ; return bearing ( lat1 , lon1 , lat2 , lon2 ) ;
public class Right { /** * Supports column level checks */ static boolean containsAllColumns ( OrderedHashSet columnSet , Table table , boolean [ ] columnCheckList ) { } }
for ( int i = 0 ; i < columnCheckList . length ; i ++ ) { if ( columnCheckList [ i ] ) { if ( columnSet == null ) { return false ; } if ( columnSet . contains ( table . getColumn ( i ) . getName ( ) ) ) { continue ; } return false ; } } return true ;
public class CmsTinyMCEHelper { /** * Creates the TinyMCE toolbar config string from a Javascript config object . < p > * @ param jso a Javascript array of toolbar items * @ return the TinyMCE toolbar config string */ protected static String createContextMenu ( JavaScriptObject jso ) { } }
JsArray < ? > jsItemArray = jso . < JsArray < ? > > cast ( ) ; List < String > jsItemList = new ArrayList < String > ( ) ; for ( int i = 0 ; i < jsItemArray . length ( ) ; i ++ ) { jsItemList . add ( jsItemArray . get ( i ) . toString ( ) ) ; } return CmsTinyMceToolbarHelper . getContextMenuEntries ( jsItemList ) ;
public class VaultNotificationConfig { /** * A list of one or more events for which Amazon Glacier will send a notification to the specified Amazon SNS topic . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setEvents ( java . util . Collection ) } or { @ link # withEvents ( java . util . Collection ) } if you want to override the * existing values . * @ param events * A list of one or more events for which Amazon Glacier will send a notification to the specified Amazon SNS * topic . * @ return Returns a reference to this object so that method calls can be chained together . */ public VaultNotificationConfig withEvents ( String ... events ) { } }
if ( this . events == null ) { setEvents ( new java . util . ArrayList < String > ( events . length ) ) ; } for ( String ele : events ) { this . events . add ( ele ) ; } return this ;
public class Package { /** * Filters a dot at the end of the passed package name if present . * @ param pkgName * a package name * @ return a filtered package name */ private static String removeLastDot ( final String pkgName ) { } }
return pkgName . charAt ( pkgName . length ( ) - 1 ) == Characters . DOT ? pkgName . substring ( 0 , pkgName . length ( ) - 1 ) : pkgName ;
public class DefaultImageFormats { /** * Get all default formats supported by Fresco . * Does not include { @ link ImageFormat # UNKNOWN } . * @ return all supported default formats */ public static List < ImageFormat > getDefaultFormats ( ) { } }
if ( sAllDefaultFormats == null ) { List < ImageFormat > mDefaultFormats = new ArrayList < > ( 9 ) ; mDefaultFormats . add ( JPEG ) ; mDefaultFormats . add ( PNG ) ; mDefaultFormats . add ( GIF ) ; mDefaultFormats . add ( BMP ) ; mDefaultFormats . add ( ICO ) ; mDefaultFormats . add ( WEBP_SIMPLE ) ; mDefaultFormats . add ( WEBP_LOSSLESS ) ; mDefaultFormats . add ( WEBP_EXTENDED ) ; mDefaultFormats . add ( WEBP_EXTENDED_WITH_ALPHA ) ; mDefaultFormats . add ( WEBP_ANIMATED ) ; mDefaultFormats . add ( HEIF ) ; sAllDefaultFormats = ImmutableList . copyOf ( mDefaultFormats ) ; } return sAllDefaultFormats ;
public class RtfHeaderFooterGroup { /** * Set that this RtfHeaderFooterGroup should have a title page . If only * a header / footer for all pages exists , then it will be copied to the * first page as well . */ public void setHasTitlePage ( ) { } }
if ( this . mode == MODE_SINGLE ) { this . mode = MODE_MULTIPLE ; headerFirst = new RtfHeaderFooter ( this . document , headerAll , RtfHeaderFooter . DISPLAY_FIRST_PAGE ) ; headerFirst . setType ( this . type ) ; }
public class Utility { /** * Copy DOM tree to a SOAP tree . * @ param tree * @ param node * @ return The parent of the new child node . */ public static Node copyTreeToNode ( Node tree , Node node ) { } }
DOMResult result = new DOMResult ( node ) ; if ( Utility . copyTreeToResult ( tree , result ) ) return node . getLastChild ( ) ; else return null ; // Failure
public class KinesisStreamSourceConfigurationMarshaller { /** * Marshall the given parameter object . */ public void marshall ( KinesisStreamSourceConfiguration kinesisStreamSourceConfiguration , ProtocolMarshaller protocolMarshaller ) { } }
if ( kinesisStreamSourceConfiguration == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( kinesisStreamSourceConfiguration . getKinesisStreamARN ( ) , KINESISSTREAMARN_BINDING ) ; protocolMarshaller . marshall ( kinesisStreamSourceConfiguration . getRoleARN ( ) , ROLEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class InputsInner { /** * Updates an existing input under an existing streaming job . This can be used to partially update ( ie . update one or two properties ) an input without affecting the rest the job or input definition . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param jobName The name of the streaming job . * @ param inputName The name of the input . * @ param input An Input object . The properties specified here will overwrite the corresponding properties in the existing input ( ie . Those properties will be updated ) . Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the InputInner object */ public Observable < ServiceResponseWithHeaders < InputInner , InputsUpdateHeaders > > updateWithServiceResponseAsync ( String resourceGroupName , String jobName , String inputName , InputInner input ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( jobName == null ) { throw new IllegalArgumentException ( "Parameter jobName is required and cannot be null." ) ; } if ( inputName == null ) { throw new IllegalArgumentException ( "Parameter inputName is required and cannot be null." ) ; } if ( input == null ) { throw new IllegalArgumentException ( "Parameter input is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Validator . validate ( input ) ; final String ifMatch = null ; return service . update ( this . client . subscriptionId ( ) , resourceGroupName , jobName , inputName , input , ifMatch , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < InputInner , InputsUpdateHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < InputInner , InputsUpdateHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < InputInner , InputsUpdateHeaders > clientResponse = updateDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class Reflect { /** * Wrap an object returned from a method */ private static Reflect on ( Method method , Object object , Object ... args ) throws ReflectException { } }
try { accessible ( method ) ; if ( method . getReturnType ( ) == void . class ) { method . invoke ( object , args ) ; return on ( object ) ; } else { return on ( method . invoke ( object , args ) ) ; } } catch ( Exception e ) { throw new ReflectException ( e ) ; }
public class LineBufferedReader { /** * Read the rest of input from the reader , and get the lines from there . * This will consume the rest of the content of the reader . * @ param trimAndSkipEmpty If lines should be trimmed and empty lines should * be skipped . * @ return List of lines after the current . * @ throws IOException When failing to read stream to end . */ @ Nonnull public List < String > getRemainingLines ( boolean trimAndSkipEmpty ) throws IOException { } }
List < String > out = new ArrayList < > ( ) ; StringBuilder builder = new StringBuilder ( ) ; while ( bufferOffset <= bufferLimit || ! bufferLineEnd ) { if ( ! readNextChar ( ) ) { break ; } if ( lastChar == '\n' ) { String line = builder . toString ( ) ; if ( ! trimAndSkipEmpty || ! line . trim ( ) . isEmpty ( ) ) { out . add ( trimAndSkipEmpty ? line . trim ( ) : line ) ; } builder = new StringBuilder ( ) ; } else { builder . append ( ( char ) lastChar ) ; } } if ( builder . length ( ) > 0 ) { String line = builder . toString ( ) ; if ( ! trimAndSkipEmpty || ! line . trim ( ) . isEmpty ( ) ) { out . add ( builder . toString ( ) ) ; } } return out ;
public class DomainsInner { /** * Creates or updates a domain . * Creates or updates a domain . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param domainName Name of the domain . * @ param domain Domain registration information . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws DefaultErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the DomainInner object if successful . */ public DomainInner update ( String resourceGroupName , String domainName , DomainPatchResource domain ) { } }
return updateWithServiceResponseAsync ( resourceGroupName , domainName , domain ) . toBlocking ( ) . single ( ) . body ( ) ;
public class NumberExpression { /** * Create a { @ code this * - 1 } expression * < p > Get the negation of this expression < / p > * @ return this * - 1 */ public NumberExpression < T > negate ( ) { } }
if ( negation == null ) { negation = Expressions . numberOperation ( getType ( ) , Ops . NEGATE , mixin ) ; } return negation ;
public class GeometryRendererImpl { public void redraw ( ) { } }
groups . clear ( ) ; mapWidget . getVectorContext ( ) . deleteGroup ( editingService . getGeometry ( ) ) ; draw ( editingService . getGeometry ( ) ) ;
public class MnistImageFile { /** * Read the specified number of images from the current position , to a byte [ nImages ] [ rows * cols ] * Note that MNIST data set is stored as unsigned bytes ; this method returns signed bytes without conversion * ( i . e . , same bits , but requires conversion before use ) * @ param nImages Number of images */ public byte [ ] [ ] readImagesUnsafe ( int nImages ) throws IOException { } }
byte [ ] [ ] out = new byte [ nImages ] [ 0 ] ; for ( int i = 0 ; i < nImages ; i ++ ) { out [ i ] = new byte [ rows * cols ] ; read ( out [ i ] ) ; } return out ;
public class HtmlOutcomeTargetLink { /** * < p > Set the value of the < code > onclick < / code > property . < / p > */ public void setOnclick ( java . lang . String onclick ) { } }
getStateHelper ( ) . put ( PropertyKeys . onclick , onclick ) ;
public class PGQueryDriver { /** * query */ private List < Entity > generateResults ( QueryBuilder < ? > builder , boolean keysOnly ) throws FalsePredicateException { } }
return datastore . query ( new Query ( builder , keysOnly ) ) ;
public class RingBuffer { /** * Attempts to publish an event to the ring buffer . It handles claiming the next sequence , getting the current ( uninitialised ) event from the ring buffer * and publishing the claimed sequence after translation . Will return false if specified capacity was not available . * @ param translator The user specified translation for the event * @ return true if the value was published , false if there was insufficient capacity . */ public boolean tryPublishEvent ( EventTranslator < E > translator ) { } }
try { final long sequence = sequencer . tryNext ( ) ; translateAndPublish ( translator , sequence ) ; return true ; } catch ( InsufficientCapacityException e ) { return false ; }
public class SurefireMojoInterceptor { /** * This method is invoked before SurefirePlugin execute method . * @ param mojo * Surefire plugin . * @ throws Exception * Always MojoExecutionException . */ public static void execute ( Object mojo ) throws Exception { } }
// Note that the object can be an instance of // AbstractSurefireMojo . if ( ! ( isSurefirePlugin ( mojo ) || isFailsafePlugin ( mojo ) ) ) { return ; } // Check if the same object is already invoked . This may // happen ( in the future ) if execute method is in both // AbstractSurefire and SurefirePlugin classes . if ( isAlreadyInvoked ( mojo ) ) { return ; } // Check if surefire version is supported . checkSurefireVersion ( mojo ) ; // Check surefire configuration . checkSurefireConfiguration ( mojo ) ; try { // Update argLine . updateArgLine ( mojo ) ; // Update excludes . updateExcludes ( mojo ) ; // Update parallel . updateParallel ( mojo ) ; } catch ( Exception ex ) { // This exception should not happen in theory . throwMojoExecutionException ( mojo , "Unsupported surefire version" , ex ) ; }
public class ntpserver { /** * Use this API to delete ntpserver resources of given names . */ public static base_responses delete ( nitro_service client , String serverip [ ] ) throws Exception { } }
base_responses result = null ; if ( serverip != null && serverip . length > 0 ) { ntpserver deleteresources [ ] = new ntpserver [ serverip . length ] ; for ( int i = 0 ; i < serverip . length ; i ++ ) { deleteresources [ i ] = new ntpserver ( ) ; deleteresources [ i ] . serverip = serverip [ i ] ; } result = delete_bulk_request ( client , deleteresources ) ; } return result ;
public class SpringHttpClientImpl { public < T > T getObject ( final String url , final Class < T > response_type , final Object ... uri_variables ) { } }
_LOG_ . debug ( "HTTP GET: URL=" + url + ", response type=" + response_type + ", variables=" + Arrays . toString ( uri_variables ) ) ; HttpHeaders request_headers = new HttpHeaders ( ) ; request_headers . setContentType ( getObjectMediaType ( ) ) ; HttpEntity < ? > request_entity = new HttpEntity < Void > ( request_headers ) ; HttpEntity < T > response = null ; try { response = _newRestTemplate ( ) . exchange ( url , HttpMethod . GET , request_entity , response_type , uri_variables ) ; // throws RestClientException } catch ( Exception ex ) { throw new HttpException ( ex ) ; } T body = response . getBody ( ) ; return body ;
public class DatatypeConverter { /** * Read a long int from an input stream . * @ param is input stream * @ return long value */ public static final long getLong ( InputStream is ) throws IOException { } }
byte [ ] data = new byte [ 8 ] ; is . read ( data ) ; return getLong ( data , 0 ) ;
public class EncoderFeatureIndex { /** * 读取特征模板文件 * @ param filename * @ return */ private boolean openTemplate ( String filename ) { } }
InputStreamReader isr = null ; try { isr = new InputStreamReader ( IOUtil . newInputStream ( filename ) , "UTF-8" ) ; BufferedReader br = new BufferedReader ( isr ) ; String line ; while ( ( line = br . readLine ( ) ) != null ) { if ( line . length ( ) == 0 || line . charAt ( 0 ) == ' ' || line . charAt ( 0 ) == '#' ) { continue ; } else if ( line . charAt ( 0 ) == 'U' ) { unigramTempls_ . add ( line . trim ( ) ) ; } else if ( line . charAt ( 0 ) == 'B' ) { bigramTempls_ . add ( line . trim ( ) ) ; } else { System . err . println ( "unknown type: " + line ) ; } } br . close ( ) ; templs_ = makeTempls ( unigramTempls_ , bigramTempls_ ) ; } catch ( Exception e ) { if ( isr != null ) { try { isr . close ( ) ; } catch ( Exception e2 ) { } } e . printStackTrace ( ) ; System . err . println ( "Error reading " + filename ) ; return false ; } return true ;
public class NameSpace { /** * Locate a variable and return the Variable object with optional recursion * through parent name spaces . * If this namespace is static , return only static variables . * @ param name the name * @ param recurse the recurse * @ return the Variable value or null if it is not defined * @ throws UtilEvalError the util eval error */ protected Variable getVariableImpl ( final String name , final boolean recurse ) throws UtilEvalError { } }
Variable var = null ; if ( this . variables . containsKey ( name ) ) return this . variables . get ( name ) ; else var = this . getImportedVar ( name ) ; // try parent if ( recurse && var == null && this . parent != null ) var = this . parent . getVariableImpl ( name , recurse ) ; return var ;
public class GitlabAPI { /** * Get group badge * @ param groupId The id of the group for which the badge should be retrieved * @ param badgeId The id of the badge that should be retrieved * @ return The badge with a given id * @ throws IOException on GitLab API call error */ public GitlabBadge getGroupBadge ( Integer groupId , Integer badgeId ) throws IOException { } }
String tailUrl = GitlabGroup . URL + "/" + groupId + GitlabBadge . URL + "/" + badgeId ; return retrieve ( ) . to ( tailUrl , GitlabBadge . class ) ;
public class Nfs3 { /** * / * ( non - Javadoc ) * @ see com . emc . ecs . nfsclient . nfs . Nfs # makeRmdirRequest ( byte [ ] , java . lang . String ) */ public Nfs3RmdirRequest makeRmdirRequest ( byte [ ] parentDirectoryFileHandle , String name ) throws FileNotFoundException { } }
return new Nfs3RmdirRequest ( parentDirectoryFileHandle , name , _credential ) ;
public class CmsEditLoginView { /** * Saves the settings . < p > */ protected void submit ( ) { } }
CmsLoginMessage loginMessage = new CmsLoginMessage ( ) ; CmsLoginMessage beforeLoginMessage = new CmsLoginMessage ( ) ; try { m_formBinderAfter . writeBean ( loginMessage ) ; m_formBinderBefore . writeBean ( beforeLoginMessage ) ; OpenCms . getLoginManager ( ) . setLoginMessage ( A_CmsUI . getCmsObject ( ) , loginMessage ) ; OpenCms . getLoginManager ( ) . setBeforeLoginMessage ( A_CmsUI . getCmsObject ( ) , beforeLoginMessage ) ; // update the system configuration OpenCms . writeConfiguration ( CmsVariablesConfiguration . class ) ; } catch ( Exception e ) { LOG . error ( "Unable to save Login Message" , e ) ; }
public class Flowable { /** * Asynchronously subscribes Subscribers to this Publisher on the specified { @ link Scheduler } . * If there is a { @ link # create ( FlowableOnSubscribe , BackpressureStrategy ) } type source up in the * chain , it is recommended to use { @ code subscribeOn ( scheduler , false ) } instead * to avoid same - pool deadlock because requests may pile up behind an eager / blocking emitter . * < img width = " 640 " height = " 305 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / subscribeOn . png " alt = " " > * < dl > * < dt > < b > Backpressure : < / b > < / dt > * < dd > The operator doesn ' t interfere with backpressure which is determined by the source { @ code Publisher } ' s backpressure * behavior . < / dd > * < dt > < b > Scheduler : < / b > < / dt > * < dd > You specify which { @ link Scheduler } this operator will use . < / dd > * < / dl > * @ param scheduler * the { @ link Scheduler } to perform subscription actions on * @ return the source Publisher modified so that its subscriptions happen on the * specified { @ link Scheduler } * @ see < a href = " http : / / reactivex . io / documentation / operators / subscribeon . html " > ReactiveX operators documentation : SubscribeOn < / a > * @ see < a href = " http : / / www . grahamlea . com / 2014/07 / rxjava - threading - examples / " > RxJava Threading Examples < / a > * @ see # observeOn * @ see # subscribeOn ( Scheduler , boolean ) */ @ CheckReturnValue @ BackpressureSupport ( BackpressureKind . PASS_THROUGH ) @ SchedulerSupport ( SchedulerSupport . CUSTOM ) public final Flowable < T > subscribeOn ( @ NonNull Scheduler scheduler ) { } }
ObjectHelper . requireNonNull ( scheduler , "scheduler is null" ) ; return subscribeOn ( scheduler , ! ( this instanceof FlowableCreate ) ) ;
public class PatternFlattener { /** * Create a parameter filler if the given parameter is recognizable . * @ param parameter the given parameter * @ return the created parameter filler , or null if can not recognize the given parameter */ private static ParameterFiller parseParameter ( String parameter ) { } }
String wrappedParameter = "{" + parameter + "}" ; String trimmedParameter = parameter . trim ( ) ; ParameterFiller parameterFiller = parseDateParameter ( wrappedParameter , trimmedParameter ) ; if ( parameterFiller != null ) { return parameterFiller ; } parameterFiller = parseLevelParameter ( wrappedParameter , trimmedParameter ) ; if ( parameterFiller != null ) { return parameterFiller ; } parameterFiller = parseTagParameter ( wrappedParameter , trimmedParameter ) ; if ( parameterFiller != null ) { return parameterFiller ; } parameterFiller = parseMessageParameter ( wrappedParameter , trimmedParameter ) ; if ( parameterFiller != null ) { return parameterFiller ; } return null ;
public class DatabasesInner { /** * Exports a database to a bacpac . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database to be exported . * @ param parameters The required parameters for exporting a database . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ImportExportResponseInner object if successful . */ public ImportExportResponseInner beginExport ( String resourceGroupName , String serverName , String databaseName , ExportRequest parameters ) { } }
return beginExportWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DruidSemiJoin { /** * Returns a copy of the left rel with the filter applied from the right - hand side . This is an expensive operation * since it actually executes the right - hand side query . */ private DruidRel < ? > getLeftRelWithFilter ( ) { } }
final DruidRel < ? > druidRight = ( DruidRel ) this . right ; // Build list of acceptable values from right side . final Set < List < String > > valuess = new HashSet < > ( ) ; final List < RexNode > conditions = druidRight . runQuery ( ) . accumulate ( new ArrayList < > ( ) , new Accumulator < List < RexNode > , Object [ ] > ( ) { int numRows ; @ Override public List < RexNode > accumulate ( final List < RexNode > theConditions , final Object [ ] row ) { final List < String > values = new ArrayList < > ( rightKeys . size ( ) ) ; for ( int i : rightKeys ) { final Object value = row [ i ] ; if ( value == null ) { // NULLs are not supposed to match NULLs in a join . So ignore them . continue ; } final String stringValue = DimensionHandlerUtils . convertObjectToString ( value ) ; values . add ( stringValue ) ; } if ( valuess . add ( values ) ) { if ( ++ numRows > maxSemiJoinRowsInMemory ) { throw new ResourceLimitExceededException ( StringUtils . format ( "maxSemiJoinRowsInMemory[%,d] exceeded" , maxSemiJoinRowsInMemory ) ) ; } final List < RexNode > subConditions = new ArrayList < > ( ) ; for ( int i = 0 ; i < values . size ( ) ; i ++ ) { final String value = values . get ( i ) ; // NULLs are not supposed to match NULLs in a join . So ignore them . if ( value != null ) { subConditions . add ( getCluster ( ) . getRexBuilder ( ) . makeCall ( SqlStdOperatorTable . EQUALS , leftExpressions . get ( i ) , getCluster ( ) . getRexBuilder ( ) . makeLiteral ( value ) ) ) ; } theConditions . add ( makeAnd ( subConditions ) ) ; } } return theConditions ; } } ) ; valuess . clear ( ) ; if ( ! conditions . isEmpty ( ) ) { // Add a filter to the left side . final PartialDruidQuery leftPartialQuery = left . getPartialDruidQuery ( ) ; final Filter whereFilter = leftPartialQuery . getWhereFilter ( ) ; final Filter newWhereFilter ; if ( whereFilter != null ) { newWhereFilter = whereFilter . copy ( whereFilter . getTraitSet ( ) , whereFilter . getInput ( ) , RexUtil . flatten ( getCluster ( ) . getRexBuilder ( ) , makeAnd ( ImmutableList . of ( whereFilter . getCondition ( ) , makeOr ( conditions ) ) ) ) ) ; } else { newWhereFilter = LogicalFilter . create ( leftPartialQuery . getScan ( ) , makeOr ( conditions ) // already in flattened form ) ; } PartialDruidQuery newPartialQuery = PartialDruidQuery . create ( leftPartialQuery . getScan ( ) ) . withWhereFilter ( newWhereFilter ) . withSelectProject ( leftPartialQuery . getSelectProject ( ) ) . withSelectSort ( leftPartialQuery . getSelectSort ( ) ) ; if ( leftPartialQuery . getAggregate ( ) != null ) { newPartialQuery = newPartialQuery . withAggregate ( leftPartialQuery . getAggregate ( ) ) ; } if ( leftPartialQuery . getHavingFilter ( ) != null ) { newPartialQuery = newPartialQuery . withHavingFilter ( leftPartialQuery . getHavingFilter ( ) ) ; } if ( leftPartialQuery . getAggregateProject ( ) != null ) { newPartialQuery = newPartialQuery . withAggregateProject ( leftPartialQuery . getAggregateProject ( ) ) ; } if ( leftPartialQuery . getSort ( ) != null ) { newPartialQuery = newPartialQuery . withSort ( leftPartialQuery . getSort ( ) ) ; } if ( leftPartialQuery . getSortProject ( ) != null ) { newPartialQuery = newPartialQuery . withSortProject ( leftPartialQuery . getSortProject ( ) ) ; } return left . withPartialQuery ( newPartialQuery ) ; } else { return null ; }
public class Angular { /** * Returns an injector object that can be used to retrieve services . * See also < a href = " https : / / docs . angularjs . org / api / ng / function / angular . injector " > * https : / / docs . angularjs . org / api / ng / function / angular . injector < / a > */ @ SafeVarargs public static < M extends AbstractModule > Injector injector ( Class < M > ... modules ) { } }
List < String > names = new ArrayList < String > ( modules . length ) ; for ( Class < ? > klass : modules ) { names . add ( klass . getName ( ) ) ; } return ngo . injector ( JSArray . create ( names . toArray ( EMPTY_STRING_ARRAY ) ) ) ;
public class ListPipelineExecutionsResult { /** * A list of executions in the history of a pipeline . * @ param pipelineExecutionSummaries * A list of executions in the history of a pipeline . */ public void setPipelineExecutionSummaries ( java . util . Collection < PipelineExecutionSummary > pipelineExecutionSummaries ) { } }
if ( pipelineExecutionSummaries == null ) { this . pipelineExecutionSummaries = null ; return ; } this . pipelineExecutionSummaries = new java . util . ArrayList < PipelineExecutionSummary > ( pipelineExecutionSummaries ) ;
public class TaskManager { /** * Registers an newly incoming runtime task with the task manager . * @ param id * the ID of the task to register * @ param jobConfiguration * the job configuration that has been attached to the original job graph * @ param environment * the environment of the task to be registered * @ return the task to be started or < code > null < / code > if a task with the same ID was already running */ private Task createAndRegisterTask ( final ExecutionVertexID id , final Configuration jobConfiguration , final RuntimeEnvironment environment ) throws InsufficientResourcesException , IOException { } }
if ( id == null ) { throw new IllegalArgumentException ( "Argument id is null" ) ; } if ( environment == null ) { throw new IllegalArgumentException ( "Argument environment is null" ) ; } // Task creation and registration must be atomic Task task ; synchronized ( this ) { final Task runningTask = this . runningTasks . get ( id ) ; boolean registerTask = true ; if ( runningTask == null ) { task = new Task ( id , environment , this ) ; } else { if ( runningTask instanceof Task ) { // Task is already running return null ; } else { // There is already a replay task running , we will simply restart it task = runningTask ; registerTask = false ; } } if ( registerTask ) { // Register the task with the byte buffered channel manager this . channelManager . register ( task ) ; boolean enableProfiling = false ; if ( this . profiler != null && jobConfiguration . getBoolean ( ProfilingUtils . PROFILE_JOB_KEY , true ) ) { enableProfiling = true ; } // Register environment , input , and output gates for profiling if ( enableProfiling ) { task . registerProfiler ( this . profiler , jobConfiguration ) ; } this . runningTasks . put ( id , task ) ; } } return task ;
public class AsyncWork { /** * Reset this AsyncWork point to reuse it . * This method remove any previous result , error or cancellation , and mark this AsyncWork as blocked . * Any previous listener is also removed . */ public final void reset ( ) { } }
unblocked = false ; result = null ; error = null ; cancel = null ; listenersInline = null ;
public class ON_MATCH { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > < u > JCYPHER < / u > < / i > < / b > < / div > * < div color = ' red ' style = " font - size : 18px ; color : red " > < i > select a node or relation to be the source for copying all properties < / i > < / div > * < div color = ' red ' style = " font - size : 18px ; color : red " > < i > e . g . . . . < b > copyPropertiesFrom ( n ) < / b > . to ( m ) < / i > < / div > * < br / > */ public static CopyProperties < ModifyTerminal > copyPropertiesFrom ( JcElement source ) { } }
CopyProperties < ModifyTerminal > ret = ModifyFactory . copyPropertiesFromInDO ( source ) ; ASTNode an = APIObjectAccess . getAstNode ( ret ) ; an . setClauseType ( ClauseType . ON_MATCH_SET ) ; return ret ;
public class BlockThreadManager { /** * Notify this manager that it may stop as soon as all currently outstanding * requests are completed . Future calls to addRequest ( ) will clear this stop * state . */ synchronized public void stop ( ) { } }
process = false ; BlockEncodeRequest temp = new BlockEncodeRequest ( ) ; temp . setAll ( null , - 1 , - 1 , - 1 , - 1 , null ) ; int count = frameThreadMap . size ( ) ; for ( int i = 0 ; i < count ; i ++ ) { unassignedEncodeRequests . add ( temp ) ; }
public class Size { /** * size ( input ) * Return the size of an array or of an string */ @ Override public Object apply ( Object value , Object ... params ) { } }
if ( super . isArray ( value ) ) { return super . asArray ( value ) . length ; } if ( super . isString ( value ) ) { return super . asString ( value ) . length ( ) ; } if ( super . isNumber ( value ) ) { // we ' re only using 64 bit longs , no BigIntegers or the like . // So just return 8 ( the number of bytes in a long ) . return 8 ; } // boolean or nil return 0 ;
public class MessageSelectorBuilder { /** * Constructs a key value map from selector string representation . * @ return */ public Map < String , String > toKeyValueMap ( ) { } }
Map < String , String > valueMap = new HashMap < String , String > ( ) ; String [ ] tokens ; if ( selectorString . contains ( " AND" ) ) { String [ ] chunks = selectorString . split ( " AND" ) ; for ( String chunk : chunks ) { tokens = escapeEqualsFromXpathNodeTest ( chunk ) . split ( "=" ) ; valueMap . put ( unescapeEqualsFromXpathNodeTest ( tokens [ 0 ] . trim ( ) ) , tokens [ 1 ] . trim ( ) . substring ( 1 , tokens [ 1 ] . trim ( ) . length ( ) - 1 ) ) ; } } else { tokens = escapeEqualsFromXpathNodeTest ( selectorString ) . split ( "=" ) ; valueMap . put ( unescapeEqualsFromXpathNodeTest ( tokens [ 0 ] . trim ( ) ) , tokens [ 1 ] . trim ( ) . substring ( 1 , tokens [ 1 ] . trim ( ) . length ( ) - 1 ) ) ; } return valueMap ;
public class JsonMapper { /** * 把object转出clazz对象 , 比如POJO和Map互换 , 字符串转换成Date * @ param object 原对象 * @ param clazz 目标类型 * @ param < T > * @ return */ public < T > T convert ( Object object , Class < T > clazz ) { } }
if ( object == null ) { return null ; } return mapper . convertValue ( object , clazz ) ;
public class ImagePathTag { /** * Returns the image resource handler * @ param context * the faces context * @ return the image resource handler */ protected BinaryResourcesHandler getBinaryResourcesHandler ( FacesContext context ) { } }
// Generate the path for the image element BinaryResourcesHandler binaryRsHandler = ( BinaryResourcesHandler ) context . getExternalContext ( ) . getApplicationMap ( ) . get ( JawrConstant . BINARY_CONTEXT_ATTRIBUTE ) ; if ( binaryRsHandler == null ) { throw new IllegalStateException ( "You are using a Jawr image tag while the Jawr Binary servlet has not been initialized. Initialization of Jawr Binary servlet either failed or never occurred." ) ; } return binaryRsHandler ;
public class ResourceUtils { /** * Build input stream resource from string value . * @ param value the value * @ param description the description * @ return the input stream resource */ public static InputStreamResource buildInputStreamResourceFrom ( final String value , final String description ) { } }
val reader = new StringReader ( value ) ; val is = new ReaderInputStream ( reader , StandardCharsets . UTF_8 ) ; return new InputStreamResource ( is , description ) ;
public class TCPProxyResponse { /** * Release the proxy connect write buffer . */ protected void releaseProxyWriteBuffer ( ) { } }
WsByteBuffer buffer = connLink . getWriteInterface ( ) . getBuffer ( ) ; if ( null != buffer ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Releasing proxy write buffer: " + buffer ) ; } buffer . release ( ) ; connLink . getWriteInterface ( ) . setBuffer ( null ) ; }
public class AbstractAggregatorImpl { /** * Formats and logs an exception log message including the request url , query args , the exception class * name and exception message . * @ param req * the http servlet request * @ param level * the logging level * @ param sourceMethod * the calling method name * @ param t * the exception */ void logException ( HttpServletRequest req , Level level , String sourceMethod , Throwable t ) { } }
if ( log . isLoggable ( level ) ) { StringBuffer sb = new StringBuffer ( ) ; // add the request URI and query args String uri = req . getRequestURI ( ) ; if ( uri != null ) { sb . append ( uri ) ; String queryArgs = req . getQueryString ( ) ; if ( queryArgs != null ) { sb . append ( "?" ) . append ( queryArgs ) ; // $ NON - NLS - 1 $ } } // append the exception class name sb . append ( ": " ) . append ( t . getClass ( ) . getName ( ) ) ; // $ NON - NLS - 1 $ // append the exception message sb . append ( " - " ) . append ( t . getMessage ( ) != null ? t . getMessage ( ) : "null" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ log . logp ( level , AbstractAggregatorImpl . class . getName ( ) , sourceMethod , sb . toString ( ) , t ) ; }
public class CertificatesInner { /** * Gets a list of integration account certificates . * @ param resourceGroupName The resource group name . * @ param integrationAccountName The integration account name . * @ param top The number of items to be included in the result . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; IntegrationAccountCertificateInner & gt ; object */ public Observable < ServiceResponse < Page < IntegrationAccountCertificateInner > > > listByIntegrationAccountsWithServiceResponseAsync ( final String resourceGroupName , final String integrationAccountName , final Integer top ) { } }
return listByIntegrationAccountsSinglePageAsync ( resourceGroupName , integrationAccountName , top ) . concatMap ( new Func1 < ServiceResponse < Page < IntegrationAccountCertificateInner > > , Observable < ServiceResponse < Page < IntegrationAccountCertificateInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < IntegrationAccountCertificateInner > > > call ( ServiceResponse < Page < IntegrationAccountCertificateInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listByIntegrationAccountsNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class InvocationDispatcher { /** * This does a ZK lookup which apparently is full of fail * if you run TestRejoinEndToEnd . Kind of lame , but initializing this data * immediately is not critical , request routing works without it . * Populate the map in the background and it will be used to route * requests to local replicas once the info is available */ public Future < ? > asynchronouslyDetermineLocalReplicas ( ) { } }
return VoltDB . instance ( ) . getSES ( false ) . submit ( new Runnable ( ) { @ Override public void run ( ) { /* * Assemble a map of all local replicas that will be used to determine * if single part reads can be delivered and executed at local replicas */ final int thisHostId = CoreUtils . getHostIdFromHSId ( m_mailbox . getHSId ( ) ) ; ImmutableMap . Builder < Integer , Long > localReplicas = ImmutableMap . builder ( ) ; for ( int partition : m_cartographer . getPartitions ( ) ) { for ( Long replica : m_cartographer . getReplicasForPartition ( partition ) ) { if ( CoreUtils . getHostIdFromHSId ( replica ) == thisHostId ) { localReplicas . put ( partition , replica ) ; } } } m_localReplicas . set ( localReplicas . build ( ) ) ; } } ) ;
public class ZipkinExporterHandler { /** * Logic borrowed from brave . internal . Platform . produceLocalEndpoint */ static Endpoint produceLocalEndpoint ( String serviceName ) { } }
Endpoint . Builder builder = Endpoint . newBuilder ( ) . serviceName ( serviceName ) ; try { Enumeration < NetworkInterface > nics = NetworkInterface . getNetworkInterfaces ( ) ; if ( nics == null ) { return builder . build ( ) ; } while ( nics . hasMoreElements ( ) ) { NetworkInterface nic = nics . nextElement ( ) ; Enumeration < InetAddress > addresses = nic . getInetAddresses ( ) ; while ( addresses . hasMoreElements ( ) ) { InetAddress address = addresses . nextElement ( ) ; if ( address . isSiteLocalAddress ( ) ) { builder . ip ( address ) ; break ; } } } } catch ( Exception e ) { // don ' t crash the caller if there was a problem reading nics . if ( logger . isLoggable ( Level . FINE ) ) { logger . log ( Level . FINE , "error reading nics" , e ) ; } } return builder . build ( ) ;
public class ExtendedPseudoRandomGenerator { /** * Get a random point from an hypersphere ( center = 0 , radius = 1) * Code taken from Maurice Clerc ' s implementation * @ param dimension * @ return A pseudo random point */ public double [ ] randSphere ( int dimension ) { } }
int D = dimension ; double [ ] x = new double [ dimension ] ; double length = 0 ; for ( int i = 0 ; i < dimension ; i ++ ) { x [ i ] = 0.0 ; } // - - - - - Step 1 . Direction for ( int i = 0 ; i < D ; i ++ ) { x [ i ] = this . randNormal ( 0 , 1 ) ; length += length + x [ i ] * x [ i ] ; } length = Math . sqrt ( length ) ; // - - - - - Step 2 . Random radius double r = randomGenerator . nextDouble ( 0 , 1 ) ; for ( int i = 0 ; i < D ; i ++ ) { x [ i ] = r * x [ i ] / length ; } return x ;
public class PlyReaderFile { /** * Creates the next element reader . * @ return next element Reader . */ private ElementReader nextElementReaderInternal ( ) { } }
if ( nextElement >= elements . size ( ) ) { return null ; } try { ElementType type = elements . get ( nextElement ) ; switch ( format ) { case ASCII : return new AsciiElementReader ( type , getElementCount ( type . getName ( ) ) , asciiReader ) ; case BINARY_BIG_ENDIAN : case BINARY_LITTLE_ENDIAN : return new BinaryElementReader ( type , getElementCount ( type . getName ( ) ) , binaryStream ) ; default : throw new UnsupportedOperationException ( "PLY format " + format + " is currently not supported." ) ; } } finally { nextElement ++ ; }
public class BackgroundAbstract { /** * Create a sprite from its filename . * @ param media The sprite media . * @ return The sprite instance . * @ throws LionEngineException If media is < code > null < / code > or image cannot be read . */ protected static Sprite createSprite ( Media media ) { } }
final Sprite sprite = Drawable . loadSprite ( media ) ; sprite . load ( ) ; sprite . prepare ( ) ; return sprite ;
public class DatabasesInner { /** * Gets a list of databases in an elastic pool . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param elasticPoolName The name of the elastic pool . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; DatabaseInner & gt ; object */ public Observable < ServiceResponse < Page < DatabaseInner > > > listByElasticPoolWithServiceResponseAsync ( final String resourceGroupName , final String serverName , final String elasticPoolName ) { } }
return listByElasticPoolSinglePageAsync ( resourceGroupName , serverName , elasticPoolName ) . concatMap ( new Func1 < ServiceResponse < Page < DatabaseInner > > , Observable < ServiceResponse < Page < DatabaseInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < DatabaseInner > > > call ( ServiceResponse < Page < DatabaseInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listByElasticPoolNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class UriUtils { /** * Builds an URI from an URL ( with a handle for URLs not compliant with RFC 2396 ) . * @ param url an URL * @ return an URI * @ throws URISyntaxException if the URI is invalid and could not be repaired */ public static URI urlToUri ( URL url ) throws URISyntaxException { } }
URI uri ; try { // Possible failing step . uri = url . toURI ( ) ; } catch ( Exception e ) { // URL did not comply with RFC 2396 = > illegal non - escaped characters . try { uri = new URI ( url . getProtocol ( ) , url . getUserInfo ( ) , url . getHost ( ) , url . getPort ( ) , url . getPath ( ) , url . getQuery ( ) , url . getRef ( ) ) ; } catch ( Exception e1 ) { throw new URISyntaxException ( String . valueOf ( url ) , "Broken URL." ) ; } } uri = uri . normalize ( ) ; return uri ;
public class Proxy { /** * Generate a proxy class . Must call the checkProxyAccess method * to perform permission checks before calling this . */ private static Class < ? > getProxyClass0 ( ClassLoader loader , Class < ? > ... interfaces ) { } }
if ( interfaces . length > 65535 ) { throw new IllegalArgumentException ( "interface limit exceeded" ) ; } Class < ? > proxyClass = null ; /* collect interface names to use as key for proxy class cache */ String [ ] interfaceNames = new String [ interfaces . length ] ; // for detecting duplicates Set < Class < ? > > interfaceSet = new HashSet < > ( ) ; for ( int i = 0 ; i < interfaces . length ; i ++ ) { /* * Verify that the class loader resolves the name of this * interface to the same Class object . */ String interfaceName = interfaces [ i ] . getName ( ) ; Class < ? > interfaceClass = null ; try { interfaceClass = Class . forName ( interfaceName , false , loader ) ; } catch ( ClassNotFoundException e ) { } if ( interfaceClass != interfaces [ i ] ) { throw new IllegalArgumentException ( interfaces [ i ] + " is not visible from class loader" ) ; } /* * Verify that the Class object actually represents an * interface . */ if ( ! interfaceClass . isInterface ( ) ) { throw new IllegalArgumentException ( interfaceClass . getName ( ) + " is not an interface" ) ; } /* * Verify that this interface is not a duplicate . */ if ( interfaceSet . contains ( interfaceClass ) ) { throw new IllegalArgumentException ( "repeated interface: " + interfaceClass . getName ( ) ) ; } interfaceSet . add ( interfaceClass ) ; interfaceNames [ i ] = interfaceName ; } /* * Using string representations of the proxy interfaces as * keys in the proxy class cache ( instead of their Class * objects ) is sufficient because we require the proxy * interfaces to be resolvable by name through the supplied * class loader , and it has the advantage that using a string * representation of a class makes for an implicit weak * reference to the class . */ List < String > key = Arrays . asList ( interfaceNames ) ; /* * Find or create the proxy class cache for the class loader . */ Map < List < String > , Object > cache ; synchronized ( loaderToCache ) { cache = loaderToCache . get ( loader ) ; if ( cache == null ) { cache = new HashMap < > ( ) ; loaderToCache . put ( loader , cache ) ; } /* * This mapping will remain valid for the duration of this * method , without further synchronization , because the mapping * will only be removed if the class loader becomes unreachable . */ } /* * Look up the list of interfaces in the proxy class cache using * the key . This lookup will result in one of three possible * kinds of values : * null , if there is currently no proxy class for the list of * interfaces in the class loader , * the pendingGenerationMarker object , if a proxy class for the * list of interfaces is currently being generated , * or a weak reference to a Class object , if a proxy class for * the list of interfaces has already been generated . */ synchronized ( cache ) { /* * Note that we need not worry about reaping the cache for * entries with cleared weak references because if a proxy class * has been garbage collected , its class loader will have been * garbage collected as well , so the entire cache will be reaped * from the loaderToCache map . */ do { Object value = cache . get ( key ) ; if ( value instanceof Reference ) { proxyClass = ( Class < ? > ) ( ( Reference < ? > ) value ) . get ( ) ; } if ( proxyClass != null ) { // proxy class already generated : return it return proxyClass ; } else if ( value == pendingGenerationMarker ) { // proxy class being generated : wait for it try { cache . wait ( ) ; } catch ( InterruptedException e ) { /* * The class generation that we are waiting for should * take a small , bounded time , so we can safely ignore * thread interrupts here . */ } continue ; } else { /* * No proxy class for this list of interfaces has been * generated or is being generated , so we will go and * generate it now . Mark it as pending generation . */ cache . put ( key , pendingGenerationMarker ) ; break ; } } while ( true ) ; } try { String proxyPkg = null ; // package to define proxy class in /* * Record the package of a non - public proxy interface so that the * proxy class will be defined in the same package . Verify that * all non - public proxy interfaces are in the same package . */ for ( int i = 0 ; i < interfaces . length ; i ++ ) { int flags = interfaces [ i ] . getModifiers ( ) ; if ( ! Modifier . isPublic ( flags ) ) { String name = interfaces [ i ] . getName ( ) ; int n = name . lastIndexOf ( '.' ) ; String pkg = ( ( n == - 1 ) ? "" : name . substring ( 0 , n + 1 ) ) ; if ( proxyPkg == null ) { proxyPkg = pkg ; } else if ( ! pkg . equals ( proxyPkg ) ) { throw new IllegalArgumentException ( "non-public interfaces from different packages" ) ; } } } if ( proxyPkg == null ) { // if no non - public proxy interfaces , use the default package . proxyPkg = "" ; } { // Android - changed : Generate the proxy directly instead of calling // through to ProxyGenerator . /* J2ObjC removed . List < Method > methods = getMethods ( interfaces ) ; Collections . sort ( methods , ORDER _ BY _ SIGNATURE _ AND _ SUBTYPE ) ; validateReturnTypes ( methods ) ; List < Class < ? > [ ] > exceptions = deduplicateAndGetExceptions ( methods ) ; Method [ ] methodsArray = methods . toArray ( new Method [ methods . size ( ) ] ) ; Class < ? > [ ] [ ] exceptionsArray = exceptions . toArray ( new Class < ? > [ exceptions . size ( ) ] [ ] ) ; */ /* * Choose a name for the proxy class to generate . */ final long num ; synchronized ( nextUniqueNumberLock ) { num = nextUniqueNumber ++ ; } String proxyName = proxyPkg + proxyClassNamePrefix + num ; proxyClass = generateProxy ( proxyName , interfaces , loader ) ; } // add to set of all generated proxy classes , for isProxyClass proxyClasses . put ( proxyClass , null ) ; } finally { /* * We must clean up the " pending generation " state of the proxy * class cache entry somehow . If a proxy class was successfully * generated , store it in the cache ( with a weak reference ) ; * otherwise , remove the reserved entry . In all cases , notify * all waiters on reserved entries in this cache . */ synchronized ( cache ) { if ( proxyClass != null ) { cache . put ( key , new WeakReference < Class < ? > > ( proxyClass ) ) ; } else { cache . remove ( key ) ; } cache . notifyAll ( ) ; } } return proxyClass ;
public class ControllerHandler { /** * Validates that the declared content - types can actually be generated by Fathom . * @ param fathomContentTypes */ protected void validateProduces ( Collection < String > fathomContentTypes ) { } }
Set < String > ignoreProduces = new TreeSet < > ( ) ; ignoreProduces . add ( Produces . TEXT ) ; ignoreProduces . add ( Produces . HTML ) ; ignoreProduces . add ( Produces . XHTML ) ; for ( String produces : declaredProduces ) { if ( ignoreProduces . contains ( produces ) ) { continue ; } if ( ! fathomContentTypes . contains ( produces ) ) { throw new FatalException ( "{} declares @{}(\"{}\") but there is no registered ContentTypeEngine for that type!" , Util . toString ( method ) , Produces . class . getSimpleName ( ) , produces ) ; } }
public class DateTimeFormatterBuilder { /** * Appends the value of a date - time field to the formatter providing full * control over printing . * The value of the field will be output during a print . * If the value cannot be obtained then an exception will be thrown . * This method provides full control of the numeric formatting , including * zero - padding and the positive / negative sign . * The parser for a variable width value such as this normally behaves greedily , * accepting as many digits as possible . * This behavior can be affected by ' adjacent value parsing ' . * See { @ link # appendValue ( TemporalField , int ) } for full details . * In strict parsing mode , the minimum number of parsed digits is { @ code minWidth } . * In lenient parsing mode , the minimum number of parsed digits is one . * If this method is invoked with equal minimum and maximum widths and a sign style of * { @ code NOT _ NEGATIVE } then it delegates to { @ code appendValue ( TemporalField , int ) } . * In this scenario , the printing and parsing behavior described there occur . * @ param field the field to append , not null * @ param minWidth the minimum field width of the printed field , from 1 to 19 * @ param maxWidth the maximum field width of the printed field , from 1 to 19 * @ param signStyle the positive / negative output style , not null * @ return this , for chaining , not null * @ throws IllegalArgumentException if the widths are invalid */ public DateTimeFormatterBuilder appendValue ( TemporalField field , int minWidth , int maxWidth , SignStyle signStyle ) { } }
if ( minWidth == maxWidth && signStyle == SignStyle . NOT_NEGATIVE ) { return appendValue ( field , maxWidth ) ; } Jdk8Methods . requireNonNull ( field , "field" ) ; Jdk8Methods . requireNonNull ( signStyle , "signStyle" ) ; if ( minWidth < 1 || minWidth > 19 ) { throw new IllegalArgumentException ( "The minimum width must be from 1 to 19 inclusive but was " + minWidth ) ; } if ( maxWidth < 1 || maxWidth > 19 ) { throw new IllegalArgumentException ( "The maximum width must be from 1 to 19 inclusive but was " + maxWidth ) ; } if ( maxWidth < minWidth ) { throw new IllegalArgumentException ( "The maximum width must exceed or equal the minimum width but " + maxWidth + " < " + minWidth ) ; } NumberPrinterParser pp = new NumberPrinterParser ( field , minWidth , maxWidth , signStyle ) ; appendValue ( pp ) ; return this ;
public class BaseRenderEngine { /** * Render an input with text markup from a Reader and write the result to a writer * @ param in Reader to read the input from * @ param context Special context for the render engine , e . g . with * configuration information */ public String render ( Reader in , RenderContext context ) throws IOException { } }
StringBuffer buffer = new StringBuffer ( ) ; BufferedReader inputReader = new BufferedReader ( in ) ; String line ; while ( ( line = inputReader . readLine ( ) ) != null ) { buffer . append ( line ) ; } return render ( buffer . toString ( ) , context ) ;
public class WrappedByteBuffer { /** * Returns a hex dump of this buffer . * @ return the hex dump */ public String getHexDump ( ) { } }
if ( _buf . position ( ) == _buf . limit ( ) ) { return "empty" ; } StringBuilder hexDump = new StringBuilder ( ) ; for ( int i = _buf . position ( ) ; i < _buf . limit ( ) ; i ++ ) { hexDump . append ( Integer . toHexString ( _buf . get ( i ) & 0xFF ) ) . append ( ' ' ) ; } return hexDump . toString ( ) ;
public class InstanceAdminClient { /** * Updates an instance , and begins allocating or releasing resources as requested . The returned * [ long - running operation ] [ google . longrunning . Operation ] can be used to track the progress of * updating the instance . If the named instance does not exist , returns ` NOT _ FOUND ` . * < p > Immediately upon completion of this request : * < p > & # 42 ; For resource types for which a decrease in the instance ' s allocation has been * requested , billing is based on the newly - requested level . * < p > Until completion of the returned operation : * < p > & # 42 ; Cancelling the operation sets its metadata ' s * [ cancel _ time ] [ google . spanner . admin . instance . v1 . UpdateInstanceMetadata . cancel _ time ] , and begins * restoring resources to their pre - request values . The operation is guaranteed to succeed at * undoing all resource changes , after which point it terminates with a ` CANCELLED ` status . & # 42; * All other attempts to modify the instance are rejected . & # 42 ; Reading the instance via the API * continues to give the pre - request resource levels . * < p > Upon completion of the returned operation : * < p > & # 42 ; Billing begins for all successfully - allocated resources ( some types may have lower * than the requested levels ) . & # 42 ; All newly - reserved resources are available for serving the * instance ' s tables . & # 42 ; The instance ' s new resource levels are readable via the API . * < p > The returned [ long - running operation ] [ google . longrunning . Operation ] will have a name of the * format ` & lt ; instance _ name & gt ; / operations / & lt ; operation _ id & gt ; ` and can be used to track the * instance modification . The [ metadata ] [ google . longrunning . Operation . metadata ] field type is * [ UpdateInstanceMetadata ] [ google . spanner . admin . instance . v1 . UpdateInstanceMetadata ] . The * [ response ] [ google . longrunning . Operation . response ] field type is * [ Instance ] [ google . spanner . admin . instance . v1 . Instance ] , if successful . * < p > Authorization requires ` spanner . instances . update ` permission on resource * [ name ] [ google . spanner . admin . instance . v1 . Instance . name ] . * < p > Sample code : * < pre > < code > * try ( InstanceAdminClient instanceAdminClient = InstanceAdminClient . create ( ) ) { * Instance instance = Instance . newBuilder ( ) . build ( ) ; * FieldMask fieldMask = FieldMask . newBuilder ( ) . build ( ) ; * Instance response = instanceAdminClient . updateInstanceAsync ( instance , fieldMask ) . get ( ) ; * < / code > < / pre > * @ param instance Required . The instance to update , which must always include the instance name . * Otherwise , only fields mentioned in * [ ] [ google . spanner . admin . instance . v1 . UpdateInstanceRequest . field _ mask ] need be included . * @ param fieldMask Required . A mask specifying which fields in * [ ] [ google . spanner . admin . instance . v1 . UpdateInstanceRequest . instance ] should be updated . The * field mask must always be specified ; this prevents any future fields in * [ ] [ google . spanner . admin . instance . v1 . Instance ] from being erased accidentally by clients * that do not know about them . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Instance , UpdateInstanceMetadata > updateInstanceAsync ( Instance instance , FieldMask fieldMask ) { } }
UpdateInstanceRequest request = UpdateInstanceRequest . newBuilder ( ) . setInstance ( instance ) . setFieldMask ( fieldMask ) . build ( ) ; return updateInstanceAsync ( request ) ;
public class UpdateIntegrationResponseRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateIntegrationResponseRequest updateIntegrationResponseRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateIntegrationResponseRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateIntegrationResponseRequest . getApiId ( ) , APIID_BINDING ) ; protocolMarshaller . marshall ( updateIntegrationResponseRequest . getContentHandlingStrategy ( ) , CONTENTHANDLINGSTRATEGY_BINDING ) ; protocolMarshaller . marshall ( updateIntegrationResponseRequest . getIntegrationId ( ) , INTEGRATIONID_BINDING ) ; protocolMarshaller . marshall ( updateIntegrationResponseRequest . getIntegrationResponseId ( ) , INTEGRATIONRESPONSEID_BINDING ) ; protocolMarshaller . marshall ( updateIntegrationResponseRequest . getIntegrationResponseKey ( ) , INTEGRATIONRESPONSEKEY_BINDING ) ; protocolMarshaller . marshall ( updateIntegrationResponseRequest . getResponseParameters ( ) , RESPONSEPARAMETERS_BINDING ) ; protocolMarshaller . marshall ( updateIntegrationResponseRequest . getResponseTemplates ( ) , RESPONSETEMPLATES_BINDING ) ; protocolMarshaller . marshall ( updateIntegrationResponseRequest . getTemplateSelectionExpression ( ) , TEMPLATESELECTIONEXPRESSION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Exceptions { /** * 获取组合本异常信息与底层异常信息的异常描述 , 适用于本异常为统一包装异常类 , 底层异常才是根本原因的情况 。 * @ param t * 异常 * @ return 组合异常信息 */ public static String getMessageWithRootCause ( final Throwable t ) { } }
return ExceptionUtils . getMessage ( t ) + " root cause is " + ExceptionUtils . getRootCauseMessage ( t ) ;
public class AnnotationTypeFieldWriterImpl { /** * { @ inheritDoc } */ public void addAnnotationDetailsTreeHeader ( TypeElement typeElement , Content memberDetailsTree ) { } }
if ( ! writer . printedAnnotationFieldHeading ) { memberDetailsTree . addContent ( writer . getMarkerAnchor ( SectionName . ANNOTATION_TYPE_FIELD_DETAIL ) ) ; Content heading = HtmlTree . HEADING ( HtmlConstants . DETAILS_HEADING , contents . fieldDetailsLabel ) ; memberDetailsTree . addContent ( heading ) ; writer . printedAnnotationFieldHeading = true ; }
public class DetectPolygonBinaryGrayRefine { /** * Returns a list of all polygons with an edge threshold above the minimum * @ param storageInfo Optional storage for info associated with polygons . Pruning is done so the info list * and the returned polygon list are not in synch with each other */ public List < Polygon2D_F64 > getPolygons ( @ Nullable List < Polygon2D_F64 > storage , @ Nullable List < DetectPolygonFromContour . Info > storageInfo ) { } }
if ( storage == null ) storage = new ArrayList < > ( ) ; else storage . clear ( ) ; if ( storageInfo != null ) storageInfo . clear ( ) ; List < DetectPolygonFromContour . Info > detections = detector . getFound ( ) . toList ( ) ; for ( int i = 0 ; i < detections . size ( ) ; i ++ ) { DetectPolygonFromContour . Info d = detections . get ( i ) ; if ( d . computeEdgeIntensity ( ) >= minimumRefineEdgeIntensity ) { storage . add ( d . polygon ) ; if ( storageInfo != null ) { storageInfo . add ( d ) ; } } } return storage ;
public class EventFeatureImpl { /** * Sets the queue . * @ param queue the new queue */ @ Inject public void setQueue ( EventQueue queue ) { } }
if ( epi == null ) { MessageToEventMapper mapper = new MessageToEventMapper ( ) ; mapper . setMaxContentLength ( maxContentLength ) ; epi = new EventProducerInterceptor ( mapper , queue ) ; }
public class BooleanInputStream { /** * Sets the vector element to true if the bit is set . */ public void getSetBits ( Type type , int batchSize , BlockBuilder builder ) throws IOException { } }
for ( int i = 0 ; i < batchSize ; i ++ ) { type . writeBoolean ( builder , nextBit ( ) ) ; }
public class LdapAdapter { /** * Get all the descendants of the given DN . * @ param DN * @ param level * @ return * @ throws WIMException */ private List < LdapEntry > getDescendants ( String DN , int level ) throws WIMException { } }
int scope = SearchControls . ONELEVEL_SCOPE ; if ( level == 0 ) { scope = SearchControls . SUBTREE_SCOPE ; } List < LdapEntry > descendants = new ArrayList < LdapEntry > ( ) ; Set < LdapEntry > ldapEntries = iLdapConn . searchEntities ( DN , "objectClass=*" , null , scope , null , null , false , false ) ; for ( Iterator < LdapEntry > iter = ldapEntries . iterator ( ) ; iter . hasNext ( ) ; ) { LdapEntry entry = iter . next ( ) ; descendants . add ( entry ) ; } return descendants ;
public class DefaultSession { /** * < b > INTERNAL USE ONLY < / b > - - switches the session to a new keyspace . * < p > This is called by the driver when a { @ code USE } query is successfully executed through the * session . Calling it from anywhere else is highly discouraged , as an invalid keyspace would * wreak havoc ( close all connections and make the session unusable ) . */ @ NonNull public CompletionStage < Void > setKeyspace ( @ NonNull CqlIdentifier newKeyspace ) { } }
return poolManager . setKeyspace ( newKeyspace ) ;
public class DistanceTable { /** * Returns the distance between the desired and supported locale , using the * default distance threshold . */ public int distance ( CLDR . Locale desired , CLDR . Locale supported ) { } }
return distance ( desired , supported , DEFAULT_THRESHOLD ) ;
public class MailUtil { /** * 发送基本HTML邮件 * @ param mailContent * @ return * @ throws MessagingException */ public static void sendMail ( MailContent mailContent ) throws MessagingException { } }
// 判断是否需要身份认证 Properties pro = mailContent . getProperties ( ) ; // 如果需要身份认证 , 则创建一个密码验证器 AuthEntity authEntity = new MailUtil ( ) . new AuthEntity ( mailContent . getAuthUsername ( ) , mailContent . getAuthPassword ( ) ) ; // 根据邮件会话属性和密码验证器构造一个发送邮件的session Session sendMailSession = Session . getDefaultInstance ( pro , authEntity ) ; // 根据session创建一个邮件消息 Message mailMessage = new MimeMessage ( sendMailSession ) ; // 创建邮件发送者地址 Address from = new InternetAddress ( mailContent . getFromAddress ( ) ) ; // 设置邮件消息的发送者 mailMessage . setFrom ( from ) ; // 创建邮件的接收者地址 , 并设置到邮件消息中 Address to = new InternetAddress ( mailContent . getToAddress ( ) ) ; // Message . RecipientType . TO属性表示接收者的类型为TO mailMessage . setRecipient ( Message . RecipientType . TO , to ) ; // 设置邮件消息的主题 mailMessage . setSubject ( mailContent . getSubject ( ) ) ; // 设置邮件消息发送的时间 mailMessage . setSentDate ( new Date ( ) ) ; // MiniMultipart类是一个容器类 , 包含MimeBodyPart类型的对象 Multipart mainPart = new MimeMultipart ( ) ; // 创建一个包含HTML内容的MimeBodyPart BodyPart html = new MimeBodyPart ( ) ; // 设置HTML内容 html . setContent ( mailContent . getContent ( ) , "text/html; charset=utf-8" ) ; mainPart . addBodyPart ( html ) ; // 将MiniMultipart对象设置为邮件内容 mailMessage . setContent ( mainPart ) ; // 发送邮件 Transport . send ( mailMessage ) ;