signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class SizeConstraintSet { /** * Specifies the parts of web requests that you want to inspect the size of . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSizeConstraints ( java . util . Collection ) } or { @ link # withSizeConstraints ( java . util . Collection ) } if you * want to override the existing values . * @ param sizeConstraints * Specifies the parts of web requests that you want to inspect the size of . * @ return Returns a reference to this object so that method calls can be chained together . */ public SizeConstraintSet withSizeConstraints ( SizeConstraint ... sizeConstraints ) { } }
if ( this . sizeConstraints == null ) { setSizeConstraints ( new java . util . ArrayList < SizeConstraint > ( sizeConstraints . length ) ) ; } for ( SizeConstraint ele : sizeConstraints ) { this . sizeConstraints . add ( ele ) ; } return this ;
public class DruidNodeAccessor { /** * Convenient method for GETing . It is the responsibility of the * caller to call returnClient ( ) to ensure clean state of the pool . * @ param url * @ param reqHeaders * @ return * @ throws IOException */ public CloseableHttpResponse get ( String url , Map < String , String > reqHeaders ) throws IOException { } }
CloseableHttpClient req = getClient ( ) ; CloseableHttpResponse resp = null ; HttpGet get = new HttpGet ( url ) ; addHeaders ( get , reqHeaders ) ; resp = req . execute ( get ) ; return resp ;
public class WeekFields { /** * Return the singleton WeekFields associated with the * { @ code firstDayOfWeek } and { @ code minimalDays } . * @ return the singleton WeekFields for the firstDayOfWeek and minimalDays . * @ throws InvalidObjectException if the serialized object has invalid * values for firstDayOfWeek or minimalDays . */ private Object readResolve ( ) throws InvalidObjectException { } }
try { return WeekFields . of ( firstDayOfWeek , minimalDays ) ; } catch ( IllegalArgumentException iae ) { throw new InvalidObjectException ( "Invalid serialized WeekFields: " + iae . getMessage ( ) ) ; }
public class DescribeEgressOnlyInternetGatewaysResult { /** * Information about the egress - only internet gateways . * @ param egressOnlyInternetGateways * Information about the egress - only internet gateways . */ public void setEgressOnlyInternetGateways ( java . util . Collection < EgressOnlyInternetGateway > egressOnlyInternetGateways ) { } }
if ( egressOnlyInternetGateways == null ) { this . egressOnlyInternetGateways = null ; return ; } this . egressOnlyInternetGateways = new com . amazonaws . internal . SdkInternalList < EgressOnlyInternetGateway > ( egressOnlyInternetGateways ) ;
public class DescribeMaintenanceWindowExecutionsResult { /** * Information about the Maintenance Windows execution . * @ return Information about the Maintenance Windows execution . */ public java . util . List < MaintenanceWindowExecution > getWindowExecutions ( ) { } }
if ( windowExecutions == null ) { windowExecutions = new com . amazonaws . internal . SdkInternalList < MaintenanceWindowExecution > ( ) ; } return windowExecutions ;
public class CmsFileTable { /** * Updates the column widths . < p > * The reason this is needed is that the Vaadin table does not support minimum widths for columns , * so expanding columns get squished when most of the horizontal space is used by other columns . * So we try to determine whether the expanded columns would have enough space , and if not , give them a * fixed width . * @ param estimatedSpace the estimated horizontal space available for the table . */ public void updateColumnWidths ( int estimatedSpace ) { } }
Object [ ] cols = m_fileTable . getVisibleColumns ( ) ; List < CmsResourceTableProperty > expandCols = Lists . newArrayList ( ) ; int nonExpandWidth = 0 ; int totalExpandMinWidth = 0 ; for ( Object colObj : cols ) { if ( m_fileTable . isColumnCollapsed ( colObj ) ) { continue ; } CmsResourceTableProperty prop = ( CmsResourceTableProperty ) colObj ; if ( 0 < m_fileTable . getColumnExpandRatio ( prop ) ) { expandCols . add ( prop ) ; totalExpandMinWidth += getAlternativeWidthForExpandingColumns ( prop ) ; } else { nonExpandWidth += prop . getColumnWidth ( ) ; } } if ( estimatedSpace < ( totalExpandMinWidth + nonExpandWidth ) ) { for ( CmsResourceTableProperty expandCol : expandCols ) { m_fileTable . setColumnWidth ( expandCol , getAlternativeWidthForExpandingColumns ( expandCol ) ) ; } }
public class CPMeasurementUnitModelImpl { /** * Converts the soap model instance into a normal model instance . * @ param soapModel the soap model instance to convert * @ return the normal model instance */ public static CPMeasurementUnit toModel ( CPMeasurementUnitSoap soapModel ) { } }
if ( soapModel == null ) { return null ; } CPMeasurementUnit model = new CPMeasurementUnitImpl ( ) ; model . setUuid ( soapModel . getUuid ( ) ) ; model . setCPMeasurementUnitId ( soapModel . getCPMeasurementUnitId ( ) ) ; model . setGroupId ( soapModel . getGroupId ( ) ) ; model . setCompanyId ( soapModel . getCompanyId ( ) ) ; model . setUserId ( soapModel . getUserId ( ) ) ; model . setUserName ( soapModel . getUserName ( ) ) ; model . setCreateDate ( soapModel . getCreateDate ( ) ) ; model . setModifiedDate ( soapModel . getModifiedDate ( ) ) ; model . setName ( soapModel . getName ( ) ) ; model . setKey ( soapModel . getKey ( ) ) ; model . setRate ( soapModel . getRate ( ) ) ; model . setPrimary ( soapModel . isPrimary ( ) ) ; model . setPriority ( soapModel . getPriority ( ) ) ; model . setType ( soapModel . getType ( ) ) ; model . setLastPublishDate ( soapModel . getLastPublishDate ( ) ) ; return model ;
public class PutRemediationConfigurationsResult { /** * Returns a list of failed remediation batch objects . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setFailedBatches ( java . util . Collection ) } or { @ link # withFailedBatches ( java . util . Collection ) } if you want * to override the existing values . * @ param failedBatches * Returns a list of failed remediation batch objects . * @ return Returns a reference to this object so that method calls can be chained together . */ public PutRemediationConfigurationsResult withFailedBatches ( FailedRemediationBatch ... failedBatches ) { } }
if ( this . failedBatches == null ) { setFailedBatches ( new com . amazonaws . internal . SdkInternalList < FailedRemediationBatch > ( failedBatches . length ) ) ; } for ( FailedRemediationBatch ele : failedBatches ) { this . failedBatches . add ( ele ) ; } return this ;
public class SelectBase { /** * Setting liveSearchNormalize to < code > true < / code > allows for * accent - insensitive searching . < br > * < br > * Defaults to < code > false < / code > . * @ param liveSearchNormalize */ public void setLiveSearchNormalize ( final boolean liveSearchNormalize ) { } }
if ( liveSearchNormalize ) attrMixin . setAttribute ( LIVE_SEARCH_NORMALIZE , Boolean . toString ( true ) ) ; else attrMixin . removeAttribute ( LIVE_SEARCH_NORMALIZE ) ;
public class WebSiteManagementClientImpl { /** * Gets source control token . * Gets source control token . * @ param sourceControlType Type of source control * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the SourceControlInner object */ public Observable < SourceControlInner > getSourceControlAsync ( String sourceControlType ) { } }
return getSourceControlWithServiceResponseAsync ( sourceControlType ) . map ( new Func1 < ServiceResponse < SourceControlInner > , SourceControlInner > ( ) { @ Override public SourceControlInner call ( ServiceResponse < SourceControlInner > response ) { return response . body ( ) ; } } ) ;
public class CookieHelper { /** * Remove a cookie by setting the max age to 0. * @ param aHttpResponse * The HTTP response . May not be < code > null < / code > . * @ param aCookie * The cookie to be removed . May not be < code > null < / code > . */ public static void removeCookie ( @ Nonnull final HttpServletResponse aHttpResponse , @ Nonnull final Cookie aCookie ) { } }
ValueEnforcer . notNull ( aHttpResponse , "HttpResponse" ) ; ValueEnforcer . notNull ( aCookie , "aCookie" ) ; // expire the cookie ! aCookie . setMaxAge ( 0 ) ; aHttpResponse . addCookie ( aCookie ) ;
public class NaaccrErrorUtils { /** * Returns the error message for the given error code * @ param code error code * @ param msgValues optional values to plug into the message * @ return the corresponding error message , never null ( will throw an runtime exception if unknown code ) */ public static String getValidationError ( String code , Object ... msgValues ) { } }
if ( ! _MESSAGES . containsKey ( code ) ) throw new RuntimeException ( "Unknown code: " + code ) ; return fillMessage ( _MESSAGES . get ( code ) , msgValues ) ;
public class Recipes { /** * An array of custom recipe names to be run following a < code > setup < / code > event . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSetup ( java . util . Collection ) } or { @ link # withSetup ( java . util . Collection ) } if you want to override the * existing values . * @ param setup * An array of custom recipe names to be run following a < code > setup < / code > event . * @ return Returns a reference to this object so that method calls can be chained together . */ public Recipes withSetup ( String ... setup ) { } }
if ( this . setup == null ) { setSetup ( new com . amazonaws . internal . SdkInternalList < String > ( setup . length ) ) ; } for ( String ele : setup ) { this . setup . add ( ele ) ; } return this ;
public class MetadataServiceListBuilder { /** * { @ inheritDoc } */ @ Override public MetadataServiceList buildObject ( String namespaceURI , String localName , String namespacePrefix ) { } }
return new MetadataServiceListImpl ( namespaceURI , localName , namespacePrefix ) ;
public class NumberUtil { /** * 比较大小 , 参数1 & lt ; 参数2 返回true * @ param bigNum1 数字1 * @ param bigNum2 数字2 * @ return 是否小于 * @ since 3,0.9 */ public static boolean isLess ( BigDecimal bigNum1 , BigDecimal bigNum2 ) { } }
Assert . notNull ( bigNum1 ) ; Assert . notNull ( bigNum2 ) ; return bigNum1 . compareTo ( bigNum2 ) < 0 ;
public class MessageQueue { /** * readers are present , you could get out of order message delivery . */ NatsMessage accumulate ( long maxSize , long maxMessages , Duration timeout ) throws InterruptedException { } }
if ( ! this . singleThreadedReader ) { throw new IllegalStateException ( "Accumulate is only supported in single reader mode." ) ; } if ( ! this . isRunning ( ) ) { return null ; } NatsMessage msg = this . queue . poll ( ) ; if ( msg == null ) { msg = waitForTimeout ( timeout ) ; if ( ! this . isRunning ( ) || ( msg == null ) ) { return null ; } } long size = msg . getSizeInBytes ( ) ; if ( maxMessages <= 1 || size >= maxSize ) { this . sizeInBytes . addAndGet ( - size ) ; this . length . decrementAndGet ( ) ; signalIfNotEmpty ( ) ; return msg ; } long count = 1 ; NatsMessage cursor = msg ; while ( cursor != null ) { NatsMessage next = this . queue . peek ( ) ; if ( next != null ) { long s = next . getSizeInBytes ( ) ; if ( maxSize < 0 || ( size + s ) < maxSize ) { // keep going size += s ; count ++ ; cursor . next = this . queue . poll ( ) ; cursor = cursor . next ; if ( count == maxMessages ) { break ; } } else { // One more is too far break ; } } else { // Didn ' t meet max condition break ; } } this . sizeInBytes . addAndGet ( - size ) ; this . length . addAndGet ( - count ) ; signalIfNotEmpty ( ) ; return msg ;
public class CountersTable { /** * Add a list of counted nodes that were built in backwards document * order , or a list of counted nodes that are in forwards document * order . * @ param flist Vector of nodes built in forwards document order * @ param blist Vector of nodes built in backwards document order */ void appendBtoFList ( NodeSetDTM flist , NodeSetDTM blist ) { } }
int n = blist . size ( ) ; for ( int i = ( n - 1 ) ; i >= 0 ; i -- ) { flist . addElement ( blist . item ( i ) ) ; }
public class ParametricCfgParser { /** * Accumulates sufficient statistics for the production rules in { @ code tree } . * Each occurrence of a production rule increments the corresponding * sufficient statistics ( for the rule ) by { @ code weight } . * @ param tree * @ param nonterminalStatistics * @ param terminalStatistics * @ param nonterminalParameters * @ param terminalParameters * @ param weight */ private void accumulateSufficientStatistics ( CfgParseTree tree , SufficientStatistics nonterminalStatistics , SufficientStatistics terminalStatistics , SufficientStatistics nonterminalParameters , SufficientStatistics terminalParameters , double weight ) { } }
if ( tree == CfgParseTree . EMPTY ) { return ; } if ( tree . isTerminal ( ) ) { Assignment terminalRule = parentVar . outcomeArrayToAssignment ( tree . getRoot ( ) ) . union ( terminalVar . outcomeArrayToAssignment ( tree . getTerminalProductions ( ) ) ) . union ( ruleTypeVar . outcomeArrayToAssignment ( tree . getRuleType ( ) ) ) ; terminalFactor . incrementSufficientStatisticsFromAssignment ( terminalStatistics , terminalParameters , terminalRule , weight ) ; // System . out . println ( weight + " " + terminalRule ) ; } else { Assignment nonterminalRule = parentVar . outcomeArrayToAssignment ( tree . getRoot ( ) ) . union ( leftVar . outcomeArrayToAssignment ( tree . getLeft ( ) . getRoot ( ) ) ) . union ( rightVar . outcomeArrayToAssignment ( tree . getRight ( ) . getRoot ( ) ) ) . union ( ruleTypeVar . outcomeArrayToAssignment ( tree . getRuleType ( ) ) ) ; nonterminalFactor . incrementSufficientStatisticsFromAssignment ( nonterminalStatistics , terminalParameters , nonterminalRule , weight ) ; // System . out . println ( weight + " " + nonterminalRule ) ; accumulateSufficientStatistics ( tree . getLeft ( ) , nonterminalStatistics , terminalStatistics , nonterminalParameters , terminalParameters , weight ) ; accumulateSufficientStatistics ( tree . getRight ( ) , nonterminalStatistics , terminalStatistics , nonterminalParameters , terminalParameters , weight ) ; }
public class ExpandableButtonMenu { /** * Returns the menu button container . The first child of the container is * a TextView , the second - an ImageButton * @ param button one of { @ link MenuButton # LEFT } , { @ link MenuButton # MID } , { @ link MenuButton # RIGHT } */ public View getMenuButton ( MenuButton button ) { } }
switch ( button ) { case MID : return mMidContainer ; case LEFT : return mLeftContainer ; case RIGHT : return mRightContainer ; } return null ;
public class ZoomablePane { /** * Replies the property for the button that serves for starting the mouse scrolling . * @ return the property . */ public ObjectProperty < MouseButton > panButtonProperty ( ) { } }
if ( this . panButton == null ) { this . panButton = new StyleableObjectProperty < MouseButton > ( DEFAULT_PAN_BUTTON ) { @ SuppressWarnings ( "synthetic-access" ) @ Override protected void invalidated ( ) { final MouseButton button = get ( ) ; if ( button == null ) { set ( DEFAULT_PAN_BUTTON ) ; } } @ Override public CssMetaData < ZoomablePane < ? > , MouseButton > getCssMetaData ( ) { return StyleableProperties . PAN_BUTTON ; } @ Override public Object getBean ( ) { return ZoomablePane . this ; } @ Override public String getName ( ) { return PAN_BUTTON_PROPERTY ; } } ; } return this . panButton ;
public class CommerceWishListPersistenceImpl { /** * Returns all the commerce wish lists where groupId = & # 63 ; and userId = & # 63 ; and defaultWishList = & # 63 ; . * @ param groupId the group ID * @ param userId the user ID * @ param defaultWishList the default wish list * @ return the matching commerce wish lists */ @ Override public List < CommerceWishList > findByG_U_D ( long groupId , long userId , boolean defaultWishList ) { } }
return findByG_U_D ( groupId , userId , defaultWishList , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
public class BookKeeperJournalOutputStream { /** * Write the buffer to a new entry in a BookKeeper ledger or throw * an IOException if we are unable to successfully write to a quorum * of bookies * @ param buf Buffer to write from * @ param off Offset in the buffer * @ param len How many bytes to write , starting the offset * @ throws IOException If we are interrupted while writing to BookKeeper or * if we are unable to successfully add the entry to * a quorum of bookies . */ private synchronized void addBookKeeperEntry ( byte [ ] buf , int off , int len ) throws IOException { } }
try { ledger . addEntry ( buf , off , len ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Last add pushed to ledger " + ledger . getId ( ) + " is " + ledger . getLastAddPushed ( ) ) ; LOG . debug ( "Last add confirmed to ledger " + ledger . getId ( ) + " is " + ledger . getLastAddConfirmed ( ) ) ; } } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; throw new IOException ( "Interrupted writing to BookKeeper" , e ) ; } catch ( BKException e ) { throw new IOException ( "Failed to write to BookKeeper" , e ) ; }
public class TypeUtil { /** * Creates a list from a varargs parameter array . * The generic list is created with the same type as the parameters . * @ param _ entries list entries * @ param < T > list type * @ return list */ @ SafeVarargs public static < T > List < T > createList ( T ... _entries ) { } }
List < T > l = new ArrayList < > ( ) ; if ( _entries != null ) { l . addAll ( Arrays . asList ( _entries ) ) ; } return l ;
public class DeclarativeEnvironmentRecord { /** * FIXME : describe spec deviance */ public void assignMutableBinding ( ExecutionContext context , String name , Object value , boolean configurable , boolean strict ) { } }
final boolean exists = hasBinding ( context , name ) ; if ( ! exists ) { PropertyDescriptor desc = new PropertyDescriptor ( ) ; desc . setValue ( value ) ; desc . setConfigurable ( configurable ) ; this . mutableBindings . put ( name , desc ) ; } else { PropertyDescriptor desc = this . mutableBindings . get ( name ) ; desc . setValue ( value ) ; return ; }
public class Schedulers { /** * New thread pool scheduler . * @ param poolSize * the pool size * @ return the schedulers */ public static Schedulers newThreadPoolScheduler ( int poolSize ) { } }
createSingleton ( ) ; Schedulers . increaseNoOfSchedullers ( ) ; ThreadPoolTaskScheduler scheduler = new ThreadPoolTaskScheduler ( ) ; CustomizableThreadFactory factory = new CustomizableThreadFactory ( ) ; scheduler . initializeExecutor ( factory , new RejectedExecutionHandler ( ) { @ Override public void rejectedExecution ( Runnable r , ThreadPoolExecutor executor ) { System . out . println ( "asdsa" ) ; } } ) ; scheduler . setPoolSize ( poolSize ) ; instance . setCurrentScheduler ( scheduler ) ; return instance ;
public class H2HttpInboundLinkWrap { /** * Create Data frames to contain the http body payload * The buffers passed in must not exceed the http2 max frame size * @ param WsByteBuffer [ ] * @ param int length * @ param boolean isFinalWrite * @ return ArrayList < Frame > of FrameData objects containing the buffered payload data */ public ArrayList < Frame > prepareBody ( WsByteBuffer [ ] wsbb , int length , boolean isFinalWrite ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "prepareBody entry : final write: " + isFinalWrite ) ; } ArrayList < Frame > dataFrames = new ArrayList < Frame > ( ) ; FrameData dataFrame ; if ( wsbb == null || length == 0 ) { // this empty data frame will have an end of stream flag set , signalling stream closure dataFrame = new FrameData ( streamID , null , 0 , isFinalWrite ) ; dataFrames . add ( dataFrame ) ; return dataFrames ; } boolean endStream = isFinalWrite ; boolean lastData = false ; int lengthWritten = 0 ; // if there ' s more than one buffer passed in we can ' t assume it will end the stream if ( wsbb . length > 1 ) { endStream = false ; } // create a data frame for every buffer in the array for ( int i = 0 ; i < wsbb . length ; i ++ ) { WsByteBuffer b = wsbb [ i ] ; if ( b == null ) { continue ; } lengthWritten += b . remaining ( ) ; if ( b . remaining ( ) != 0 ) { if ( lengthWritten >= length ) { // the current buffer meets the expected total write length , // so we ' ll mark this as the last data frame on the stream lastData = true ; endStream = lastData && isFinalWrite ? true : false ; } dataFrame = new FrameData ( streamID , b , b . remaining ( ) , endStream ) ; dataFrames . add ( dataFrame ) ; if ( lastData ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "prepareBody exit : " + dataFrames ) ; } return dataFrames ; } } } return dataFrames ;
public class SVGUtil { /** * Create a SVG text element . * @ param document document to create in ( factory ) * @ param x first point x * @ param y first point y * @ param text Content of text element . * @ return New text element . */ public static Element svgText ( Document document , double x , double y , String text ) { } }
Element elem = SVGUtil . svgElement ( document , SVGConstants . SVG_TEXT_TAG ) ; SVGUtil . setAtt ( elem , SVGConstants . SVG_X_ATTRIBUTE , x ) ; SVGUtil . setAtt ( elem , SVGConstants . SVG_Y_ATTRIBUTE , y ) ; elem . setTextContent ( text ) ; return elem ;
public class NatureLibrary { /** * 根据字符串得道词性 . 没有就创建一个 * @ param natureStr * @ return */ public static Nature getNature ( String natureStr ) { } }
Nature nature = NATUREMAP . get ( natureStr ) ; if ( nature == null ) { nature = new Nature ( natureStr , FYI , FYI , YI ) ; NATUREMAP . put ( natureStr , nature ) ; return nature ; } return nature ;
public class TangoUtil { /** * Get the full device name for an attribute * @ param attributeName * @ return * @ throws DevFailed */ public static String getfullDeviceNameForAttribute ( final String attributeName ) throws DevFailed { } }
String result ; final String [ ] fields = attributeName . split ( "/" ) ; final Database db = ApiUtil . get_db_obj ( ) ; if ( fields . length == 1 ) { result = db . get_attribute_from_alias ( fields [ 0 ] ) ; } else if ( fields . length == 2 ) { result = db . get_device_from_alias ( fields [ 0 ] ) ; } else if ( fields . length == 4 ) { result = fields [ 0 ] + "/" + fields [ 1 ] + "/" + fields [ 2 ] ; } else { throw DevFailedUtils . newDevFailed ( "TANGO_WRONG_DATA_ERROR" , "cannot retrieve device name" ) ; } return result ;
public class AWSServiceCatalogClient { /** * Associates the specified principal ARN with the specified portfolio . * @ param associatePrincipalWithPortfolioRequest * @ return Result of the AssociatePrincipalWithPortfolio operation returned by the service . * @ throws InvalidParametersException * One or more parameters provided to the operation are not valid . * @ throws ResourceNotFoundException * The specified resource was not found . * @ throws LimitExceededException * The current limits of the service would have been exceeded by this operation . Decrease your resource use * or increase your service limits and retry the operation . * @ sample AWSServiceCatalog . AssociatePrincipalWithPortfolio * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / servicecatalog - 2015-12-10 / AssociatePrincipalWithPortfolio " * target = " _ top " > AWS API Documentation < / a > */ @ Override public AssociatePrincipalWithPortfolioResult associatePrincipalWithPortfolio ( AssociatePrincipalWithPortfolioRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAssociatePrincipalWithPortfolio ( request ) ;
public class LockedObject { /** * helper of checkLocks ( ) . looks if the children are locked * @ param exclusive whether the new lock should be exclusive * @ param depth depth * @ return true if no locks at the children paths are forbidding a new lock */ private boolean checkChildren ( boolean exclusive , int depth ) { } }
if ( children == null ) { // a file return owner == null || ! ( this . exclusive || exclusive ) ; } // a folder if ( owner == null ) { // no owner , checking children if ( depth != 0 ) { boolean canLock = true ; int limit = children . length ; for ( int i = 0 ; i < limit ; i ++ ) { if ( ! children [ i ] . checkChildren ( exclusive , depth - 1 ) ) { canLock = false ; } } return canLock ; } // depth = = 0 - > we don ' t care for children return true ; } // there already is a owner return ! ( this . exclusive || exclusive ) ;
public class AmazonEC2Client { /** * Replaces an existing route within a route table in a VPC . You must provide only one of the following : internet * gateway or virtual private gateway , NAT instance , NAT gateway , VPC peering connection , network interface , or * egress - only internet gateway . * For more information , see < a * href = " https : / / docs . aws . amazon . com / AmazonVPC / latest / UserGuide / VPC _ Route _ Tables . html " > Route Tables < / a > in the * < i > Amazon Virtual Private Cloud User Guide < / i > . * @ param replaceRouteRequest * @ return Result of the ReplaceRoute operation returned by the service . * @ sample AmazonEC2 . ReplaceRoute * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / ReplaceRoute " target = " _ top " > AWS API * Documentation < / a > */ @ Override public ReplaceRouteResult replaceRoute ( ReplaceRouteRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeReplaceRoute ( request ) ;
public class UserInfoGridScreen { /** * SetupSFields Method . */ public void setupSFields ( ) { } }
this . getRecord ( UserInfo . USER_INFO_FILE ) . getField ( UserInfo . USER_NAME ) . setupDefaultView ( this . getNextLocation ( ScreenConstants . NEXT_LOGICAL , ScreenConstants . ANCHOR_DEFAULT ) , this , ScreenConstants . DEFAULT_DISPLAY ) ;
public class CmsDateConverter { /** * Returns < code > true < / code > if an am is in the given date object < code > false < / code > otherwise . < p > * @ param date the date to check * @ return < code > true < / code > if an am is in the given Date object < code > false < / code > otherwise */ public static boolean isAm ( Date date ) { } }
String time = getTime ( date ) ; return time . toLowerCase ( ) . contains ( AM ) ;
public class CodeJamSession { /** * < p > Indiciates if the contest is currently active , * namely if competition is occuring at the current * time , or not . < / p > * @ return < tt > true < / tt > if the contest is active , < tt > false < / tt > otherwise . */ public boolean isActive ( ) { } }
final long now = System . currentTimeMillis ( ) ; final long start = values . getStart ( ) ; final long end = start + values . getLeft ( ) ; // TODO : Ensure predicate consistency . return now >= start && now <= end ;
public class RequestUtils { /** * Return if a given HTTP method results in a read or write request to a resource * GET = read * POST = write * PUT = write * DELETE = write * PATCH = write * OPTIONS = read * HEAD = read * @ param method The HTTP method * @ return read or write if HTTP method is found , blank otherwise */ public static String getOperation ( HttpString method ) { } }
String operation = "" ; if ( Methods . POST . equals ( method ) ) { operation = WRITE ; } else if ( Methods . PUT . equals ( method ) ) { operation = WRITE ; } else if ( Methods . DELETE . equals ( method ) ) { operation = WRITE ; } else if ( Methods . GET . equals ( method ) ) { operation = READ ; } else if ( Methods . PATCH . equals ( method ) ) { operation = WRITE ; } else if ( Methods . OPTIONS . equals ( method ) ) { operation = READ ; } else if ( Methods . HEAD . equals ( method ) ) { operation = READ ; } else { // ignore everything else } return operation ;
public class CallbackMethod { /** * メソッドの実行 * @ param record Beanのオブジェクト * @ param csvContext 現在のCSVのレコード情報 * @ param bindingErrors エラー情報 。 * @ param beanMapping マッピング情報 * @ throws SuperCsvReflectionException メソッドの実行に失敗した場合 。 */ public void invoke ( final Object record , final CsvContext csvContext , final CsvBindingErrors bindingErrors , final BeanMapping < ? > beanMapping ) { } }
// 引数の組み立て final Class < ? > [ ] paramTypes = method . getParameterTypes ( ) ; final Object [ ] paramValues = new Object [ paramTypes . length ] ; for ( int i = 0 ; i < paramTypes . length ; i ++ ) { if ( CsvContext . class . isAssignableFrom ( paramTypes [ i ] ) ) { paramValues [ i ] = csvContext ; } else if ( CsvBindingErrors . class . isAssignableFrom ( paramTypes [ i ] ) ) { paramValues [ i ] = bindingErrors ; } else if ( paramTypes [ i ] . isArray ( ) && Class . class . isAssignableFrom ( paramTypes [ i ] . getComponentType ( ) ) ) { paramValues [ i ] = beanMapping . getGroups ( ) ; } else if ( ValidationContext . class . isAssignableFrom ( paramTypes [ i ] ) ) { paramValues [ i ] = new ValidationContext < > ( csvContext , beanMapping ) ; } else if ( beanMapping . getType ( ) . isAssignableFrom ( paramTypes [ i ] ) ) { paramValues [ i ] = record ; } else { paramValues [ i ] = null ; } } execute ( record , paramValues ) ;
public class Handshaker { /** * Create a new read cipher and return it to caller . */ CipherBox newReadCipher ( ) throws NoSuchAlgorithmException { } }
BulkCipher cipher = cipherSuite . cipher ; CipherBox box ; if ( isClient ) { box = cipher . newCipher ( protocolVersion , svrWriteKey , svrWriteIV , sslContext . getSecureRandom ( ) , false ) ; svrWriteKey = null ; svrWriteIV = null ; } else { box = cipher . newCipher ( protocolVersion , clntWriteKey , clntWriteIV , sslContext . getSecureRandom ( ) , false ) ; clntWriteKey = null ; clntWriteIV = null ; } return box ;
public class AsciiTable { /** * Sets the left padding for all cells in the table . * @ param paddingLeft new padding , ignored if smaller than 0 * @ return this to allow chaining */ public AsciiTable setPaddingLeft ( int paddingLeft ) { } }
for ( AT_Row row : this . rows ) { if ( row . getType ( ) == TableRowType . CONTENT ) { row . setPaddingLeft ( paddingLeft ) ; } } return this ;
public class WebElementFinder { /** * Creates a new { @ link WebElementFinder } based on this { @ link WebElementFinder } using the specified element locator . * @ param theBy * locates the element to operate on * @ return the new { @ link FormInputHandler } instance */ public WebElementFinder by ( final By theBy ) { } }
Fields fields = new Fields ( this ) ; fields . by = theBy ; return new WebElementFinder ( fields ) ;
public class BaseUseAttributesAuthorizationGenerator { /** * Add profile roles from attributes . * @ param profile the profile * @ param ldapAttribute the ldap attribute * @ param prefix the prefix */ protected void addProfileRolesFromAttributes ( final CommonProfile profile , final LdapAttribute ldapAttribute , final String prefix ) { } }
ldapAttribute . getStringValues ( ) . forEach ( value -> profile . addRole ( prefix . concat ( value . toUpperCase ( ) ) ) ) ;
public class OWLNegativeDataPropertyAssertionAxiomImpl_CustomFieldSerializer { /** * Serializes the content of the object into the * { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } . * @ param streamWriter the { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } to write the * object ' s content to * @ param instance the object instance to serialize * @ throws com . google . gwt . user . client . rpc . SerializationException * if the serialization operation is not * successful */ @ Override public void serializeInstance ( SerializationStreamWriter streamWriter , OWLNegativeDataPropertyAssertionAxiomImpl instance ) throws SerializationException { } }
serialize ( streamWriter , instance ) ;
public class JDBCConnection { /** * # ifdef JAVA4 */ public synchronized void setHoldability ( int holdability ) throws SQLException { } }
checkClosed ( ) ; switch ( holdability ) { case JDBCResultSet . HOLD_CURSORS_OVER_COMMIT : case JDBCResultSet . CLOSE_CURSORS_AT_COMMIT : break ; default : throw Util . invalidArgument ( ) ; } rsHoldability = holdability ;
public class SGraphSegment { /** * Add a user data in the data associated to this point . * @ param userData the user data to add . * @ return < code > true < / code > if the data was added ; otherwise < code > false < / code > . */ public boolean addUserData ( Object userData ) { } }
if ( this . userData == null ) { this . userData = new ArrayList < > ( ) ; } return this . userData . add ( userData ) ;
public class HCConversionSettings { /** * Change the HTML version . Note : this does NOT change the * { @ link XMLWriterSettings } ! * @ param eHTMLVersion * The HTML version to use . * @ return this */ @ Nonnull public HCConversionSettings setHTMLVersion ( @ Nonnull final EHTMLVersion eHTMLVersion ) { } }
ValueEnforcer . notNull ( eHTMLVersion , "HTMLVersion" ) ; m_eHTMLVersion = eHTMLVersion ; m_sHTMLNamespaceURI = eHTMLVersion . getNamespaceURI ( ) ; return this ;
public class AbstractListPreference { /** * Obtains the the entries of the list preference from a specific typed array . * @ param typedArray * The typed array , the entries should be obtained from , as an instance of the class * { @ link TypedArray } . The typed array may not be null */ private void obtainEntries ( @ NonNull final TypedArray typedArray ) { } }
CharSequence [ ] obtainedEntries = typedArray . getTextArray ( R . styleable . AbstractListPreference_android_entries ) ; if ( obtainedEntries != null ) { setEntries ( obtainedEntries ) ; }
public class StringEncoder { /** * Encodes the given byte array into a string that can be used by * the SQLite database . The database cannot handle null ( 0x00 ) and * the character ' \ ' ' ( 0x27 ) . The encoding consists of escaping * these characters with a reserved character ( 0x01 ) . The escaping * is applied after determining and applying a shift that minimizes * the number of escapes required . * With this encoding the data of original size n is increased to a * maximum of 1 + ( n * 257 ) / 254. * For sufficiently large n the overhead is thus less than 1.2 % . * @ param a the byte array to be encoded . A null reference is handled as * an empty array . * @ return the encoded bytes as a string . When an empty array is * provided a string of length 1 is returned , the value of * which is bogus . * When decoded with this class ' < code > decode < / code > method * a string of size 1 will return an empty byte array . */ public static String encode ( byte [ ] a ) { } }
// check input if ( a == null || a . length == 0 ) { // bogus shift , no data return "x" ; } // determine count int [ ] cnt = new int [ 256 ] ; for ( int i = 0 ; i < a . length ; i ++ ) { cnt [ a [ i ] & 0xff ] ++ ; } // determine shift for minimum number of escapes int shift = 1 ; int nEscapes = a . length ; for ( int i = 1 ; i < 256 ; i ++ ) { if ( i == '\'' ) { continue ; } int sum = cnt [ i ] + cnt [ ( i + 1 ) & 0xff ] + cnt [ ( i + '\'' ) & 0xff ] ; if ( sum < nEscapes ) { nEscapes = sum ; shift = i ; if ( nEscapes == 0 ) { // cannot become smaller break ; } } } // construct encoded output int outLen = a . length + nEscapes + 1 ; StringBuffer out = new StringBuffer ( outLen ) ; out . append ( ( char ) shift ) ; for ( int i = 0 ; i < a . length ; i ++ ) { // apply shift char c = ( char ) ( ( a [ i ] - shift ) & 0xff ) ; // insert escapes if ( c == 0 ) { // forbidden out . append ( ( char ) 1 ) ; out . append ( ( char ) 1 ) ; } else if ( c == 1 ) { // escape character out . append ( ( char ) 1 ) ; out . append ( ( char ) 2 ) ; } else if ( c == '\'' ) { // forbidden out . append ( ( char ) 1 ) ; out . append ( ( char ) 3 ) ; } else { out . append ( c ) ; } } return out . toString ( ) ;
public class SpdDoubleAggregate { /** * Check data type and call super to remove data - synchronized in super */ public boolean remove ( SpdData data ) { } }
if ( data == null ) return false ; if ( data instanceof SpdDouble ) { return super . remove ( data ) ; } else { return false ; }
public class CollectionSchemaUpdate { /** * Returns a Map of DELETE operations . * @ return Map of DELETE operations which have a non - null default value specified */ public Map < String , DeleteOperation > getDeleteOperations ( ) { } }
Map < String , DeleteOperation > delOperations = new TreeMap < String , DeleteOperation > ( ) ; for ( Entry < String , IOperation > entry : collectionUpdateData . entrySet ( ) ) { String key = entry . getKey ( ) ; IOperation op = entry . getValue ( ) ; if ( op . getOperationType ( ) . equals ( Type . DELETE ) ) { delOperations . put ( key , ( DeleteOperation ) op ) ; } } return delOperations ;
public class COP { /** * Recompute the centroid of a set . * @ param centroid Scratch buffer * @ param relation Input data * @ param ids IDs to include */ private static void computeCentroid ( double [ ] centroid , Relation < ? extends NumberVector > relation , DBIDs ids ) { } }
Arrays . fill ( centroid , 0 ) ; int dim = centroid . length ; for ( DBIDIter it = ids . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { NumberVector v = relation . get ( it ) ; for ( int i = 0 ; i < dim ; i ++ ) { centroid [ i ] += v . doubleValue ( i ) ; } } timesEquals ( centroid , 1. / ids . size ( ) ) ;
public class RocksDbUtils { /** * Builds RocksDb { @ link Options } . * @ param maxBackgroundThreads * @ param levelZeloFileNumCompactionTrigger * @ param writeBufferSize * @ param targetFileSizeBase * @ return */ public static Options buildOptions ( int maxBackgroundThreads , int levelZeloFileNumCompactionTrigger , long writeBufferSize , long targetFileSizeBase ) { } }
Options rocksOptions = new Options ( ) ; rocksOptions . setCreateIfMissing ( true ) . getEnv ( ) . setBackgroundThreads ( 1 , Env . FLUSH_POOL ) . setBackgroundThreads ( maxBackgroundThreads , Env . COMPACTION_POOL ) ; rocksOptions . setMaxBackgroundFlushes ( 1 ) . setMaxBackgroundCompactions ( maxBackgroundThreads ) ; rocksOptions . setWriteBufferSize ( writeBufferSize ) . setMinWriteBufferNumberToMerge ( 2 ) . setLevelZeroFileNumCompactionTrigger ( levelZeloFileNumCompactionTrigger ) . setTargetFileSizeBase ( targetFileSizeBase ) ; rocksOptions . setMemTableConfig ( new SkipListMemTableConfig ( ) ) ; // rocksOptions . setMemTableConfig ( new HashSkipListMemTableConfig ( ) ) ; // rocksOptions . setMemTableConfig ( new HashLinkedListMemTableConfig ( ) ) ; return rocksOptions ;
public class TldFernClassifier { /** * Renormalizes fern . numN to avoid overflow */ public void renormalizeN ( ) { } }
int targetMax = maxN / 20 ; for ( int i = 0 ; i < managers . length ; i ++ ) { TldFernManager m = managers [ i ] ; for ( int j = 0 ; j < m . table . length ; j ++ ) { TldFernFeature f = m . table [ j ] ; if ( f == null ) continue ; f . numN = targetMax * f . numN / maxN ; } } maxN = targetMax ;
public class Bean { /** * Introspects a { @ link Class } or an interface and learns about all * its { @ link Property } elements . * < p > If the target type has been previously analized then the { @ link Bean } * instance is retrieved from a thread - safe { @ link SoftReference } cache . * @ param beanClass the class or interface to analize * @ param < T > the bean ' s type . * @ return a { @ link Bean } object describing the target class or interface * @ throws NullPointerException if the given beanClass parameter is { @ code null } */ @ SuppressWarnings ( "unchecked" ) public static < T > Bean < T > forClass ( Class < T > beanClass ) { } }
if ( beanClass == null ) { throw new NullPointerException ( "Cannot instrospect a bean with a 'null' beanClass." ) ; } Bean bean ; SoftReference < Bean < ? > > softReference = beansCache . get ( beanClass ) ; if ( softReference == null ) { bean = new Bean ( beanClass ) ; beansCache . put ( beanClass , new SoftReference < Bean < ? > > ( bean ) ) ; } else { bean = softReference . get ( ) ; if ( bean == null ) { bean = new Bean ( beanClass ) ; beansCache . put ( beanClass , new SoftReference < Bean < ? > > ( bean ) ) ; } } return bean ;
public class CheckedDatastoreReaderWriter { /** * Only use this method if the results are small enough that gathering them in list is acceptable . * Otherwise use { @ link # query ( Query , IOConsumer ) } . * @ see DatastoreReaderWriter # run ( Query ) * @ throws IOException if the underlying client throws { @ link DatastoreException } */ < T > List < T > query ( Query < T > query ) throws IOException { } }
return call ( ( ) -> ImmutableList . copyOf ( rw . run ( query ) ) ) ;
public class ListBundlesResult { /** * A list of bundles . * @ param bundleList * A list of bundles . */ public void setBundleList ( java . util . Collection < BundleDetails > bundleList ) { } }
if ( bundleList == null ) { this . bundleList = null ; return ; } this . bundleList = new java . util . ArrayList < BundleDetails > ( bundleList ) ;
import java . util . * ; import java . util . stream . * ; public class FindAnagrams { /** * A function that finds all anagrams of a given string within a list of strings . * Examples : * findAnagrams ( List . of ( " bcda " , " abce " , " cbda " , " cbea " , " adcb " ) , " abcd " ) - > [ " bcda " , " cbda " , " adcb " ] * findAnagrams ( List . of ( " recitals " , " python " ) , " articles " ) - > [ " recitals " ] * findAnagrams ( List . of ( " keep " , " abcdef " , " xyz " ) , " peek " ) - > [ " keep " ] * @ param inputList : A list of strings in which to search for anagrams . * @ param inputString : A string for which to find anagrams . * @ return : A list of strings from inputList that are anagrams of inputString . */ public List < String > findAnagrams ( List < String > inputList , String inputString ) { } private Map < Character , Long > Counter ( String str ) { return str . chars ( ) . mapToObj ( c -> ( char ) c ) . collect ( Collectors . groupingBy ( character -> character , Collectors . counting ( ) ) ) ; } }
List < String > anagrams = inputList . stream ( ) . filter ( string -> Counter ( inputString ) . equals ( Counter ( string ) ) ) . collect ( Collectors . toList ( ) ) ; return anagrams ;
public class AbstractVersionIdentifier { /** * This method performs the part of { @ link # compareTo ( VersionIdentifier ) } for linear and optional attributes * like { @ link # getTimestamp ( ) } or { @ link # getRevision ( ) } . * @ param < T > is the generic type of the { @ link Comparable } value . * @ param currentResult is the current result so far . * @ param thisValue is the value of this { @ link VersionIdentifier } . * @ param otherValue is the value of the other { @ link VersionIdentifier } . * @ param otherVersion is the { @ link VersionIdentifier } to compare to . * @ return the result of comparison . */ private < T extends Comparable < T > > int compareToLinear ( int currentResult , T thisValue , T otherValue , VersionIdentifier otherVersion ) { } }
if ( currentResult == COMPARE_TO_INCOMPARABLE ) { return COMPARE_TO_INCOMPARABLE ; } int result = currentResult ; if ( thisValue != null ) { if ( otherValue != null ) { int diff = thisValue . compareTo ( otherValue ) ; if ( result == 0 ) { if ( ( diff != 0 ) && ( ! isSnapshot ( ) ) && ( ! otherVersion . isSnapshot ( ) ) ) { return COMPARE_TO_INCOMPARABLE ; } if ( diff < 0 ) { result = COMPARE_TO_STRICT_PREDECESSOR ; } else { result = COMPARE_TO_STRICT_SUCCESSOR ; } } else if ( result < 0 ) { // this . timestamp < otherVersion . timestamp if ( diff > 0 ) { return COMPARE_TO_INCOMPARABLE ; } } else { // this . timestamp > otherVersion . timestamp if ( diff < 0 ) { return COMPARE_TO_INCOMPARABLE ; } } } } return result ;
public class FormDataParser { /** * Get a child JSON Array from an incoming JSON object . If the child does * not exist it will be created . * @ param object The incoming object we are to look inside * @ param key The child node we are looking for * @ return JSONArray The child we found or created * @ throws IOException if there is a type mismatch on existing data */ private static JSONArray getArray ( JsonObject object , String key ) throws IOException { } }
// Get the existing one if ( object . containsKey ( key ) ) { Object existing = object . get ( key ) ; if ( ! ( existing instanceof JSONArray ) ) { throw new IOException ( "Invalid field structure, '" + key + "' expected to be an array, but incompatible " + "data type already present." ) ; } return ( JSONArray ) existing ; // Or add a new one } else { JSONArray newObject = new JSONArray ( ) ; object . put ( key , newObject ) ; return newObject ; }
public class AvPairs { /** * Remove all occurances of the given type * @ param pairs * @ param type */ public static void remove ( List < AvPair > pairs , int type ) { } }
Iterator < AvPair > it = pairs . iterator ( ) ; while ( it . hasNext ( ) ) { AvPair p = it . next ( ) ; if ( p . getType ( ) == type ) { it . remove ( ) ; } }
public class AWSIotClient { /** * Creates a dynamic thing group . * @ param createDynamicThingGroupRequest * @ return Result of the CreateDynamicThingGroup operation returned by the service . * @ throws InvalidRequestException * The request is not valid . * @ throws ResourceAlreadyExistsException * The resource already exists . * @ throws ResourceNotFoundException * The specified resource does not exist . * @ throws ThrottlingException * The rate exceeds the limit . * @ throws InternalFailureException * An unexpected error has occurred . * @ throws InvalidQueryException * The query is invalid . * @ throws LimitExceededException * A limit has been exceeded . * @ sample AWSIot . CreateDynamicThingGroup */ @ Override public CreateDynamicThingGroupResult createDynamicThingGroup ( CreateDynamicThingGroupRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateDynamicThingGroup ( request ) ;
public class RowKeyUtils { /** * Constructs a row key when the row ' s shard ID is already known , which is rare . Generally this is used for * range queries to construct the lower or upper bound for a query , so it doesn ' t necessarily need to produce * a valid row key . */ static ByteBuffer getRowKeyRaw ( int shardId , long tableUuid , byte [ ] contentKeyBytes ) { } }
checkArgument ( shardId >= 0 && shardId < 256 ) ; // Assemble a single array which is " 1 byte shard id + 8 byte table uuid + n - byte content key " . ByteBuffer rowKey = ByteBuffer . allocate ( 9 + contentKeyBytes . length ) ; rowKey . put ( ( byte ) shardId ) ; rowKey . putLong ( tableUuid ) ; rowKey . put ( contentKeyBytes ) ; rowKey . flip ( ) ; return rowKey ;
public class StringUtils { /** * Concatenates the array of Strings into a single String value delimited by the specified delimiter . * @ param values an array of Strings to concatenate . * @ param delimiter the String used as the delimiter separating the String values from the array . * @ return a single String value containing all Strings from the array concatenated by the specified delimiter . * @ throws NullPointerException if the String array is null . * @ see java . lang . StringBuilder */ public static String concat ( String [ ] values , String delimiter ) { } }
Assert . notNull ( values , "The array of String values to concatenate cannot be null!" ) ; StringBuilder buffer = new StringBuilder ( ) ; for ( String value : values ) { buffer . append ( buffer . length ( ) > 0 ? delimiter : EMPTY_STRING ) ; buffer . append ( value ) ; } return buffer . toString ( ) ;
public class CompactorOutputCommitter { /** * Commits the task , moving files to their final committed location by delegating to * { @ link FileOutputCommitter } to perform the actual moving . First , renames the * files to include the count of records contained within the file and a timestamp , * in the form { recordCount } . { timestamp } . avro . Then , the files are moved to their * committed location . */ @ Override public void commitTask ( TaskAttemptContext context ) throws IOException { } }
Path workPath = getWorkPath ( ) ; FileSystem fs = workPath . getFileSystem ( context . getConfiguration ( ) ) ; if ( fs . exists ( workPath ) ) { long recordCount = getRecordCountFromCounter ( context , RecordKeyDedupReducerBase . EVENT_COUNTER . RECORD_COUNT ) ; String fileNamePrefix ; if ( recordCount == 0 ) { // recordCount = = 0 indicates that it is a map - only , non - dedup job , and thus record count should // be obtained from mapper counter . fileNamePrefix = CompactionRecordCountProvider . M_OUTPUT_FILE_PREFIX ; recordCount = getRecordCountFromCounter ( context , RecordKeyMapperBase . EVENT_COUNTER . RECORD_COUNT ) ; } else { fileNamePrefix = CompactionRecordCountProvider . MR_OUTPUT_FILE_PREFIX ; } String fileName = CompactionRecordCountProvider . constructFileName ( fileNamePrefix , "." + compactionFileExtension , recordCount ) ; for ( FileStatus status : fs . listStatus ( workPath , new PathFilter ( ) { @ Override public boolean accept ( Path path ) { return FilenameUtils . isExtension ( path . getName ( ) , compactionFileExtension ) ; } } ) ) { Path newPath = new Path ( status . getPath ( ) . getParent ( ) , fileName ) ; LOG . info ( String . format ( "Renaming %s to %s" , status . getPath ( ) , newPath ) ) ; fs . rename ( status . getPath ( ) , newPath ) ; } } super . commitTask ( context ) ;
public class MappingFilterLexer { /** * $ ANTLR start " DASH " */ public final void mDASH ( ) throws RecognitionException { } }
try { int _type = DASH ; int _channel = DEFAULT_TOKEN_CHANNEL ; // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 130:5 : ( ' - ' ) // C : \ \ Project \ \ Obdalib \ \ obdalib - parent \ \ obdalib - core \ \ src \ \ main \ \ java \ \ it \ \ unibz \ \ inf \ \ obda \ \ gui \ \ swing \ \ utils \ \ MappingFilter . g : 130:16 : ' - ' { match ( '-' ) ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class Seb { /** * Constructs a new instance of { @ link SebEvent } subclass with given context * and local time . * @ param eventCls * The event class to construct * @ param context * The context to use * @ return The event instance */ @ SuppressWarnings ( "unchecked" ) public synchronized < T extends SebEvent > T constructEvent ( Class < T > eventCls , SebContext context ) { } }
try { return ( T ) eventCls . getConstructor ( ) . newInstance ( ) . with ( context , LocalDateTime . now ( ) ) ; } catch ( Exception e ) { throw new EventConstructException ( "Unable to construct event " + eventCls . getName ( ) , e ) ; }
public class JSONViewerActivity { /** * Starts the activity with a json object . * @ param context The context to start the activity . * @ param jsonObject The json object . */ public static void startActivity ( @ NonNull Context context , @ Nullable JSONObject jsonObject ) { } }
Intent intent = new Intent ( context , JSONViewerActivity . class ) ; Bundle bundle = new Bundle ( ) ; if ( jsonObject != null ) { bundle . putString ( JSON_OBJECT_STATE , jsonObject . toString ( ) ) ; } intent . putExtras ( bundle ) ; context . startActivity ( intent ) ;
public class CeylonInstall { /** * Determines dependencies from the Maven project model . * @ param proj The Maven project * @ return String of dependency lines * @ throws MojoExecutionException In case the dependency version could not be determined */ String calculateDependencies ( final MavenProject proj ) throws MojoExecutionException { } }
Module module = new Module ( new ModuleIdentifier ( CeylonUtil . ceylonModuleBaseName ( proj . getGroupId ( ) , proj . getArtifactId ( ) ) , proj . getVersion ( ) , false , false ) ) ; for ( Dependency dep : proj . getDependencies ( ) ) { if ( dep . getVersion ( ) != null && ! "" . equals ( dep . getVersion ( ) ) ) { if ( ! "test" . equals ( dep . getScope ( ) ) && dep . getSystemPath ( ) == null ) { module . addDependency ( new ModuleIdentifier ( CeylonUtil . ceylonModuleBaseName ( dep . getGroupId ( ) , dep . getArtifactId ( ) ) , dep . getVersion ( ) , dep . isOptional ( ) , false ) ) ; } } else { throw new MojoExecutionException ( "Dependency version for " + dep + " in project " + proj + "could not be determined from the POM. Aborting." ) ; } } StringBuilder builder = new StringBuilder ( CeylonUtil . STRING_BUILDER_SIZE ) ; for ( ModuleIdentifier depMod : module . getDependencies ( ) ) { builder . append ( depMod . getName ( ) ) ; if ( depMod . isOptional ( ) ) { builder . append ( "?" ) ; } builder . append ( "=" ) . append ( depMod . getVersion ( ) ) ; builder . append ( System . lineSeparator ( ) ) ; } return builder . toString ( ) ;
public class DetectFiducialSquareImage { /** * Adds a new image to the detector . Image must be gray - scale and is converted into * a binary image using the specified threshold . All input images are rescaled to be * square and of the appropriate size . Thus the original shape of the image doesn ' t * matter . Square shapes are highly recommended since that ' s what the target looks like . * @ param inputBinary Binary input image pattern . 0 = black , 1 = white . * @ param lengthSide How long one of the sides of the target is in world units . * @ return The ID of the provided image */ public int addPattern ( GrayU8 inputBinary , double lengthSide ) { } }
if ( inputBinary == null ) { throw new IllegalArgumentException ( "Input image is null." ) ; } else if ( lengthSide <= 0 ) { throw new IllegalArgumentException ( "Parameter lengthSide must be more than zero" ) ; } else if ( ImageStatistics . max ( inputBinary ) > 1 ) throw new IllegalArgumentException ( "A binary image is composed on 0 and 1 pixels. This isn't binary!" ) ; // see if it needs to be resized if ( inputBinary . width != squareLength || inputBinary . height != squareLength ) { // need to create a new image and rescale it to better handle the resizing GrayF32 inputGray = new GrayF32 ( inputBinary . width , inputBinary . height ) ; ConvertImage . convert ( inputBinary , inputGray ) ; PixelMath . multiply ( inputGray , 255 , inputGray ) ; GrayF32 scaled = new GrayF32 ( squareLength , squareLength ) ; // See if it can use the better algorithm for scaling down the image if ( inputBinary . width > squareLength && inputBinary . height > squareLength ) { AverageDownSampleOps . down ( inputGray , scaled ) ; } else { new FDistort ( inputGray , scaled ) . scaleExt ( ) . apply ( ) ; } GThresholdImageOps . threshold ( scaled , binary , 255 / 2.0 , false ) ; } else { binary . setTo ( inputBinary ) ; } // describe it in 4 different orientations FiducialDef def = new FiducialDef ( ) ; def . lengthSide = lengthSide ; // CCW rotation so that the index refers to how many CW rotation it takes to put it into the nominal pose binaryToDef ( binary , def . desc [ 0 ] ) ; ImageMiscOps . rotateCCW ( binary ) ; binaryToDef ( binary , def . desc [ 1 ] ) ; ImageMiscOps . rotateCCW ( binary ) ; binaryToDef ( binary , def . desc [ 2 ] ) ; ImageMiscOps . rotateCCW ( binary ) ; binaryToDef ( binary , def . desc [ 3 ] ) ; int index = targets . size ( ) ; targets . add ( def ) ; return index ;
public class LongStream { /** * Returns a stream consisting of the elements of this stream , truncated * to be no longer than { @ code maxSize } in length . * < p > This is a short - circuiting stateful intermediate operation . * < p > Example : * < pre > * maxSize : 3 * stream : [ 1 , 2 , 3 , 4 , 5] * result : [ 1 , 2 , 3] * maxSize : 10 * stream : [ 1 , 2] * result : [ 1 , 2] * < / pre > * @ param maxSize the number of elements the stream should be limited to * @ return the new stream * @ throws IllegalArgumentException if { @ code maxSize } is negative */ @ NotNull public LongStream limit ( final long maxSize ) { } }
if ( maxSize < 0 ) throw new IllegalArgumentException ( "maxSize cannot be negative" ) ; if ( maxSize == 0 ) return LongStream . empty ( ) ; return new LongStream ( params , new LongLimit ( iterator , maxSize ) ) ;
public class SparkUtils { /** * Write a String to a file ( on HDFS or local ) in UTF - 8 format * @ param path Path to write to * @ param toWrite String to write * @ param sc Spark context */ public static void writeStringToFile ( String path , String toWrite , SparkContext sc ) throws IOException { } }
FileSystem fileSystem = FileSystem . get ( sc . hadoopConfiguration ( ) ) ; try ( BufferedOutputStream bos = new BufferedOutputStream ( fileSystem . create ( new Path ( path ) ) ) ) { bos . write ( toWrite . getBytes ( "UTF-8" ) ) ; }
public class AbstractApplication { /** * Initialize the default scene . */ private void initializeScene ( ) { } }
final Stage currentStage = this . stage ; final KeyCode fullKeyCode = fullScreenKeyCode ( ) ; final KeyCode iconKeyCode = iconifiedKeyCode ( ) ; // Attach the handler only if necessary , these 2 method can be overridden to return null if ( fullKeyCode != null && iconKeyCode != null ) { this . scene . addEventFilter ( KeyEvent . KEY_PRESSED , keyEvent -> { // Manage F11 button to switch full screen if ( fullKeyCode != null && fullKeyCode == keyEvent . getCode ( ) ) { currentStage . setFullScreen ( ! currentStage . isFullScreen ( ) ) ; keyEvent . consume ( ) ; // Manage F10 button to iconify } else if ( iconKeyCode != null && iconKeyCode == keyEvent . getCode ( ) ) { currentStage . setIconified ( ! currentStage . isIconified ( ) ) ; keyEvent . consume ( ) ; } } ) ; } // The call customize method to allow extension by sub class customizeScene ( this . scene ) ; // Add the default Style Sheet if none have been added manageDefaultStyleSheet ( this . scene ) ;
public class Transform1D { /** * Translate . * @ param thePath the path to follow . * @ param direction is the direction to follow on the path if the path contains only one segment . * @ param move where < code > x < / code > is the curviline coordinate and < code > y < / code > is the shift coordinate . */ public void translate ( List < ? extends S > thePath , Direction1D direction , Tuple2D < ? > move ) { } }
assert move != null : AssertMessages . notNullParameter ( 2 ) ; this . path = thePath == null || thePath . isEmpty ( ) ? null : new ArrayList < > ( thePath ) ; this . firstSegmentDirection = detectFirstSegmentDirection ( direction ) ; this . curvilineTranslation += move . getX ( ) ; this . shiftTranslation += move . getY ( ) ; this . isIdentity = null ;
public class UIComponentTag { /** * Return true if the specified string contains an EL expression . * UIComponent properties are often required to be value - binding expressions ; this method allows code to check * whether that is the case or not . */ public static boolean isValueReference ( String value ) { } }
if ( value == null ) { throw new NullPointerException ( "value" ) ; } int start = value . indexOf ( "#{" ) ; if ( start < 0 ) { return false ; } int end = value . lastIndexOf ( '}' ) ; return ( end >= 0 && start < end ) ;
public class EXIInflaterInputStream { /** * Reads uncompressed data into an array of bytes . This method will block * until some input can be decompressed . * @ param b * the buffer into which the data is read * @ param off * the start offset of the data * @ param len * the maximum number of bytes read * @ return the actual number of bytes read , or - 1 if the end of the * compressed input is reached or a preset dictionary is needed * @ exception ZipException * if a ZIP format error has occurred * @ exception IOException * if an I / O error has occurred */ public int read ( byte [ ] b , int off , int len ) throws IOException { } }
ensureOpen ( ) ; if ( ( off | len | ( off + len ) | ( b . length - ( off + len ) ) ) < 0 ) { throw new IndexOutOfBoundsException ( ) ; } else if ( len == 0 ) { return 0 ; } try { int n ; while ( ( n = inf . inflate ( b , off , len ) ) == 0 ) { if ( inf . finished ( ) || inf . needsDictionary ( ) ) { reachEOF = true ; return - 1 ; } if ( inf . needsInput ( ) ) { fill ( ) ; } } return n ; } catch ( DataFormatException e ) { String s = e . getMessage ( ) ; throw new ZipException ( s != null ? s : "Invalid ZLIB data format" ) ; }
public class UpdateOperation { /** * - - - - - interface InvertibleModificationOperation - - - - - */ @ Override public void apply ( final App app , final Page sourcePage , final Page newPage ) throws FrameworkException { } }
existingNode . updateFromNode ( newNode ) ;
public class ModelServiceClientCache { /** * Get the list of ModelServiceInstances . * @ return the list of ModelServiceInstances */ public List < ModelServiceInstance > getAllModelServiceInstance ( ) { } }
return getData ( ) == null ? Collections . < ModelServiceInstance > emptyList ( ) : getData ( ) . getServiceInstances ( ) ;
public class ImageIOGreyScale { /** * Returns a < code > BufferedImage < / code > as the result of decoding a supplied < code > URL < / code > with an * < code > ImageReader < / code > chosen automatically from among those currently registered . An * < code > InputStream < / code > is obtained from the < code > URL < / code > , which is wrapped in an * < code > ImageInputStream < / code > . If no registered < code > ImageReader < / code > claims to be able to read the * resulting stream , < code > null < / code > is returned . * The current cache settings from < code > getUseCache < / code > and < code > getCacheDirectory < / code > will be used * to control caching in the < code > ImageInputStream < / code > that is created . * This method does not attempt to locate < code > ImageReader < / code > s that can read directly from a * < code > URL < / code > ; that may be accomplished using < code > IIORegistry < / code > and * < code > ImageReaderSpi < / code > . * @ param input * a < code > URL < / code > to read from . * @ return a < code > BufferedImage < / code > containing the decoded contents of the input , or < code > null < / code > * @ exception IllegalArgumentException * if < code > input < / code > is < code > null < / code > . * @ exception IOException * if an error occurs during reading . */ public static BufferedImage read ( URL input ) throws IOException { } }
if ( input == null ) { throw new IllegalArgumentException ( "input == null!" ) ; } InputStream istream = null ; try { istream = input . openStream ( ) ; } catch ( IOException e ) { throw new IIOException ( "Can't get input stream from URL!" , e ) ; } ImageInputStream stream = createImageInputStream ( istream ) ; BufferedImage bi ; try { bi = read ( stream ) ; if ( bi == null ) { stream . close ( ) ; } } finally { istream . close ( ) ; } return bi ;
public class PolymerBehaviorExtractor { /** * Resolve an identifier , which is presumed to refer to a Polymer Behavior declaration , using the * global namespace . Recurses to resolve assignment chains of any length . * @ param nameNode The NAME , GETPROP , or CAST node containing the identifier . * @ return The behavior declaration node , or null if it couldn ' t be resolved . */ @ Nullable private ResolveBehaviorNameResult resolveBehaviorName ( Node nameNode ) { } }
String name = getQualifiedNameThroughCast ( nameNode ) ; if ( name == null ) { return null ; } Name globalName = globalNames . getSlot ( name ) ; if ( globalName == null ) { return null ; } boolean isGlobalDeclaration = true ; // Use any set as a backup declaration , even if it ' s local . Ref declarationRef = globalName . getDeclaration ( ) ; if ( declarationRef == null ) { for ( Ref ref : globalName . getRefs ( ) ) { if ( ref . isSet ( ) ) { isGlobalDeclaration = false ; declarationRef = ref ; break ; } } } if ( declarationRef == null ) { return null ; } Node declarationNode = declarationRef . getNode ( ) ; if ( declarationNode == null ) { return null ; } Node rValue = NodeUtil . getRValueOfLValue ( declarationNode ) ; if ( rValue == null ) { return null ; } if ( rValue . isQualifiedName ( ) ) { // Another identifier ; recurse . return resolveBehaviorName ( rValue ) ; } JSDocInfo behaviorInfo = NodeUtil . getBestJSDocInfo ( declarationNode ) ; if ( behaviorInfo == null || ! behaviorInfo . isPolymerBehavior ( ) ) { compiler . report ( JSError . make ( declarationNode , PolymerPassErrors . POLYMER_UNANNOTATED_BEHAVIOR ) ) ; } return new ResolveBehaviorNameResult ( rValue , isGlobalDeclaration ) ;
public class MemcachedSessionService { /** * Specifies the number of threads that are used if { @ link # setSessionBackupAsync ( boolean ) } * is set to < code > true < / code > . * @ param backupThreadCount the number of threads to use for session backup . */ public void setBackupThreadCount ( final int backupThreadCount ) { } }
final int oldBackupThreadCount = _backupThreadCount ; _backupThreadCount = backupThreadCount ; if ( _manager . isInitialized ( ) ) { _log . info ( "Changed backupThreadCount from " + oldBackupThreadCount + " to " + _backupThreadCount + "." + " Reloading configuration..." ) ; reloadMemcachedConfig ( _memcachedNodes , _failoverNodes ) ; _log . info ( "Finished reloading configuration." ) ; }
public class DataTransformProcess { /** * Learns the transforms for the given data set . The data set is then * altered after each transform is learned so the next transform can be * learned as well . < br > The results are equivalent to calling * { @ link # learnApplyTransforms ( jsat . DataSet ) } on the data set and then * calling { @ link DataSet # applyTransform ( jsat . datatransform . DataTransform ) } * with this DataTransformProces . * @ param dataSet the data set to learn a series of transforms from and * alter into the final transformed form */ public void learnApplyTransforms ( DataSet dataSet ) { } }
learnedTransforms . clear ( ) ; // used to keep track if we can start using in place transforms boolean vecSafe = false ; boolean catSafe = false ; int iter = 0 ; // copy original references so we can check saftey of inplace mutation later Vec [ ] origVecs = new Vec [ dataSet . size ( ) ] ; int [ ] [ ] origCats = new int [ dataSet . size ( ) ] [ ] ; for ( int i = 0 ; i < origVecs . length ; i ++ ) { DataPoint dp = dataSet . getDataPoint ( i ) ; origVecs [ i ] = dp . getNumericalValues ( ) ; origCats [ i ] = dp . getCategoricalValues ( ) ; } for ( DataTransform dtf : transformSource ) { DataTransform transform = dtf . clone ( ) ; transform . fit ( dataSet ) ; if ( transform instanceof InPlaceTransform ) { InPlaceTransform ipt = ( InPlaceTransform ) transform ; // check if it is safe to apply mutations if ( iter > 0 && ! vecSafe || ( ipt . mutatesNominal ( ) && ! catSafe ) ) { boolean vecClear = true , catClear = true ; for ( int i = 0 ; i < origVecs . length && ( vecClear || catClear ) ; i ++ ) { DataPoint dp = dataSet . getDataPoint ( i ) ; vecClear = origVecs [ i ] != dp . getNumericalValues ( ) ; catClear = origCats [ i ] != dp . getCategoricalValues ( ) ; } vecSafe = vecClear ; catSafe = catClear ; } // Now we know if we can apply the mutations or not if ( vecSafe && ( ! ipt . mutatesNominal ( ) || catSafe ) ) dataSet . applyTransformMutate ( ipt , true ) ; else // go back to normal dataSet . applyTransform ( transform ) ; } else dataSet . applyTransform ( transform ) ; learnedTransforms . add ( transform ) ; iter ++ ; } consolidateTransforms ( ) ;
public class SchemaBuilder { /** * Shortcut for { @ link # dropFunction ( CqlIdentifier , CqlIdentifier ) * dropFunction ( CqlIdentifier . fromCql ( keyspace ) , CqlIdentifier . fromCql ( functionName ) } . */ @ NonNull public static Drop dropFunction ( @ Nullable String keyspace , @ NonNull String functionName ) { } }
return new DefaultDrop ( keyspace == null ? null : CqlIdentifier . fromCql ( keyspace ) , CqlIdentifier . fromCql ( functionName ) , "FUNCTION" ) ;
public class UTFDataInputStream { /** * Read a fragmented UTF - 8 String * @ return a String written with * { @ link UTFDataOutputStream # writeFragmentedUTF ( String ) } * @ throws IOException * @ see UTFDataOutputStream # writeFragmentedUTF ( String ) */ public String readFragmentedUTF ( ) throws IOException { } }
// String result = super . readUTF ( ) ; StringBuffer result = new StringBuffer ( super . readUTF ( ) ) ; boolean fragmentFlag = super . readBoolean ( ) ; while ( fragmentFlag != END_REACHED ) { // result = result . concat ( super . readUTF ( ) ) ; result . append ( super . readUTF ( ) ) ; fragmentFlag = super . readBoolean ( ) ; } return result . toString ( ) ;
public class ConvertUtils { /** * Konwertuje tablicę bajtów na łańcuch wartości szesnastkowych . * @ param stream Tablica bajtów do konwersji . * @ return Łańcuch danych szesnastkowych . */ public static String toHexString ( byte in [ ] ) { } }
if ( in == null || in . length <= 0 ) { return null ; } int i = 0 ; byte ch = 0x00 ; String pseudo [ ] = { "0" , "1" , "2" , "3" , "4" , "5" , "6" , "7" , "8" , "9" , "A" , "B" , "C" , "D" , "E" , "F" } ; StringBuffer out = new StringBuffer ( in . length * 2 ) ; while ( i < in . length ) { ch = ( byte ) ( in [ i ] & 0xF0 ) ; // Strip off high nibble ch = ( byte ) ( ch >>> 4 ) ; // shift the bits down ch = ( byte ) ( ch & 0x0F ) ; // must do this is high order bit is on ! out . append ( pseudo [ ( int ) ch ] ) ; // convert the nibble to a String Character ch = ( byte ) ( in [ i ] & 0x0F ) ; // Strip off low nibble out . append ( pseudo [ ( int ) ch ] ) ; // convert the nibble to a String Character out . append ( ' ' ) ; i ++ ; } return new String ( out ) ;
public class TCConversationMessageImpl { /** * ( non - Javadoc ) * @ seeorg . restcomm . protocols . ss7 . tcap . asn . comp . TCBeginMessage # setOriginatingTransactionId ( java . lang . String ) */ public void setOriginatingTransactionId ( byte [ ] t ) { } }
if ( t != null && t . length != 4 ) throw new IllegalArgumentException ( "TransactionId leng must be 4 bytes, found: " + t . length ) ; this . originatingTransactionId = t ;
public class CriteriaVisitor { /** * { @ inheritDoc } */ @ Override public Object visit ( Id filter , Object userData ) { } }
String idName ; try { idName = featureModel . getEntityMetadata ( ) . getIdentifierPropertyName ( ) ; } catch ( LayerException e ) { log . warn ( "Cannot read idName, defaulting to 'id'" , e ) ; idName = HIBERNATE_ID ; } Collection < ? > c = ( Collection < ? > ) castLiteral ( filter . getIdentifiers ( ) , idName ) ; return Restrictions . in ( idName , c ) ;
public class DateTerm { /** * 経過時間を計算するときの基準日を取得する 。 * ・ 1900/2/28までは 、 - 1日ずれる 。 Excelは1900年は1月0日 ( = 1899年12月31日 ) から始まるため 、 1日多い 。 * ・ 1900/3/1以降は 、 - 2日ずれず 。 Excel は 、 閏日ではない1900年2月29日 ( = 1900年3月1日 ) が存在するため1日多い 。 * @ param date * @ param isStartDate1904 * @ return */ private static long getElapsedZeroTime ( final Date date , final boolean isStartDate1904 ) { } }
if ( isStartDate1904 ) { return ExcelDateUtils . getExcelZeroDateTime ( isStartDate1904 ) ; } else { if ( ExcelDateUtils . MILLISECONDS_19000301 <= date . getTime ( ) ) { // 1900-03-01以降 return ExcelDateUtils . MILLISECONDS_19000101 - TimeUnit . DAYS . toMillis ( 2 ) ; } else { return ExcelDateUtils . MILLISECONDS_19000101 - TimeUnit . DAYS . toMillis ( 1 ) ; } }
public class ScalarStatistics { /** * Add . * @ param v the v */ public final synchronized void add ( final double v ) { } }
sum0 += 1 ; sum1 += v ; sum2 += v * v ; min = Math . min ( min , v ) ; max = Math . max ( max , v ) ; if ( Math . abs ( v ) < com . simiacryptus . util . data . ScalarStatistics . zeroTol ) { zeros ++ ; } else { if ( v < 0 ) { negatives ++ ; } else { positives ++ ; } sumLog += Math . log10 ( Math . abs ( v ) ) ; }
public class BugInstance { /** * Add a field annotation for the field which is being visited by given * visitor . * @ param visitor * the visitor * @ return this object */ @ Nonnull public BugInstance addVisitedField ( PreorderVisitor visitor ) { } }
FieldAnnotation f = FieldAnnotation . fromVisitedField ( visitor ) ; addField ( f ) ; return this ;
public class DefaultConnectionFactory { /** * Returns the stored { @ link ExecutorService } for listeners . * By default , a { @ link ThreadPoolExecutor } is used that acts exactly * like a default cachedThreadPool , but defines the upper limit of * Threads to be created as the number of available processors to * prevent resource exhaustion . * @ return the stored { @ link ExecutorService } . */ @ Override public ExecutorService getListenerExecutorService ( ) { } }
if ( executorService == null ) { ThreadFactory threadFactory = new ThreadFactory ( ) { @ Override public Thread newThread ( Runnable r ) { return new Thread ( r , "FutureNotifyListener" ) ; } } ; executorService = new ThreadPoolExecutor ( 0 , Runtime . getRuntime ( ) . availableProcessors ( ) , 60L , TimeUnit . SECONDS , new LinkedBlockingQueue < Runnable > ( ) , threadFactory ) ; } return executorService ;
public class ModuleIdentifier { /** * Returns an identifier for a Closure namespace . * @ param name The Closure namespace . It may be in one of the formats ` name . space ` , * ` goog : name . space ` or ` goog : moduleName : name . space ` , where the latter specifies that the * module and namespace names are different . */ public static ModuleIdentifier forClosure ( String name ) { } }
String normalizedName = name ; if ( normalizedName . startsWith ( "goog:" ) ) { normalizedName = normalizedName . substring ( "goog:" . length ( ) ) ; } String namespace = normalizedName ; String moduleName = normalizedName ; int splitPoint = normalizedName . indexOf ( ':' ) ; if ( splitPoint != - 1 ) { moduleName = normalizedName . substring ( 0 , splitPoint ) ; namespace = normalizedName . substring ( Math . min ( splitPoint + 1 , normalizedName . length ( ) - 1 ) ) ; } return new AutoValue_ModuleIdentifier ( normalizedName , namespace , moduleName ) ;
public class PackingPlan { /** * Get the formatted String describing component RAM distribution from PackingPlan , * used by executor * @ return String describing component RAM distribution */ public String getComponentRamDistribution ( ) { } }
// Generate a map with the minimal RAM size for each component Map < String , ByteAmount > ramMap = new HashMap < > ( ) ; for ( ContainerPlan containerPlan : this . getContainers ( ) ) { for ( InstancePlan instancePlan : containerPlan . getInstances ( ) ) { ByteAmount newRam = instancePlan . getResource ( ) . getRam ( ) ; ByteAmount currentRam = ramMap . get ( instancePlan . getComponentName ( ) ) ; if ( currentRam == null || currentRam . asBytes ( ) > newRam . asBytes ( ) ) { ramMap . put ( instancePlan . getComponentName ( ) , newRam ) ; } } } // Convert it into a formatted String StringBuilder ramMapBuilder = new StringBuilder ( ) ; for ( String component : ramMap . keySet ( ) ) { ramMapBuilder . append ( String . format ( "%s:%d," , component , ramMap . get ( component ) . asBytes ( ) ) ) ; } // Remove the duplicated " , " at the end ramMapBuilder . deleteCharAt ( ramMapBuilder . length ( ) - 1 ) ; return ramMapBuilder . toString ( ) ;
public class EndpointActivationService { /** * Registers RRS XA resource information with the transaction manager . This is used for inbound . * @ param actSpecId The id of the activation spec * @ return the recovery ID ( or - 1 if an error occurs ) */ public final int registerRRSXAResourceInfo ( String actSpecId ) { } }
RRSXAResourceFactory xaFactory = rrsXAResFactorySvcRef . getService ( ) ; // Make sure that the bundle is active . if ( xaFactory == null ) { String formattedMessage = Utils . getMessage ( "J2CA8807.native.rrs.not.available" , new Object [ 0 ] ) ; throw new IllegalStateException ( formattedMessage ) ; } // Create a filter for the transaction manager to be able to find the native // transaction factory in the service registry during recovery . String filter = FilterUtils . createPropertyFilter ( "native.xa.factory" , ( xaFactory . getClass ( ) . getCanonicalName ( ) ) ) ; // NOTE : At this point in time , the transaction manager does not support logging // XAResourceInfo type objects ; However , they do allow generic serializable objects such as a String // to be logged and retrieved during recovery . So , a String is what is currently passed as resource info to // the registerResourceInfo call . Serializable xaResInfo = xaFactory . getXAResourceInfo ( null ) ; int recoveryToken = transactionManager . registerResourceInfo ( filter , xaResInfo ) ; return recoveryToken ;
public class TimeZoneGenericNames { /** * Private method returning LocaleDisplayNames instance for the locale of this * instance . Because LocaleDisplayNames is only used for generic * location formant and partial location format , the LocaleDisplayNames * is instantiated lazily . * @ return the instance of LocaleDisplayNames for the locale of this object . */ private synchronized LocaleDisplayNames getLocaleDisplayNames ( ) { } }
LocaleDisplayNames locNames = null ; if ( _localeDisplayNamesRef != null ) { locNames = _localeDisplayNamesRef . get ( ) ; } if ( locNames == null ) { locNames = LocaleDisplayNames . getInstance ( _locale ) ; _localeDisplayNamesRef = new WeakReference < LocaleDisplayNames > ( locNames ) ; } return locNames ;
public class ResourceAdapterParser { /** * Store a - ra . xml file * @ param metadata The resource adapter definitions * @ param writer The writer * @ exception Exception Thrown if an error occurs */ public void store ( Activations metadata , XMLStreamWriter writer ) throws Exception { } }
if ( metadata != null && writer != null ) { writer . writeStartElement ( XML . ELEMENT_RESOURCE_ADAPTERS ) ; for ( Activation a : metadata . getActivations ( ) ) { writer . writeStartElement ( XML . ELEMENT_RESOURCE_ADAPTER ) ; if ( a . getId ( ) != null ) writer . writeAttribute ( XML . ATTRIBUTE_ID , a . getValue ( XML . ATTRIBUTE_ID , a . getId ( ) ) ) ; writer . writeStartElement ( XML . ELEMENT_ARCHIVE ) ; writer . writeCharacters ( a . getValue ( XML . ELEMENT_ARCHIVE , a . getArchive ( ) ) ) ; writer . writeEndElement ( ) ; storeCommon ( a , writer ) ; writer . writeEndElement ( ) ; } writer . writeEndElement ( ) ; }
public class DescriptorExtensionList { /** * Finds the descriptor that describes the given type . * That is , if this method returns d , { @ code d . clazz = = type } */ public D find ( Class < ? extends T > type ) { } }
for ( D d : this ) if ( d . clazz == type ) return d ; return null ;
public class ExceptionUtil { /** * d632115 */ public static NoSuchEJBException NoSuchEJBException ( String message , Throwable cause ) { } }
NoSuchEJBException nsejb ; if ( cause == null ) { nsejb = new NoSuchEJBException ( message ) ; } else { if ( cause instanceof Exception ) { nsejb = new NoSuchEJBException ( message , ( Exception ) cause ) ; } else { Exception wrappedCause = new Exception ( "See nested Throwable" , cause ) ; nsejb = new NoSuchEJBException ( message , wrappedCause ) ; cause = wrappedCause ; } // And finally . . . insure the cause is set on Throwable . // Geronimo EJBException . getCause returns getCausedbyException , so // we do not expect this code to be used . F53643 if ( nsejb . getCause ( ) == null ) { nsejb . initCause ( cause ) ; } } return nsejb ;
public class HtmlMessages { /** * < p > Set the value of the < code > title < / code > property . < / p > */ public void setTitle ( java . lang . String title ) { } }
getStateHelper ( ) . put ( PropertyKeys . title , title ) ; handleAttribute ( "title" , title ) ;
public class AbstractGrid { /** * Creates a new container instance by calling the required * template - methods . * A new container is created on initialization as well as when container * content fundamentally changes ( e . g . if container content depends on a * selection as common in master - details relations ) */ protected void addNewContainerDS ( ) { } }
final T container = createContainer ( ) ; Indexed indexedContainer = container ; if ( hasGeneratedPropertySupport ( ) ) { indexedContainer = getGeneratedPropertySupport ( ) . decorate ( container ) ; setContainerDataSource ( indexedContainer ) ; getGeneratedPropertySupport ( ) . addGeneratedContainerProperties ( ) ; } else { setContainerDataSource ( indexedContainer ) ; } addContainerProperties ( ) ; setColumnProperties ( ) ; setColumnHeaderNames ( ) ; setColumnsHidable ( ) ; addColumnRenderers ( ) ; setColumnExpandRatio ( ) ; setHiddenColumns ( ) ; final CellDescriptionGenerator cellDescriptionGenerator = getDescriptionGenerator ( ) ; if ( getDescriptionGenerator ( ) != null ) { setCellDescriptionGenerator ( cellDescriptionGenerator ) ; } if ( indexedContainer != null && indexedContainer . size ( ) == 0 ) { setData ( i18n . getMessage ( UIMessageIdProvider . MESSAGE_NO_DATA ) ) ; }
public class CreateIconChoicePopup { /** * / * popup display */ public void show ( int clientX , int clientY ) { } }
if ( onlyOneChoice ( ) ) { controller . createIconInContainer ( selectedIconUrl , selectedMarkerShape ) ; } else { dialog . setPopupPosition ( clientX , clientY ) ; dialog . show ( ) ; }
public class PolicyMappingsExtension { /** * Return an enumeration of names of attributes existing within this * attribute . */ public Enumeration < String > getElements ( ) { } }
AttributeNameEnumeration elements = new AttributeNameEnumeration ( ) ; elements . addElement ( MAP ) ; return elements . elements ( ) ;