signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class ArrowConverter { /** * Create a field given the input { @ link ColumnType }
* and name
* @ param name the name of the field
* @ param columnType the column type to add
* @ return */
public static Field getFieldForColumn ( String name , ColumnType columnType ) { } }
|
switch ( columnType ) { case Long : return field ( name , new ArrowType . Int ( 64 , false ) ) ; case Integer : return field ( name , new ArrowType . Int ( 32 , false ) ) ; case Double : return field ( name , new ArrowType . FloatingPoint ( FloatingPointPrecision . DOUBLE ) ) ; case Float : return field ( name , new ArrowType . FloatingPoint ( FloatingPointPrecision . SINGLE ) ) ; case Boolean : return field ( name , new ArrowType . Bool ( ) ) ; case Categorical : return field ( name , new ArrowType . Utf8 ( ) ) ; case Time : return field ( name , new ArrowType . Date ( DateUnit . MILLISECOND ) ) ; case Bytes : return field ( name , new ArrowType . Binary ( ) ) ; case NDArray : return field ( name , new ArrowType . Binary ( ) ) ; case String : return field ( name , new ArrowType . Utf8 ( ) ) ; default : throw new IllegalArgumentException ( "Column type invalid " + columnType ) ; }
|
public class StencilEngine { /** * Loads a template from the given path
* @ param path Path to load template from
* @ return Loaded template
* @ throws IOException
* @ throws ParseException */
public Template load ( String path ) throws IOException , ParseException { } }
|
try ( TemplateSource source = sourceLoader . find ( path ) ) { if ( source != null ) { return load ( path , source ) ; } } return null ;
|
public class FacebookRestClient { /** * Retrieves whether the logged - in user has granted the specified permission
* to this application .
* @ param permission an extended permission ( e . g . FacebookExtendedPerm . MARKETPLACE ,
* " photo _ upload " )
* @ return boolean indicating whether the user has the permission
* @ see FacebookExtendedPerm
* @ see < a href = " http : / / wiki . developers . facebook . com / index . php / Users . hasAppPermission " >
* Developers Wiki : Users . hasAppPermission < / a > */
public boolean users_hasAppPermission ( CharSequence permission ) throws FacebookException , IOException { } }
|
return extractBoolean ( this . callMethod ( FacebookMethod . USERS_HAS_APP_PERMISSION , new Pair < String , CharSequence > ( "ext_perm" , permission ) ) ) ;
|
public class AbstractFontManager { /** * Move the font size pointer up . If this would move the pointer past
* the maximum font size , track this increase with a virtual size . */
public void increaseFontSize ( ) { } }
|
// move INTO range if we have just moved OUT of lower virtual
if ( inRange ( ) || ( atMin ( ) && atLowerBoundary ( ) ) ) { currentFontIndex ++ ; } else if ( atMax ( ) ) { upperVirtualCount ++ ; } else if ( atMin ( ) && inLower ( ) ) { lowerVirtualCount ++ ; }
|
public class IbanUtil { /** * format iban to four character blocks .
* @ param pstring string to format
* @ return formated string */
public static String ibanFormat ( final String pstring ) { } }
|
if ( pstring == null ) { return null ; } final ValueWithPos < String > formatedValue = ibanFormatWithPos ( new ValueWithPos < > ( pstring , - 1 ) ) ; return formatedValue . getValue ( ) ;
|
public class PaymentSession { /** * Returns a future that will be notified with a PaymentSession object after it is fetched using the provided url .
* url is an address where the { @ link Protos . PaymentRequest } object may be fetched .
* If the payment request object specifies a PKI method , then the system trust store will
* be used to verify the signature provided by the payment request . An exception is thrown by the future if the
* signature cannot be verified .
* If trustStoreLoader is null , the system default trust store is used . */
public static ListenableFuture < PaymentSession > createFromUrl ( final String url , final boolean verifyPki , @ Nullable final TrustStoreLoader trustStoreLoader ) throws PaymentProtocolException { } }
|
if ( url == null ) throw new PaymentProtocolException . InvalidPaymentRequestURL ( "null paymentRequestUrl" ) ; try { return fetchPaymentRequest ( new URI ( url ) , verifyPki , trustStoreLoader ) ; } catch ( URISyntaxException e ) { throw new PaymentProtocolException . InvalidPaymentRequestURL ( e ) ; }
|
public class RuleOrganizer { /** * Finds out cluster names .
* @ param cluster the cluster .
* @ return pattern names . */
private ArrayList < String > getNameInCluster ( Cluster cluster ) { } }
|
ArrayList < String > itemsInCluster = new ArrayList < String > ( ) ; String nodeName ; if ( cluster . isLeaf ( ) ) { nodeName = cluster . getName ( ) ; itemsInCluster . add ( nodeName ) ; } else { // String [ ] clusterName = cluster . getName ( ) . split ( " # " ) ;
// nodeName = clusterName [ 1 ] ;
} for ( Cluster child : cluster . getChildren ( ) ) { ArrayList < String > childrenNames = getNameInCluster ( child ) ; itemsInCluster . addAll ( childrenNames ) ; } return itemsInCluster ;
|
public class ListPopupWindow { /** * Show the popup list . If the list is already showing , this method
* will recalculate the popup ' s size and position . */
public void show ( ) { } }
|
int height = buildDropDown ( ) ; int widthSpec = 0 ; int heightSpec = 0 ; boolean noInputMethod = isInputMethodNotNeeded ( ) ; if ( mPopup . isShowing ( ) ) { if ( mDropDownWidth == ViewGroup . LayoutParams . MATCH_PARENT ) { // The call to PopupWindow ' s update method below can accept - 1 for any
// value you do not want to update .
widthSpec = - 1 ; } else if ( mDropDownWidth == ViewGroup . LayoutParams . WRAP_CONTENT ) { widthSpec = getAnchorView ( ) . getWidth ( ) ; } else { widthSpec = mDropDownWidth ; } if ( mDropDownHeight == ViewGroup . LayoutParams . MATCH_PARENT ) { // The call to PopupWindow ' s update method below can accept - 1 for any
// value you do not want to update .
heightSpec = noInputMethod ? height : ViewGroup . LayoutParams . MATCH_PARENT ; if ( noInputMethod ) { mPopup . setWindowLayoutMode ( mDropDownWidth == ViewGroup . LayoutParams . MATCH_PARENT ? ViewGroup . LayoutParams . MATCH_PARENT : 0 , 0 ) ; } else { mPopup . setWindowLayoutMode ( mDropDownWidth == ViewGroup . LayoutParams . MATCH_PARENT ? ViewGroup . LayoutParams . MATCH_PARENT : 0 , ViewGroup . LayoutParams . MATCH_PARENT ) ; } } else if ( mDropDownHeight == ViewGroup . LayoutParams . WRAP_CONTENT ) { heightSpec = height ; } else { heightSpec = mDropDownHeight ; } mPopup . setOutsideTouchable ( ! mForceIgnoreOutsideTouch && ! mDropDownAlwaysVisible ) ; mPopup . update ( getAnchorView ( ) , mDropDownHorizontalOffset , mDropDownVerticalOffset , widthSpec , heightSpec ) ; } else { if ( mDropDownWidth == ViewGroup . LayoutParams . MATCH_PARENT ) { widthSpec = ViewGroup . LayoutParams . MATCH_PARENT ; } else { if ( mDropDownWidth == ViewGroup . LayoutParams . WRAP_CONTENT ) { mPopup . setWidth ( getAnchorView ( ) . getWidth ( ) ) ; } else { mPopup . setWidth ( mDropDownWidth ) ; } } if ( mDropDownHeight == ViewGroup . LayoutParams . MATCH_PARENT ) { heightSpec = ViewGroup . LayoutParams . MATCH_PARENT ; } else { if ( mDropDownHeight == ViewGroup . LayoutParams . WRAP_CONTENT ) { mPopup . setHeight ( height ) ; } else { mPopup . setHeight ( mDropDownHeight ) ; } } mPopup . setWindowLayoutMode ( widthSpec , heightSpec ) ; setPopupClipToScreenEnabled ( true ) ; // use outside touchable to dismiss drop down when touching outside of it , so
// only set this if the dropdown is not always visible
mPopup . setOutsideTouchable ( ! mForceIgnoreOutsideTouch && ! mDropDownAlwaysVisible ) ; mPopup . setTouchInterceptor ( mTouchInterceptor ) ; PopupWindowCompat . showAsDropDown ( mPopup , getAnchorView ( ) , mDropDownHorizontalOffset , mDropDownVerticalOffset , mDropDownGravity ) ; mDropDownList . setSelection ( ListView . INVALID_POSITION ) ; if ( ! mModal || mDropDownList . isInTouchMode ( ) ) { clearListSelection ( ) ; } if ( ! mModal ) { mHandler . post ( mHideSelector ) ; } // show item animation
if ( mItemAnimationId != 0 ) mPopup . getContentView ( ) . getViewTreeObserver ( ) . addOnPreDrawListener ( new ViewTreeObserver . OnPreDrawListener ( ) { @ Override public boolean onPreDraw ( ) { mPopup . getContentView ( ) . getViewTreeObserver ( ) . removeOnPreDrawListener ( this ) ; for ( int i = 0 , count = mDropDownList . getChildCount ( ) ; i < count ; i ++ ) { View v = mDropDownList . getChildAt ( i ) ; Animation anim = AnimationUtils . loadAnimation ( mContext , mItemAnimationId ) ; anim . setStartOffset ( mItemAnimationOffset * i ) ; v . startAnimation ( anim ) ; } return false ; } } ) ; }
|
public class AWSElasticsearchClient { /** * Creates a new Elasticsearch domain . For more information , see < a href =
* " http : / / docs . aws . amazon . com / elasticsearch - service / latest / developerguide / es - createupdatedomains . html # es - createdomains "
* target = " _ blank " > Creating Elasticsearch Domains < / a > in the < i > Amazon Elasticsearch Service Developer Guide < / i > .
* @ param createElasticsearchDomainRequest
* @ return Result of the CreateElasticsearchDomain operation returned by the service .
* @ throws BaseException
* An error occurred while processing the request .
* @ throws DisabledOperationException
* An error occured because the client wanted to access a not supported operation . Gives http status code of
* 409.
* @ throws InternalException
* The request processing has failed because of an unknown error , exception or failure ( the failure is
* internal to the service ) . Gives http status code of 500.
* @ throws InvalidTypeException
* An exception for trying to create or access sub - resource that is either invalid or not supported . Gives
* http status code of 409.
* @ throws LimitExceededException
* An exception for trying to create more than allowed resources or sub - resources . Gives http status code of
* 409.
* @ throws ResourceAlreadyExistsException
* An exception for creating a resource that already exists . Gives http status code of 400.
* @ throws ValidationException
* An exception for missing / invalid input fields . Gives http status code of 400.
* @ sample AWSElasticsearch . CreateElasticsearchDomain */
@ Override public CreateElasticsearchDomainResult createElasticsearchDomain ( CreateElasticsearchDomainRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeCreateElasticsearchDomain ( request ) ;
|
public class PaginatedDbService { /** * Get the { @ link DTO } for the given ID .
* @ param id the ID of the object
* @ return an Optional containing the found object or an empty Optional if no object can be found for the given ID */
public Optional < DTO > get ( String id ) { } }
|
return Optional . ofNullable ( db . findOneById ( new ObjectId ( id ) ) ) ;
|
public class OperationManager { /** * 根据persistentId查询任务状态
* 返回结果的 class */
public < T > T prefop ( String persistentId , Class < T > retClass ) throws QiniuException { } }
|
StringMap params = new StringMap ( ) . put ( "id" , persistentId ) ; byte [ ] data = StringUtils . utf8Bytes ( params . formString ( ) ) ; String url = String . format ( "%s/status/get/prefop" , configuration . apiHost ( ) ) ; Response response = this . client . post ( url , data , null , Client . FormMime ) ; if ( ! response . isOK ( ) ) { throw new QiniuException ( response ) ; } T object = response . jsonToObject ( retClass ) ; response . close ( ) ; return object ;
|
public class StoredChannel { /** * Sets the opaque ID for the subscribed resource that is stable across API versions or
* { @ code null } for none . */
public StoredChannel setTopicId ( String topicId ) { } }
|
lock . lock ( ) ; try { this . topicId = topicId ; } finally { lock . unlock ( ) ; } return this ;
|
public class Monetary { /** * Access a new instance based on the { @ link Locale } . Currencies are
* available as provided by { @ link CurrencyProviderSpi } instances registered
* with the { @ link javax . money . spi . Bootstrap } .
* @ param locale the target { @ link Locale } , typically representing an ISO
* country , not { @ code null } .
* @ param providers the ( optional ) specification of providers to consider .
* @ return the corresponding { @ link CurrencyUnit } instance .
* @ throws UnknownCurrencyException if no such currency exists . */
public static Set < CurrencyUnit > getCurrencies ( Locale locale , String ... providers ) { } }
|
return Optional . ofNullable ( MONETARY_CURRENCIES_SINGLETON_SPI ( ) ) . orElseThrow ( ( ) -> new MonetaryException ( "No MonetaryCurrenciesSingletonSpi loaded, check your system setup." ) ) . getCurrencies ( locale , providers ) ;
|
public class PersistenceApi { /** * Bulk Save Relationships
* This is used to batch save an entity & # 39 ; s relationship in order to offer more throughput than persisting .
* @ param request Save Relationships Request ( required )
* @ return ApiResponse & lt ; Void & gt ;
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiResponse < Void > saveRelationshipsPostWithHttpInfo ( SaveRelationshipRequest request ) throws ApiException { } }
|
com . squareup . okhttp . Call call = saveRelationshipsPostValidateBeforeCall ( request , null , null ) ; return apiClient . execute ( call ) ;
|
public class AbstractQueue { /** * { @ inheritDoc }
* @ since 0.6.0 */
@ Override public IQueueMessage < ID , DATA > createMessage ( DATA content ) { } }
|
return messageFactory . createMessage ( content ) ;
|
public class ExtendableService { /** * Installs a list of service filters . This method is designed to facilitate Spring bean assembly . */
public void setFilters ( List < Service . Filter > filters ) { } }
|
for ( Service . Filter filter : filters ) setFilter ( filter ) ;
|
public class SQLRunner { /** * Execute SQL stmt .
* @ param _ sqlProvider the sql provider
* @ param _ complStmt the compl stmt
* @ return true , if successful
* @ throws EFapsException the e faps exception */
@ SuppressWarnings ( "unchecked" ) protected boolean executeSQLStmt ( final ISelectionProvider _sqlProvider , final String _complStmt ) throws EFapsException { } }
|
SQLRunner . LOG . debug ( "SQL-Statement: {}" , _complStmt ) ; boolean ret = false ; List < Object [ ] > rows = new ArrayList < > ( ) ; boolean cached = false ; if ( runnable . has ( StmtFlag . REQCACHED ) ) { final QueryKey querykey = QueryKey . get ( Context . getThreadContext ( ) . getRequestId ( ) , _complStmt ) ; final Cache < QueryKey , Object > cache = QueryCache . getSqlCache ( ) ; if ( cache . containsKey ( querykey ) ) { final Object object = cache . get ( querykey ) ; if ( object instanceof List ) { rows = ( List < Object [ ] > ) object ; } cached = true ; } } if ( ! cached ) { ConnectionResource con = null ; try { con = Context . getThreadContext ( ) . getConnectionResource ( ) ; final Statement stmt = con . createStatement ( ) ; final ResultSet rs = stmt . executeQuery ( _complStmt ) ; final ArrayListHandler handler = new ArrayListHandler ( Context . getDbType ( ) . getRowProcessor ( ) ) ; rows = handler . handle ( rs ) ; rs . close ( ) ; stmt . close ( ) ; } catch ( final SQLException e ) { throw new EFapsException ( SQLRunner . class , "executeOneCompleteStmt" , e ) ; } if ( runnable . has ( StmtFlag . REQCACHED ) ) { final ICacheDefinition cacheDefinition = new ICacheDefinition ( ) { @ Override public long getLifespan ( ) { return 5 ; } @ Override public TimeUnit getLifespanUnit ( ) { return TimeUnit . MINUTES ; } } ; QueryCache . put ( cacheDefinition , QueryKey . get ( Context . getThreadContext ( ) . getRequestId ( ) , _complStmt ) , rows ) ; } } for ( final Object [ ] row : rows ) { for ( final Select select : _sqlProvider . getSelection ( ) . getAllSelects ( ) ) { select . addObject ( row ) ; } ret = true ; } return ret ;
|
public class AmazonCloudDirectoryClient { /** * Creates a new < a > Facet < / a > in a schema . Facet creation is allowed only in development or applied schemas .
* @ param createFacetRequest
* @ return Result of the CreateFacet operation returned by the service .
* @ throws InternalServiceException
* Indicates a problem that must be resolved by Amazon Web Services . This might be a transient error in
* which case you can retry your request until it succeeds . Otherwise , go to the < a
* href = " http : / / status . aws . amazon . com / " > AWS Service Health Dashboard < / a > site to see if there are any
* operational issues with the service .
* @ throws InvalidArnException
* Indicates that the provided ARN value is not valid .
* @ throws RetryableConflictException
* Occurs when a conflict with a previous successful write is detected . For example , if a write operation
* occurs on an object and then an attempt is made to read the object using “ SERIALIZABLE ” consistency , this
* exception may result . This generally occurs when the previous write did not have time to propagate to the
* host serving the current request . A retry ( with appropriate backoff logic ) is the recommended response to
* this exception .
* @ throws ValidationException
* Indicates that your request is malformed in some manner . See the exception message .
* @ throws LimitExceededException
* Indicates that limits are exceeded . See < a
* href = " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / limits . html " > Limits < / a > for more
* information .
* @ throws AccessDeniedException
* Access denied . Check your permissions .
* @ throws ResourceNotFoundException
* The specified resource could not be found .
* @ throws FacetAlreadyExistsException
* A facet with the same name already exists .
* @ throws InvalidRuleException
* Occurs when any of the rule parameter keys or values are invalid .
* @ throws FacetValidationException
* The < a > Facet < / a > that you provided was not well formed or could not be validated with the schema .
* @ sample AmazonCloudDirectory . CreateFacet
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / clouddirectory - 2017-01-11 / CreateFacet " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public CreateFacetResult createFacet ( CreateFacetRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeCreateFacet ( request ) ;
|
public class ValueTransformer { /** * / * ( non - Javadoc )
* @ see com . oath . cyclops . types . MonadicValue # forEach3 ( java . util . function . Function , java . util . function . BiFunction , com . oath . cyclops . util . function . TriFunction ) */
public < T2 , R1 , R2 , R > ValueTransformer < W , R > forEach3 ( Function < ? super T , ? extends MonadicValue < R1 > > value1 , BiFunction < ? super T , ? super R1 , ? extends MonadicValue < R2 > > value2 , Function3 < ? super T , ? super R1 , ? super R2 , ? extends R > yieldingFunction ) { } }
|
return unitAnyM ( this . transformerStream ( ) . map ( v -> v . forEach3 ( value1 , value2 , yieldingFunction ) ) ) ;
|
public class SsmlSayAs { /** * Attributes to set on the generated XML element
* @ return A Map of attribute keys to values */
protected Map < String , String > getElementAttributes ( ) { } }
|
// Preserve order of attributes
Map < String , String > attrs = new HashMap < > ( ) ; if ( this . getInterpretAs ( ) != null ) { attrs . put ( "interpret-as" , this . getInterpretAs ( ) . toString ( ) ) ; } if ( this . getRole ( ) != null ) { attrs . put ( "role" , this . getRole ( ) . toString ( ) ) ; } return attrs ;
|
public class Governator { /** * Add Guice modules to Governator .
* @ param modules Guice modules to add .
* @ return this */
public Governator addModules ( List < Module > modules ) { } }
|
if ( modules != null ) { this . modules . addAll ( modules ) ; } return this ;
|
public class HttpServerBuilder { /** * Defines a file to be hosted on the specified path . The file ' s content is provided by the specified URL .
* @ param path
* the path where the file is accessible from the server
* @ param resource
* the resource providing the content for the file
* @ return
* this builder */
public HttpServerBuilder contentFrom ( final String path , final URL resource ) { } }
|
resources . put ( path , resource ) ; return this ;
|
public class ScriptRuntime { /** * If d is exact int value , return its value wrapped as Integer
* and othewise return d converted to String . */
static Object getIndexObject ( double d ) { } }
|
int i = ( int ) d ; if ( i == d ) { return Integer . valueOf ( i ) ; } return toString ( d ) ;
|
public class ExtendedBufferedReader { /** * Non - blocking reading of len chars into buffer buf starting
* at bufferposition off .
* performs an iteratative read on the underlying stream
* as long as the following conditions hold :
* - less than len chars have been read
* - end of stream has not been reached
* - next read is not blocking
* @ return nof chars actually read or END _ OF _ STREAM */
public int read ( char [ ] buf , int off , int len ) throws IOException { } }
|
// do not claim if len = = 0
if ( len == 0 ) { return 0 ; } // init lookahead , but do not block ! !
if ( lookaheadChar == UNDEFINED ) { if ( ready ( ) ) { lookaheadChar = super . read ( ) ; } else { return - 1 ; } } // ' first read of underlying stream '
if ( lookaheadChar == - 1 ) { return - 1 ; } // continue until the lookaheadChar would block
int cOff = off ; while ( len > 0 && ready ( ) ) { if ( lookaheadChar == - 1 ) { // eof stream reached , do not continue
return cOff - off ; } else { buf [ cOff ++ ] = ( char ) lookaheadChar ; if ( lookaheadChar == '\n' ) { lineCounter ++ ; } lastChar = lookaheadChar ; lookaheadChar = super . read ( ) ; len -- ; } } return cOff - off ;
|
public class ApiOvhOverTheBox { /** * Delete a remote access
* REST : DELETE / overTheBox / { serviceName } / remoteAccesses / { remoteAccessId }
* @ param serviceName [ required ] The internal name of your overTheBox offer
* @ param remoteAccessId [ required ] The id of the remote access */
public void serviceName_remoteAccesses_remoteAccessId_DELETE ( String serviceName , String remoteAccessId ) throws IOException { } }
|
String qPath = "/overTheBox/{serviceName}/remoteAccesses/{remoteAccessId}" ; StringBuilder sb = path ( qPath , serviceName , remoteAccessId ) ; exec ( qPath , "DELETE" , sb . toString ( ) , null ) ;
|
public class JBBPUtils { /** * Check that an object is null and throw NullPointerException in the case .
* @ param object an object to be checked
* @ param message message to be used as the exception message
* @ throws NullPointerException it will be thrown if the object is null */
public static void assertNotNull ( final Object object , final String message ) { } }
|
if ( object == null ) { throw new NullPointerException ( message == null ? "Object is null" : message ) ; }
|
public class PrivateTaskScheduler { /** * Constructor .
* @ param application The parent application .
* @ param iMaxThreads The maximum number of threads to run ( - 1 = default ) .
* @ param bKeepAlive Keep the task alive after execution . */
public void init ( App application , int iMaxThreads , boolean bKeepAlive ) { } }
|
iMaxThreads = 0 ; super . init ( application , iMaxThreads ) ; m_vPrivateJobs = new Vector < Object > ( ) ; // List of my private jobs
m_bKeepAlive = bKeepAlive ; // Keep the task alive after execution .
if ( m_bKeepAlive ) new Thread ( this , "KeepAliveThread" ) . start ( ) ;
|
public class RelationalOperations { /** * Returns true if polygon _ a crosses envelope _ b . */
private static boolean polygonCrossesEnvelope_ ( Polygon polygon_a , Envelope envelope_b , double tolerance , ProgressTracker progress_tracker ) { } }
|
Envelope2D env_a = new Envelope2D ( ) , env_b = new Envelope2D ( ) ; polygon_a . queryEnvelope2D ( env_a ) ; envelope_b . queryEnvelope2D ( env_b ) ; if ( envelopeInfContainsEnvelope_ ( env_b , env_a , tolerance ) ) return false ; if ( env_b . getHeight ( ) > tolerance && env_b . getWidth ( ) > tolerance ) return false ; // when treated as an area , areas cannot cross areas .
if ( env_b . getHeight ( ) <= tolerance && env_b . getWidth ( ) <= tolerance ) return false ; // when treated as a point , areas cannot cross points .
// Treat as polyline
Polyline polyline_b = new Polyline ( ) ; Point p = new Point ( ) ; envelope_b . queryCornerByVal ( 0 , p ) ; polyline_b . startPath ( p ) ; envelope_b . queryCornerByVal ( 2 , p ) ; polyline_b . lineTo ( p ) ; return polygonCrossesPolylineImpl_ ( polygon_a , polyline_b , tolerance , progress_tracker ) ;
|
public class Compiler { /** * Simplistic implementation of the java . nio . file . Path resolveSibling method that works with GWT .
* @ param fromPath - must be a file ( not directory )
* @ param toPath - must be a file ( not directory ) */
private static String resolveSibling ( String fromPath , String toPath ) { } }
|
// If the destination is an absolute path , nothing to do .
if ( toPath . startsWith ( "/" ) ) { return toPath ; } List < String > fromPathParts = new ArrayList < > ( Arrays . asList ( fromPath . split ( "/" ) ) ) ; List < String > toPathParts = new ArrayList < > ( Arrays . asList ( toPath . split ( "/" ) ) ) ; if ( ! fromPathParts . isEmpty ( ) ) { fromPathParts . remove ( fromPathParts . size ( ) - 1 ) ; } while ( ! fromPathParts . isEmpty ( ) && ! toPathParts . isEmpty ( ) ) { if ( toPathParts . get ( 0 ) . equals ( "." ) ) { toPathParts . remove ( 0 ) ; } else if ( toPathParts . get ( 0 ) . equals ( ".." ) ) { toPathParts . remove ( 0 ) ; fromPathParts . remove ( fromPathParts . size ( ) - 1 ) ; } else { break ; } } fromPathParts . addAll ( toPathParts ) ; return String . join ( "/" , fromPathParts ) ;
|
public class RtfColorList { /** * Returns the index of the given RtfColor in the color list . If the RtfColor
* is not in the list of colors , then it is added .
* @ param color The RtfColor for which to get the index
* @ return The index of the RtfColor */
public int getColorNumber ( RtfColor color ) { } }
|
int colorIndex = - 1 ; for ( int i = 0 ; i < colorList . size ( ) ; i ++ ) { if ( colorList . get ( i ) . equals ( color ) ) { colorIndex = i ; } } if ( colorIndex == - 1 ) { colorIndex = colorList . size ( ) ; colorList . add ( color ) ; } return colorIndex ;
|
public class FormatUtilities { /** * Returns the given date formatted using the given format .
* @ param dt The date to be formatted
* @ param format The format to use to display the date
* @ param tolerance The tolerance for the date to be formatted
* @ return The given date formatted using the given format */
static public String getFormattedDateTime ( long dt , String format , long tolerance ) { } }
|
return getFormattedDateTime ( dt , format , true , tolerance ) ;
|
public class CommerceDiscountRelPersistenceImpl { /** * Returns an ordered range of all the commerce discount rels where commerceDiscountId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceDiscountRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param commerceDiscountId the commerce discount ID
* @ param start the lower bound of the range of commerce discount rels
* @ param end the upper bound of the range of commerce discount rels ( not inclusive )
* @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > )
* @ return the ordered range of matching commerce discount rels */
@ Override public List < CommerceDiscountRel > findByCommerceDiscountId ( long commerceDiscountId , int start , int end , OrderByComparator < CommerceDiscountRel > orderByComparator ) { } }
|
return findByCommerceDiscountId ( commerceDiscountId , start , end , orderByComparator , true ) ;
|
public class CasConfigurationEventListener { /** * Handle configuration modified event .
* @ param event the event */
@ EventListener public void handleConfigurationModifiedEvent ( final CasConfigurationModifiedEvent event ) { } }
|
if ( this . contextRefresher == null ) { LOGGER . warn ( "Unable to refresh application context, since no refresher is available" ) ; return ; } if ( event . isEligibleForContextRefresh ( ) ) { LOGGER . info ( "Received event [{}]. Refreshing CAS configuration..." , event ) ; Collection < String > keys = null ; try { keys = contextRefresher . refresh ( ) ; LOGGER . debug ( "Refreshed the following settings: [{}]." , keys ) ; } catch ( final Exception e ) { LOGGER . trace ( e . getMessage ( ) , e ) ; } finally { rebind ( ) ; LOGGER . info ( "CAS finished rebinding configuration with new settings [{}]" , ObjectUtils . defaultIfNull ( keys , new ArrayList < > ( 0 ) ) ) ; } }
|
public class CmsGalleryControllerHandler { /** * Updates the gallery data . < p >
* @ param searchObj the current search object
* @ param dialogBean the gallery data
* @ param controller he gallery controller */
public void updateGalleryData ( CmsGallerySearchBean searchObj , CmsGalleryDataBean dialogBean , CmsGalleryController controller ) { } }
|
if ( ( m_galleryDialog . getGalleriesTab ( ) != null ) && ( dialogBean . getGalleries ( ) != null ) ) { Collections . sort ( dialogBean . getGalleries ( ) , new CmsComparatorTitle ( true ) ) ; setGalleriesTabContent ( dialogBean . getGalleries ( ) , searchObj . getGalleries ( ) ) ; } if ( ( m_galleryDialog . getTypesTab ( ) != null ) && ( dialogBean . getTypes ( ) != null ) ) { setTypesTabContent ( controller . getSearchTypes ( ) , searchObj . getTypes ( ) ) ; } if ( ( m_galleryDialog . getCategoriesTab ( ) != null ) && ( dialogBean . getCategories ( ) != null ) ) { setCategoriesTabContent ( dialogBean . getCategories ( ) , searchObj . getCategories ( ) ) ; }
|
public class EffectSize { /** * Original Cohen ' s d formulation , as in Cohen ( 1988 ) , Statistical power
* analysis for the behavioral sciences .
* @ param < V > type of the keys of each map .
* @ param baselineN number of samples of baseline method .
* @ param baselineMean mean of baseline method .
* @ param baselineStd standard deviation of baseline method .
* @ param testN number of samples of test method .
* @ param testMean mean of test method .
* @ param testStd standard deviation of test method .
* @ return Cohen ' s d without least squares estimation . */
public static < V > double getCohenD ( final int baselineN , final double baselineMean , final double baselineStd , final int testN , final double testMean , final double testStd ) { } }
|
double pooledStd = Math . sqrt ( ( ( testN - 1 ) * Math . pow ( testStd , 2 ) + ( baselineN - 1 ) * Math . pow ( baselineStd , 2 ) ) / ( baselineN + testN ) ) ; double d = Math . abs ( testMean - baselineMean ) / pooledStd ; return d ;
|
public class CommunicationManager { /** * Adds torrent to storage with any storage and metadata source
* @ param metadataProvider specified metadata source
* @ param pieceStorage specified storage of pieces
* @ return { @ link TorrentManager } instance for monitoring torrent state
* @ throws IOException if IO error occurs in reading metadata file */
public TorrentManager addTorrent ( TorrentMetadataProvider metadataProvider , PieceStorage pieceStorage ) throws IOException { } }
|
return addTorrent ( metadataProvider , pieceStorage , Collections . < TorrentListener > emptyList ( ) ) ;
|
public class EnumBindTransform { /** * / * ( non - Javadoc )
* @ see com . abubusoft . kripton . processor . bind . transform . BindTransform # generateParseOnJackson ( com . abubusoft . kripton . processor . bind . BindTypeContext , com . squareup . javapoet . MethodSpec . Builder , java . lang . String , com . squareup . javapoet . TypeName , java . lang . String , com . abubusoft . kripton . processor . bind . model . BindProperty ) */
@ Override public void generateParseOnJackson ( BindTypeContext context , Builder methodBuilder , String parserName , TypeName beanClass , String beanName , BindProperty property ) { } }
|
if ( property . isNullable ( ) ) { methodBuilder . beginControlFlow ( "if ($L.currentToken()!=$T.VALUE_NULL)" , parserName , JsonToken . class ) ; } else { methodBuilder . beginControlFlow ( "" ) ; } methodBuilder . addStatement ( "String tempEnum=$L.getText()" , parserName ) ; methodBuilder . addStatement ( setter ( beanClass , beanName , property , "$T.hasText(tempEnum)?$T.valueOf(tempEnum):null" ) , StringUtils . class , typeName ) ; methodBuilder . endControlFlow ( ) ;
|
public class TxRecoveryAgentImpl { /** * Creates a Filesystem TranLogConfiguration object appropriate for storing transaction logs in a filesystem .
* @ param recoveredServerIdentity
* @ param fs
* @ param logDir
* @ param logSize
* @ param isPeerRecoverySupported
* @ return
* @ throws URISyntaxException */
private TranLogConfiguration createFileTranLogConfiguration ( String recoveredServerIdentity , FailureScope fs , String logDir , int logSize , boolean isPeerRecoverySupported ) throws URISyntaxException { } }
|
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "createFileTranLogConfiguration" , new java . lang . Object [ ] { recoveredServerIdentity , fs , logDir , logSize , this } ) ; TranLogConfiguration tlc = null ; if ( _isPeerRecoverySupported ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Work with server recovery identity - " , recoveredServerIdentity ) ; // Do we need to reset the logdir ?
if ( recoveredServerIdentity . equals ( localRecoveryIdentity ) ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Local server recovery identity so no need to reset the logDir" ) ; } else { // Reset the logdir
if ( fs != null && fs instanceof FileFailureScope ) { FileFailureScope ffs = ( FileFailureScope ) fs ; if ( ffs != null ) { LeaseInfo li = ffs . getLeaseInfo ( ) ; if ( li != null ) { logDir = li . getLeaseDetail ( ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Have reset the logDir to " , logDir ) ; } } } } } tlc = new TranLogConfiguration ( logDir , logDir , logSize ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "createFileTranLogConfiguration" , tlc ) ; return tlc ;
|
public class LUnaryOperatorBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */
@ Nonnull public static < T > LUnaryOperator < T > unaryOperatorFrom ( Consumer < LUnaryOperatorBuilder < T > > buildingFunction ) { } }
|
LUnaryOperatorBuilder builder = new LUnaryOperatorBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
|
public class SelectExtension { /** * selects an item by it ' s identifier
* @ param identifier the identifier of the item to select
* @ param fireEvent true if the onClick listener should be called
* @ param considerSelectableFlag true if the select method should not select an item if its not selectable */
public void selectByIdentifier ( final long identifier , final boolean fireEvent , final boolean considerSelectableFlag ) { } }
|
mFastAdapter . recursive ( new AdapterPredicate < Item > ( ) { @ Override public boolean apply ( @ NonNull IAdapter < Item > lastParentAdapter , int lastParentPosition , Item item , int position ) { if ( item . getIdentifier ( ) == identifier ) { select ( lastParentAdapter , item , position , fireEvent , considerSelectableFlag ) ; return true ; } return false ; } } , true ) ;
|
public class JavacFileManager { /** * container is a directory , a zip file , or a non - existant path .
* Insert all files in subdirectory subdirectory of container which
* match fileKinds into resultList */
private void listContainer ( File container , RelativeDirectory subdirectory , Set < JavaFileObject . Kind > fileKinds , boolean recurse , ListBuffer < JavaFileObject > resultList ) { } }
|
Archive archive = archives . get ( container ) ; if ( archive == null ) { // archives are not created for directories .
if ( fsInfo . isDirectory ( container ) ) { listDirectory ( container , subdirectory , fileKinds , recurse , resultList ) ; return ; } // Not a directory ; either a file or non - existant , create the archive
try { archive = openArchive ( container ) ; } catch ( IOException ex ) { log . error ( "error.reading.file" , container , getMessage ( ex ) ) ; return ; } } listArchive ( archive , subdirectory , fileKinds , recurse , resultList ) ;
|
public class FormValidation { /** * Makes sure that the given string is a base64 encoded text .
* @ param allowWhitespace
* if you allow whitespace ( CR , LF , etc ) in base64 encoding
* @ param allowEmpty
* Is empty string allowed ?
* @ param errorMessage
* Error message .
* @ since 1.305 */
public static FormValidation validateBase64 ( String value , boolean allowWhitespace , boolean allowEmpty , String errorMessage ) { } }
|
try { String v = value ; if ( ! allowWhitespace ) { if ( v . indexOf ( ' ' ) >= 0 || v . indexOf ( '\n' ) >= 0 ) return error ( errorMessage ) ; } v = v . trim ( ) ; if ( ! allowEmpty && v . length ( ) == 0 ) return error ( errorMessage ) ; Base64 . getDecoder ( ) . decode ( v . getBytes ( StandardCharsets . UTF_8 ) ) ; return ok ( ) ; } catch ( IllegalArgumentException e ) { return error ( errorMessage ) ; }
|
public class AdminDevice { /** * remove logging to a device
* @ param argin
* @ throws DevFailed */
@ Command ( name = "RemoveLoggingTarget" , inTypeDesc = "Str[i]=Device-name. Str[i+1]=Target-type::Target-name" ) public void removeLoggingTarget ( final String [ ] argin ) throws DevFailed { } }
|
if ( argin . length % 2 != 0 ) { throw DevFailedUtils . newDevFailed ( INPUT_ERROR , "argin must be of even size" ) ; } for ( int i = 0 ; i < argin . length - 1 ; i = i + 2 ) { final String deviceName = argin [ i ] ; final String [ ] config = argin [ i + 1 ] . split ( LoggingManager . LOGGING_TARGET_SEPARATOR ) ; if ( config . length != 2 ) { throw DevFailedUtils . newDevFailed ( INPUT_ERROR , "config must be of size 2: targetType::targetName" ) ; } LoggingManager . getInstance ( ) . removeAppender ( deviceName , config [ 0 ] ) ; }
|
public class GlobalSuffixFinders { /** * Returns all suffixes of the counterexample word as distinguishing suffixes , as suggested by Maler & amp ; Pnueli .
* @ param ceQuery
* the counterexample query
* @ return all suffixes of the counterexample input */
public static < I , D > List < Word < I > > findMalerPnueli ( Query < I , D > ceQuery ) { } }
|
return ceQuery . getInput ( ) . suffixes ( false ) ;
|
public class Grid { /** * Replies the grid cells that are intersecting the specified bounds .
* @ param bounds the bounds
* @ param createCells indicates if the not already created cells should be created .
* @ return the grid cells . */
protected Iterable < GridCell < P > > getGridCellsOn ( Rectangle2afp < ? , ? , ? , ? , ? , ? > bounds , boolean createCells ) { } }
|
if ( bounds . intersects ( this . bounds ) ) { final int c1 = getColumnFor ( bounds . getMinX ( ) ) ; final int r1 = getRowFor ( bounds . getMinY ( ) ) ; final int c2 = getColumnFor ( bounds . getMaxX ( ) ) ; final int r2 = getRowFor ( bounds . getMaxY ( ) ) ; return new CellIterable ( r1 , c1 , r2 , c2 , createCells ) ; } return Collections . emptyList ( ) ;
|
public class IfcSurfaceStyleWithTexturesImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcSurfaceTexture > getTextures ( ) { } }
|
return ( EList < IfcSurfaceTexture > ) eGet ( Ifc4Package . Literals . IFC_SURFACE_STYLE_WITH_TEXTURES__TEXTURES , true ) ;
|
public class InvokeDynamicBytecodeGeneratingVisitor { /** * @ Override
* public void visit ( ExecutionContext context , AssignmentExpression expr , boolean strict ) {
* LabelNode throwRefError = new LabelNode ( ) ;
* LabelNode end = new LabelNode ( ) ;
* LabelNode isUnresolvableRef = new LabelNode ( ) ;
* LabelNode isPropertyRef = new LabelNode ( ) ;
* LabelNode isEnvRecord = new LabelNode ( ) ;
* LabelNode doPut = new LabelNode ( ) ;
* expr . getLhs ( ) . accept ( context , this , strict ) ;
* / / reference
* dup ( ) ;
* / / reference reference
* instance _ of ( p ( Reference . class ) ) ;
* / / reference bool
* iffalse ( throwRefError ) ;
* / / reference
* checkcast ( p ( Reference . class ) ) ;
* / / ref
* dup ( ) ;
* / / ref ref
* invokevirtual ( p ( Reference . class ) , " isUnresolvableReference " , sig ( boolean . class ) ) ;
* / / ref unresolv ?
* iftrue ( isUnresolvableRef ) ;
* / / ref
* dup ( ) ;
* / / ref ref
* invokevirtual ( p ( Reference . class ) , " isPropertyReference " , sig ( boolean . class ) ) ;
* / / ref isprop ?
* iftrue ( isPropertyRef ) ;
* / / ref
* go _ to ( isEnvRecord ) ;
* / / unresolvable ref
* label ( isUnresolvableRef ) ;
* / / ref
* dup ( ) ;
* / / ref ref
* invokevirtual ( p ( Reference . class ) , " isStrictReference " , sig ( boolean . class ) ) ;
* / / ref isstrict ?
* iftrue ( throwRefError ) ;
* / / ref
* aload ( Arities . EXECUTION _ CONTEXT ) ;
* / / ref context
* invokevirtual ( p ( ExecutionContext . class ) , " getGlobalContext " , sig ( GlobalObject . class ) ) ;
* / / ref obj
* go _ to ( doPut ) ;
* / / property ref
* label ( isPropertyRef ) ;
* / / ref
* dup ( ) ;
* / / ref ref
* invokevirtual ( p ( Reference . class ) , " getBase " , sig ( Object . class ) ) ;
* / / ref obj
* go _ to ( doPut ) ;
* / / property ref
* label ( isEnvRecord ) ;
* / / ref
* dup ( ) ;
* / / ref ref
* invokevirtual ( p ( Reference . class ) , " getBase " , sig ( Object . class ) ) ;
* / / ref obj
* go _ to ( doPut ) ;
* label ( doPut ) ;
* / / ref obj
* swap ( ) ;
* / / obj ref
* dup ( ) ;
* / / obj ref ref
* aload ( Arities . EXECUTION _ CONTEXT ) ;
* / / obj ref ref context
* invokestatic ( p ( ReferenceContext . class ) , " create " , sig ( ReferenceContext . class , Reference . class , ExecutionContext . class ) ) ;
* / / obj ref context
* swap ( ) ;
* / / obj context ref
* invokevirtual ( p ( Reference . class ) , " getReferencedName " , sig ( String . class ) ) ;
* / / obj context name
* expr . getRhs ( ) . accept ( context , this , strict ) ;
* / / obj context name value
* append ( jsGetValue ( ) ) ;
* / / object context name value
* invokedynamic ( " dyn : setProp " , sig ( Object . class , Object . class , ReferenceContext . class , String . class , Object . class ) , DynJSBootstrapper . HANDLE , DynJSBootstrapper . ARGS ) ;
* / / value
* go _ to ( end ) ;
* label ( throwRefError ) ;
* / / reference
* pop ( ) ;
* newobj ( p ( ThrowException . class ) ) ;
* / / ex
* dup ( ) ;
* / / ex ex
* aload ( Arities . EXECUTION _ CONTEXT ) ;
* / / ex ex context
* ldc ( expr . getLhs ( ) . toString ( ) + " is not a reference " ) ;
* / / ex ex context str
* invokevirtual ( p ( ExecutionContext . class ) , " createReferenceError " , sig ( JSObject . class , String . class ) ) ;
* / / ex ex error
* aload ( Arities . EXECUTION _ CONTEXT ) ;
* / / ex ex error context
* swap ( ) ;
* / / ex ex context error
* invokespecial ( p ( ThrowException . class ) , " < init > " , sig ( void . class , ExecutionContext . class , Object . class ) ) ;
* / / ex ex
* athrow ( ) ;
* label ( end ) ;
* nop ( ) ; */
@ Override public CodeBlock jsGetValue ( final Class < ? > throwIfNot ) { } }
|
LabelNode end = new LabelNode ( ) ; LabelNode throwRef = new LabelNode ( ) ; CodeBlock codeBlock = new CodeBlock ( ) // IN : reference
. dup ( ) // ref ref
. instance_of ( p ( Reference . class ) ) // ref isref ?
. iffalse ( end ) . checkcast ( p ( Reference . class ) ) // ref
. dup ( ) // ref ref
. invokevirtual ( p ( Reference . class ) , "isUnresolvableReference" , sig ( boolean . class ) ) // ref unresolv ?
. iftrue ( throwRef ) // ref
. dup ( ) // ref ref
. invokevirtual ( p ( Reference . class ) , "getReferencedName" , sig ( String . class ) ) // ref name
. aload ( Arities . EXECUTION_CONTEXT ) // ref name context
. swap ( ) // ref context name
. invokedynamic ( "dyn:getProperty|getMethod" , sig ( Object . class , Object . class , ExecutionContext . class , String . class ) , DynJSBootstrapper . HANDLE , DynJSBootstrapper . ARGS ) ; // value
if ( throwIfNot != null ) { codeBlock . dup ( ) // value value
. instance_of ( p ( throwIfNot ) ) // value bool
. iftrue ( end ) // value
. pop ( ) . append ( jsThrowTypeError ( "expected " + throwIfNot . getName ( ) ) ) ; } // result
codeBlock . go_to ( end ) . label ( throwRef ) . append ( jsThrowReferenceError ( "unable to dereference" ) ) . label ( end ) // value
. nop ( ) ; return codeBlock ;
|
public class AWSBackupClient { /** * Returns the backup plan that is specified by the plan ID as a backup template .
* @ param exportBackupPlanTemplateRequest
* @ return Result of the ExportBackupPlanTemplate operation returned by the service .
* @ throws InvalidParameterValueException
* Indicates that something is wrong with a parameter ' s value . For example , the value is out of range .
* @ throws MissingParameterValueException
* Indicates that a required parameter is missing .
* @ throws ServiceUnavailableException
* The request failed due to a temporary failure of the server .
* @ throws ResourceNotFoundException
* A resource that is required for the action doesn ' t exist .
* @ sample AWSBackup . ExportBackupPlanTemplate
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / backup - 2018-11-15 / ExportBackupPlanTemplate "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public ExportBackupPlanTemplateResult exportBackupPlanTemplate ( ExportBackupPlanTemplateRequest request ) { } }
|
request = beforeClientExecution ( request ) ; return executeExportBackupPlanTemplate ( request ) ;
|
public class MarginalLogLikelihood { /** * Gets the " expected " feature counts . */
public static FeatureVector getExpectedFeatureCounts ( FgExampleList data , FgInferencerFactory infFactory , FgModel model , double [ ] params ) { } }
|
model . updateModelFromDoubles ( params ) ; FgModel feats = model . getDenseCopy ( ) ; feats . zero ( ) ; for ( int i = 0 ; i < data . size ( ) ; i ++ ) { LFgExample ex = data . get ( i ) ; FactorGraph fgLatPred = ex . getFactorGraph ( ) ; fgLatPred . updateFromModel ( model ) ; FgInferencer infLatPred = infFactory . getInferencer ( fgLatPred ) ; infLatPred . run ( ) ; addExpectedPartials ( fgLatPred , infLatPred , 1.0 * ex . getWeight ( ) , feats ) ; } double [ ] f = new double [ model . getNumParams ( ) ] ; feats . updateDoublesFromModel ( f ) ; return new FeatureVector ( f ) ;
|
public class CssEscape { /** * Perform a ( configurable ) CSS String < strong > escape < / strong > operation on a < tt > String < / tt > input .
* This method will perform an escape operation according to the specified
* { @ link CssStringEscapeType } and
* { @ link CssStringEscapeLevel } argument values .
* All other < tt > String < / tt > - based < tt > escapeCssString * ( . . . ) < / tt > methods call this one with preconfigured
* < tt > type < / tt > and < tt > level < / tt > values .
* This method is < strong > thread - safe < / strong > .
* @ param text the < tt > String < / tt > to be escaped .
* @ param type the type of escape operation to be performed , see
* { @ link CssStringEscapeType } .
* @ param level the escape level to be applied , see { @ link CssStringEscapeLevel } .
* @ return The escaped result < tt > String < / tt > . As a memory - performance improvement , will return the exact
* same object as the < tt > text < / tt > input argument if no escaping modifications were required ( and
* no additional < tt > String < / tt > objects will be created during processing ) . Will
* return < tt > null < / tt > if input is < tt > null < / tt > . */
public static String escapeCssString ( final String text , final CssStringEscapeType type , final CssStringEscapeLevel level ) { } }
|
if ( type == null ) { throw new IllegalArgumentException ( "The 'type' argument cannot be null" ) ; } if ( level == null ) { throw new IllegalArgumentException ( "The 'level' argument cannot be null" ) ; } return CssStringEscapeUtil . escape ( text , type , level ) ;
|
public class SwaggerBuilder { /** * Register authentication security .
* @ param swagger
* @ param operation
* @ param method */
protected void registerSecurity ( Swagger swagger , Operation operation , Method method ) { } }
|
RequireToken requireToken = ClassUtil . getAnnotation ( method , RequireToken . class ) ; if ( requireToken != null ) { String apiKeyName = requireToken . value ( ) ; if ( swagger . getSecurityDefinitions ( ) == null || ! swagger . getSecurityDefinitions ( ) . containsKey ( apiKeyName ) ) { ApiKeyAuthDefinition security = new ApiKeyAuthDefinition ( ) ; security . setName ( apiKeyName ) ; security . setIn ( In . HEADER ) ; security . setType ( "apiKey" ) ; swagger . addSecurityDefinition ( apiKeyName , security ) ; } operation . addSecurity ( apiKeyName , Collections . emptyList ( ) ) ; } BasicAuth basicAuth = ClassUtil . getAnnotation ( method , BasicAuth . class ) ; if ( basicAuth != null ) { if ( swagger . getSecurityDefinitions ( ) == null || ! swagger . getSecurityDefinitions ( ) . containsKey ( "basic" ) ) { BasicAuthDefinition security = new BasicAuthDefinition ( ) ; swagger . addSecurityDefinition ( "basic" , security ) ; } operation . addSecurity ( "basic" , Collections . emptyList ( ) ) ; }
|
public class Sql { /** * Creates a new Sql instance given a JDBC connection URL
* and a driver class name .
* @ param url a database url of the form
* < code > jdbc : < em > subprotocol < / em > : < em > subname < / em > < / code >
* @ param driverClassName the fully qualified class name of the driver class
* @ return a new Sql instance with a connection
* @ throws SQLException if a database access error occurs
* @ throws ClassNotFoundException if the driver class cannot be found or loaded */
public static Sql newInstance ( String url , String driverClassName ) throws SQLException , ClassNotFoundException { } }
|
loadDriver ( driverClassName ) ; return newInstance ( url ) ;
|
public class AgentInput { /** * Return an array of bytes from the ssh - agent representing data signed by a private SSH key .
* @ return An array of signed bytes . */
byte [ ] readSignResponse ( ) throws IOException { } }
|
// Read the first 9 bytes from the InputStream which are the SSH2 _ AGENT _ SIGN _ RESPONSE headers .
final byte [ ] headerBytes = readBytes ( 9 , "SSH2_AGENT_SIGN_RESPONSE" ) ; log . debug ( "Received SSH2_AGENT_SIGN_RESPONSE message from ssh-agent." ) ; final SignResponseHeaders headers = SignResponseHeaders . from ( headerBytes ) ; // Read the rest of the SSH2 _ AGENT _ SIGN _ RESPONSE message from ssh - agent .
// 5 is the sum of the number of bytes of response code and response length
final byte [ ] bytes = readBytes ( headers . getLength ( ) - 5 ) ; final ByteIterator iterator = new ByteIterator ( bytes ) ; final byte [ ] responseType = iterator . next ( ) ; final String signatureFormatId = new String ( responseType ) ; if ( ! signatureFormatId . equals ( Rsa . RSA_LABEL ) ) { throw new RuntimeException ( "I unexpectedly got a non-Rsa signature format ID in the " + "SSH2_AGENT_SIGN_RESPONSE's signature blob." ) ; } return iterator . next ( ) ;
|
public class StringParser { /** * Checks if the given string is a numeric string that can be converted to an
* unsigned long value with radix { @ value # DEFAULT _ RADIX } .
* @ param sStr
* The string to check . May be < code > null < / code > .
* @ return < code > true < / code > if the value can be converted to a valid value */
public static boolean isUnsignedLong ( @ Nullable final String sStr ) { } }
|
if ( sStr != null ) try { final long ret = Long . parseLong ( sStr , DEFAULT_RADIX ) ; return ret >= 0 ; } catch ( final NumberFormatException ex ) { // fall through
} return false ;
|
public class DescribeComplianceByConfigRuleResult { /** * Indicates whether each of the specified AWS Config rules is compliant .
* @ return Indicates whether each of the specified AWS Config rules is compliant . */
public java . util . List < ComplianceByConfigRule > getComplianceByConfigRules ( ) { } }
|
if ( complianceByConfigRules == null ) { complianceByConfigRules = new com . amazonaws . internal . SdkInternalList < ComplianceByConfigRule > ( ) ; } return complianceByConfigRules ;
|
public class Viewport { /** * Adds a { @ link FocusEvent } handler .
* @ param handler the handler
* @ return returns the handler registration */
@ Override public HandlerRegistration addFocusHandler ( FocusHandler handler ) { } }
|
return ensureHandlers ( ) . addHandler ( FocusEvent . getType ( ) , handler ) ;
|
public class CmsElementUtil { /** * Returns the rendered element content for all the given containers .
* @ param element the element to render
* @ param containers the containers the element appears in
* @ return a map from container names to rendered page contents */
private Map < String , String > getContentsByContainerName ( CmsContainerElementBean element , Collection < CmsContainer > containers ) { } }
|
CmsFormatterConfiguration configs = getFormatterConfiguration ( element . getResource ( ) ) ; Map < String , String > result = new HashMap < String , String > ( ) ; for ( CmsContainer container : containers ) { String content = getContentByContainer ( element , container , configs ) ; if ( content != null ) { content = removeScriptTags ( content ) ; } result . put ( container . getName ( ) , content ) ; } return result ;
|
public class InjectionUtils { /** * Liberty Change for CXF End */
public static Class < ? > updateParamClassToTypeIfNeeded ( Class < ? > paramCls , Type type ) { } }
|
if ( paramCls != type && type instanceof Class ) { Class < ? > clsType = ( Class < ? > ) type ; if ( paramCls . isAssignableFrom ( clsType ) || clsType != Object . class && ! clsType . isInterface ( ) && clsType . isAssignableFrom ( paramCls ) ) { paramCls = clsType ; } } return paramCls ;
|
public class HttpInputStreamImpl { /** * ( non - Javadoc )
* @ see java . io . InputStream # close ( ) */
@ Override public void close ( ) throws IOException { } }
|
if ( isClosed ( ) ) { return ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Closing stream: " + this ) ; } // adding MultiRead option
if ( ! this . enableMultiReadofPostData ) { if ( null != this . buffer ) { this . buffer . release ( ) ; this . buffer = null ; } validate ( ) ; } else { if ( null != this . buffer ) { if ( firstReadCompleteforMulti ) { this . buffer . rewind ( ) ; // make position 0 , the buffer is ready for next read
} else { this . buffer . release ( ) ; } this . buffer = null ; } validate ( ) ; if ( obs != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Alert for close , obs -->" + obs + " buffer ->" + this ) ; } obs . alertISClose ( ) ; } this . firstReadCompleteforMulti = true ; this . postDataIndex = 0 ; } this . closed = true ;
|
public class Table { /** * If there is an identity column in the table , sets
* the max identity value . */
protected void systemUpdateIdentityValue ( Object [ ] data ) { } }
|
if ( identityColumn != - 1 ) { Number id = ( Number ) data [ identityColumn ] ; if ( id != null ) { identitySequence . systemUpdate ( id . longValue ( ) ) ; } }
|
public class Status { /** * Converts a < code > String < / code > name to a status .
* @ param name the name of the status
* @ return the status or < code > UNDEFINDED < / code > if the conversion fails . */
public static Status valueOf ( String name ) { } }
|
if ( name == null ) { return UNDEFINED ; } for ( Status status : INSTANCES . values ( ) ) { if ( name . equals ( status . getName ( ) ) ) { return status ; } } return UNDEFINED ;
|
public class Schema { /** * Get ColumnFamily metadata by its identifier
* @ param cfId The ColumnFamily identifier
* @ return metadata about ColumnFamily */
public CFMetaData getCFMetaData ( UUID cfId ) { } }
|
Pair < String , String > cf = getCF ( cfId ) ; return ( cf == null ) ? null : getCFMetaData ( cf . left , cf . right ) ;
|
public class ZoneOffsetTransition { /** * Obtains an instance defining a transition between two offsets .
* Applications should normally obtain an instance from { @ link ZoneRules } .
* This factory is only intended for use when creating { @ link ZoneRules } .
* @ param transition the transition date - time at the transition , which never
* actually occurs , expressed local to the before offset , not null
* @ param offsetBefore the offset before the transition , not null
* @ param offsetAfter the offset at and after the transition , not null
* @ return the transition , not null
* @ throws IllegalArgumentException if { @ code offsetBefore } and { @ code offsetAfter }
* are equal , or { @ code transition . getNano ( ) } returns non - zero value */
public static ZoneOffsetTransition of ( LocalDateTime transition , ZoneOffset offsetBefore , ZoneOffset offsetAfter ) { } }
|
Jdk8Methods . requireNonNull ( transition , "transition" ) ; Jdk8Methods . requireNonNull ( offsetBefore , "offsetBefore" ) ; Jdk8Methods . requireNonNull ( offsetAfter , "offsetAfter" ) ; if ( offsetBefore . equals ( offsetAfter ) ) { throw new IllegalArgumentException ( "Offsets must not be equal" ) ; } if ( transition . getNano ( ) != 0 ) { throw new IllegalArgumentException ( "Nano-of-second must be zero" ) ; } return new ZoneOffsetTransition ( transition , offsetBefore , offsetAfter ) ;
|
public class StylesheetHandler { /** * Resolve an external entity .
* @ param publicId The public identifer , or null if none is
* available .
* @ param systemId The system identifier provided in the XML
* document .
* @ return The new input source , or null to require the
* default behaviour .
* @ throws org . xml . sax . SAXException if the entity can not be resolved . */
public InputSource resolveEntity ( String publicId , String systemId ) throws org . xml . sax . SAXException { } }
|
return getCurrentProcessor ( ) . resolveEntity ( this , publicId , systemId ) ;
|
public class Dialog { /** * Set the title of this Dialog .
* @ param title The title text .
* @ return The Dialog for chaining methods . */
public Dialog title ( CharSequence title ) { } }
|
mTitle . setText ( title ) ; mTitle . setVisibility ( TextUtils . isEmpty ( title ) ? View . GONE : View . VISIBLE ) ; return this ;
|
public class SimpleXMLParser { /** * Does the actual parsing . Perform this immediately
* after creating the parser object . */
private void go ( Reader r ) throws IOException { } }
|
BufferedReader reader ; if ( r instanceof BufferedReader ) reader = ( BufferedReader ) r ; else reader = new BufferedReader ( r ) ; doc . startDocument ( ) ; while ( true ) { // read a new character
if ( previousCharacter == - 1 ) { character = reader . read ( ) ; } // or re - examine the previous character
else { character = previousCharacter ; previousCharacter = - 1 ; } // the end of the file was reached
if ( character == - 1 ) { if ( html ) { if ( html && state == TEXT ) flush ( ) ; doc . endDocument ( ) ; } else { throwException ( "Missing end tag" ) ; } return ; } // dealing with \ n and \ r
if ( character == '\n' && eol ) { eol = false ; continue ; } else if ( eol ) { eol = false ; } else if ( character == '\n' ) { lines ++ ; columns = 0 ; } else if ( character == '\r' ) { eol = true ; character = '\n' ; lines ++ ; columns = 0 ; } else { columns ++ ; } switch ( state ) { // we are in an unknown state before there ' s actual content
case UNKNOWN : if ( character == '<' ) { saveState ( TEXT ) ; state = TAG_ENCOUNTERED ; } break ; // we can encounter any content
case TEXT : if ( character == '<' ) { flush ( ) ; saveState ( state ) ; state = TAG_ENCOUNTERED ; } else if ( character == '&' ) { saveState ( state ) ; entity . setLength ( 0 ) ; state = ENTITY ; nowhite = true ; } else if ( Character . isWhitespace ( ( char ) character ) ) { if ( nowhite ) text . append ( ( char ) character ) ; nowhite = false ; } else { text . append ( ( char ) character ) ; nowhite = true ; } break ; // we have just seen a < and are wondering what we are looking at
// < foo > , < / foo > , < ! - - . . . - - - > , etc .
case TAG_ENCOUNTERED : initTag ( ) ; if ( character == '/' ) { state = IN_CLOSETAG ; } else if ( character == '?' ) { restoreState ( ) ; state = PI ; } else { text . append ( ( char ) character ) ; state = EXAMIN_TAG ; } break ; // we are processing something like this < foo . . . > .
// It could still be a < ! - - . . . - - > or something .
case EXAMIN_TAG : if ( character == '>' ) { doTag ( ) ; processTag ( true ) ; initTag ( ) ; state = restoreState ( ) ; } else if ( character == '/' ) { state = SINGLE_TAG ; } else if ( character == '-' && text . toString ( ) . equals ( "!-" ) ) { flush ( ) ; state = COMMENT ; } else if ( character == '[' && text . toString ( ) . equals ( "![CDATA" ) ) { flush ( ) ; state = CDATA ; } else if ( character == 'E' && text . toString ( ) . equals ( "!DOCTYP" ) ) { flush ( ) ; state = PI ; } else if ( Character . isWhitespace ( ( char ) character ) ) { doTag ( ) ; state = TAG_EXAMINED ; } else { text . append ( ( char ) character ) ; } break ; // we know the name of the tag now .
case TAG_EXAMINED : if ( character == '>' ) { processTag ( true ) ; initTag ( ) ; state = restoreState ( ) ; } else if ( character == '/' ) { state = SINGLE_TAG ; } else if ( Character . isWhitespace ( ( char ) character ) ) { // empty
} else { text . append ( ( char ) character ) ; state = ATTRIBUTE_KEY ; } break ; // we are processing a closing tag : e . g . < / foo >
case IN_CLOSETAG : if ( character == '>' ) { doTag ( ) ; processTag ( false ) ; if ( ! html && nested == 0 ) return ; state = restoreState ( ) ; } else { if ( ! Character . isWhitespace ( ( char ) character ) ) text . append ( ( char ) character ) ; } break ; // we have just seen something like this : < foo a = " b " /
// and are looking for the final > .
case SINGLE_TAG : if ( character != '>' ) throwException ( "Expected > for tag: <" + tag + "/>" ) ; doTag ( ) ; processTag ( true ) ; processTag ( false ) ; initTag ( ) ; if ( ! html && nested == 0 ) { doc . endDocument ( ) ; return ; } state = restoreState ( ) ; break ; // we are processing CDATA
case CDATA : if ( character == '>' && text . toString ( ) . endsWith ( "]]" ) ) { text . setLength ( text . length ( ) - 2 ) ; flush ( ) ; state = restoreState ( ) ; } else text . append ( ( char ) character ) ; break ; // we are processing a comment . We are inside
// the < ! - - . . . . - - > looking for the - - > .
case COMMENT : if ( character == '>' && text . toString ( ) . endsWith ( "--" ) ) { text . setLength ( text . length ( ) - 2 ) ; flush ( ) ; state = restoreState ( ) ; } else text . append ( ( char ) character ) ; break ; // We are inside one of these < ? . . . ? > or one of these < ! DOCTYPE . . . >
case PI : if ( character == '>' ) { state = restoreState ( ) ; if ( state == TEXT ) state = UNKNOWN ; } break ; // we are processing an entity , e . g . & lt ; , & # 187 ; , etc .
case ENTITY : if ( character == ';' ) { state = restoreState ( ) ; String cent = entity . toString ( ) ; entity . setLength ( 0 ) ; char ce = EntitiesToUnicode . decodeEntity ( cent ) ; if ( ce == '\0' ) text . append ( '&' ) . append ( cent ) . append ( ';' ) ; else text . append ( ce ) ; } else if ( ( character != '#' && ( character < '0' || character > '9' ) && ( character < 'a' || character > 'z' ) && ( character < 'A' || character > 'Z' ) ) || entity . length ( ) >= 7 ) { state = restoreState ( ) ; previousCharacter = character ; text . append ( '&' ) . append ( entity . toString ( ) ) ; entity . setLength ( 0 ) ; } else { entity . append ( ( char ) character ) ; } break ; // We are processing the quoted right - hand side of an element ' s attribute .
case QUOTE : if ( html && quoteCharacter == ' ' && character == '>' ) { flush ( ) ; processTag ( true ) ; initTag ( ) ; state = restoreState ( ) ; } else if ( html && quoteCharacter == ' ' && Character . isWhitespace ( ( char ) character ) ) { flush ( ) ; state = TAG_EXAMINED ; } else if ( html && quoteCharacter == ' ' ) { text . append ( ( char ) character ) ; } else if ( character == quoteCharacter ) { flush ( ) ; state = TAG_EXAMINED ; } else if ( " \r\n\u0009" . indexOf ( character ) >= 0 ) { text . append ( ' ' ) ; } else if ( character == '&' ) { saveState ( state ) ; state = ENTITY ; entity . setLength ( 0 ) ; } else { text . append ( ( char ) character ) ; } break ; case ATTRIBUTE_KEY : if ( Character . isWhitespace ( ( char ) character ) ) { flush ( ) ; state = ATTRIBUTE_EQUAL ; } else if ( character == '=' ) { flush ( ) ; state = ATTRIBUTE_VALUE ; } else if ( html && character == '>' ) { text . setLength ( 0 ) ; processTag ( true ) ; initTag ( ) ; state = restoreState ( ) ; } else { text . append ( ( char ) character ) ; } break ; case ATTRIBUTE_EQUAL : if ( character == '=' ) { state = ATTRIBUTE_VALUE ; } else if ( Character . isWhitespace ( ( char ) character ) ) { // empty
} else if ( html && character == '>' ) { text . setLength ( 0 ) ; processTag ( true ) ; initTag ( ) ; state = restoreState ( ) ; } else if ( html && character == '/' ) { flush ( ) ; state = SINGLE_TAG ; } else if ( html ) { flush ( ) ; text . append ( ( char ) character ) ; state = ATTRIBUTE_KEY ; } else { throwException ( "Error in attribute processing." ) ; } break ; case ATTRIBUTE_VALUE : if ( character == '"' || character == '\'' ) { quoteCharacter = character ; state = QUOTE ; } else if ( Character . isWhitespace ( ( char ) character ) ) { // empty
} else if ( html && character == '>' ) { flush ( ) ; processTag ( true ) ; initTag ( ) ; state = restoreState ( ) ; } else if ( html ) { text . append ( ( char ) character ) ; quoteCharacter = ' ' ; state = QUOTE ; } else { throwException ( "Error in attribute processing" ) ; } break ; } }
|
public class RequestSpotFleetRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < RequestSpotFleetRequest > getDryRunRequest ( ) { } }
|
Request < RequestSpotFleetRequest > request = new RequestSpotFleetRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
|
public class JdbcQueue { /** * Take a message from queue , retry if deadlock .
* Note : http : / / dev . mysql . com / doc / refman / 5.0 / en / innodb - deadlocks . html
* InnoDB uses automatic row - level locking . You can get deadlocks even in
* the case of transactions that just insert or delete a single row . That is
* because these operations are not really " atomic " ; they automatically set
* locks on the ( possibly several ) index records of the row inserted or
* deleted .
* @ param conn
* @ param numRetries
* @ param maxRetries
* @ return */
protected IQueueMessage < ID , DATA > _takeWithRetries ( Connection conn , int numRetries , int maxRetries ) { } }
|
try { jdbcHelper . startTransaction ( conn ) ; conn . setTransactionIsolation ( transactionIsolationLevel ) ; boolean result = true ; IQueueMessage < ID , DATA > msg = readFromQueueStorage ( conn ) ; if ( msg != null ) { result = result && removeFromQueueStorage ( conn , msg ) ; if ( ! isEphemeralDisabled ( ) ) { try { result = result && putToEphemeralStorage ( conn , msg ) ; } catch ( DuplicatedValueException dve ) { LOGGER . warn ( dve . getMessage ( ) , dve ) ; } catch ( DaoException de ) { if ( de . getCause ( ) instanceof DuplicatedValueException ) { LOGGER . warn ( de . getMessage ( ) , de ) ; } else { throw de ; } } } } if ( result ) { jdbcHelper . commitTransaction ( conn ) ; return msg ; } else { jdbcHelper . rollbackTransaction ( conn ) ; return null ; } } catch ( DaoException de ) { if ( de . getCause ( ) instanceof ConcurrencyFailureException ) { jdbcHelper . rollbackTransaction ( conn ) ; if ( numRetries > maxRetries ) { throw new QueueException ( de ) ; } else { incRetryCounter ( "_takeWithRetries" ) ; return _takeWithRetries ( conn , numRetries + 1 , maxRetries ) ; } } throw de ; } catch ( Exception e ) { jdbcHelper . rollbackTransaction ( conn ) ; throw e instanceof QueueException ? ( QueueException ) e : new QueueException ( e ) ; }
|
public class SingleLaneProcessor { /** * Processes the batch by calling the { @ link # process ( Batch , SingleLaneBatchMaker ) } method .
* @ param batch the batch of records to process .
* @ param batchMaker records created by the < code > Processor < / code > stage must be added to the < code > BatchMaker < / code >
* for them to be available to the rest of the pipeline .
* @ throws StageException if the < code > Processor < / code > had an error while processing records . */
@ Override public void process ( final Batch batch , final BatchMaker batchMaker ) throws StageException { } }
|
SingleLaneBatchMaker slBatchMaker = new SingleLaneBatchMaker ( ) { @ Override public void addRecord ( Record record ) { batchMaker . addRecord ( record , outputLane ) ; } } ; process ( batch , slBatchMaker ) ;
|
public class MetricsServlet { /** * Prints metrics data in a multi - line text form . */
void printMap ( PrintWriter out , Map < String , Map < String , List < TagsMetricsPair > > > map ) { } }
|
for ( Map . Entry < String , Map < String , List < TagsMetricsPair > > > context : map . entrySet ( ) ) { out . println ( context . getKey ( ) ) ; for ( Map . Entry < String , List < TagsMetricsPair > > record : context . getValue ( ) . entrySet ( ) ) { indent ( out , 1 ) ; out . println ( record . getKey ( ) ) ; for ( TagsMetricsPair pair : record . getValue ( ) ) { indent ( out , 2 ) ; // Prints tag values in the form " { key = value , key = value } : "
out . print ( "{" ) ; boolean first = true ; for ( Map . Entry < String , Object > tagValue : pair . tagMap . entrySet ( ) ) { if ( first ) { first = false ; } else { out . print ( "," ) ; } out . print ( tagValue . getKey ( ) ) ; out . print ( "=" ) ; out . print ( tagValue . getValue ( ) . toString ( ) ) ; } out . println ( "}:" ) ; // Now print metric values , one per line
for ( Map . Entry < String , Number > metricValue : pair . metricMap . entrySet ( ) ) { indent ( out , 3 ) ; out . print ( metricValue . getKey ( ) ) ; out . print ( "=" ) ; out . println ( metricValue . getValue ( ) . toString ( ) ) ; } } } }
|
public class PythonDualInputSender { /** * Extracts records from an iterator and writes them to the memory - mapped file . This method assumes that all values
* in the iterator are of the same type . This method does NOT take care of synchronization . The caller must
* guarantee that the file may be written to before calling this method .
* @ param input iterator containing records
* @ return size of the written buffer
* @ throws IOException */
public int sendBuffer1 ( SingleElementPushBackIterator < IN1 > input ) throws IOException { } }
|
if ( serializer1 == null ) { IN1 value = input . next ( ) ; serializer1 = getSerializer ( value ) ; input . pushBack ( value ) ; } return sendBuffer ( input , serializer1 ) ;
|
public class XMLReader { /** * * * * * * EVERYTHING BELOW HERE IS FOR TESTING PURPOSES AND NOT CRITICAL * * * * * */
static public void main ( String ... args ) throws Exception { } }
|
FileInputStream fin = new FileInputStream ( args [ 0 ] ) ; XMLReader < Sample > reader = new XMLReader < Sample > ( ) ; for ( Map < String , Object > map : reader . read ( fin , Sample . class ) ) { System . out . println ( "Next..." ) ; for ( Map . Entry < String , Object > entry : map . entrySet ( ) ) { System . out . println ( "\t" + entry . getKey ( ) + " (" + ( entry . getValue ( ) != null ? entry . getValue ( ) . getClass ( ) . getName ( ) : "" ) + "): " + entry . getValue ( ) ) ; } }
|
public class MessageUtil { /** * A convenience method for calling { @ link # compose ( String , String [ ] ) } with an array of argument
* that will be automatically tainted . */
public static String tcompose ( String key , String ... args ) { } }
|
for ( int ii = 0 , nn = args . length ; ii < nn ; ii ++ ) { args [ ii ] = taint ( args [ ii ] ) ; } return compose ( key , args ) ;
|
public class MavenJDOMWriter { /** * Method updateExtension .
* @ param value
* @ param element
* @ param counter
* @ param xmlTag */
protected void updateExtension ( Extension value , String xmlTag , Counter counter , Element element ) { } }
|
Element root = element ; Counter innerCount = new Counter ( counter . getDepth ( ) + 1 ) ; findAndReplaceSimpleElement ( innerCount , root , "groupId" , value . getGroupId ( ) , null ) ; findAndReplaceSimpleElement ( innerCount , root , "artifactId" , value . getArtifactId ( ) , null ) ; findAndReplaceSimpleElement ( innerCount , root , "version" , value . getVersion ( ) , null ) ;
|
public class NaryTree { /** * Gets the lexical item ids comprising the sentence . */
public int [ ] getSentenceIds ( IntObjectBimap < String > lexAlphabet ) { } }
|
ArrayList < NaryTree > leaves = getLexicalLeaves ( ) ; int [ ] sent = new int [ leaves . size ( ) ] ; for ( int i = 0 ; i < sent . length ; i ++ ) { sent [ i ] = lexAlphabet . lookupIndex ( leaves . get ( i ) . symbol ) ; } return sent ;
|
public class CmsAliasErrorColumn { /** * Static helper method to get the value to display in the column from a row . < p >
* @ param row the row
* @ return the value to display */
protected static String getValueInternal ( CmsAliasTableRow row ) { } }
|
if ( row . getAliasError ( ) != null ) { return row . getAliasError ( ) ; } if ( row . getPathError ( ) != null ) { return row . getPathError ( ) ; } return null ;
|
public class RaftLock { /** * Releases the lock if the current lock holder ' s session is closed . */
@ Override protected void onSessionClose ( long sessionId , Map < Long , Object > responses ) { } }
|
removeInvocationRefUids ( sessionId ) ; if ( owner != null && owner . sessionId ( ) == sessionId ) { ReleaseResult result = doRelease ( owner . endpoint ( ) , newUnsecureUUID ( ) , lockCount ) ; for ( LockInvocationKey key : result . completedWaitKeys ( ) ) { responses . put ( key . commitIndex ( ) , result . ownership ( ) . getFence ( ) ) ; } }
|
public class JSATData { /** * This loads a JSAT dataset from an input stream , and will not do any of
* its own buffering . The DataSet will be returned as either a
* { @ link SimpleDataSet } , { @ link ClassificationDataSet } , or
* { @ link RegressionDataSet } depending on what type of dataset was
* originally written out . < br >
* @ param inRaw the input stream , caller should buffer it
* @ param backingStore the data store to put all datapoints in
* @ return a dataset
* @ throws IOException */
public static DataSet < ? > load ( InputStream inRaw , DataStore backingStore ) throws IOException { } }
|
return load ( inRaw , false , backingStore ) ;
|
public class FactoryInterpolation { /** * Creates an interpolation class of the specified type for the specified image type .
* @ param min Minimum possible pixel value . Inclusive .
* @ param max Maximum possible pixel value . Inclusive .
* @ param type Interpolation type
* @ param borderType Border type . If null then it will not be set here .
* @ param imageType Type of input image
* @ return Interpolation */
public static < T extends ImageGray < T > > InterpolatePixelS < T > createPixelS ( double min , double max , InterpolationType type , BorderType borderType , Class < T > imageType ) { } }
|
InterpolatePixelS < T > alg ; switch ( type ) { case NEAREST_NEIGHBOR : alg = nearestNeighborPixelS ( imageType ) ; break ; case BILINEAR : return bilinearPixelS ( imageType , borderType ) ; case BICUBIC : alg = bicubicS ( - 0.5f , ( float ) min , ( float ) max , imageType ) ; break ; case POLYNOMIAL4 : alg = polynomialS ( 4 , min , max , imageType ) ; break ; default : throw new IllegalArgumentException ( "Add type: " + type ) ; } if ( borderType != null ) alg . setBorder ( FactoryImageBorder . single ( imageType , borderType ) ) ; return alg ;
|
public class UpdateTrustRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateTrustRequest updateTrustRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( updateTrustRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateTrustRequest . getTrustId ( ) , TRUSTID_BINDING ) ; protocolMarshaller . marshall ( updateTrustRequest . getSelectiveAuth ( ) , SELECTIVEAUTH_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class TrailerDocumentRepositoryMongoImpl { /** * { @ inheritDoc } */
@ Override public final TrailerDocument findByFileAndString ( final String filename , final String string ) { } }
|
final Query searchQuery = new Query ( Criteria . where ( "string" ) . is ( string ) . and ( "filename" ) . is ( filename ) ) ; final TrailerDocument trailerDocument = mongoTemplate . findOne ( searchQuery , TrailerDocumentMongo . class ) ; if ( trailerDocument == null ) { return null ; } final Trailer trailer = ( Trailer ) toObjConverter . createGedObject ( null , trailerDocument ) ; trailerDocument . setGedObject ( trailer ) ; return trailerDocument ;
|
public class Position { /** * TODO add unit test */
public LongitudePair getLongitudeOnGreatCircle ( Position position , double latitudeDegrees ) { } }
|
double lat3 = toRadians ( latitudeDegrees ) ; double lat1 = toRadians ( lat ) ; double lon1 = toRadians ( lon ) ; double lat2 = toRadians ( position . getLat ( ) ) ; double lon2 = toRadians ( position . getLon ( ) ) ; double l12 = lon1 - lon2 ; double sinLat1 = sin ( lat1 ) ; double cosLat2 = cos ( lat2 ) ; double cosLat3 = cos ( lat3 ) ; double cosLat1 = cos ( lat1 ) ; double sinL12 = sin ( l12 ) ; double A = sinLat1 * cosLat2 * cosLat3 * sinL12 ; double B = sinLat1 * cosLat2 * cosLat3 * cos ( l12 ) - cosLat1 * sin ( lat2 ) * cosLat3 ; double C = cosLat1 * cosLat2 * sin ( lat3 ) * sinL12 ; double longitude = atan2 ( B , A ) ; double v = sqrt ( sqr ( A ) + sqr ( B ) ) ; if ( abs ( C ) >= v ) { // not found !
return null ; } else { double dlon = acos ( C / v ) ; double lonCandidate1Degrees = to180 ( FastMath . toDegrees ( lon1 + dlon + longitude ) ) ; double lonCandidate2Degrees = to180 ( FastMath . toDegrees ( lon1 - dlon + longitude ) ) ; return new LongitudePair ( lonCandidate1Degrees , lonCandidate2Degrees ) ; }
|
public class ListTargetsForPolicyRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( ListTargetsForPolicyRequest listTargetsForPolicyRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( listTargetsForPolicyRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listTargetsForPolicyRequest . getPolicyId ( ) , POLICYID_BINDING ) ; protocolMarshaller . marshall ( listTargetsForPolicyRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listTargetsForPolicyRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class ApiOvhDomain { /** * List all your SMD files
* REST : GET / domain / data / smd
* @ param protectedLabels _ label [ required ] Filter the value of protectedLabels . label property ( = ) */
public ArrayList < Long > data_smd_GET ( String protectedLabels_label ) throws IOException { } }
|
String qPath = "/domain/data/smd" ; StringBuilder sb = path ( qPath ) ; query ( sb , "protectedLabels.label" , protectedLabels_label ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t1 ) ;
|
public class SlicedFileConsumer { /** * Initialization
* @ throws IOException
* I / O exception */
private void init ( ) throws IOException { } }
|
if ( initialized . compareAndSet ( false , true ) ) { log . debug ( "Init: {}" , mode ) ; // instance an executor for queue handling
scheduledExecutorService = Executors . newScheduledThreadPool ( schedulerThreadSize , new CustomizableThreadFactory ( "FileConsumerExecutor-" ) ) ; // if the path is null , the consumer has been uninitialized
if ( path != null ) { if ( log . isDebugEnabled ( ) ) { Path parent = path . getParent ( ) ; log . debug ( "Parent abs: {} dir: {}" , parent . isAbsolute ( ) , Files . isDirectory ( parent ) ) ; } if ( IClientStream . MODE_APPEND . equals ( mode ) ) { if ( Files . notExists ( path ) ) { throw new IOException ( "File to be appended doesnt exist, verify the record mode" ) ; } log . debug ( "Path: {}\nRead: {} write: {} size: {}" , path , Files . isReadable ( path ) , Files . isWritable ( path ) , Files . size ( path ) ) ; writer = new FLVWriter ( path , true ) ; } else if ( IClientStream . MODE_RECORD . equals ( mode ) ) { try { // delete existing file
if ( Files . deleteIfExists ( path ) ) { log . debug ( "File deleted" ) ; } // ensure parent dirs exist
Files . createDirectories ( path . getParent ( ) ) ; // create the file
path = Files . createFile ( path ) ; } catch ( IOException ioe ) { log . error ( "File creation error: {}" , ioe ) ; } if ( ! Files . isWritable ( path ) ) { throw new IOException ( "File is not writable" ) ; } log . debug ( "Path: {}\nRead: {} write: {}" , path , Files . isReadable ( path ) , Files . isWritable ( path ) ) ; writer = new FLVWriter ( path , false ) ; if ( audioConfigurationTag != null ) { writer . writeTag ( audioConfigurationTag ) ; } if ( videoConfigurationTag != null ) { writer . writeTag ( videoConfigurationTag ) ; gotVideoKeyframe = true ; } } else { // throw new IllegalStateException ( String . format ( " Illegal mode type : % s " , mode ) ) ;
try { // delete existing file since we ' re not recording nor appending
if ( Files . deleteIfExists ( path ) ) { log . debug ( "File deleted" ) ; } } catch ( IOException ioe ) { log . error ( "File creation error: {}" , ioe ) ; } } } else { log . warn ( "Consumer is uninitialized" ) ; } log . debug ( "Init - complete" ) ; }
|
public class SoundManager { /** * removes the LocaleDateTime and Thread ( if exisiting ) */
private void endWaitingForUsage ( ) { } }
|
synchronized ( permanentUserReadWriteLock ) { if ( permissionWithoutUsageLimit != null ) permissionWithoutUsageLimit = null ; if ( permissionWithoutUsageCloseThread != null ) { permissionWithoutUsageCloseThread . cancel ( true ) ; permissionWithoutUsageLimit = null ; } }
|
public class DefaultOperationCandidatesProvider { /** * package for testing purpose */
static Property getProperty ( String propName , ModelNode attrs ) { } }
|
String [ ] arr = propName . split ( "\\." ) ; ModelNode attrDescr = attrs ; for ( String item : arr ) { // Remove list part .
if ( item . endsWith ( "]" ) ) { int i = item . indexOf ( "[" ) ; if ( i < 0 ) { return null ; } item = item . substring ( 0 , i ) ; } ModelNode descr = attrDescr . get ( item ) ; if ( ! descr . isDefined ( ) ) { if ( attrDescr . has ( Util . VALUE_TYPE ) ) { ModelNode vt = attrDescr . get ( Util . VALUE_TYPE ) ; if ( vt . has ( item ) ) { attrDescr = vt . get ( item ) ; continue ; } } return null ; } attrDescr = descr ; } return new Property ( propName , attrDescr ) ;
|
public class SparseMatrixT { /** * 将多维索引转换为列排序索引
* @ param indices
* @ return
* Jul 29 , 2009 */
public long getIdx ( int [ ] indices ) { } }
|
long idx = 0 ; int i = indices . length - 1 ; for ( int j = 0 ; i > 0 && j < indices . length - 1 ; i -- , j ++ ) idx += indices [ i ] * dim [ j ] ; idx += indices [ 0 ] ; return idx ;
|
public class NetUtil { /** * Checks if given String is either a valid IPv4 or IPv6 address .
* @ param _ ipAddress
* @ return true if valid address , false otherwise */
public static boolean isIPv4orIPv6Address ( String _ipAddress ) { } }
|
return IPV4_PATTERN . matcher ( _ipAddress ) . matches ( ) || IPV6_PATTERN . matcher ( _ipAddress ) . matches ( ) ;
|
public class JsonSerializer { /** * Writes the object out as json .
* @ param out
* output writer
* @ param json
* a { @ link JsonElement }
* @ param pretty
* if true , a properly indented version of the json is written
* @ throws IOException
* if there is a problem writing to the stream */
public static void serialize ( OutputStream out , @ Nonnull JsonElement json , boolean pretty ) throws IOException { } }
|
Validate . notNull ( out ) ; BufferedOutputStream bufferedOut = new BufferedOutputStream ( out ) ; OutputStreamWriter w = new OutputStreamWriter ( bufferedOut , UTF8 ) ; if ( pretty ) { serialize ( w , json , pretty ) ; } else { json . serialize ( w ) ; // subtle bug where not flushing this results in empty string when serializing to a ByteArrayOutputStream
w . flush ( ) ; bufferedOut . flush ( ) ; }
|
public class MarcField { /** * A MARC field can be denoted by a key , independent of values .
* This key is a string , consisting of tag and indicator delimited by a dollar sign .
* @ return the tag / indicator - based key of this MARC field */
public String toTagIndicatorKey ( ) { } }
|
return ( tag == null ? EMPTY_STRING : tag ) + KEY_DELIMITER + ( indicator == null ? EMPTY_STRING : indicator ) ;
|
public class PDBStatus { /** * Gets the PDB which superseded oldPdbId . For CURRENT IDs , this will
* be itself . For obsolete IDs , the behavior depends on the recursion
* parameter . If false , only IDs which directly supersede oldPdbId are
* returned . If true , the replacements for obsolete records are recursively
* fetched , yielding a list of all current replacements of oldPdbId .
* @ param oldPdbId A pdb ID
* @ param recurse Indicates whether the replacements for obsolete records
* should be fetched .
* @ param includeObsolete Indicates whether obsolete records should be
* included in the results .
* @ return The PDB which replaced oldPdbId . This may be oldPdbId itself , for
* current records . A return value of null indicates that the ID has
* been removed from the PDB or that an error has occurred . */
public static List < String > getReplacement ( String oldPdbId , boolean recurse , boolean includeObsolete ) { } }
|
List < Map < String , String > > attrList = getStatusIdRecords ( new String [ ] { oldPdbId } ) ; // Expect a single record
if ( attrList == null || attrList . size ( ) != 1 ) { logger . error ( "Error getting Status for {} from the PDB website." , oldPdbId ) ; return null ; } Map < String , String > attrs = attrList . get ( 0 ) ; // Check that the record matches pdbId
String id = attrs . get ( "structureId" ) ; if ( id == null || ! id . equalsIgnoreCase ( oldPdbId ) ) { logger . error ( "Results returned from the query don't match {}" , oldPdbId ) ; return null ; } // Check that the status is given
String statusStr = attrs . get ( "status" ) ; if ( statusStr == null ) { logger . error ( "No status returned for {}" , oldPdbId ) ; return null ; } Status status = Status . fromString ( statusStr ) ; if ( status == null ) { logger . error ( "Unknown status '{}'" , statusStr ) ; return null ; } // If we ' re current , just return
LinkedList < String > results = new LinkedList < String > ( ) ; switch ( status ) { case CURRENT : results . add ( oldPdbId ) ; return results ; case OBSOLETE : { String replacementStr = attrs . get ( "replacedBy" ) ; if ( replacementStr == null ) { logger . error ( "{} is OBSOLETE but lacks a replacedBy attribute." , oldPdbId ) ; return null ; } replacementStr = replacementStr . toUpperCase ( ) ; // include this result
if ( includeObsolete ) { results . add ( oldPdbId ) ; } // Some PDBs are not replaced .
if ( replacementStr . equals ( "NONE" ) ) { return results ; // empty
} String [ ] replacements = replacementStr . split ( " " ) ; Arrays . sort ( replacements , new Comparator < String > ( ) { @ Override public int compare ( String o1 , String o2 ) { return o2 . compareToIgnoreCase ( o1 ) ; } } ) ; for ( String replacement : replacements ) { // Return the replacement .
if ( recurse ) { List < String > others = PDBStatus . getReplacement ( replacement , recurse , includeObsolete ) ; mergeReversed ( results , others ) ; } else { if ( includeObsolete ) { mergeReversed ( results , Arrays . asList ( replacement ) ) ; } else { // check status of replacement
Status replacementStatus = getStatus ( replacement ) ; switch ( replacementStatus ) { case OBSOLETE : // ignore obsolete
break ; case CURRENT : default : // include it
mergeReversed ( results , Arrays . asList ( replacement ) ) ; } } } } return results ; } case UNKNOWN : return null ; default : { // TODO handle other cases explicitly . They might have other syntax than " replacedBy "
String replacementStr = attrs . get ( "replacedBy" ) ; if ( replacementStr == null ) { // If no " replacedBy " attribute , treat like we ' re current
// TODO is this correct ?
results . add ( oldPdbId ) ; return results ; } replacementStr = replacementStr . toUpperCase ( ) ; // Some PDBs are not replaced .
if ( replacementStr . equals ( "NONE" ) ) { return null ; } // include this result , since it ' s not obsolete
results . add ( oldPdbId ) ; String [ ] replacements = replacementStr . split ( " " ) ; Arrays . sort ( replacements , new Comparator < String > ( ) { @ Override public int compare ( String o1 , String o2 ) { return o2 . compareToIgnoreCase ( o1 ) ; } } ) ; for ( String replacement : replacements ) { // Return the replacement .
if ( recurse ) { List < String > others = PDBStatus . getReplacement ( replacement , recurse , includeObsolete ) ; mergeReversed ( results , others ) ; } else { mergeReversed ( results , Arrays . asList ( replacement ) ) ; } } return results ; } }
|
public class BarPlot { /** * Create a plot canvas with the bar plot of given data .
* @ param id the id of the plot .
* @ param data a vector of which values will determine the heights of bars . */
public static PlotCanvas plot ( String id , double [ ] data ) { } }
|
double [ ] lowerBound = { 0 , Math . min ( data ) } ; double [ ] upperBound = { data . length , Math . max ( data ) } ; PlotCanvas canvas = new PlotCanvas ( lowerBound , upperBound ) ; BarPlot plot = new BarPlot ( data ) ; plot . setID ( id ) ; canvas . add ( plot ) ; canvas . getAxis ( 0 ) . setGridVisible ( false ) ; canvas . getAxis ( 0 ) . setLabelVisible ( false ) ; return canvas ;
|
public class PasswordEditText { /** * Returns the color of the helper text , which corresponds to a specific password strength .
* @ param score
* The password strength as a { @ link Float } value between 0.0 and 1.0 , which represents
* the fraction of constraints , which are satisfied
* @ return The color of the helper text as an { @ link Integer } value */
private int getHelperTextColor ( final float score ) { } }
|
if ( ! helperTextColors . isEmpty ( ) ) { float interval = 1.0f / helperTextColors . size ( ) ; int index = ( int ) Math . floor ( score / interval ) - 1 ; index = Math . max ( index , 0 ) ; index = Math . min ( index , helperTextColors . size ( ) - 1 ) ; return helperTextColors . get ( index ) ; } return regularHelperTextColor ;
|
public class PermissionsInner { /** * Gets all permissions the caller has for a resource .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; PermissionInner & gt ; object */
public Observable < Page < PermissionInner > > listForResourceNextAsync ( final String nextPageLink ) { } }
|
return listForResourceNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < PermissionInner > > , Page < PermissionInner > > ( ) { @ Override public Page < PermissionInner > call ( ServiceResponse < Page < PermissionInner > > response ) { return response . body ( ) ; } } ) ;
|
public class JDBC4DatabaseMetaData { /** * Retrieves a description of the system and user functions available in the given catalog . */
@ Override public ResultSet getFunctions ( String catalog , String schemaPattern , String functionNamePattern ) throws SQLException { } }
|
checkClosed ( ) ; throw SQLError . noSupport ( ) ;
|
public class POIUtils { /** * 座標をExcelのアドレス形式 ' A1 ' になどに変換する 。
* @ param cellAddress セルの位置情報
* @ return
* @ throws IllegalArgumentException address = = null . */
public static String formatCellAddress ( final Point cellAddress ) { } }
|
ArgUtils . notNull ( cellAddress , "cellAddress" ) ; return formatCellAddress ( cellAddress . y , cellAddress . x ) ;
|
public class RegExFileFilter { /** * Checks whether a file satisfies the selection filter .
* @ param file The file
* @ return true if the file is acceptable */
public boolean accept ( File file ) { } }
|
Matcher m = pattern . matcher ( file . getName ( ) ) ; return m . matches ( ) ;
|
public class Address { /** * Appends the given address to this address .
* This lets you build up addresses in a step - wise fashion .
* @ param address new address to appen to this address .
* @ return this address ( which now has the new address appended ) . */
public Address add ( Address address ) { } }
|
// if address is null or is the root address then there is nothing to append
if ( address == null || address . isRoot ( ) ) { return this ; } // if we are the root address then the given address just is our new address ,
// otherwise , append all parts from " address " to us .
if ( isRoot ( ) ) { this . addressNode = address . addressNode . clone ( ) ; } else { List < Property > parts = address . addressNode . asPropertyList ( ) ; for ( Property part : parts ) { this . addressNode . add ( part ) ; } } return this ;
|
public class CmsNewResourceTypeDialog { /** * Gets the message bundle . < p >
* @ return Message bundle resource */
private CmsResource getMessageBundle ( ) { } }
|
OpenCms . getLocaleManager ( ) ; String localString = CmsLocaleManager . getDefaultLocale ( ) . toString ( ) ; List < String > moduleResource = m_module . getResources ( ) ; for ( String resourcePath : moduleResource ) { if ( resourcePath . contains ( PATH_I18N ) && resourcePath . endsWith ( localString ) ) { try { return m_cms . readResource ( resourcePath ) ; } catch ( CmsException e ) { LOG . error ( "Can not read message bundle" , e ) ; } } } String moduleFolder = getModuleFolder ( m_module . getName ( ) ) ; if ( CmsStringUtil . isEmptyOrWhitespaceOnly ( moduleFolder ) ) { return null ; } String bundlePath = CmsStringUtil . joinPaths ( moduleFolder , PATH_I18N , m_module . getName ( ) + SUFFIX_BUNDLE_FILE + "_" + localString ) ; if ( m_cms . existsResource ( bundlePath ) ) { try { return m_cms . readResource ( bundlePath ) ; } catch ( CmsException e ) { LOG . error ( "No bundle found for module" , e ) ; } } return null ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.