signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class Tick { /** * Check if specific time has been elapsed ( in tick referential ) .
* @ param rate The rate reference .
* @ param milli The milliseconds to check ( based on frame time ) .
* @ return < code > true < / code > if time elapsed , < code > false < / code > else .
* @ throws LionEngineException If invalid argument . */
public boolean elapsedTime ( int rate , long milli ) { } } | if ( started && rate > 0 ) { final double frameTime = ONE_SECOND_IN_MILLI / rate ; return Double . compare ( milli , StrictMath . floor ( currentTicks * frameTime ) ) <= 0 ; } return false ; |
public class MojoExecutor { /** * Builds the configuration for the goal using Elements
* @ param elements A list of elements for the configuration section
* @ return The elements transformed into the Maven - native XML format */
public static Xpp3Dom configuration ( Element ... elements ) { } } | Xpp3Dom dom = new Xpp3Dom ( "configuration" ) ; for ( Element e : elements ) { dom . addChild ( e . toDom ( ) ) ; } return dom ; |
public class MemorySession { /** * @ see com . ibm . wsspi . session . ISession # removeAttribute ( java . lang . Object ) */
@ Override public synchronized Object removeAttribute ( Object name ) { } } | if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINER ) ) { String s = name + appNameAndIdString ; LoggingUtil . SESSION_LOGGER_CORE . entering ( methodClassName , methodNames [ REMOVE_ATTRIBUTE ] , s ) ; } Object oldValue = _attributes . remove ( name ) ; Boolean oldIsBindingListener = ( Boolean ) _attributeNames . get ( name ) ; _attributeNames . remove ( name ) ; _storeCallback . sessionAttributeRemoved ( this , name , oldValue , oldIsBindingListener ) ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_CORE . isLoggable ( Level . FINE ) ) { if ( ! SessionManagerConfig . isHideSessionValues ( ) ) { LoggingUtil . SESSION_LOGGER_CORE . exiting ( methodClassName , methodNames [ REMOVE_ATTRIBUTE ] , oldValue ) ; } else { LoggingUtil . SESSION_LOGGER_CORE . exiting ( methodClassName , methodNames [ REMOVE_ATTRIBUTE ] ) ; } } return oldValue ; |
public class ContentNegotiator { /** * Return true if the transport and content negotiators have finished . */
public boolean isFullyEstablished ( ) { } } | boolean result = true ; MediaNegotiator mediaNeg = getMediaNegotiator ( ) ; if ( ( mediaNeg == null ) || ! mediaNeg . isFullyEstablished ( ) ) { result = false ; } TransportNegotiator transNeg = getTransportNegotiator ( ) ; if ( ( transNeg == null ) || ! transNeg . isFullyEstablished ( ) ) { result = false ; } return result ; |
public class JsonObject { /** * Retrieves the decrypted value from the field name and casts it to { @ link Boolean } .
* Note : Use of the Field Level Encryption functionality provided in the
* com . couchbase . client . encryption namespace provided by Couchbase is
* subject to the Couchbase Inc . Enterprise Subscription License Agreement
* at https : / / www . couchbase . com / ESLA - 11132015.
* @ param name the name of the field .
* @ param providerName the provider name of the field .
* @ return the result or null if it does not exist . */
public Boolean getAndDecryptBoolean ( String name , String providerName ) throws Exception { } } | return ( Boolean ) getAndDecrypt ( name , providerName ) ; |
public class MediaMarkupBuilderUtil { /** * Adds CSS classes that denote the changes to the media element when compared to a different version .
* If no diff has been requested by the WCM UI , there won ' t be any changes to the element .
* @ param mediaElement Element to be decorated
* @ param resource Resource pointing to JCR node
* @ param refProperty Name of property for media library item reference . If null , default name is used .
* @ param request Servlet request
* @ deprecated Use
* { @ link # addDiffDecoration ( HtmlElement , Resource , String , SlingHttpServletRequest , MediaHandlerConfig ) } */
@ Deprecated public static void addDiffDecoration ( @ NotNull HtmlElement < ? > mediaElement , @ NotNull Resource resource , @ NotNull String refProperty , @ NotNull SlingHttpServletRequest request ) { } } | addDiffDecoration ( mediaElement , resource , refProperty , request , null ) ; |
public class CommerceShipmentLocalServiceUtil { /** * Creates a new commerce shipment with the primary key . Does not add the commerce shipment to the database .
* @ param commerceShipmentId the primary key for the new commerce shipment
* @ return the new commerce shipment */
public static com . liferay . commerce . model . CommerceShipment createCommerceShipment ( long commerceShipmentId ) { } } | return getService ( ) . createCommerceShipment ( commerceShipmentId ) ; |
public class AuditServiceImpl { /** * ( non Java - doc )
* Validates the supplied non - custom event name is a valid event name . Return true if valid ; false otherwise */
public boolean validateEventName ( String eventName ) { } } | boolean isValid = false ; if ( ( AuditConstants . validEventNamesList ) . contains ( eventName ) ) isValid = true ; return isValid ; |
public class AnnotatedMethodInvoker { public Method getInvokableMethodForClass ( final Class < ? > valueClass ) { } } | if ( valueClass == null ) return null ; Method method = methods . get ( valueClass ) ; if ( method != null ) return method ; // get the list of all classes and interfaces .
// first classes .
Class < ? > clazz = valueClass . getSuperclass ( ) ; while ( clazz != null ) { method = methods . get ( clazz ) ; if ( method != null ) { methods . put ( valueClass , method ) ; return method ; } clazz = clazz . getSuperclass ( ) ; } // now look through the interfaces .
for ( final Class < ? > iface : valueClass . getInterfaces ( ) ) { method = methods . get ( iface ) ; if ( method != null ) { methods . put ( valueClass , method ) ; return method ; } } return null ; |
public class WordVectorsImpl { /** * Words nearest based on positive and negative words
* @ param positive the positive words
* @ param negative the negative words
* @ param top the top n words
* @ return the words nearest the mean of the words */
@ Override public Collection < String > wordsNearest ( Collection < String > positive , Collection < String > negative , int top ) { } } | return modelUtils . wordsNearest ( positive , negative , top ) ; |
public class GraphiteSanitize { /** * Trims the string and replaces all whitespace characters with the provided symbol */
static String sanitize ( String string ) { } } | return WHITESPACE . matcher ( string . trim ( ) ) . replaceAll ( DASH ) ; |
public class Contract { /** * syntactic sugar */
public Contract addAction ( CodeableConcept t ) { } } | if ( t == null ) return this ; if ( this . action == null ) this . action = new ArrayList < CodeableConcept > ( ) ; this . action . add ( t ) ; return this ; |
public class CmsJspInstanceDateBean { /** * Returns the start and end dates / times as " start - end " in the provided date / time format specific for the request locale .
* @ param dateTimeFormat the format to use for date ( time is always short ) .
* @ return the formatted date / time string . */
private String getFormattedDate ( int dateTimeFormat ) { } } | DateFormat df ; String result ; if ( isWholeDay ( ) ) { df = DateFormat . getDateInstance ( dateTimeFormat , m_series . getLocale ( ) ) ; result = df . format ( getStart ( ) ) ; if ( getLastDay ( ) . after ( getStart ( ) ) ) { result += DATE_SEPARATOR + df . format ( getLastDay ( ) ) ; } } else { df = DateFormat . getDateTimeInstance ( dateTimeFormat , DateFormat . SHORT , m_series . getLocale ( ) ) ; result = df . format ( getStart ( ) ) ; if ( getEnd ( ) . after ( getStart ( ) ) ) { if ( isMultiDay ( ) ) { result += DATE_SEPARATOR + df . format ( getEnd ( ) ) ; } else { df = DateFormat . getTimeInstance ( DateFormat . SHORT , m_series . getLocale ( ) ) ; result += DATE_SEPARATOR + df . format ( getEnd ( ) ) ; } } } return result ; |
public class LogFormatter { /** * Format message and write to the output stream .
* @ param out The output stream to write to .
* @ param message The message to be written .
* @ param < Message > The message type .
* @ param < Field > The field type . */
public < Message extends PMessage < Message , Field > , Field extends PField > void formatTo ( OutputStream out , Message message ) { } } | IndentedPrintWriter builder = new IndentedPrintWriter ( out , indent , newline ) ; if ( message == null ) { builder . append ( null ) ; } else { builder . append ( message . descriptor ( ) . getQualifiedName ( ) ) . append ( space ) ; appendMessage ( builder , message ) ; } builder . flush ( ) ; |
public class GosuStyleContext { /** * Fetch the font to use for a given attribute set . */
@ Override public Font getFont ( AttributeSet attr ) { } } | boolean bUnderline = StyleConstants . isUnderline ( attr ) ; boolean bStrikethrough = StyleConstants . isStrikeThrough ( attr ) ; if ( ! bUnderline && ! bStrikethrough ) { // StyleContext ignores the Underline and Strikethrough attribute
return getFont ( attr , getFontFamily ( attr ) ) ; } // Must build the font via TextAttribute map to support Underlined and Strikethrough text
Map < TextAttribute , Object > map = new HashMap < TextAttribute , Object > ( ) ; map . put ( TextAttribute . FAMILY , getFontFamily ( attr ) ) ; map . put ( TextAttribute . SIZE , ( float ) getFontSize ( ) ) ; map . put ( TextAttribute . WEIGHT , StyleConstants . isBold ( attr ) ? TextAttribute . WEIGHT_BOLD : TextAttribute . WEIGHT_REGULAR ) ; map . put ( TextAttribute . POSTURE , StyleConstants . isItalic ( attr ) ? TextAttribute . POSTURE_OBLIQUE : TextAttribute . POSTURE_REGULAR ) ; if ( bUnderline ) { map . put ( TextAttribute . UNDERLINE , TextAttribute . UNDERLINE_ON ) ; if ( attr . getAttribute ( DASHED ) != null ) { map . put ( TextAttribute . UNDERLINE , TextAttribute . UNDERLINE_LOW_GRAY ) ; } } if ( bStrikethrough ) { map . put ( TextAttribute . STRIKETHROUGH , TextAttribute . STRIKETHROUGH_ON ) ; } Font font = _fontCache . get ( attr ) ; if ( font == null ) { font = new Font ( map ) ; _fontCache . put ( attr , font ) ; } return font ; |
public class DevicesManagementApi { /** * Updates a device type information
* Updates a device type information
* @ param dtid Device type ID . ( required )
* @ param deviceTypeInfo Device type info object to be set ( required )
* @ return ApiResponse & lt ; DeviceTypesInfoEnvelope & gt ;
* @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */
public ApiResponse < DeviceTypesInfoEnvelope > updateDeviceTypesInfoWithHttpInfo ( String dtid , DeviceTypesInfo deviceTypeInfo ) throws ApiException { } } | com . squareup . okhttp . Call call = updateDeviceTypesInfoValidateBeforeCall ( dtid , deviceTypeInfo , null , null ) ; Type localVarReturnType = new TypeToken < DeviceTypesInfoEnvelope > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ; |
public class BankCountryValidator { /** * { @ inheritDoc } check if given object is valid .
* @ see javax . validation . ConstraintValidator # isValid ( Object ,
* javax . validation . ConstraintValidatorContext ) */
@ Override public final boolean isValid ( final Object pvalue , final ConstraintValidatorContext pcontext ) { } } | if ( pvalue == null ) { return true ; } try { String valueCountry = BeanPropertyReaderUtil . getNullSaveStringProperty ( pvalue , this . fieldCountryCode ) ; final String valueIban = BeanPropertyReaderUtil . getNullSaveStringProperty ( pvalue , this . fieldIban ) ; final String valueBic = BeanPropertyReaderUtil . getNullSaveStringProperty ( pvalue , this . fieldBic ) ; final String bicOfIban = IbanUtil . getBicOfIban ( valueIban ) ; if ( StringUtils . isEmpty ( valueIban ) && StringUtils . isEmpty ( valueBic ) ) { return true ; } else if ( StringUtils . isEmpty ( valueIban ) ) { pcontext . disableDefaultConstraintViolation ( ) ; pcontext . buildConstraintViolationWithTemplate ( NOT_EMPTY_MESSAGE ) . addPropertyNode ( this . fieldIban ) . addConstraintViolation ( ) ; return false ; } else if ( StringUtils . isEmpty ( valueBic ) ) { pcontext . disableDefaultConstraintViolation ( ) ; pcontext . buildConstraintViolationWithTemplate ( NOT_EMPTY_MESSAGE ) . addPropertyNode ( this . fieldBic ) . addConstraintViolation ( ) ; return false ; } else if ( StringUtils . length ( valueIban ) >= IbanValidator . IBAN_LENGTH_MIN && StringUtils . length ( valueBic ) >= BicValidator . BIC_LENGTH_MIN ) { String countryIban = valueIban . replaceAll ( "\\s+" , StringUtils . EMPTY ) . substring ( 0 , 2 ) ; String countryBic = valueBic . replaceAll ( "\\s+" , StringUtils . EMPTY ) . substring ( 4 , 6 ) ; if ( StringUtils . length ( valueCountry ) != 2 ) { // missing country selection , us country of iban
valueCountry = countryIban ; } if ( this . allowLowerCaseCountryCode ) { valueCountry = StringUtils . upperCase ( valueCountry ) ; countryIban = StringUtils . upperCase ( countryIban ) ; countryBic = StringUtils . upperCase ( countryIban ) ; } boolean ibanCodeMatches = false ; boolean bicCodeMatches = false ; final boolean bicIbanMatches = bicOfIban == null || StringUtils . equals ( bicOfIban , valueBic ) ; switch ( valueCountry ) { case "GF" : // French Guyana
case "GP" : // Guadeloupe
case "MQ" : // Martinique
case "RE" : // Reunion
case "PF" : // French Polynesia
case "TF" : // French Southern Territories
case "YT" : // Mayotte
case "NC" : // New Caledonia
case "BL" : // Saint Barthelemy
case "MF" : // Saint Martin
case "PM" : // Saint Pierre et Miquelon
case "WF" : // Wallis and Futuna Islands
// special solution for French oversea teritorials with french registry
ibanCodeMatches = "FR" . equals ( countryIban ) ; bicCodeMatches = "FR" . equals ( countryBic ) ; break ; case "JE" : // Jersey
case "GG" : // Guernsey
// they can use GB or FR registry , but iban and bic code must match
ibanCodeMatches = ( "GB" . equals ( countryIban ) || "FR" . equals ( countryIban ) ) && countryBic . equals ( countryIban ) ; bicCodeMatches = "GB" . equals ( countryBic ) || "FR" . equals ( countryBic ) ; break ; default : ibanCodeMatches = valueCountry . equals ( countryIban ) ; bicCodeMatches = valueCountry . equals ( countryBic ) ; break ; } if ( ibanCodeMatches && bicCodeMatches && bicIbanMatches ) { return true ; } pcontext . disableDefaultConstraintViolation ( ) ; if ( ! ibanCodeMatches ) { pcontext . buildConstraintViolationWithTemplate ( this . message ) . addPropertyNode ( this . fieldIban ) . addConstraintViolation ( ) ; } if ( ! bicCodeMatches ) { pcontext . buildConstraintViolationWithTemplate ( this . message ) . addPropertyNode ( this . fieldBic ) . addConstraintViolation ( ) ; } if ( ! bicIbanMatches ) { pcontext . buildConstraintViolationWithTemplate ( this . messageWrongBic ) . addPropertyNode ( this . fieldBic ) . addConstraintViolation ( ) ; } return false ; } else { // wrong format , should be handled by other validators
return true ; } } catch ( final Exception ignore ) { return false ; } |
public class CacheImpl { /** * Checks if the given { @ link CacheItem } has expired . If so , it is removed from its scope in the
* { @ link CacheStoreAdapter } .
* @ return true if the item has expired and was removed from the scope , false otherwise
* @ throws Exception */
protected boolean checkForExpiration ( CacheItem item ) throws Exception { } } | if ( item . isExpired ( ticks . get ( ) ) ) { cacheStoreAdapter . remove ( item . getScope ( ) , item . getKey ( ) ) ; return true ; } else { return false ; } |
public class AbstractObjectTable { /** * Construct the table model for this table . The default implementation of
* this creates a GlazedTableModel using an Advanced format .
* @ param eventList
* on which to build the model
* @ return table model */
protected GlazedTableModel createTableModel ( EventList eventList ) { } } | return new GlazedTableModel ( eventList , getColumnPropertyNames ( ) , modelId ) { protected TableFormat createTableFormat ( ) { return new DefaultAdvancedTableFormat ( ) ; } } ; |
public class vlan { /** * Use this API to add vlan . */
public static base_response add ( nitro_service client , vlan resource ) throws Exception { } } | vlan addresource = new vlan ( ) ; addresource . id = resource . id ; addresource . aliasname = resource . aliasname ; addresource . ipv6dynamicrouting = resource . ipv6dynamicrouting ; return addresource . add_resource ( client ) ; |
public class RuleClassifier { /** * This function This expands the rule */
public void expandeRule ( RuleClassification rl , Instance inst , int ruleIndex ) { } } | int remainder = ( int ) Double . MAX_VALUE ; int numInstanciaObservers = ( int ) rl . obserClassDistrib . sumOfValues ( ) ; // Number of instances for this rule observers .
this . updateRuleAttribStatistics ( inst , rl , ruleIndex ) ; if ( numInstanciaObservers != 0 && this . gracePeriodOption . getValue ( ) != 0 ) { remainder = ( numInstanciaObservers ) % ( this . gracePeriodOption . getValue ( ) ) ; } if ( remainder == 0 ) { this . saveBestValGlobalEntropy = new ArrayList < ArrayList < Double > > ( ) ; this . saveBestGlobalEntropy = new DoubleVector ( ) ; this . saveTheBest = new ArrayList < Double > ( ) ; this . minEntropyTemp = Double . MAX_VALUE ; this . minEntropyNominalAttrib = Double . MAX_VALUE ; theBestAttributes ( inst , rl . observers ) ; // The best value of entropy for each attribute .
boolean HB = checkBestAttrib ( numInstanciaObservers , rl . observers , rl . obserClassDistrib ) ; // Check if the best attribute value is really the best .
if ( HB == true ) { double attributeValue = this . saveTheBest . get ( 3 ) ; double symbol = this . saveTheBest . get ( 2 ) ; // = , < = , > ( 0.0 , - 1.0 , 1.0 ) .
double value = this . saveTheBest . get ( 0 ) ; // Value of the attribute .
this . pred = new Predicates ( attributeValue , symbol , value ) ; int countPred = 0 ; for ( int i = 0 ; i < rl . predicateSet . size ( ) ; i ++ ) { // Checks if the new predicate is not yet in the predicateSet .
if ( this . pred . getSymbol ( ) == 0.0 ) { // Nominal Attribute .
if ( rl . predicateSet . get ( i ) . getAttributeValue ( ) != this . pred . getAttributeValue ( ) ) { countPred = countPred + 1 ; } } else { if ( rl . predicateSet . get ( i ) . getAttributeValue ( ) != this . pred . getAttributeValue ( ) || rl . predicateSet . get ( i ) . getSymbol ( ) != this . pred . getSymbol ( ) || rl . predicateSet . get ( i ) . getValue ( ) != this . pred . getValue ( ) ) { countPred = countPred + 1 ; } } } if ( countPred == rl . predicateSet . size ( ) ) { int countDifPred = 0 ; ArrayList < Predicates > predicSetTemp = new ArrayList < Predicates > ( ) ; for ( int x = 0 ; x < rl . predicateSet . size ( ) ; x ++ ) { predicSetTemp . add ( rl . predicateSet . get ( x ) ) ; } predicSetTemp . add ( this . pred ) ; for ( int f = 0 ; f < this . ruleSet . size ( ) ; f ++ ) { int countDifPredTemp = 0 ; if ( this . ruleSet . get ( f ) . predicateSet . size ( ) == predicSetTemp . size ( ) ) { for ( int x = 0 ; x < this . ruleSet . get ( f ) . predicateSet . size ( ) ; x ++ ) { if ( this . ruleSet . get ( f ) . predicateSet . get ( x ) . getAttributeValue ( ) == predicSetTemp . get ( x ) . getAttributeValue ( ) && this . ruleSet . get ( f ) . predicateSet . get ( x ) . getSymbol ( ) == predicSetTemp . get ( x ) . getSymbol ( ) && this . ruleSet . get ( f ) . predicateSet . get ( x ) . getValue ( ) == predicSetTemp . get ( x ) . getValue ( ) ) { countDifPredTemp = countDifPredTemp + 1 ; } } if ( countDifPredTemp == predicSetTemp . size ( ) ) { break ; } else { countDifPred = countDifPred + 1 ; } } else { countDifPred = countDifPred + 1 ; } } if ( countDifPred == this . ruleSet . size ( ) ) { if ( this . pred . getSymbol ( ) == 0.0 ) { initializeRuleStatistics ( rl , pred , inst ) ; } else if ( this . pred . getSymbol ( ) == 1.0 ) { int countIqualPred = 0 ; for ( int f = 0 ; f < rl . predicateSet . size ( ) ; f ++ ) { if ( this . pred . getAttributeValue ( ) == rl . predicateSet . get ( f ) . getAttributeValue ( ) && this . pred . getSymbol ( ) == rl . predicateSet . get ( f ) . getSymbol ( ) ) { countIqualPred = countIqualPred + 1 ; if ( this . pred . getValue ( ) > rl . predicateSet . get ( f ) . getValue ( ) ) { rl . predicateSet . remove ( f ) ; initializeRuleStatistics ( rl , pred , inst ) ; } } } if ( countIqualPred == 0 ) { initializeRuleStatistics ( rl , pred , inst ) ; } } else { int countIqualPred = 0 ; for ( int f = 0 ; f < rl . predicateSet . size ( ) ; f ++ ) { if ( this . pred . getAttributeValue ( ) == rl . predicateSet . get ( f ) . getAttributeValue ( ) && this . pred . getSymbol ( ) == rl . predicateSet . get ( f ) . getSymbol ( ) ) { countIqualPred = countIqualPred + 1 ; if ( this . pred . getValue ( ) < rl . predicateSet . get ( f ) . getValue ( ) ) { rl . predicateSet . remove ( f ) ; initializeRuleStatistics ( rl , pred , inst ) ; } } } if ( countIqualPred == 0 ) { initializeRuleStatistics ( rl , pred , inst ) ; } } } } } } |
public class CmsSitemapController { /** * Recomputes the properties for a client sitemap entry . < p >
* @ param entry the entry for whose descendants the properties should be recomputed */
protected void recomputeProperties ( CmsClientSitemapEntry entry ) { } } | for ( I_CmsPropertyUpdateHandler handler : m_propertyUpdateHandlers ) { handler . handlePropertyUpdate ( entry ) ; } for ( CmsClientSitemapEntry child : entry . getSubEntries ( ) ) { recomputeProperties ( child ) ; } |
public class ArrayHelper { /** * Check if the passed array contains only < code > null < / code > element .
* @ param < T >
* element type
* @ param aArray
* The array to check . May be < code > null < / code > .
* @ return < code > true < / code > only if the passed array is neither
* < code > null < / code > nor empty and if at least one < code > null < / code >
* element is contained . */
public static < T > boolean containsOnlyNullElements ( @ Nullable final T [ ] aArray ) { } } | if ( isEmpty ( aArray ) ) return false ; for ( final Object aObj : aArray ) if ( aObj != null ) return false ; return true ; |
public class InefficientStringBuffering { /** * implements the visitor to create and clear the stack
* @ param obj
* the context object of the currently parsed code block */
@ Override public void visitCode ( final Code obj ) { } } | if ( obj . getCode ( ) != null ) { stack . resetForMethodEntry ( this ) ; sawLDCEmpty = false ; super . visitCode ( obj ) ; } |
public class JsonParser { /** * < p > parse . < / p >
* @ param key a { @ link java . lang . String } object .
* @ param value a { @ link java . lang . Object } object .
* @ param environment a { @ link org . configureme . environments . DynamicEnvironment } object .
* @ return a { @ link java . util . List } object .
* @ throws org . json . JSONException if any . */
public static List < ? extends ParsedAttribute < ? > > parse ( final String key , final Object value , final DynamicEnvironment environment ) throws JSONException { } } | // an object value means a change in environment , let ' s see what it is
if ( value instanceof JSONObject && key . startsWith ( COMPOSITE_ATTR_PREFIX ) ) return Collections . singletonList ( parseComposite ( key , ( JSONObject ) value , environment ) ) ; if ( value instanceof JSONArray && key . startsWith ( COMPOSITE_ATTR_PREFIX ) ) return Collections . singletonList ( parseArray ( key , ( JSONArray ) value , environment ) ) ; if ( value instanceof String && ( ( String ) value ) . startsWith ( INCLUDE_ATTR_PREFIX ) ) return Collections . singletonList ( parseInclude ( key , ( String ) value , environment ) ) ; if ( value instanceof JSONObject ) return parseObject ( key , ( JSONObject ) value , environment ) ; if ( value instanceof JSONArray ) return Collections . singletonList ( parseArray ( key , ( JSONArray ) value , environment ) ) ; return Collections . singletonList ( new PlainParsedAttribute ( key , ( Environment ) environment . clone ( ) , JSONObject . NULL . equals ( value ) ? null : value . toString ( ) ) ) ; |
public class MapOperation { /** * This method helps to add clearing Near Cache event only from one - partition which matches partitionId of the map name . */
protected final void invalidateAllKeysInNearCaches ( ) { } } | if ( mapContainer . hasInvalidationListener ( ) ) { int partitionId = getPartitionId ( ) ; Invalidator invalidator = getNearCacheInvalidator ( ) ; if ( partitionId == getNodeEngine ( ) . getPartitionService ( ) . getPartitionId ( name ) ) { invalidator . invalidateAllKeys ( name , getCallerUuid ( ) ) ; } invalidator . resetPartitionMetaData ( name , getPartitionId ( ) ) ; } |
public class MessageInitialProcess { /** * Init Method . */
public void init ( RecordOwnerParent taskParent , Record recordMain , Map < String , Object > properties ) { } } | super . init ( taskParent , recordMain , properties ) ; this . registerInitialProcesses ( ) ; |
public class QueryCache { /** * Obtain the cache . Start it lazily when needed . */
private Cache < QueryCacheKey , Object > getCache ( ) { } } | final Cache < QueryCacheKey , Object > cache = lazyCache ; // Most likely branch first :
if ( cache != null ) { return cache ; } synchronized ( this ) { if ( lazyCache == null ) { // define the query cache configuration if it does not already exist ( from a previous call or manually defined by the user )
internalCacheRegistry . registerInternalCache ( QUERY_CACHE_NAME , getQueryCacheConfig ( ) . build ( ) , EnumSet . noneOf ( InternalCacheRegistry . Flag . class ) ) ; lazyCache = cacheManager . getCache ( QUERY_CACHE_NAME ) ; } return lazyCache ; } |
public class YahooFinance { /** * Sends a request with the historical quotes included
* starting from the specified { @ link Calendar } date
* at the specified interval .
* Returns null if the data can ' t be retrieved from Yahoo Finance .
* @ param symbol the symbol of the stock for which you want to retrieve information
* @ param from start date of the historical data
* @ param interval the interval of the included historical data
* @ return a { @ link Stock } object containing the requested information
* @ throws java . io . IOException when there ' s a connection problem */
public static Stock get ( String symbol , Calendar from , Interval interval ) throws IOException { } } | return YahooFinance . get ( symbol , from , HistQuotesRequest . DEFAULT_TO , interval ) ; |
public class AbstractParamContainerPanel { /** * Expands the node of the param panel with the given name .
* @ param panelName the name of the panel whose node should be expanded , should not be { @ code null } .
* @ since TODO add version */
public void expandParamPanelNode ( String panelName ) { } } | DefaultMutableTreeNode node = getTreeNodeFromPanelName ( panelName ) ; if ( node != null ) { getTreeParam ( ) . expandPath ( new TreePath ( node . getPath ( ) ) ) ; } |
public class responderaction { /** * Use this API to fetch all the responderaction resources that are configured on netscaler . */
public static responderaction [ ] get ( nitro_service service ) throws Exception { } } | responderaction obj = new responderaction ( ) ; responderaction [ ] response = ( responderaction [ ] ) obj . get_resources ( service ) ; return response ; |
public class CssSkinGenerator { /** * ( non - Javadoc )
* @ see
* net . jawr . web . resource . handler . reader . ResourceBrowser # getFilePath ( java .
* lang . String ) */
@ Override public String getFilePath ( String resourcePath ) { } } | String path = getResolver ( ) . getResourcePath ( resourcePath ) ; return rsBrowser . getFilePath ( path ) ; |
public class BooleanUtils { /** * < p > Negates the specified boolean . < / p >
* < p > If { @ code null } is passed in , { @ code null } will be returned . < / p >
* < p > NOTE : This returns null and will throw a NullPointerException if unboxed to a boolean . < / p >
* < pre >
* BooleanUtils . negate ( Boolean . TRUE ) = Boolean . FALSE ;
* BooleanUtils . negate ( Boolean . FALSE ) = Boolean . TRUE ;
* BooleanUtils . negate ( null ) = null ;
* < / pre >
* @ param bool the Boolean to negate , may be null
* @ return the negated Boolean , or { @ code null } if { @ code null } input */
public static Boolean negate ( final Boolean bool ) { } } | if ( bool == null ) { return null ; } return bool . booleanValue ( ) ? Boolean . FALSE : Boolean . TRUE ; |
public class GetMetricStatisticsRequest { /** * The percentile statistics . Specify values between p0.0 and p100 . When calling < code > GetMetricStatistics < / code > ,
* you must specify either < code > Statistics < / code > or < code > ExtendedStatistics < / code > , but not both . Percentile
* statistics are not available for metrics when any of the metric values are negative numbers .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setExtendedStatistics ( java . util . Collection ) } or { @ link # withExtendedStatistics ( java . util . Collection ) } if
* you want to override the existing values .
* @ param extendedStatistics
* The percentile statistics . Specify values between p0.0 and p100 . When calling
* < code > GetMetricStatistics < / code > , you must specify either < code > Statistics < / code > or
* < code > ExtendedStatistics < / code > , but not both . Percentile statistics are not available for metrics when
* any of the metric values are negative numbers .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetMetricStatisticsRequest withExtendedStatistics ( String ... extendedStatistics ) { } } | if ( this . extendedStatistics == null ) { setExtendedStatistics ( new com . amazonaws . internal . SdkInternalList < String > ( extendedStatistics . length ) ) ; } for ( String ele : extendedStatistics ) { this . extendedStatistics . add ( ele ) ; } return this ; |
public class ServerStatsEncoding { /** * Decodes serialized byte array to create { @ link ServerStats } as per Opencensus Summary Span
* specification .
* @ param serialized encoded { @ code ServerStats } in byte array .
* @ return decoded { @ code ServerStats } . null if decoding fails .
* @ since 0.16 */
public static ServerStats parseBytes ( byte [ ] serialized ) throws ServerStatsDeserializationException { } } | final ByteBuffer bb = ByteBuffer . wrap ( serialized ) ; bb . order ( ByteOrder . LITTLE_ENDIAN ) ; long serviceLatencyNs = 0L ; long lbLatencyNs = 0L ; byte traceOption = ( byte ) 0 ; // Check the version first .
if ( ! bb . hasRemaining ( ) ) { throw new ServerStatsDeserializationException ( "Serialized ServerStats buffer is empty" ) ; } byte version = bb . get ( ) ; if ( version > CURRENT_VERSION || version < 0 ) { throw new ServerStatsDeserializationException ( "Invalid ServerStats version: " + version ) ; } while ( bb . hasRemaining ( ) ) { ServerStatsFieldEnums . Id id = ServerStatsFieldEnums . Id . valueOf ( ( int ) bb . get ( ) & 0xFF ) ; if ( id == null ) { // Skip remaining ;
bb . position ( bb . limit ( ) ) ; } else { switch ( id ) { case SERVER_STATS_LB_LATENCY_ID : lbLatencyNs = bb . getLong ( ) ; break ; case SERVER_STATS_SERVICE_LATENCY_ID : serviceLatencyNs = bb . getLong ( ) ; break ; case SERVER_STATS_TRACE_OPTION_ID : traceOption = bb . get ( ) ; break ; } } } try { return ServerStats . create ( lbLatencyNs , serviceLatencyNs , traceOption ) ; } catch ( IllegalArgumentException e ) { throw new ServerStatsDeserializationException ( "Serialized ServiceStats contains invalid values: " + e . getMessage ( ) ) ; } |
public class PayIn { /** * Gets the structure that maps which property depends on other property .
* @ return */
@ Override public Map < String , Map < String , Map < String , Class < ? > > > > getDependentObjects ( ) { } } | return new HashMap < String , Map < String , Map < String , Class < ? > > > > ( ) { { put ( "PaymentType" , new HashMap < String , Map < String , Class < ? > > > ( ) { { put ( "CARD" , new HashMap < String , Class < ? > > ( ) { { put ( "PaymentDetails" , PayInPaymentDetailsCard . class ) ; } } ) ; put ( "PREAUTHORIZED" , new HashMap < String , Class < ? > > ( ) { { put ( "PaymentDetails" , PayInPaymentDetailsPreAuthorized . class ) ; } } ) ; put ( "BANK_WIRE" , new HashMap < String , Class < ? > > ( ) { { put ( "PaymentDetails" , PayInPaymentDetailsBankWire . class ) ; } } ) ; put ( "DIRECT_DEBIT" , new HashMap < String , Class < ? > > ( ) { { put ( "PaymentDetails" , PayInPaymentDetailsDirectDebit . class ) ; } } ) ; put ( "PAYPAL" , new HashMap < String , Class < ? > > ( ) { { put ( "PaymentDetails" , PayInPaymentDetailsPayPal . class ) ; } } ) ; // . . . and more in future . . .
} } ) ; put ( "ExecutionType" , new HashMap < String , Map < String , Class < ? > > > ( ) { { put ( "WEB" , new HashMap < String , Class < ? > > ( ) { { put ( "ExecutionDetails" , PayInExecutionDetailsWeb . class ) ; } } ) ; put ( "DIRECT" , new HashMap < String , Class < ? > > ( ) { { put ( "ExecutionDetails" , PayInExecutionDetailsDirect . class ) ; } } ) ; put ( "EXTERNAL_INSTRUCTION" , new HashMap < String , Class < ? > > ( ) { { put ( "ExecutionDetails" , PayInExecutionDetailsBankingAlias . class ) ; } } ) ; // . . . and more in future . . .
} } ) ; } } ; |
public class InstanceNetworkInterface { /** * One or more security groups .
* @ param groups
* One or more security groups . */
public void setGroups ( java . util . Collection < GroupIdentifier > groups ) { } } | if ( groups == null ) { this . groups = null ; return ; } this . groups = new com . amazonaws . internal . SdkInternalList < GroupIdentifier > ( groups ) ; |
public class GroupsPieChart { /** * Updates the state of the chart
* @ param groupTargetCounts
* list of target counts
* @ param totalTargetsCount
* total count of targets that are represented by the pie */
public void setChartState ( final List < Long > groupTargetCounts , final Long totalTargetsCount ) { } } | getState ( ) . setGroupTargetCounts ( groupTargetCounts ) ; getState ( ) . setTotalTargetCount ( totalTargetsCount ) ; markAsDirty ( ) ; |
public class DebugMolecularFormula { /** * { @ inheritDoc } */
@ Override public IMolecularFormula addIsotope ( IIsotope isotope ) { } } | logger . debug ( "Adding isotope: " , isotope ) ; return super . addIsotope ( isotope ) ; |
public class StringUtility { /** * Joins the string representation of objects on the provided delimiter .
* The iteration will be performed twice . Once to compute the total length
* of the resulting string , and the second to build the result .
* @ throws ConcurrentModificationException if iteration is not consistent between passes
* @ see # join ( java . lang . Iterable , java . lang . String , java . lang . Appendable )
* @ see # join ( java . lang . Object [ ] , java . lang . String ) */
public static String join ( Iterable < ? > objects , String delimiter ) throws ConcurrentModificationException { } } | int delimiterLength = delimiter . length ( ) ; // Find total length
int totalLength = 0 ; boolean didOne = false ; for ( Object obj : objects ) { if ( didOne ) totalLength += delimiterLength ; else didOne = true ; totalLength += String . valueOf ( obj ) . length ( ) ; } // Build result
StringBuilder sb = new StringBuilder ( totalLength ) ; didOne = false ; for ( Object obj : objects ) { if ( didOne ) sb . append ( delimiter ) ; else didOne = true ; sb . append ( obj ) ; } if ( totalLength != sb . length ( ) ) throw new ConcurrentModificationException ( ) ; return sb . toString ( ) ; |
public class GrassLegacyUtilities { /** * return the rectangle of the cell of the active region , that surrounds the given coordinates
* @ param activeRegion
* @ param x the given easting coordinate
* @ param y given northing coordinate
* @ return the rectangle localizing the cell inside which the x and y stay */
public static Window getRectangleAroundPoint ( Window activeRegion , double x , double y ) { } } | double minx = activeRegion . getRectangle ( ) . getBounds2D ( ) . getMinX ( ) ; double ewres = activeRegion . getWEResolution ( ) ; double snapx = minx + ( Math . round ( ( x - minx ) / ewres ) * ewres ) ; double miny = activeRegion . getRectangle ( ) . getBounds2D ( ) . getMinY ( ) ; double nsres = activeRegion . getNSResolution ( ) ; double snapy = miny + ( Math . round ( ( y - miny ) / nsres ) * nsres ) ; double xmin = 0.0 ; double xmax = 0.0 ; double ymin = 0.0 ; double ymax = 0.0 ; if ( x >= snapx ) { xmin = snapx ; xmax = xmin + ewres ; } else { xmax = snapx ; xmin = xmax - ewres ; } if ( y <= snapy ) { ymax = snapy ; ymin = ymax - nsres ; } else { ymin = snapy ; ymax = ymin + nsres ; } // why do I have to put ymin , Rectangle requires the upper left
// corner ? ? ? ? ? ! ! ! ! Is it a BUG
// in the Rectangle2D class ? or docu ?
// Rectangle2D rect = new Rectangle2D . Double ( xmin , ymin , ewres , nsres ) ;
return new Window ( xmin , xmax , ymin , ymax , 1 , 1 ) ; |
public class ElemLiteralResult { /** * Return the raw value of the attribute .
* @ param namespaceURI : localName or localName if the namespaceURI is null of
* the attribute to get
* @ return The Attr value as a string , or the empty string if that attribute
* does not have a specified or default value */
public String getAttribute ( String rawName ) { } } | AVT avt = getLiteralResultAttribute ( rawName ) ; if ( ( null != avt ) ) { return avt . getSimpleString ( ) ; } return EMPTYSTRING ; |
public class JSLocalConsumerPoint { /** * NOTE : Callers to this method will have the JSLocalConsumerPoint locked
* ( and their JSKeyGroup locked if applicable ) */
private int lockMessages ( boolean isolatedRun ) throws SIResourceException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "lockMessages" , new Object [ ] { this , Boolean . valueOf ( isolatedRun ) } ) ; int numOfMsgs = 0 ; _transferredConsumer = null ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "GatherMessages: " + Boolean . valueOf ( _gatherMessages ) + ", instance isolatedRun: " + Boolean . valueOf ( _isolatedRun ) + ", _hasLocalQP: " + Boolean . valueOf ( _hasLocalQP ) ) ; // If we ' re doing an isolated run then we don ' t want to become ready
// but we do still want to drain the queue . Unfortunately , if we ' re remote
// from the queue we probably won ' t have a message sat there waiting and
// therefore we do need to make ourselves ready and issue a request back
// to the DME . ( unless we are gathering in which case we have to treat
// the consumer as if it is local )
if ( ! isolatedRun || ( ! _hasLocalQP || _gatherMessages ) ) { // If the LCP is stopped then do nothing ( unless we ' re an isolated run )
if ( _stopped && ! isolatedRun ) { // we ' re not ready for more messages
unsetReady ( ) ; // don ' t try and get any more
_drainQueue = false ; } else { // we are ready for new messages , before we look for one on the ItemStream
// we tell teh COnsumerDispatcher that we ' re available for messages just to
// make sure we don ' t miss one coming in while we ' re looking on the ItemStream
// ( and not finding any ) . If we do find a suitable message on the ItemStream
// we mark ourselves as unready . This obviously introduces the possibility of
// the ConsumerDispatcher trying to give us a message , but better a false
// possitive than a message falling down the gap .
try { setReady ( ) ; } catch ( SINotPossibleInCurrentConfigurationException e ) { // No FFDC code needed
// don ' t try and get any more
_drainQueue = false ; } } // Remember we ' re in the middle of an isolated run ( so that put ( ) knows
// what to do )
if ( isolatedRun ) _isolatedRun = isolatedRun ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Instance isolatedRun: " + Boolean . valueOf ( _isolatedRun ) ) ; } if ( ( _keyGroup != null ) && ! _keyGroup . isStarted ( ) ) _drainQueue = false ; boolean consumerReady = true ; // If the consumer has asked for batches of messages we ' ll look for that number
// of available messages on the queue and give them to the asynch consumer all in
// one go . If this is a stoppable consumer we ' d like to keep the number of messages
// being processed within the number of possible hidden messages ( which equals the
// maxSequentialFailuresThreshold ) . So we crop the batch size to the remaining
// space we have left for hidden messages ( or one ) whichever is the larger .
// However , that would limit the batch size when there are no problems which is a little
// rough so we only do the cropping once we ' ve started hiding messages ( which can result
// in a few more messages being processed / hidden to start with but that ' s life ) .
int currentMaxBatchSize = _maxBatchSize ; if ( _consumerStoppable && ( currentMaxBatchSize > 1 ) && ( _hiddenMessages . size ( ) > 0 ) ) { int remainingHiddenSpace = _maxSequentialFailuresThreshold - _hiddenMessages . size ( ) ; if ( remainingHiddenSpace < 1 ) remainingHiddenSpace = 1 ; if ( remainingHiddenSpace < currentMaxBatchSize ) currentMaxBatchSize = remainingHiddenSpace ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "lockMessages" , "Cropped maxBatchSize to " + currentMaxBatchSize ) ; } // while we still want more messages
while ( ( numOfMsgs < currentMaxBatchSize ) && // filled the batch
_drainQueue && // emptied the queue
( ! _lockRequested || ( numOfMsgs == 0 ) ) && // Asked to interupt
( ! _interruptConsumer || ( numOfMsgs == 0 ) ) && // Interrupted ( ignore first time round )
( _transferredConsumer == null ) ) // switched ordered consumer
{ SIMPMessage msg = null ; if ( _keyGroup != null ) { // If we are part of a group , we want to remember which member of the group actually gets matched
// when the filter is called .
_keyGroup . setConsumerActive ( true ) ; } // try and get a locked eligible message
msg = getEligibleMsgLocked ( null ) ; if ( msg != null ) { JSLocalConsumerPoint matchingConsumer = this ; // We ' ve found a message that matches . If we are a member of
// an ordering group it is possible that this message matches
// a different member of the group from us . If that is that is
// the case we have to possibilities . . . .
// i ) If this is the fist message in the batch we can pass the locked
// message to the matching consumer for them to process .
// ii ) If we already have messages in our batch we will have to
// unlock this message and process the batch first .
if ( _keyGroup != null ) { // find out which member of the group actually got matched
matchingConsumer = ( JSLocalConsumerPoint ) _keyGroup . getMatchingMember ( _consumerKey ) . getConsumerPoint ( ) ; // and show that we ' re done with the filter for now
_keyGroup . setConsumerActive ( false ) ; } // The message is intended for this consumer
if ( matchingConsumer == this ) { // If we ' ve got a message then we ' re no longer interested
// in messages straight from the ConsumerDispatcher but if
// looking for a batch of messages we may take a long time
// to exit this loop , so take ourselves out of the runnin
// as soon as possible .
if ( consumerReady ) { consumerReady = false ; unsetReady ( ) ; } // Do we really need this message in the MS ?
boolean isRecoverable = true ; if ( ( _unrecoverableOptions != Reliability . NONE ) && ( msg . getReliability ( ) . compareTo ( _unrecoverableOptions ) <= 0 ) ) isRecoverable = false ; registerForEvents ( msg ) ; // store a reference to the locked message in the LME
_allLockedMessages . addNewMessage ( msg , true , isRecoverable ) ; // If we were successful , increment our counter
numOfMsgs ++ ; // We check for an interupt after every message is locked
if ( _externalConsumerLock != null ) _interruptConsumer = _externalConsumerLock . isLockYieldRequested ( ) ; } // The message is intended for another member of the key group
else { // If we already have messages locked to this consumer we must
// unlock the last message and process the others first . Then
// we can come back and pick this one up later ( if it is still
// available )
if ( numOfMsgs > 0 ) { try { msg . unlockMsg ( msg . getLockID ( ) , null , true ) ; msg = null ; } catch ( MessageStoreException e ) { // MessageStoreException shouldn ' t occur so FFDC .
FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.JSLocalConsumerPoint.lockMessages" , "1:2995:1.22.5.1" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "lockMessages" , e ) ; throw new SIResourceException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.JSLocalConsumerPoint" , "1:3008:1.22.5.1" , e } , null ) , e ) ; } } // This is the only message locked so we can just drive the
// correct consumer
else { numOfMsgs ++ ; // Mark the fact that we ' ve transferred ownership of a locked message
// so that it can be processed by the calling method ( outside of any
// LCP locks )
_transferredConsumer = matchingConsumer ; _transferredConsumer . lock ( ) ; try { _transferredConsumer . unsetReady ( ) ; _transferredConsumer . addLockedMessage ( msg ) ; } finally { _transferredConsumer . unlock ( ) ; } // We now want to drop out of the loop so we can process this message
// with the correct consumer
// We check for an interupt after every message is locked
if ( _externalConsumerLock != null ) _interruptConsumer = _externalConsumerLock . isLockYieldRequested ( ) ; } } } if ( msg == null ) { // We have all the messages currently on the
// queue so we are about to callack to
// get them processed .
// stop trying to get any more
_drainQueue = false ; } // We check for an interupt after every message is locked
if ( _externalConsumerLock != null ) _interruptConsumer = _externalConsumerLock . isLockYieldRequested ( ) ; } // end while
// If we ' re an isolated run and we found a message ( i . e . not ready ) then
// no - one will try to deliver a message to us and therefore we can forget
// that we were ever here . On the other hand , if we couldn ' t find a message
// and the queue is remote we need to signal that we ' re still waiting .
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Instance isolatedRun: " + Boolean . valueOf ( _isolatedRun ) + ", method isolatedRun: " + Boolean . valueOf ( isolatedRun ) + ", ready: " + Boolean . valueOf ( _ready ) ) ; if ( isolatedRun && ! _ready ) _isolatedRun = false ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "lockMessages" , Integer . valueOf ( numOfMsgs ) ) ; return numOfMsgs ; |
public class ApiOvhMe { /** * Get this object properties
* REST : GET / me / order / { orderId } / details / { orderDetailId } / extension
* @ param orderId [ required ]
* @ param orderDetailId [ required ]
* API beta */
public OvhItemDetail order_orderId_details_orderDetailId_extension_GET ( Long orderId , Long orderDetailId ) throws IOException { } } | String qPath = "/me/order/{orderId}/details/{orderDetailId}/extension" ; StringBuilder sb = path ( qPath , orderId , orderDetailId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhItemDetail . class ) ; |
public class SibRaProducerSession { /** * Sends a message . Checks that the session is valid . Maps the transaction
* parameter before delegating .
* @ param msg
* the message to send
* @ param tran
* the transaction to send the message under
* @ throws SIConnectionUnavailableException
* if the connection is not valid
* @ throws SINotPossibleInCurrentConfigurationException
* if the delegation fails
* @ throws SIIncorrectCallException
* if the transaction parameter is not valid given the current
* application and container transactions
* @ throws SIErrorException
* if the delegation fails
* @ throws SIResourceException
* if the current container transaction cannot be determined
* @ throws SINotAuthorizedException
* if the delegation fails
* @ throws SILimitExceededException
* if the delegation fails
* @ throws SIConnectionLostException
* if the delegation fails
* @ throws SISessionUnavailableException
* if the delegation fails
* @ throws SIConnectionDroppedException
* if the delegation fails
* @ throws SISessionDroppedException
* if the delegation fails */
public void send ( final SIBusMessage msg , final SITransaction tran ) throws SISessionDroppedException , SIConnectionDroppedException , SISessionUnavailableException , SIConnectionUnavailableException , SIConnectionLostException , SILimitExceededException , SINotAuthorizedException , SIResourceException , SIErrorException , SIIncorrectCallException , SINotPossibleInCurrentConfigurationException { } } | checkValid ( ) ; _delegate . send ( msg , _parentConnection . mapTransaction ( tran ) ) ; |
public class UIResults { /** * Create a self - referencing URL that will perform a query for all copies
* of the given URL .
* < p > This method builds URL that passes target URL in CGI parameter ,
* along with other parameters unnecessary for making simple capture
* query request . It is not suitable for simple links .
* { @ link # makePlainCaptureQueryUrl ( String ) } generates clean and
* plain URL . < / p >
* @ param url to search for copies of
* @ return String url that will make a query for all captures of an URL . */
public String makeCaptureQueryUrl ( String url ) { } } | WaybackRequest newWBR = wbRequest . clone ( ) ; newWBR . setCaptureQueryRequest ( ) ; newWBR . setRequestUrl ( url ) ; return newWBR . getAccessPoint ( ) . getQueryPrefix ( ) + "query?" + newWBR . getQueryArguments ( 1 ) ; |
public class PdfContentByte { /** * Sets the fill color . < CODE > color < / CODE > can be an
* < CODE > ExtendedColor < / CODE > .
* @ param color the color */
public void setColorFill ( Color color ) { } } | PdfXConformanceImp . checkPDFXConformance ( writer , PdfXConformanceImp . PDFXKEY_COLOR , color ) ; int type = ExtendedColor . getType ( color ) ; switch ( type ) { case ExtendedColor . TYPE_GRAY : { setGrayFill ( ( ( GrayColor ) color ) . getGray ( ) ) ; break ; } case ExtendedColor . TYPE_CMYK : { CMYKColor cmyk = ( CMYKColor ) color ; setCMYKColorFillF ( cmyk . getCyan ( ) , cmyk . getMagenta ( ) , cmyk . getYellow ( ) , cmyk . getBlack ( ) ) ; break ; } case ExtendedColor . TYPE_SEPARATION : { SpotColor spot = ( SpotColor ) color ; setColorFill ( spot . getPdfSpotColor ( ) , spot . getTint ( ) ) ; break ; } case ExtendedColor . TYPE_PATTERN : { PatternColor pat = ( PatternColor ) color ; setPatternFill ( pat . getPainter ( ) ) ; break ; } case ExtendedColor . TYPE_SHADING : { ShadingColor shading = ( ShadingColor ) color ; setShadingFill ( shading . getPdfShadingPattern ( ) ) ; break ; } default : setRGBColorFill ( color . getRed ( ) , color . getGreen ( ) , color . getBlue ( ) ) ; } |
public class RenderUtils { /** * Render disabled class as : - class - simple - name ( class - package ) .
* @ param type class
* @ return rendered disabled class line */
public static String renderDisabledClassLine ( final Class < ? > type ) { } } | return String . format ( "-%-27s %-26s" , getClassName ( type ) , brackets ( renderPackage ( type ) ) ) ; |
public class Seconds { /** * Obtains a { @ code Seconds } representing the number of seconds
* equivalent to a number of hours .
* The resulting amount will be second - based , with the number of seconds
* equal to the number of minutes multiplied by 60.
* @ param minutes the number of minutes , positive or negative
* @ return the amount with the input minutes converted to seconds , not null
* @ throws ArithmeticException if numeric overflow occurs */
public static Seconds ofMinutes ( int minutes ) { } } | if ( minutes == 0 ) { return ZERO ; } return new Seconds ( Math . multiplyExact ( minutes , SECONDS_PER_MINUTE ) ) ; |
public class FileUpload { /** * Base support for the attribute tag . This is overridden to prevent setting the < code > type < / code >
* attribute .
* @ param name The name of the attribute . This value may not be null or the empty string .
* @ param value The value of the attribute . This may contain an expression .
* @ param facet The name of a facet to which the attribute will be applied . This is optional .
* @ throws JspException A JspException may be thrown if there is an error setting the attribute . */
public void setAttribute ( String name , String value , String facet ) throws JspException { } } | if ( name != null ) { if ( name . equals ( TYPE ) ) { String s = Bundle . getString ( "Tags_AttributeMayNotBeSet" , new Object [ ] { name } ) ; registerTagError ( s , null ) ; } else if ( name . equals ( READONLY ) ) { _state . readonly = Boolean . valueOf ( value ) . booleanValue ( ) ; return ; } } super . setAttribute ( name , value , facet ) ; |
public class JFapUtils { /** * Generate a JFAPSUMMARY message in trace .
* It is a good idea for the caller to check the main trace TraceComponent . isAnyTracingEnabled ( ) before calling this method .
* The data output by this message can be searched for in several ways :
* < nl >
* < li > Searching on JFAPSUMMARY will output all segments sent and received < / li >
* < li > On a per connection basis . This is done by doing a search of the form " [ client dotted ip address : client port number : server dotted ip address : server port number " .
* This information is displayed the same on the client and server side of the connection and so can be used to match up client and server traces < / li >
* < li > On a per conversation basis . This is done by doing a search of the form " [ client dotted ip address : client port number : server dotted ip address : server port number : conversation id " .
* This information is displayed the same on the client and server side of the connection and so can be used to match up client and server traces < / li >
* < / nl >
* @ param callersTrace
* @ param connection
* @ param conversation
* @ param remark */
public static void debugSummaryMessage ( TraceComponent callersTrace , Connection connection , ConversationImpl conversation , String remark ) { } } | // Use a request number of - 1 to distinguish from valid request numbers which are non - negative .
debugSummaryMessage ( callersTrace , connection , conversation , remark , - 1 ) ; |
public class CmsImportVersion5 { /** * Imports the relations . < p > */
protected void importRelations ( ) { } } | if ( m_importedRelations . isEmpty ( ) ) { return ; } m_report . println ( Messages . get ( ) . container ( Messages . RPT_START_IMPORT_RELATIONS_0 ) , I_CmsReport . FORMAT_HEADLINE ) ; int i = 0 ; Iterator < Entry < String , List < CmsRelation > > > it = m_importedRelations . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { Entry < String , List < CmsRelation > > entry = it . next ( ) ; String resourcePath = entry . getKey ( ) ; List < CmsRelation > relations = entry . getValue ( ) ; m_report . print ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_SUCCESSION_2 , String . valueOf ( i + 1 ) , String . valueOf ( m_importedRelations . size ( ) ) ) , I_CmsReport . FORMAT_NOTE ) ; m_report . print ( Messages . get ( ) . container ( Messages . RPT_IMPORTING_RELATIONS_FOR_2 , resourcePath , new Integer ( relations . size ( ) ) ) , I_CmsReport . FORMAT_NOTE ) ; m_report . print ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_DOTS_0 ) ) ; boolean withErrors = false ; Iterator < CmsRelation > itRelations = relations . iterator ( ) ; while ( itRelations . hasNext ( ) ) { CmsRelation relation = itRelations . next ( ) ; try { // Add the relation to the resource
m_cms . importRelation ( m_cms . getSitePath ( relation . getSource ( m_cms , CmsResourceFilter . ALL ) ) , m_cms . getSitePath ( relation . getTarget ( m_cms , CmsResourceFilter . ALL ) ) , relation . getType ( ) . getName ( ) ) ; } catch ( CmsException e ) { m_report . addWarning ( e ) ; withErrors = true ; if ( LOG . isWarnEnabled ( ) ) { LOG . warn ( e . getLocalizedMessage ( ) ) ; } if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( e . getLocalizedMessage ( ) , e ) ; } } } if ( ! withErrors ) { m_report . println ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_OK_0 ) , I_CmsReport . FORMAT_OK ) ; } else { m_report . println ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_FAILED_0 ) , I_CmsReport . FORMAT_ERROR ) ; } i ++ ; } m_report . println ( Messages . get ( ) . container ( Messages . RPT_END_IMPORT_RELATIONS_0 ) , I_CmsReport . FORMAT_HEADLINE ) ; |
public class DateTimePatternParser { /** * Render the compiled pattern back into string form . */
public static String render ( List < Node > nodes ) { } } | StringBuilder buf = new StringBuilder ( ) ; for ( Node node : nodes ) { if ( node instanceof Text ) { String text = ( ( Text ) node ) . text ; boolean inquote = false ; for ( int i = 0 ; i < text . length ( ) ; i ++ ) { char ch = text . charAt ( i ) ; switch ( ch ) { case 'G' : case 'y' : case 'Y' : case 'u' : case 'U' : case 'r' : case 'Q' : case 'q' : case 'M' : case 'L' : case 'l' : case 'w' : case 'W' : case 'd' : case 'D' : case 'F' : case 'g' : case 'E' : case 'e' : case 'c' : case 'a' : case 'b' : case 'B' : case 'h' : case 'H' : case 'K' : case 'k' : case 'j' : case 'J' : case 'C' : case 'm' : case 's' : case 'S' : case 'A' : case 'z' : case 'Z' : case 'O' : case 'v' : case 'V' : case 'X' : case 'x' : if ( ! inquote ) { buf . append ( '\'' ) ; } buf . append ( ch ) ; break ; default : if ( inquote ) { buf . append ( '\'' ) ; } buf . append ( ch ) ; break ; } } } else if ( node instanceof Field ) { Field field = ( Field ) node ; for ( int i = 0 ; i < field . width ; i ++ ) { buf . append ( field . ch ) ; } } } return buf . toString ( ) ; |
public class AbstractQueuedSynchronizer { /** * Releases in exclusive mode . Implemented by unblocking one or
* more threads if { @ link # tryRelease } returns true .
* This method can be used to implement method { @ link Lock # unlock } .
* @ param arg the release argument . This value is conveyed to
* { @ link # tryRelease } but is otherwise uninterpreted and
* can represent anything you like .
* @ return the value returned from { @ link # tryRelease } */
public final boolean release ( int arg ) { } } | if ( tryRelease ( arg ) ) { Node h = head ; if ( h != null && h . waitStatus != 0 ) unparkSuccessor ( h ) ; return true ; } return false ; |
public class aaauser { /** * Use this API to fetch aaauser resources of given names . */
public static aaauser [ ] get ( nitro_service service , String username [ ] ) throws Exception { } } | if ( username != null && username . length > 0 ) { aaauser response [ ] = new aaauser [ username . length ] ; aaauser obj [ ] = new aaauser [ username . length ] ; for ( int i = 0 ; i < username . length ; i ++ ) { obj [ i ] = new aaauser ( ) ; obj [ i ] . set_username ( username [ i ] ) ; response [ i ] = ( aaauser ) obj [ i ] . get_resource ( service ) ; } return response ; } return null ; |
public class MkCoPTree { /** * auxiliary function for approxKdist methods . */
private double ssqerr ( int k0 , int kmax , double [ ] logk , double [ ] log_kDist , double m , double t ) { } } | int k = kmax - k0 ; double result = 0 ; for ( int i = 0 ; i < k ; i ++ ) { // double h = log _ kDist [ i ] - ( m * ( logk [ i ] - logk [ 0 ] ) + t ) ; ? ? ?
double h = log_kDist [ i ] - m * logk [ i ] - t ; result += h * h ; } return result ; |
public class ZKStream { /** * region private helpers */
@ VisibleForTesting String getActiveTxPath ( final int epoch , final String txId ) { } } | return ZKPaths . makePath ( ZKPaths . makePath ( activeTxRoot , Integer . toString ( epoch ) ) , txId ) ; |
public class Channel { /** * / / / / / Transaction monitoring / / / / / */
private void startEventQue ( ) { } } | if ( eventQueueThread != null ) { return ; } client . getExecutorService ( ) . execute ( ( ) -> { eventQueueThread = Thread . currentThread ( ) ; while ( ! shutdown ) { if ( ! initialized ) { try { logger . debug ( "not intialized:" + initialized ) ; Thread . sleep ( 1 ) ; } catch ( InterruptedException e ) { logger . warn ( e ) ; } continue ; // wait on sending events till the channel is initialized .
} final BlockEvent blockEvent ; try { blockEvent = channelEventQue . getNextEvent ( ) ; } catch ( EventingException e ) { if ( ! shutdown ) { logger . error ( e ) ; } continue ; } if ( blockEvent == null ) { logger . warn ( "GOT null block event." ) ; continue ; } try { final String blockchainID = blockEvent . getChannelId ( ) ; final String from = format ( "Channel %s eventqueue got block event with block number: %d for channel: %s, from %s" , name , blockEvent . getBlockNumber ( ) , blockchainID , blockEvent . getPeer ( ) != null ? ( "" + blockEvent . getPeer ( ) ) : "" ) ; logger . trace ( from ) ; if ( ! Objects . equals ( name , blockchainID ) ) { logger . warn ( format ( "Channel %s eventqueue got block event NOT FOR ME channelId %s from %s" , name , blockchainID , from ) ) ; continue ; // not targeted for this channel
} final ArrayList < BL > blcopy = new ArrayList < > ( blockListeners . size ( ) + 3 ) ; synchronized ( blockListeners ) { blcopy . addAll ( blockListeners . values ( ) ) ; } for ( BL l : blcopy ) { try { logger . trace ( format ( "Sending block event '%s' to block listener %s" , from , l . handle ) ) ; if ( l . listener != null ) { client . getExecutorService ( ) . execute ( ( ) -> l . listener . received ( blockEvent ) ) ; } else if ( l . blockingQueue != null ) { if ( l . timeout < 0 || l . timeUnit == null ) { l . blockingQueue . put ( new QueuedBlockEvent ( l . handle , blockEvent ) ) ; } else { if ( ! l . blockingQueue . offer ( new QueuedBlockEvent ( l . handle , blockEvent ) , l . timeout , l . timeUnit ) ) { logger . warn ( format ( "Error calling block listener %s on channel: %s event: %s could not be added in time %d %s " , l . handle , name , from , l . timeout , l . timeUnit ) ) ; } } } } catch ( Throwable e ) { // Don ' t let one register stop rest .
logger . error ( format ( "Error calling block listener %s on channel: %s event: %s " , l . handle , name , from ) , e ) ; } } } catch ( Exception e ) { logger . error ( "Unable to parse event" , e ) ; logger . debug ( "event:\n)" ) ; logger . debug ( blockEvent . toString ( ) ) ; } } logger . info ( format ( "Channel %s eventThread shutting down. shutdown: %b thread: %s " , name , shutdown , Thread . currentThread ( ) . getName ( ) ) ) ; } ) ; |
public class AlertEntityConditionService { /** * Returns the alert conditions for the given entity .
* @ param entity The entity to look up
* @ return The alert conditions for the entity */
public Collection < AlertCondition > list ( Entity entity ) { } } | return list ( entity . getId ( ) , entity . getType ( ) ) ; |
public class HttpResponseMessageImpl { /** * Query whether this response message ' s status code represents a temporary
* status of 1xx .
* @ return boolean */
public boolean isTemporaryStatusCode ( ) { } } | int code = this . myStatusCode . getIntCode ( ) ; // We need to check if the WebContainer spec level is greater than 3.0
// If it is then we don ' t want to treat the 101 status code as a temp
// one as it changes our behavior
if ( HttpDispatcher . useEE7Streams ( ) && ( code == 101 ) ) return false ; return ( 100 <= code && 200 > code ) ; |
public class SkinSwitcherJsGenerator { /** * ( non - Javadoc )
* @ see net . jawr . web . resource . bundle . generator . AbstractCachedGenerator #
* generateResource ( net . jawr . web . resource . bundle . generator . GeneratorContext ,
* java . lang . String ) */
@ Override public Reader generateResource ( String path , GeneratorContext context ) { } } | JawrConfig ctxConfig = context . getConfig ( ) ; String skinCookieName = ctxConfig . getSkinCookieName ( ) ; String script = createScript ( skinCookieName ) ; return new StringReader ( script ) ; |
public class DefaultClusterManager { /** * Creates a redeploy handler . */
private Handler < AsyncResult < String > > createRedeployHandler ( final JsonObject deploymentInfo , final CountDownLatch latch ) { } } | return new Handler < AsyncResult < String > > ( ) { @ Override public void handle ( AsyncResult < String > result ) { if ( result . failed ( ) ) { log . error ( result . cause ( ) ) ; latch . countDown ( ) ; } else { addMappedDeployment ( result . result ( ) , deploymentInfo , new Handler < AsyncResult < String > > ( ) { @ Override public void handle ( AsyncResult < String > result ) { latch . countDown ( ) ; } } ) ; } } } ; |
public class Bugsnag { /** * Set a timeout ( in ms ) to use when delivering Bugsnag error reports and sessions .
* This is a convenient shorthand for bugsnag . getDelivery ( ) . setTimeout ( ) ;
* @ param timeout the timeout to set ( in ms )
* @ see # setDelivery */
public void setTimeout ( int timeout ) { } } | if ( config . delivery instanceof HttpDelivery ) { ( ( HttpDelivery ) config . delivery ) . setTimeout ( timeout ) ; } if ( config . sessionDelivery instanceof HttpDelivery ) { ( ( HttpDelivery ) config . sessionDelivery ) . setTimeout ( timeout ) ; } |
public class RemoteConsumerDispatcher { /** * Overriding super class method to do nothing */
protected void eventPostCommitAdd ( SIMPMessage msg , TransactionCommon transaction ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "eventPostCommitAdd" , new Object [ ] { msg , transaction } ) ; SibTr . exit ( tc , "eventPostCommitAdd" ) ; } |
public class Matrix4d { /** * Set this matrix to a rotation matrix which rotates the given radians about a given axis .
* The axis described by the < code > axis < / code > vector needs to be a unit vector .
* When used with a right - handed coordinate system , the produced rotation will rotate a vector
* counter - clockwise around the rotation axis , when viewing along the negative axis direction towards the origin .
* When used with a left - handed coordinate system , the rotation is clockwise .
* @ param angle
* the angle in radians
* @ param axis
* the axis to rotate about
* @ return this */
public Matrix4d rotation ( double angle , Vector3fc axis ) { } } | return rotation ( angle , axis . x ( ) , axis . y ( ) , axis . z ( ) ) ; |
public class InputUtils { /** * Defines order of @ parameters based on @ processedInput
* @ param processedInput InputHandler processedInput
* @ param parameters Query Parameters ordering of which would be updated */
public static void defineOrder ( ProcessedInput processedInput , QueryParameters parameters ) { } } | String parameterName = null ; for ( int i = 0 ; i < processedInput . getAmountOfParameters ( ) ; i ++ ) { parameterName = processedInput . getParameterName ( i ) ; if ( parameterName != null ) { parameters . updatePosition ( parameterName , i ) ; } } |
public class Analyzer { /** * Reflectively examine the view , gathering , filtering , and sorting fields . The results are
* assigned to { @ link # requiredLists } and { @ link # requiredObjects } ; fields that are lists and
* objects of fields that are not lists , respectively . This method is idempotent ; subsequent
* calls after the first have no effect ( fields need only be analyzed once ) .
* < p > Fields cannot be analyzed before they are assigned , which is why this analyze is delayed
* until needed . This way you can instantiate an Analyzer in a constructor or { @ code < init > }
* without worrying about whether your class or subclass fields are assigned yet . */
private void analyze ( ) { } } | if ( requiredLists != null && requiredObjects != null ) { return ; } requiredLists = required . stream ( ) . filter ( this :: isList ) . map ( f -> new RequiredList < > ( f , view ) ) . filter ( l -> Element . class . isAssignableFrom ( l . genericType ( ) ) || View . class . isAssignableFrom ( l . genericType ( ) ) || Findable . class . isAssignableFrom ( l . genericType ( ) ) ) . collect ( Collectors . toList ( ) ) ; requiredObjects = required . stream ( ) . filter ( f -> ! isList ( f ) ) . map ( this :: fieldToObject ) . collect ( Collectors . toList ( ) ) ; if ( requiredLists . isEmpty ( ) && requiredObjects . isEmpty ( ) ) { throw new NoRequiredElementsException ( view ) ; } |
public class FilterUtilities { /** * Creates an intersect filter .
* @ param geomName the name of the geom field to filter .
* @ param geometry the geometry to use as filtering geom .
* @ return the filter .
* @ throws CQLException */
public static Filter getIntersectsGeometryFilter ( String geomName , Geometry geometry ) throws CQLException { } } | Filter result = CQL . toFilter ( "INTERSECTS(" + geomName + ", " + geometry . toText ( ) + " )" ) ; return result ; |
public class AnnotatedMethodInvoker { /** * Examines the passed class and extracts a single method that is annotated with the specified annotation type , < code > null < / code > if not methods are so annotated . Behavior is undefined if multiple methods
* have the specified annotation . */
public static < T extends Annotation > Method introspectAnnotationSingle ( final Class < ? > klass , final Class < T > annotationType ) { } } | final List < Method > methods = introspectAnnotationMultiple ( klass , annotationType ) ; return ( methods . size ( ) > 0 ) ? methods . get ( 0 ) : null ; |
public class AuthService { /** * Logs into Argus .
* @ param username The username .
* @ param password The password .
* @ throws IOException If the server is unavailable . */
public void login ( String username , String password ) throws IOException { } } | String requestUrl = RESOURCE + "/login" ; Credentials creds = new Credentials ( ) ; creds . setPassword ( password ) ; creds . setUsername ( username ) ; ArgusResponse response = getClient ( ) . executeHttpRequest ( ArgusHttpClient . RequestType . POST , requestUrl , creds ) ; try { assertValidResponse ( response , requestUrl ) ; } catch ( TokenExpiredException e ) { // This should never happen
throw new RuntimeException ( "This should never happen. login() method should never throw a TokenExpiredException" , e ) ; } Map < String , String > tokens = fromJson ( response . getResult ( ) , new TypeReference < Map < String , String > > ( ) { } ) ; getClient ( ) . accessToken = tokens . get ( "accessToken" ) ; getClient ( ) . refreshToken = tokens . get ( "refreshToken" ) ; |
public class ListBuilder { /** * Returns an immutable { @ link C . List } from a collection
* @ param col the collection specified
* @ param < T > element type
* @ return an immutable list contains all elements in the collection */
public static < T > C . List < T > toList ( Collection < ? extends T > col ) { } } | if ( col . size ( ) == 0 ) { return Nil . list ( ) ; } if ( col instanceof C . List ) { C . List < T > list = $ . cast ( col ) ; if ( list . is ( C . Feature . IMMUTABLE ) ) { return list ; } } return new ListBuilder < T > ( col ) . toList ( ) ; |
public class DescribeAgentsRequest { /** * The agent or the Connector IDs for which you want information . If you specify no IDs , the system returns
* information about all agents / Connectors associated with your AWS user account .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setAgentIds ( java . util . Collection ) } or { @ link # withAgentIds ( java . util . Collection ) } if you want to override
* the existing values .
* @ param agentIds
* The agent or the Connector IDs for which you want information . If you specify no IDs , the system returns
* information about all agents / Connectors associated with your AWS user account .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeAgentsRequest withAgentIds ( String ... agentIds ) { } } | if ( this . agentIds == null ) { setAgentIds ( new java . util . ArrayList < String > ( agentIds . length ) ) ; } for ( String ele : agentIds ) { this . agentIds . add ( ele ) ; } return this ; |
public class CassandraClientBase { /** * Finds a { @ link List } of entities from database .
* @ param entityClass
* the entity class
* @ param relationNames
* the relation names
* @ param isWrapReq
* the is wrap req
* @ param metadata
* the metadata
* @ param rowIds
* the row ids
* @ return the list */
public final List findByRowKeys ( Class entityClass , List < String > relationNames , boolean isWrapReq , EntityMetadata metadata , Object ... rowIds ) { } } | List entities = null ; MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( metadata . getPersistenceUnit ( ) ) ; EntityType entityType = metaModel . entity ( metadata . getEntityClazz ( ) ) ; List < AbstractManagedType > subManagedType = ( ( AbstractManagedType ) entityType ) . getSubManagedType ( ) ; try { if ( ! subManagedType . isEmpty ( ) ) { for ( AbstractManagedType subEntity : subManagedType ) { EntityMetadata subEntityMetadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , subEntity . getJavaType ( ) ) ; entities = getDataHandler ( ) . fromThriftRow ( entityClass , subEntityMetadata , subEntityMetadata . getRelationNames ( ) , isWrapReq , getConsistencyLevel ( ) , rowIds ) ; if ( entities != null && ! entities . isEmpty ( ) ) { break ; } } } else { entities = getDataHandler ( ) . fromThriftRow ( entityClass , metadata , relationNames , isWrapReq , getConsistencyLevel ( ) , rowIds ) ; } } catch ( Exception e ) { log . error ( "Error while retrieving records for entity {}, row keys {}" , entityClass , rowIds ) ; throw new KunderaException ( e ) ; } return entities ; |
public class MemberBox { /** * Writes a Constructor or Method object .
* Methods and Constructors are not serializable , so we must serialize
* information about the class , the name , and the parameters and
* recreate upon deserialization . */
private static void writeMember ( ObjectOutputStream out , Executable member ) throws IOException { } } | if ( member == null ) { out . writeBoolean ( false ) ; return ; } out . writeBoolean ( true ) ; if ( ! ( member instanceof Method || member instanceof Constructor ) ) throw new IllegalArgumentException ( "not Method or Constructor" ) ; out . writeBoolean ( member instanceof Method ) ; out . writeObject ( member . getName ( ) ) ; out . writeObject ( member . getDeclaringClass ( ) ) ; writeParameters ( out , member . getParameterTypes ( ) ) ; |
public class FibonacciHeap { /** * Dequeues and returns the minimum element of the Fibonacci heap . If the
* heap is empty , this throws a NoSuchElementException .
* @ return The smallest element of the Fibonacci heap .
* @ throws java . util . NoSuchElementException If the heap is empty . */
public Entry < T > dequeueMin ( ) { } } | /* Check for whether we ' re empty . */
if ( isEmpty ( ) ) throw new NoSuchElementException ( "Heap is empty." ) ; /* Otherwise , we ' re about to lose an element , so decrement the number of
* entries in this heap . */
-- mSize ; /* Grab the minimum element so we know what to return . */
Entry < T > minElem = mMin ; /* Now , we need to get rid of this element from the list of roots . There
* are two cases to consider . First , if this is the only element in the
* list of roots , we set the list of roots to be null by clearing mMin .
* Otherwise , if it ' s not null , then we write the elements next to the
* min element around the min element to remove it , then arbitrarily
* reassign the min . */
if ( mMin . mNext == mMin ) { // Case one
mMin = null ; } else { // Case two
mMin . mPrev . mNext = mMin . mNext ; mMin . mNext . mPrev = mMin . mPrev ; mMin = mMin . mNext ; // Arbitrary element of the root list .
} /* Next , clear the parent fields of all of the min element ' s children ,
* since they ' re about to become roots . Because the elements are
* stored in a circular list , the traversal is a bit complex . */
if ( minElem . mChild != null ) { /* Keep track of the first visited node . */
Entry < ? > curr = minElem . mChild ; do { curr . mParent = null ; /* Walk to the next node , then stop if this is the node we
* started at . */
curr = curr . mNext ; } while ( curr != minElem . mChild ) ; } /* Next , splice the children of the root node into the topmost list ,
* then set mMin to point somewhere in that list . */
mMin = mergeLists ( mMin , minElem . mChild ) ; /* If there are no entries left , we ' re done . */
if ( mMin == null ) return minElem ; /* Next , we need to coalsce all of the roots so that there is only one
* tree of each degree . To track trees of each size , we allocate an
* ArrayList where the entry at position i is either null or the
* unique tree of degree i . */
List < Entry < T > > treeTable = new ArrayList < Entry < T > > ( ) ; /* We need to traverse the entire list , but since we ' re going to be
* messing around with it we have to be careful not to break our
* traversal order mid - stream . One major challenge is how to detect
* whether we ' re visiting the same node twice . To do this , we ' ll
* spent a bit of overhead adding all of the nodes to a list , and
* then will visit each element of this list in order . */
List < Entry < T > > toVisit = new ArrayList < Entry < T > > ( ) ; /* To add everything , we ' ll iterate across the elements until we
* find the first element twice . We check this by looping while the
* list is empty or while the current element isn ' t the first element
* of that list . */
for ( Entry < T > curr = mMin ; toVisit . isEmpty ( ) || toVisit . get ( 0 ) != curr ; curr = curr . mNext ) toVisit . add ( curr ) ; /* Traverse this list and perform the appropriate unioning steps . */
for ( Entry < T > curr : toVisit ) { /* Keep merging until a match arises . */
while ( true ) { /* Ensure that the list is long enough to hold an element of this
* degree . */
while ( curr . mDegree >= treeTable . size ( ) ) treeTable . add ( null ) ; /* If nothing ' s here , we ' re can record that this tree has this size
* and are done processing . */
if ( treeTable . get ( curr . mDegree ) == null ) { treeTable . set ( curr . mDegree , curr ) ; break ; } /* Otherwise , merge with what ' s there . */
Entry < T > other = treeTable . get ( curr . mDegree ) ; treeTable . set ( curr . mDegree , null ) ; // Clear the slot
/* Determine which of the two trees has the smaller root , storing
* the two tree accordingly . */
Entry < T > min = ( other . mPriority < curr . mPriority ) ? other : curr ; Entry < T > max = ( other . mPriority < curr . mPriority ) ? curr : other ; /* Break max out of the root list , then merge it into min ' s child
* list . */
max . mNext . mPrev = max . mPrev ; max . mPrev . mNext = max . mNext ; /* Make it a singleton so that we can merge it . */
max . mNext = max . mPrev = max ; min . mChild = mergeLists ( min . mChild , max ) ; /* Reparent max appropriately . */
max . mParent = min ; /* Clear max ' s mark , since it can now lose another child . */
max . mIsMarked = false ; /* Increase min ' s degree ; it now has another child . */
++ min . mDegree ; /* Continue merging this tree . */
curr = min ; } /* Update the global min based on this node . Note that we compare
* for < = instead of < here . That ' s because if we just did a
* reparent operation that merged two different trees of equal
* priority , we need to make sure that the min pointer points to
* the root - level one . */
if ( curr . mPriority <= mMin . mPriority ) mMin = curr ; } return minElem ; |
public class StreamingJobsInner { /** * Creates a streaming job or replaces an already existing streaming job .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param jobName The name of the streaming job .
* @ param streamingJob The definition of the streaming job that will be used to create a new streaming job or replace the existing one .
* @ param ifMatch The ETag of the streaming job . Omit this value to always overwrite the current record set . Specify the last - seen ETag value to prevent accidentally overwritting concurrent changes .
* @ param ifNoneMatch Set to ' * ' to allow a new streaming job to be created , but to prevent updating an existing record set . Other values will result in a 412 Pre - condition Failed response .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < StreamingJobInner > createOrReplaceAsync ( String resourceGroupName , String jobName , StreamingJobInner streamingJob , String ifMatch , String ifNoneMatch , final ServiceCallback < StreamingJobInner > serviceCallback ) { } } | return ServiceFuture . fromHeaderResponse ( createOrReplaceWithServiceResponseAsync ( resourceGroupName , jobName , streamingJob , ifMatch , ifNoneMatch ) , serviceCallback ) ; |
public class SqlExistStatement { /** * Return SELECT clause for object existence call */
public String getStatement ( ) { } } | if ( sql == null ) { StringBuffer stmt = new StringBuffer ( 128 ) ; ClassDescriptor cld = getClassDescriptor ( ) ; FieldDescriptor [ ] fieldDescriptors = cld . getPkFields ( ) ; if ( fieldDescriptors == null || fieldDescriptors . length == 0 ) { throw new OJBRuntimeException ( "No PK fields defined in metadata for " + cld . getClassNameOfObject ( ) ) ; } FieldDescriptor field = fieldDescriptors [ 0 ] ; stmt . append ( SELECT ) ; stmt . append ( field . getColumnName ( ) ) ; stmt . append ( FROM ) ; stmt . append ( cld . getFullTableName ( ) ) ; appendWhereClause ( cld , false , stmt ) ; sql = stmt . toString ( ) ; } return sql ; |
public class Exceptions { /** * TODO : was package - level initially */
public static RuntimeException launderCacheListenerException ( CacheListenerException e ) { } } | Throwable cause = e . getCause ( ) ; if ( cause instanceof CacheEntryListenerException ) return ( CacheEntryListenerException ) cause ; if ( cause instanceof Exception ) return new CacheEntryListenerException ( cause ) ; if ( cause instanceof Error ) throw ( Error ) cause ; return e ; |
public class ZPicture { /** * Queues a ' picture ' message to the socket ( or actor ) , so it can be sent .
* @ param picture The picture is a string that defines the type of each frame .
* This makes it easy to send a complex multiframe message in
* one call . The picture can contain any of these characters ,
* each corresponding to zero or one arguments :
* < table >
* < caption > < / caption >
* < tr > < td > i = int ( stores signed integer ) < / td > < / tr >
* < tr > < td > 1 = byte ( stores 8 - bit unsigned integer ) < / td > < / tr >
* < tr > < td > 2 = int ( stores 16 - bit unsigned integer ) < / td > < / tr >
* < tr > < td > 4 = long ( stores 32 - bit unsigned integer ) < / td > < / tr >
* < tr > < td > 8 = long ( stores 64 - bit unsigned integer ) < / td > < / tr >
* < tr > < td > s = String < / td > < / tr >
* < tr > < td > b = byte [ ] < / td > < / tr >
* < tr > < td > c = byte [ ] < / td > < / tr >
* < tr > < td > f = ZFrame < / td > < / tr >
* < tr > < td > m = ZMsg ( sends all frames in the ZMsg ) < b > Has to be the last element of the picture < / b > < / td > < / tr >
* < tr > < td > z = sends zero - sized frame ( 0 arguments ) < / td > < / tr >
* < / table >
* Note that s , b , f and m are encoded the same way and the choice is
* offered as a convenience to the sender , which may or may not already
* have data in a ZFrame or ZMsg . Does not change or take ownership of
* any arguments .
* Also see { @ link # recvPicture ( Socket , String ) } } how to recv a
* multiframe picture .
* @ param args Arguments according to the picture
* @ return true if successful , false if sending failed for any reason */
@ Draft public boolean sendPicture ( Socket socket , String picture , Object ... args ) { } } | if ( ! FORMAT . matcher ( picture ) . matches ( ) ) { throw new ZMQException ( picture + " is not in expected format " + FORMAT . pattern ( ) , ZError . EPROTO ) ; } ZMsg msg = new ZMsg ( ) ; for ( int pictureIndex = 0 , argIndex = 0 ; pictureIndex < picture . length ( ) ; pictureIndex ++ , argIndex ++ ) { char pattern = picture . charAt ( pictureIndex ) ; switch ( pattern ) { case 'i' : { msg . add ( String . format ( "%d" , ( int ) args [ argIndex ] ) ) ; break ; } case '1' : { msg . add ( String . format ( "%d" , ( 0xff ) & ( int ) args [ argIndex ] ) ) ; break ; } case '2' : { msg . add ( String . format ( "%d" , ( 0xffff ) & ( int ) args [ argIndex ] ) ) ; break ; } case '4' : { msg . add ( String . format ( "%d" , ( 0xffffffff ) & ( int ) args [ argIndex ] ) ) ; break ; } case '8' : { msg . add ( String . format ( "%d" , ( long ) args [ argIndex ] ) ) ; break ; } case 's' : { msg . add ( ( String ) args [ argIndex ] ) ; break ; } case 'b' : case 'c' : { msg . add ( ( byte [ ] ) args [ argIndex ] ) ; break ; } case 'f' : { msg . add ( ( ZFrame ) args [ argIndex ] ) ; break ; } case 'm' : { ZMsg msgParm = ( ZMsg ) args [ argIndex ] ; while ( msgParm . size ( ) > 0 ) { msg . add ( msgParm . pop ( ) ) ; } break ; } case 'z' : { msg . add ( ( byte [ ] ) null ) ; argIndex -- ; break ; } default : assert ( false ) : "invalid picture element '" + pattern + "'" ; } } return msg . send ( socket , false ) ; |
public class DaoUtils { /** * Returns an escaped value in parameter , with the desired wildcards . Suitable to be used in a like sql query < br / >
* Escapes the " / " , " % " and " _ " characters . < br / >
* You < strong > must < / strong > add " ESCAPE ' / ' " after your like query . It defines ' / ' as the escape character . */
public static String buildLikeValue ( String value , WildcardPosition wildcardPosition ) { } } | String escapedValue = escapePercentAndUnderscore ( value ) ; String wildcard = "%" ; switch ( wildcardPosition ) { case BEFORE : escapedValue = wildcard + escapedValue ; break ; case AFTER : escapedValue += wildcard ; break ; case BEFORE_AND_AFTER : escapedValue = wildcard + escapedValue + wildcard ; break ; default : throw new UnsupportedOperationException ( "Unhandled WildcardPosition: " + wildcardPosition ) ; } return escapedValue ; |
public class Reflection { /** * Allows to gracefully create a new instance of class , without having to try - catch exceptions .
* @ param ofClass instance of this class will be constructed using reflection .
* @ return a new instance of passed class .
* @ throws GdxRuntimeException when unable to create a new instance .
* @ param < Type > type of constructed value . */
public static < Type > Type newInstance ( final Class < Type > ofClass ) { } } | try { return ClassReflection . newInstance ( ofClass ) ; } catch ( final Throwable exception ) { throw new GdxRuntimeException ( "Unable to create a new instance of class: " + ofClass , exception ) ; } |
public class RAMJobStore { /** * Get all of the Triggers that are associated to the given Job .
* < p > If there are no matches , a zero - length array should be returned . */
@ Override public List < Trigger > getTriggersForJob ( String jobKey ) { } } | ArrayList < Trigger > trigList = new ArrayList < Trigger > ( ) ; synchronized ( lock ) { for ( int i = 0 ; i < wrappedTriggers . size ( ) ; i ++ ) { TriggerWrapper tw = wrappedTriggers . get ( i ) ; if ( tw . jobKey . equals ( jobKey ) ) { trigList . add ( ( OperableTrigger ) tw . trigger . clone ( ) ) ; } } } return trigList ; |
public class AbstractResult { /** * Computes the confidence 05 interval - factor . This value has to be combined with the mean to get the
* confidence - interval .
* @ param meter the meter for the 05 - confidence interval factor
* @ return the 99 % confidence */
public final double getConf05 ( final AbstractMeter meter ) { } } | checkIfMeterExists ( meter ) ; final AbstractUnivariateStatistic conf05 = new Percentile ( 5.0 ) ; final CollectionDoubleCollection doubleColl = new CollectionDoubleCollection ( this . meterResults . get ( meter ) ) ; return conf05 . evaluate ( doubleColl . toArray ( ) , 0 , doubleColl . toArray ( ) . length ) ; |
public class SillynessPotPourri { /** * implements the visitor to look for various silly bugs
* @ param seen
* the opcode of the currently parsed instruction */
@ edu . umd . cs . findbugs . annotations . SuppressFBWarnings ( value = "SF_SWITCH_FALLTHROUGH" , justification = "This fall-through is deliberate and documented" ) @ Override public void sawOpcode ( int seen ) { } } | int reg = - 1 ; SPPUserValue userValue = null ; try { stack . precomputation ( this ) ; checkTrimLocations ( ) ; if ( isBranchByteCode ( seen ) ) { Integer branchTarget = Integer . valueOf ( getBranchTarget ( ) ) ; BitSet branchInsSet = branchTargets . get ( branchTarget ) ; if ( branchInsSet == null ) { branchInsSet = new BitSet ( ) ; branchTargets . put ( branchTarget , branchInsSet ) ; } branchInsSet . set ( getPC ( ) ) ; } // not an else if , because some of the opcodes in the previous
// branch also matter here .
if ( ( seen == Const . IFEQ ) || ( seen == Const . IFLE ) || ( seen == Const . IFNE ) ) { checkForEmptyStringAndNullChecks ( seen ) ; } // see above , several opcodes hit multiple branches .
if ( ( seen == Const . IFEQ ) || ( seen == Const . IFNE ) || ( seen == Const . IFGT ) ) { checkSizeEquals0 ( ) ; } if ( seen == Const . IFEQ ) { checkNullAndInstanceOf ( ) ; } switch ( seen ) { case Const . IFNE : checkNotEqualsStringBuilderLength ( ) ; break ; case Const . IFEQ : checkEqualsStringBufferLength ( ) ; break ; case Const . IRETURN : { if ( lastIfEqWasBoolean ) { checkForUselessTernaryReturn ( ) ; } } // $ FALL - THROUGH $
case Const . LRETURN : case Const . DRETURN : case Const . FRETURN : case Const . ARETURN : trimLocations . clear ( ) ; break ; case Const . LDC2_W : checkApproximationsOfMathConstants ( ) ; break ; case Const . DCMPL : checkCompareToNaNDouble ( ) ; break ; case Const . FCMPL : checkCompareToNaNFloat ( ) ; break ; case Const . ICONST_0 : case Const . ICONST_1 : case Const . ICONST_2 : case Const . ICONST_3 : userValue = sawIntConst ( ) ; break ; case Const . CALOAD : checkImproperToCharArrayUse ( ) ; break ; case Const . INVOKESTATIC : userValue = sawInvokeStatic ( ) ; break ; case Const . INVOKEVIRTUAL : userValue = sawInvokeVirtual ( ) ; break ; case Const . INVOKESPECIAL : sawInvokeSpecial ( ) ; break ; case Const . INVOKEINTERFACE : userValue = sawInvokeInterface ( ) ; break ; case Const . IF_ICMPEQ : case Const . IF_ICMPGE : case Const . IF_ICMPGT : case Const . IF_ICMPLE : case Const . IF_ICMPLT : case Const . IF_ICMPNE : if ( stack . getStackDepth ( ) >= 2 ) { OpcodeStack . Item first = stack . getStackItem ( 1 ) ; OpcodeStack . Item second = stack . getStackItem ( 0 ) ; SPPUserValue uv = ( SPPUserValue ) first . getUserValue ( ) ; Integer c = null ; if ( ( uv != null ) && ( uv . getMethod ( ) == SPPMethod . COMPARETO ) ) { c = ( Integer ) second . getConstant ( ) ; } else { uv = ( SPPUserValue ) second . getUserValue ( ) ; if ( ( uv != null ) && ( uv . getMethod ( ) == SPPMethod . COMPARETO ) ) { c = ( Integer ) first . getConstant ( ) ; } } if ( ( uv != null ) && ( uv . getMethod ( ) == SPPMethod . COMPARETO ) && ( ( c == null ) || ( c . intValue ( ) != 0 ) ) ) { bugReporter . reportBug ( new BugInstance ( this , BugType . SPP_USE_ZERO_WITH_COMPARATOR . name ( ) , NORMAL_PRIORITY ) . addClass ( this ) . addMethod ( this ) . addSourceLine ( this ) ) ; } } break ; default : if ( OpcodeUtils . isALoad ( seen ) ) { sawLoad ( seen ) ; } else if ( OpcodeUtils . isAStore ( seen ) ) { reg = RegisterUtils . getAStoreReg ( this , seen ) ; checkTrimDupStore ( ) ; checkStutterdAssignment ( seen , reg ) ; checkImmutableUsageOfStringBuilder ( reg ) ; } } } catch ( ClassNotFoundException cnfe ) { bugReporter . reportMissingClass ( cnfe ) ; } finally { TernaryPatcher . pre ( stack , seen ) ; stack . sawOpcode ( this , seen ) ; TernaryPatcher . post ( stack , seen ) ; if ( ( stack . getStackDepth ( ) > 0 ) ) { OpcodeStack . Item item = stack . getStackItem ( 0 ) ; if ( userValue != null ) { item . setUserValue ( userValue ) ; } else { SPPUserValue uv = ( SPPUserValue ) item . getUserValue ( ) ; if ( ( ( ( uv != null ) && ( uv . getMethod ( ) == SPPMethod . ITERATOR ) ) && ( seen == Const . GETFIELD ) ) || ( seen == Const . ALOAD ) || ( ( seen >= Const . ALOAD_0 ) && ( seen <= Const . ALOAD_3 ) ) ) { item . setUserValue ( null ) ; } } } lastOpcode = seen ; lastReg = reg ; System . arraycopy ( lastPCs , 1 , lastPCs , 0 , 3 ) ; lastPCs [ 3 ] = getPC ( ) ; } |
public class Scs_gaxpy { /** * Sparse matrix times dense column vector , y = A * x + y .
* @ param A
* column - compressed matrix
* @ param x
* size n , vector x
* @ param y
* size m , vector y
* @ return true if successful , false on error */
public static boolean cs_gaxpy ( Scs A , float [ ] x , float [ ] y ) { } } | int p , j , n , Ap [ ] , Ai [ ] ; float Ax [ ] ; if ( ! Scs_util . CS_CSC ( A ) || x == null || y == null ) return ( false ) ; /* check inputs */
n = A . n ; Ap = A . p ; Ai = A . i ; Ax = A . x ; for ( j = 0 ; j < n ; j ++ ) { for ( p = Ap [ j ] ; p < Ap [ j + 1 ] ; p ++ ) { y [ Ai [ p ] ] += Ax [ p ] * x [ j ] ; } } return ( true ) ; |
public class CommerceShipmentItemUtil { /** * Returns a range of all the commerce shipment items where commerceShipmentId = & # 63 ; .
* Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceShipmentItemModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order .
* @ param commerceShipmentId the commerce shipment ID
* @ param start the lower bound of the range of commerce shipment items
* @ param end the upper bound of the range of commerce shipment items ( not inclusive )
* @ return the range of matching commerce shipment items */
public static List < CommerceShipmentItem > findByCommerceShipment ( long commerceShipmentId , int start , int end ) { } } | return getPersistence ( ) . findByCommerceShipment ( commerceShipmentId , start , end ) ; |
public class DistributedObjectCacheAdapter { /** * Adds one or more aliases for the given key in the cache ' s mapping table . If the alias is already
* associated with another key , it will be changed to associate with the new key .
* @ param key the key assoicated with alias
* @ param aliasArray the aliases to use for lookups
* @ throws IllegalArgumentException if the key is not in the cache ' s mapping table . */
@ Override public void addAlias ( Object key , Object [ ] aliasArray ) { } } | final String methodName = "addAlias(key, aliasArray)" ; functionNotAvailable ( methodName ) ; |
public class ReadWriteMultipleRequest { /** * getMessage - - return a prepared message .
* @ return prepared message */
public byte [ ] getMessage ( ) { } } | byte results [ ] = new byte [ 9 + 2 * getWriteWordCount ( ) ] ; results [ 0 ] = ( byte ) ( readReference >> 8 ) ; results [ 1 ] = ( byte ) ( readReference & 0xFF ) ; results [ 2 ] = ( byte ) ( readCount >> 8 ) ; results [ 3 ] = ( byte ) ( readCount & 0xFF ) ; results [ 4 ] = ( byte ) ( writeReference >> 8 ) ; results [ 5 ] = ( byte ) ( writeReference & 0xFF ) ; results [ 6 ] = ( byte ) ( writeCount >> 8 ) ; results [ 7 ] = ( byte ) ( writeCount & 0xFF ) ; results [ 8 ] = ( byte ) ( writeCount * 2 ) ; int offset = 9 ; for ( int i = 0 ; i < writeCount ; i ++ ) { Register reg = getRegister ( i ) ; byte [ ] bytes = reg . toBytes ( ) ; results [ offset ++ ] = bytes [ 0 ] ; results [ offset ++ ] = bytes [ 1 ] ; } return results ; |
public class OWLObjectPropertyRangeAxiomImpl_CustomFieldSerializer { /** * Deserializes the content of the object from the
* { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } .
* @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the
* object ' s content from
* @ param instance the object instance to deserialize
* @ throws com . google . gwt . user . client . rpc . SerializationException
* if the deserialization operation is not
* successful */
@ Override public void deserializeInstance ( SerializationStreamReader streamReader , OWLObjectPropertyRangeAxiomImpl instance ) throws SerializationException { } } | deserialize ( streamReader , instance ) ; |
public class CmsRequestContext { /** * Adds the given site root of this context to the given resource name ,
* taking into account special folders like " / system " where no site root must be added ,
* and also translates the resource name with the configured the directory translator . < p >
* @ param siteRoot the site root to add
* @ param resourcename the resource name
* @ return the translated resource name including site root */
public String addSiteRoot ( String siteRoot , String resourcename ) { } } | if ( ( resourcename == null ) || ( siteRoot == null ) ) { return null ; } siteRoot = getAdjustedSiteRoot ( siteRoot , resourcename ) ; StringBuffer result = new StringBuffer ( 128 ) ; result . append ( siteRoot ) ; if ( ( ( siteRoot . length ( ) == 0 ) || ( siteRoot . charAt ( siteRoot . length ( ) - 1 ) != '/' ) ) && ( ( resourcename . length ( ) == 0 ) || ( resourcename . charAt ( 0 ) != '/' ) ) ) { // add slash between site root and resource if required
result . append ( '/' ) ; } result . append ( resourcename ) ; return m_directoryTranslator . translateResource ( result . toString ( ) ) ; |
public class SymoplibParser { /** * Load all SpaceGroup information from the file spacegroups . xml
* @ return a map providing information for all spacegroups */
private static TreeMap < Integer , SpaceGroup > parseSpaceGroupsXML ( ) { } } | // NOTE : if the space group file is requested by some part of the code ( i . e . this method is called ) and
// there is a problem in reading it , then that ' s truly a FATAL problem , since this is not a user file
// but a file that ' s part of the distribution : it MUST be there and MUST have the right format . A failure
// to read it is more of a " compilation " error than a runtime error . That ' s the reason that System . exit
// is called ( which otherwise usually is not a good idea ) .
// The rest of the application will simply not work : there are 3 options to handle it
// a ) returning null and then a NullPointer will happen down the line and thus a not very clear
// error message will be printed
// b ) throw the exception forward and catch it in the final main but that would also be bad because
// this is a file that the user didn ' t input but that should be part of the distribution
// c ) call System . exit ( 1 ) and " crash " the application with a human - understandable error message
InputStream spaceGroupIS = SymoplibParser . class . getClassLoader ( ) . getResourceAsStream ( SPACE_GROUPS_FILE ) ; if ( spaceGroupIS == null ) { logger . error ( "Fatal error! Could not find resource: " + SPACE_GROUPS_FILE + ". This probably means that your biojava jar file is corrupt or incorrectly built." ) ; System . exit ( 1 ) ; } TreeMap < Integer , SpaceGroup > map = new TreeMap < Integer , SpaceGroup > ( ) ; try { map = parseSpaceGroupsXML ( spaceGroupIS ) ; } catch ( IOException e ) { logger . error ( "Fatal error! Could not parse resource: " + SPACE_GROUPS_FILE + ". Error: " + e . getMessage ( ) ) ; System . exit ( 1 ) ; } catch ( JAXBException e ) { logger . error ( "Fatal error! Could not parse resource: " + SPACE_GROUPS_FILE + ". Problem in xml formatting: " + e . getMessage ( ) ) ; System . exit ( 1 ) ; } name2sgs = new HashMap < String , SpaceGroup > ( ) ; for ( SpaceGroup sg : map . values ( ) ) { sg . initializeCellTranslations ( ) ; name2sgs . put ( sg . getShortSymbol ( ) , sg ) ; if ( sg . getAltShortSymbol ( ) != null ) { // we add also alternative name to map so we can look it up
name2sgs . put ( sg . getAltShortSymbol ( ) , sg ) ; } } return map ; |
public class AsynchronousRequest { /** * For more info on guild log API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / guild / : id / log " > here < / a > < br / >
* Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions < br / >
* @ param id guild id
* @ param api Guild leader ' s Guild Wars 2 API key
* @ param since log id used to filter log entries
* @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) }
* @ throws NullPointerException if given { @ link Callback } is empty
* @ see GuildLog guild log info */
public void getFilteredGuildLogInfo ( String id , String api , int since , Callback < List < GuildLog > > callback ) throws GuildWars2Exception , NullPointerException { } } | isParamValid ( new ParamChecker ( ParamType . GUILD , id ) , new ParamChecker ( ParamType . API , api ) ) ; gw2API . getFilteredGuildLogInfo ( id , api , Integer . toString ( since ) ) . enqueue ( callback ) ; |
public class FontDescriptorSpecificationImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setFtDsFlags ( Integer newFtDsFlags ) { } } | Integer oldFtDsFlags = ftDsFlags ; ftDsFlags = newFtDsFlags ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . FONT_DESCRIPTOR_SPECIFICATION__FT_DS_FLAGS , oldFtDsFlags , ftDsFlags ) ) ; |
public class CsvWriter { /** * Create a DSL on the specified type .
* @ param typeReference the type of object to write
* @ param < T > the type
* @ return a DSL on the specified type */
public static < T > CsvWriterDSL < T > from ( TypeReference < T > typeReference ) { } } | return from ( typeReference . getType ( ) ) ; |
public class TraceNLS { /** * Return the message obtained by looking up the localized text
* corresponding to the specified key in the specified ResourceBundle using
* the specified Locale and formatting the resultant text using the
* specified substitution arguments .
* The message is formatted using the java . text . MessageFormat class .
* Substitution parameters are handled according to the rules of that class .
* Most noteably , that class does special formatting for native java Date and
* Number objects .
* If an error occurs in obtaining the localized text corresponding to this
* key , then the defaultString is used as the message text . If all else fails ,
* this class will provide one of the default English messages to indicate
* what occurred .
* @ param aClass
* Class object calling this method
* @ param bundle
* the ResourceBundle to use for lookups . Null is tolerated . If
* null is passed , the resource bundle will be looked up from
* bundleName . If not null , bundleName must match .
* @ param bundleName
* the fully qualified name of the ResourceBundle . Must not be
* null .
* @ param aClass
* the class representing the caller of the method - - used for
* loading the right resource bundle
* @ param key
* the key to use in the ResourceBundle lookup . Must not be null .
* @ param args
* substitution parameters that are inserted into the message
* text . Null is tolerated
* @ param defaultString
* text to use if the localized text cannot be found . Must not be
* null .
* @ param locale
* the Locale object to use when looking up the ResourceBundle .
* If null is passed , the default Locale will be used .
* @ param quiet
* indicates whether or not errors will be logged when
* encountered , and must be used in conjunction with com . ibm .
* @ return a non - null message that is localized and formatted as
* appropriate . */
public static String getFormattedMessage ( Class < ? > aClass , ResourceBundle bundle , String bundleName , String key , Object [ ] args , String defaultString , Locale locale , boolean quiet ) { } } | return TraceNLSResolver . getInstance ( ) . getMessage ( aClass , bundle , bundleName , key , args , defaultString , true , locale , quiet ) ; |
public class RequestHttpWeb { /** * Service is the main call when data is available from the socket . */
@ Override public StateConnection service ( ) { } } | try { StateConnection nextState = _state . service ( this ) ; // return StateConnection . CLOSE ;
return nextState ; /* if ( _ invocation = = null & & getRequestHttp ( ) . parseInvocation ( ) ) {
if ( _ invocation = = null ) {
return NextState . CLOSE ;
return _ invocation . service ( this , getResponse ( ) ) ;
else
if ( _ upgrade ! = null ) {
return _ upgrade . service ( ) ;
else if ( _ invocation ! = null ) {
return _ invocation . service ( this ) ;
else {
return StateConnection . CLOSE ; */
} catch ( Throwable e ) { log . warning ( e . toString ( ) ) ; log . log ( Level . FINER , e . toString ( ) , e ) ; // e . printStackTrace ( ) ;
toClose ( ) ; return StateConnection . CLOSE_READ_A ; } |
public class ServerInfoMBeanImpl { /** * { @ inheritDoc } */
@ Override public String getDefaultHostname ( ) { } } | final String hostname = varReg . resolveString ( VAR_DEFAULTHOSTNAME ) ; // If we have no specified host name ( we get the raw variable back ) , or the host name is empty , default to localhost
if ( VAR_DEFAULTHOSTNAME . equals ( hostname ) || hostname . trim ( ) . isEmpty ( ) ) { return "localhost" ; } else if ( "*" . equals ( hostname ) ) { return null ; // Carried over behaviour from CollectiveHostName
} else { return hostname . toLowerCase ( ) ; } |
public class Serializables { /** * Utility for turning a serializable object into a byte array . If the { @ code compress } option is selected
* then the bytes will be run through gzip compression . */
public static byte [ ] serialize ( Serializable serializable , boolean compress ) throws IOException { } } | requireNonNull ( serializable ) ; try ( ByteArrayOutputStream o = new ByteArrayOutputStream ( ) ; ObjectOutputStream oos = new ObjectOutputStream ( compress ? new GZIPOutputStream ( o ) : o ) ) { oos . writeObject ( serializable ) ; oos . flush ( ) ; oos . close ( ) ; // must close before getting bytes because GZip streams require it .
return o . toByteArray ( ) ; } |
public class BaseTemplate { /** * Imports a template and renders it using the specified model , allowing fine grained composition of templates and
* layouting . This works similarily to a template include but allows a distinct model to be used . If the layout
* inherits from the parent model , a new model is created , with the values from the parent model , eventually
* overridden with those provided specifically for this layout .
* @ param model model to be passed to the template
* @ param templateName the name of the template to be used as a layout
* @ param inheritModel a boolean indicating if we should inherit the parent model
* @ return this template instance
* @ throws IOException
* @ throws ClassNotFoundException */
public Object layout ( Map model , String templateName , boolean inheritModel ) throws IOException , ClassNotFoundException { } } | Map submodel = inheritModel ? forkModel ( model ) : model ; URL resource = engine . resolveTemplate ( templateName ) ; engine . createTypeCheckedModelTemplate ( resource , modelTypes ) . make ( submodel ) . writeTo ( out ) ; return this ; |
public class Jwts { /** * Creates a new { @ link Header } instance suitable for < em > plaintext < / em > ( not digitally signed ) JWTs , populated
* with the specified name / value pairs . As this is a less common use of JWTs , consider using the
* { @ link # jwsHeader ( java . util . Map ) } factory method instead if you will later digitally sign the JWT .
* @ return a new { @ link Header } instance suitable for < em > plaintext < / em > ( not digitally signed ) JWTs . */
public static Header header ( Map < String , Object > header ) { } } | return Classes . newInstance ( "io.jsonwebtoken.impl.DefaultHeader" , MAP_ARG , header ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.