signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class OptionsCertificatePanel { /** * This method initializes this */
private void initialize ( ) { } } | contextManager = Model . getSingleton ( ) . getOptionsParam ( ) . getCertificateParam ( ) . getSSLContextManager ( ) ; keyStoreListModel = new DefaultListModel < > ( ) ; aliasTableModel = new AliasTableModel ( contextManager ) ; this . setLayout ( new CardLayout ( ) ) ; this . setName ( Constant . messages . getString ( "options.cert.title" ) ) ; JPanel certificatePanel = getPanelCertificate ( ) ; this . add ( certificatePanel , certificatePanel . getName ( ) ) ; driverConfig = createDriverConfiguration ( ) ; updateDriverComboBox ( ) ; driverConfig . addChangeListener ( e -> updateDriverComboBox ( ) ) ; Certificate cert = contextManager . getDefaultCertificate ( ) ; if ( cert != null ) { certificateTextField . setText ( cert . toString ( ) ) ; } if ( contextManager . getKeyStoreCount ( ) != 0 ) { overrideEnableClientCertificate = true ; } |
public class Coin { /** * < p > Parses an amount expressed in the way humans are used to . < / p >
* < p > This takes string in a format understood by { @ link BigDecimal # BigDecimal ( String ) } , for example " 0 " , " 1 " , " 0.10 " ,
* " 1.23E3 " , " 1234.5E - 5 " . < / p >
* @ throws IllegalArgumentException
* if you try to specify fractional satoshis , or a value out of range . */
public static Coin parseCoin ( final String str ) { } } | try { long satoshis = new BigDecimal ( str ) . movePointRight ( SMALLEST_UNIT_EXPONENT ) . longValueExact ( ) ; return Coin . valueOf ( satoshis ) ; } catch ( ArithmeticException e ) { throw new IllegalArgumentException ( e ) ; // Repackage exception to honor method contract
} |
public class ConcurrentWebDriverFactory { /** * enables Webdriver usage of the same from different threads ( handles locks internally ) . This locks an already in use instance .
* < i > Difference to < tt > ThreadGuard < / tt > : < / i > It does not forbid usage of a single webdriver by different threads .
* based on { @ link org . openqa . selenium . support . ThreadGuard } */
public static WebDriver createSyncronized ( WebDriver webDriver ) { } } | ClassLoader classLoader = webDriver . getClass ( ) . getClassLoader ( ) ; List < Class < ? > > interfaces = ClassUtils . getAllInterfaces ( webDriver . getClass ( ) ) ; InvocationHandler invocationHandler = new SynchronizedWebDriverInvocationHandler ( webDriver ) ; return ( WebDriver ) Proxy . newProxyInstance ( classLoader , interfaces . toArray ( new Class [ interfaces . size ( ) ] ) , invocationHandler ) ; |
public class DefaultQueryParamsParser { /** * < strong > Important ! < / strong > Katharsis implementation differs form JSON API
* < a href = " http : / / jsonapi . org / format / # fetching - filtering " > definition of filtering < / a >
* in order to fit standard query parameter serializing strategy and maximize effective processing of data .
* Filter params can be send with following format ( Katharsis does not specify or implement any operators ) : < br >
* < strong > filter [ ResourceType ] [ property | operator ] ( [ property | operator ] ) * = " value " < / strong > < br >
* Examples of accepted filtering of resources :
* < ul >
* < li > { @ code GET / tasks / ? filter [ tasks ] [ name ] = Super task } < / li >
* < li > { @ code GET / tasks / ? filter [ tasks ] [ name ] = Super task & filter [ tasks ] [ dueDate ] = 2015-10-01 } < / li >
* < li > { @ code GET / tasks / ? filter [ tasks ] [ name ] [ $ startWith ] = Super task } < / li >
* < li > { @ code GET / tasks / ? filter [ tasks ] [ name ] [ ] [ $ startWith ] = Super & filter [ tasks ] [ name ] [ ] [ $ endWith ] = task } < / li >
* < / ul >
* @ param context No idea . I didn ' t write this code .
* @ return { @ link TypedParams } Map of filtering params passed to a request grouped by type of resource */
protected TypedParams < FilterParams > parseFiltersParameters ( final QueryParamsParserContext context ) { } } | String filterKey = RestrictedQueryParamsMembers . filter . name ( ) ; Map < String , Set < String > > filters = filterQueryParamsByKey ( context , filterKey ) ; Map < String , Map < String , Set < String > > > temporaryFiltersMap = new LinkedHashMap < > ( ) ; for ( Map . Entry < String , Set < String > > entry : filters . entrySet ( ) ) { List < String > propertyList = buildPropertyListFromEntry ( entry , filterKey ) ; String resourceType = propertyList . get ( 0 ) ; String propertyPath = StringUtils . join ( "." , propertyList . subList ( 1 , propertyList . size ( ) ) ) ; if ( temporaryFiltersMap . containsKey ( resourceType ) ) { Map < String , Set < String > > resourceParams = temporaryFiltersMap . get ( resourceType ) ; resourceParams . put ( propertyPath , Collections . unmodifiableSet ( entry . getValue ( ) ) ) ; } else { Map < String , Set < String > > resourceParams = new LinkedHashMap < > ( ) ; temporaryFiltersMap . put ( resourceType , resourceParams ) ; resourceParams . put ( propertyPath , entry . getValue ( ) ) ; } } Map < String , FilterParams > decodedFiltersMap = new LinkedHashMap < > ( ) ; for ( Map . Entry < String , Map < String , Set < String > > > resourceTypesMap : temporaryFiltersMap . entrySet ( ) ) { Map < String , Set < String > > filtersMap = Collections . unmodifiableMap ( resourceTypesMap . getValue ( ) ) ; decodedFiltersMap . put ( resourceTypesMap . getKey ( ) , new FilterParams ( filtersMap ) ) ; } return new TypedParams < > ( Collections . unmodifiableMap ( decodedFiltersMap ) ) ; |
public class CoverTree { /** * Splits a given pointSet into near and far based on the given
* scale / level . All points with distance > base ^ maxScale would be moved
* to far set . In other words , all those points that are not covered by the
* next child ball of a point p ( ball made of the same point p but of
* smaller radius at the next lower level ) are removed from the supplied
* current pointSet and put into farSet .
* @ param pointSet the supplied set from which all far points
* would be removed .
* @ param farSet the set in which all far points having distance
* > base ^ maxScale would be put into .
* @ param maxScale the given scale based on which the distances
* of points are judged to be far or near . */
private void split ( ArrayList < DistanceSet > pointSet , ArrayList < DistanceSet > farSet , int maxScale ) { } } | double fmax = getCoverRadius ( maxScale ) ; ArrayList < DistanceSet > newSet = new ArrayList < > ( ) ; for ( int i = 0 ; i < pointSet . size ( ) ; i ++ ) { DistanceSet n = pointSet . get ( i ) ; if ( n . dist . get ( n . dist . size ( ) - 1 ) <= fmax ) { newSet . add ( n ) ; } else { farSet . add ( n ) ; } } pointSet . clear ( ) ; pointSet . addAll ( newSet ) ; |
public class OqlBuilder { /** * groupBy .
* @ param what a { @ link java . lang . String } object .
* @ return a { @ link org . beangle . commons . dao . query . builder . OqlBuilder } object . */
public OqlBuilder < T > groupBy ( final String what ) { } } | if ( Strings . isNotEmpty ( what ) ) { groups . add ( what ) ; } return this ; |
public class GVRTextureParameters { /** * Returns an integer array that contains the current values for all the
* texture parameters .
* @ return an integer array that contains the current values for all the
* texture parameters . */
public int [ ] getCurrentValuesArray ( ) { } } | int [ ] currentValues = new int [ 5 ] ; currentValues [ 0 ] = getMinFilterType ( ) . getFilterValue ( ) ; // MIN FILTER
currentValues [ 1 ] = getMagFilterType ( ) . getFilterValue ( ) ; // MAG FILTER
currentValues [ 2 ] = getAnisotropicValue ( ) ; // ANISO FILTER
currentValues [ 3 ] = getWrapSType ( ) . getWrapValue ( ) ; // WRAP S
currentValues [ 4 ] = getWrapTType ( ) . getWrapValue ( ) ; // WRAP T
return currentValues ; |
public class TrainingsImpl { /** * Get a specific iteration .
* @ param projectId The id of the project the iteration belongs to
* @ param iterationId The id of the iteration to get
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the Iteration object */
public Observable < Iteration > getIterationAsync ( UUID projectId , UUID iterationId ) { } } | return getIterationWithServiceResponseAsync ( projectId , iterationId ) . map ( new Func1 < ServiceResponse < Iteration > , Iteration > ( ) { @ Override public Iteration call ( ServiceResponse < Iteration > response ) { return response . body ( ) ; } } ) ; |
public class STUNResolver { /** * Load the STUN configuration from a stream .
* @ param stunConfigStream An InputStream with the configuration file .
* @ return A list of loaded servers */
public ArrayList < STUNService > loadSTUNServers ( java . io . InputStream stunConfigStream ) { } } | ArrayList < STUNService > serversList = new ArrayList < > ( ) ; String serverName ; int serverPort ; try { XmlPullParser parser = XmlPullParserFactory . newInstance ( ) . newPullParser ( ) ; parser . setFeature ( XmlPullParser . FEATURE_PROCESS_NAMESPACES , true ) ; parser . setInput ( stunConfigStream , "UTF-8" ) ; int eventType = parser . getEventType ( ) ; do { if ( eventType == XmlPullParser . START_TAG ) { // Parse a STUN server definition
if ( parser . getName ( ) . equals ( "stunServer" ) ) { serverName = null ; serverPort = - 1 ; // Parse the hostname
parser . next ( ) ; parser . next ( ) ; serverName = parser . nextText ( ) ; // Parse the port
parser . next ( ) ; parser . next ( ) ; try { serverPort = Integer . parseInt ( parser . nextText ( ) ) ; } catch ( Exception e ) { } // If we have a valid hostname and port , add
// it to the list .
if ( serverName != null && serverPort != - 1 ) { STUNService service = new STUNService ( serverName , serverPort ) ; serversList . add ( service ) ; } } } eventType = parser . next ( ) ; } while ( eventType != XmlPullParser . END_DOCUMENT ) ; } catch ( XmlPullParserException e ) { LOGGER . log ( Level . SEVERE , "Exception" , e ) ; } catch ( IOException e ) { LOGGER . log ( Level . SEVERE , "Exception" , e ) ; } currentServer = bestSTUNServer ( serversList ) ; return serversList ; |
public class OagBuilder { /** * Computes DS , the completion of IDS using A . ( Definition 5 ) . */
public Graph < Attribute > [ ] createDS ( Graph < Attribute > [ ] ids , List < Attribute > [ ] [ ] a ) { } } | int i ; Graph < Attribute > [ ] ds ; ds = new Graph [ ids . length ] ; for ( i = 0 ; i < ds . length ; i ++ ) { ds [ i ] = createDSx ( ids [ i ] , a [ i ] ) ; } return ds ; |
public class BaasDocument { /** * Synchronously fetches a document from the server
* @ param collection the collection to retrieve the document from . Not < code > null < / code >
* @ param id the id of the document to retrieve . Not < code > null < / code >
* @ return the result of the request */
public static BaasResult < BaasDocument > fetchSync ( String collection , String id ) { } } | return fetchSync ( collection , id , false ) ; |
public class FNCImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setFNPRGLen ( Integer newFNPRGLen ) { } } | Integer oldFNPRGLen = fnprgLen ; fnprgLen = newFNPRGLen ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . FNC__FNPRG_LEN , oldFNPRGLen , fnprgLen ) ) ; |
public class ObjectWrapper { /** * Internal : Static version of { @ link ObjectWrapper # getIndexedValue ( Property , int ) } . */
@ SuppressWarnings ( "unchecked" ) private static Object getIndexedValue ( Object obj , Property property , int index , ObjectWrapper options ) { } } | if ( property == null ) { throw new IllegalArgumentException ( "Cannot get the indexed value from 'null' property." ) ; } Object propertyValue = property . get ( obj ) ; if ( propertyValue == null ) { throw new NullPointerException ( "Invalid 'null' value found for indexed '" + property + "' in " + obj . getClass ( ) . getName ( ) + "." ) ; } if ( propertyValue instanceof List ) { List list = ( List ) propertyValue ; int size = list . size ( ) ; if ( size < index ) { if ( options . isOutOfBoundsSafety ) { return null ; } else { throw new IndexOutOfBoundsException ( "The indexed " + property + " in " + obj . getClass ( ) . getSimpleName ( ) + " object has only '" + size + "' elements," + " but index '" + index + "' requested." ) ; } } return list . get ( index ) ; } else if ( propertyValue . getClass ( ) . isArray ( ) ) { int lenght = Array . getLength ( propertyValue ) ; if ( lenght < index && options . isOutOfBoundsSafety ) { return null ; } try { return Array . get ( propertyValue , index ) ; } catch ( ArrayIndexOutOfBoundsException e ) { throw new IndexOutOfBoundsException ( "The indexed " + property + " in " + obj . getClass ( ) . getSimpleName ( ) + " object has only '" + lenght + "' elements," + " but index '" + index + "' requested." ) ; } } else if ( propertyValue instanceof Iterable ) { Iterable iterable = ( Iterable ) propertyValue ; int k = 0 ; for ( Object object : iterable ) { if ( k == index ) { return object ; } k ++ ; // Hopefully not an infinite iteration
} // Nothing found
if ( options . isOutOfBoundsSafety ) { return null ; } else { throw new IndexOutOfBoundsException ( "The indexed " + property + " in " + obj . getClass ( ) . getSimpleName ( ) + " object has less than '" + index + "'" + " elements." ) ; } } else { throw new IllegalArgumentException ( "Cannot get an indexed value from the not indexed " + property + ". Only List, array and Iterable types are supported, but " + property . getType ( ) . getSimpleName ( ) + " found." ) ; } |
public class Solo { /** * Swipes with two fingers in a linear path determined by starting and ending points . Requires API level > = 14.
* @ param startPoint1 First " finger " down on the screen
* @ param startPoint2 Second " finger " down on the screen
* @ param endPoint1 Corresponding ending point of startPoint1
* @ param endPoint2 Corresponding ending point of startPoint2 */
public void swipe ( PointF startPoint1 , PointF startPoint2 , PointF endPoint1 , PointF endPoint2 ) { } } | if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "swipe(" + startPoint1 + ", " + startPoint2 + ", " + endPoint1 + ", " + endPoint2 + ")" ) ; } if ( android . os . Build . VERSION . SDK_INT < 14 ) { throw new RuntimeException ( "swipe() requires API level >= 14" ) ; } swiper . generateSwipeGesture ( startPoint1 , startPoint2 , endPoint1 , endPoint2 ) ; |
public class JSLocalConsumerPoint { /** * Returns true if this LCP is closed .
* @ return */
public boolean isClosed ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "isClosed" , this ) ; SibTr . exit ( tc , "isClosed" , Boolean . valueOf ( _closed ) ) ; } return _closed ; |
public class FullScreenImageGalleryActivity { /** * region FullScreenImageGalleryAdapter . FullScreenImageLoader Methods */
@ Override public void loadFullScreenImage ( ImageView iv , String imageUrl , int width , LinearLayout bglinearLayout ) { } } | fullScreenImageLoader . loadFullScreenImage ( iv , imageUrl , width , bglinearLayout ) ; |
public class DomConfigurationWriter { /** * Externalizes a { @ link HadoopClusterInformation } to a XML element .
* @ param hadoopClusterInformation the hadoopClusterInformation to externalize
* @ return a XML element representing the datastore . */
public Element toElement ( final HadoopClusterInformation hadoopClusterInformation ) { } } | final Element hadoopClusterElement = getDocument ( ) . createElement ( "hadoop-cluster" ) ; hadoopClusterElement . setAttribute ( "name" , hadoopClusterInformation . getName ( ) ) ; final String description = hadoopClusterInformation . getDescription ( ) ; if ( ! Strings . isNullOrEmpty ( description ) ) { hadoopClusterElement . setAttribute ( "description" , description ) ; } // These inherit each other , so order is important
if ( hadoopClusterInformation instanceof DirectConnectionHadoopClusterInformation ) { appendElement ( hadoopClusterElement , "namenode-url" , ( ( DirectConnectionHadoopClusterInformation ) hadoopClusterInformation ) . getNameNodeUri ( ) . toString ( ) ) ; } else if ( hadoopClusterInformation instanceof EnvironmentBasedHadoopClusterInformation ) { appendElement ( hadoopClusterElement , "environment-configured" , "" ) ; } else if ( hadoopClusterInformation instanceof DirectoryBasedHadoopClusterInformation ) { final DirectoryBasedHadoopClusterInformation directoryBasedHadoopClusterInformation = ( DirectoryBasedHadoopClusterInformation ) hadoopClusterInformation ; final Element directoriesElement = getDocument ( ) . createElement ( "directories" ) ; hadoopClusterElement . appendChild ( directoriesElement ) ; for ( final String directory : directoryBasedHadoopClusterInformation . getDirectories ( ) ) { appendElement ( directoriesElement , "directory" , directory ) ; } } else { throw new UnsupportedOperationException ( "Unknown Hadoop cluster configuration" ) ; } return hadoopClusterElement ; |
public class AbstractCircleController { /** * Return the circle ' s radius in world length units . */
protected double getWorldRadius ( ) { } } | if ( center != null ) { Coordinate screenEndPoint = new Coordinate ( center . getX ( ) + radius , center . getY ( ) ) ; Coordinate worldEndPoint = mapWidget . getMapModel ( ) . getMapView ( ) . getWorldViewTransformer ( ) . viewToWorld ( screenEndPoint ) ; double deltaX = worldEndPoint . getX ( ) - getWorldCenter ( ) . getX ( ) ; double deltaY = worldEndPoint . getY ( ) - getWorldCenter ( ) . getY ( ) ; return ( float ) Math . sqrt ( ( deltaX * deltaX ) + ( deltaY * deltaY ) ) ; } return 0 ; |
public class GuildController { /** * Used to move a { @ link net . dv8tion . jda . core . entities . Member Member } from one { @ link net . dv8tion . jda . core . entities . VoiceChannel VoiceChannel }
* to another { @ link net . dv8tion . jda . core . entities . VoiceChannel VoiceChannel } .
* < br > As a note , you cannot move a Member that isn ' t already in a VoiceChannel . Also they must be in a VoiceChannel
* in the same Guild as the one that you are moving them to .
* < p > Possible { @ link net . dv8tion . jda . core . requests . ErrorResponse ErrorResponses } caused by
* the returned { @ link net . dv8tion . jda . core . requests . RestAction RestAction } include the following :
* < ul >
* < li > { @ link net . dv8tion . jda . core . requests . ErrorResponse # MISSING _ PERMISSIONS MISSING _ PERMISSIONS }
* < br > The target Member cannot be moved due to a permission discrepancy < / li >
* < li > { @ link net . dv8tion . jda . core . requests . ErrorResponse # MISSING _ ACCESS MISSING _ ACCESS }
* < br > We were removed from the Guild before finishing the task < / li >
* < li > { @ link net . dv8tion . jda . core . requests . ErrorResponse # UNKNOWN _ MEMBER UNKNOWN _ MEMBER }
* < br > The specified Member was removed from the Guild before finishing the task < / li >
* < li > { @ link net . dv8tion . jda . core . requests . ErrorResponse # UNKNOWN _ CHANNEL UNKNOWN _ CHANNEL }
* < br > The specified channel was deleted before finishing the task < / li >
* < / ul >
* @ param member
* The { @ link net . dv8tion . jda . core . entities . Member Member } that you are moving .
* @ param voiceChannel
* The destination { @ link net . dv8tion . jda . core . entities . VoiceChannel VoiceChannel } to which the member is being
* moved to .
* @ throws IllegalStateException
* If the Member isn ' t currently in a VoiceChannel in this Guild , or { @ link net . dv8tion . jda . core . utils . cache . CacheFlag # VOICE _ STATE } is disabled .
* @ throws IllegalArgumentException
* < ul >
* < li > If any of the provided arguments is { @ code null } < / li >
* < li > If the provided Member isn ' t part of this { @ link net . dv8tion . jda . core . entities . Guild Guild } < / li >
* < li > If the provided VoiceChannel isn ' t part of this { @ link net . dv8tion . jda . core . entities . Guild Guild } < / li >
* < / ul >
* @ throws net . dv8tion . jda . core . exceptions . InsufficientPermissionException
* < ul >
* < li > If this account doesn ' t have { @ link net . dv8tion . jda . core . Permission # VOICE _ MOVE _ OTHERS }
* in the VoiceChannel that the Member is currently in . < / li >
* < li > If this account < b > AND < / b > the Member being moved don ' t have
* { @ link net . dv8tion . jda . core . Permission # VOICE _ CONNECT } for the destination VoiceChannel . < / li >
* < / ul >
* @ return { @ link net . dv8tion . jda . core . requests . RestAction RestAction } */
@ CheckReturnValue public RestAction < Void > moveVoiceMember ( Member member , VoiceChannel voiceChannel ) { } } | Checks . notNull ( member , "Member" ) ; Checks . notNull ( voiceChannel , "VoiceChannel" ) ; checkGuild ( member . getGuild ( ) , "Member" ) ; checkGuild ( voiceChannel . getGuild ( ) , "VoiceChannel" ) ; GuildVoiceState vState = member . getVoiceState ( ) ; if ( vState == null ) throw new IllegalStateException ( "Cannot move a Member with disabled CacheFlag.VOICE_STATE" ) ; if ( ! vState . inVoiceChannel ( ) ) throw new IllegalStateException ( "You cannot move a Member who isn't in a VoiceChannel!" ) ; if ( ! PermissionUtil . checkPermission ( vState . getChannel ( ) , getGuild ( ) . getSelfMember ( ) , Permission . VOICE_MOVE_OTHERS ) ) throw new InsufficientPermissionException ( Permission . VOICE_MOVE_OTHERS , "This account does not have Permission to MOVE_OTHERS out of the channel that the Member is currently in." ) ; if ( ! PermissionUtil . checkPermission ( voiceChannel , getGuild ( ) . getSelfMember ( ) , Permission . VOICE_CONNECT ) && ! PermissionUtil . checkPermission ( voiceChannel , member , Permission . VOICE_CONNECT ) ) throw new InsufficientPermissionException ( Permission . VOICE_CONNECT , "Neither this account nor the Member that is attempting to be moved have the VOICE_CONNECT permission " + "for the destination VoiceChannel, so the move cannot be done." ) ; JSONObject body = new JSONObject ( ) . put ( "channel_id" , voiceChannel . getId ( ) ) ; Route . CompiledRoute route = Route . Guilds . MODIFY_MEMBER . compile ( getGuild ( ) . getId ( ) , member . getUser ( ) . getId ( ) ) ; return new RestAction < Void > ( getGuild ( ) . getJDA ( ) , route , body ) { @ Override protected void handleResponse ( Response response , Request < Void > request ) { if ( response . isOk ( ) ) request . onSuccess ( null ) ; else request . onFailure ( response ) ; } } ; |
public class AbstractComponentDecoration { /** * Gets the effective clipping ancestor .
* If no custom clipping ancestor is set , the parent container of the decorated component will be returned .
* @ return Effective clipping ancestor . */
private JComponent getEffectiveClippingAncestor ( ) { } } | JComponent clippingComponent = clippingAncestor ; if ( ( clippingComponent == null ) && ( decoratedComponent != null ) ) { // No specific clipping ancestor specified by the programmer , so try to find one
// Keep a reference to the best alternative candidate if no other proper ancestor can be found
Container lastNonNullParent = decoratedComponent ; // Look for the first viewport or layered pane in the component hierarchy tree
Container parent = decoratedComponent . getParent ( ) ; while ( ( parent != null ) && ( clippingComponent == null ) ) { lastNonNullParent = parent ; if ( ( parent instanceof JViewport ) || ( parent instanceof JLayeredPane ) ) { clippingComponent = ( JComponent ) parent ; } parent = parent . getParent ( ) ; } if ( clippingComponent == null ) { // This may be a normal case depending on the application logic to hide a panel ( tabbed pane like )
if ( lastNonNullParent instanceof JComponent ) { clippingComponent = ( JComponent ) lastNonNullParent ; } } } return clippingComponent ; |
public class ConfigPropertyUtils { /** * Create a regular expression which will match any of the values from the supplied enum type */
public static < T extends Enum < T > > Pattern createValidationPatternFromEnumType ( Class < T > enumType ) { } } | String regEx = Stream . of ( enumType . getEnumConstants ( ) ) . map ( Enum :: name ) . collect ( Collectors . joining ( "|" , "(?i)" , "" ) ) ; // Enum constants may contain $ which needs to be escaped
regEx = regEx . replace ( "$" , "\\$" ) ; return Pattern . compile ( regEx ) ; |
public class AWSMediaStoreDataClient { /** * Provides a list of metadata entries about folders and objects in the specified folder .
* @ param listItemsRequest
* @ return Result of the ListItems operation returned by the service .
* @ throws ContainerNotFoundException
* The specified container was not found for the specified account .
* @ throws InternalServerErrorException
* The service is temporarily unavailable .
* @ sample AWSMediaStoreData . ListItems
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / mediastore - data - 2017-09-01 / ListItems " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public ListItemsResult listItems ( ListItemsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeListItems ( request ) ; |
public class PSBroker { /** * Subscribes the subscriber to the given type of message .
* @ param < T >
* - The class to subscribe to .
* @ param subscriber
* - The subscriber .
* @ param messageType
* - The type of message to subscribe to .
* @ exception SubscriberTypeMismatchException
* the type the subscriber was bound to is incompatible with
* the type that is being received . */
public < T > void subscribe ( Subscriber < ? > subscriber , Class < T > messageType ) { } } | subscribeStrategy . subscribe ( mapping , subscriber , messageType ) ; |
public class FileOperations { /** * Copy the content of the source file to the destination file using
* { @ link FileChannel } . This version seems to fail with UNC paths .
* @ param aSrcFile
* Source file . May not be < code > null < / code > .
* @ param aDestFile
* Destination file . May not be < code > null < / code > .
* @ return { @ link ESuccess } */
@ Nonnull private static ESuccess _copyFileViaChannel ( @ Nonnull final File aSrcFile , @ Nonnull final File aDestFile ) { } } | final FileChannel aSrcChannel = FileChannelHelper . getFileReadChannel ( aSrcFile ) ; if ( aSrcChannel == null ) return ESuccess . FAILURE ; try { final FileChannel aDstChannel = FileChannelHelper . getFileWriteChannel ( aDestFile , EAppend . TRUNCATE ) ; if ( aDstChannel == null ) return ESuccess . FAILURE ; try { FileLock aSrcLock = null ; FileLock aDestLock = null ; try { final long nBytesToRead = aSrcChannel . size ( ) ; // Shared read lock and exclusive write lock
aSrcLock = aSrcChannel . lock ( 0 , nBytesToRead , true ) ; aDestLock = aDstChannel . lock ( ) ; // Main copying - the loop version is much quicker than then
// transferTo with full size !
long nBytesWritten = 0 ; final long nChunkSize = 1L * CGlobal . BYTES_PER_MEGABYTE ; while ( nBytesWritten < nBytesToRead ) nBytesWritten += aSrcChannel . transferTo ( nBytesWritten , nChunkSize , aDstChannel ) ; if ( nBytesToRead != nBytesWritten ) { if ( LOGGER . isErrorEnabled ( ) ) LOGGER . error ( "Failed to copy file. Meant to read " + nBytesToRead + " bytes but wrote " + nBytesWritten ) ; return ESuccess . FAILURE ; } return ESuccess . SUCCESS ; } catch ( final IOException ex ) { throw new IllegalStateException ( "Failed to copy from " + aSrcFile + " to " + aDestFile , ex ) ; } finally { // Unlock
ChannelHelper . release ( aDestLock ) ; ChannelHelper . release ( aSrcLock ) ; } } finally { ChannelHelper . close ( aDstChannel ) ; } } finally { ChannelHelper . close ( aSrcChannel ) ; } |
public class AggregationIntervalHelperImpl { /** * Return a sorted list of AcademicTermDetail objects where the the first element of the list
* where the first element is the first term that starts after the specified start DateTime . */
protected List < AcademicTermDetail > getAcademicTermsAfter ( DateTime start ) { } } | final List < AcademicTermDetail > terms = this . eventAggregationManagementDao . getAcademicTermDetails ( ) ; final int index = Collections . binarySearch ( terms , new AcademicTermDetailImpl ( start . toDateMidnight ( ) , start . plusDays ( 1 ) . toDateMidnight ( ) , "" ) ) ; if ( index > 0 ) { return terms . subList ( index , terms . size ( ) ) ; } else if ( index < 0 ) { return terms . subList ( - ( index + 1 ) , terms . size ( ) ) ; } return terms ; |
public class web { /** * Check if can connect to the server , also this method will need the
* permissions " android . permission . INTERNET "
* @ param serverURL - server url
* @ return true if the connection returned a successful code */
public static boolean checkServerConnection ( String serverURL ) { } } | boolean value = false ; try { value = new RetrieveCheckServerConnectionString ( ) . execute ( serverURL ) . get ( ) ; } catch ( InterruptedException e ) { QuickUtils . log . e ( "InterruptedException" , e ) ; } catch ( ExecutionException e ) { QuickUtils . log . e ( "ExecutionException" , e ) ; } return value ; |
public class ParseUtils { /** * Read an element which contains only a single list attribute of a given
* type .
* @ param reader the reader
* @ param attributeName the attribute name , usually " value "
* @ param type the value type class
* @ param < T > the value type
* @ return the value list
* @ throws javax . xml . stream . XMLStreamException if an error occurs or if the
* element does not contain the specified attribute , contains other
* attributes , or contains child elements . */
@ SuppressWarnings ( { } } | "unchecked" , "WeakerAccess" } ) public static < T > List < T > readListAttributeElement ( final XMLExtendedStreamReader reader , final String attributeName , final Class < T > type ) throws XMLStreamException { requireSingleAttribute ( reader , attributeName ) ; // todo : fix this when this method signature is corrected
final List < T > value = ( List < T > ) reader . getListAttributeValue ( 0 , type ) ; requireNoContent ( reader ) ; return value ; |
public class SyncWriter { /** * ( non - Javadoc )
* @ see org . springframework . batch . core . StepExecutionListener # afterStep ( org .
* springframework . batch . core . StepExecution ) */
@ Override public ExitStatus afterStep ( StepExecution stepExecution ) { } } | ExitStatus status = stepExecution . getExitStatus ( ) ; List < String > errors = getErrors ( ) ; if ( errors . isEmpty ( ) ) { try { RestoreStatus newStatus = RestoreStatus . TRANSFER_TO_DURACLOUD_COMPLETE ; restoreManager . transitionRestoreStatus ( restorationId , newStatus , "" ) ; // restore the snapshot props file to the data directory .
restoreFile ( new File ( this . watchDir . getParentFile ( ) , Constants . SNAPSHOT_PROPS_FILENAME ) , watchDir . getParentFile ( ) ) ; return status . and ( ExitStatus . COMPLETED ) ; } catch ( Exception e ) { String message = "failed to transition restore status: " + e . getMessage ( ) ; log . error ( message , e ) ; return status . and ( ExitStatus . FAILED ) . addExitDescription ( message ) ; } } else { status = status . and ( ExitStatus . FAILED ) ; status . addExitDescription ( "Transfer to DuraCloud failed: " + errors . size ( ) + " items failed." ) ; for ( String error : errors ) { status . addExitDescription ( error ) ; } resetContextState ( ) ; return status ; } |
public class NormalizedKeySorter { /** * Writes the records in this buffer in their logical order to the given output .
* @ param output The output view to write the records to .
* @ throws IOException Thrown , if an I / O exception occurred writing to the output view . */
@ Override public void writeToOutput ( final ChannelWriterOutputView output ) throws IOException { } } | int recordsLeft = this . numRecords ; int currentMemSeg = 0 ; while ( recordsLeft > 0 ) { final MemorySegment currentIndexSegment = this . sortIndex . get ( currentMemSeg ++ ) ; int offset = 0 ; // check whether we have a full or partially full segment
if ( recordsLeft >= this . indexEntriesPerSegment ) { // full segment
for ( ; offset <= this . lastIndexEntryOffset ; offset += this . indexEntrySize ) { final long pointer = currentIndexSegment . getLong ( offset ) ; this . recordBuffer . setReadPosition ( pointer ) ; this . serializer . copy ( this . recordBuffer , output ) ; } recordsLeft -= this . indexEntriesPerSegment ; } else { // partially filled segment
for ( ; recordsLeft > 0 ; recordsLeft -- , offset += this . indexEntrySize ) { final long pointer = currentIndexSegment . getLong ( offset ) ; this . recordBuffer . setReadPosition ( pointer ) ; this . serializer . copy ( this . recordBuffer , output ) ; } } } |
public class AbstractLIBORCovarianceModelParametric { /** * Performs a generic calibration of the parametric model by
* trying to match a given vector of calibration product to a given vector of target values
* using a given vector of weights .
* Optional calibration parameters may be passed using the map calibrationParameters . The keys are ( < code > String < / code > s ) :
* < ul >
* < li > < tt > brownianMotion < / tt > : Under this key an object implementing { @ link net . finmath . montecarlo . BrownianMotionInterface } may be provided . If so , this Brownian motion is used to build the valuation model . < / li >
* < li > < tt > maxIterations < / tt > : Under this key an object of type Integer may be provided specifying the maximum number of iterations . < / li >
* < li > < tt > accuracy < / tt > : Under this key an object of type Double may be provided specifying the desired accuracy . Note that this is understood in the sense that the solver will stop if the iteration does not improve by more than this number . < / li >
* < / ul >
* @ param calibrationModel The LIBOR market model to be used for calibrations ( specifies forward curve and tenor discretization ) .
* @ param calibrationProducts The array of calibration products .
* @ param calibrationTargetValues The array of target values .
* @ param calibrationWeights The array of weights .
* @ param calibrationParameters A map of type Map & lt ; String , Object & gt ; specifying some ( optional ) calibration parameters .
* @ return A new parametric model of the same type than < code > this < / code > one , but with calibrated parameters .
* @ throws CalculationException Thrown if calibration has failed . */
public AbstractLIBORCovarianceModelParametric getCloneCalibrated ( final LIBORMarketModelInterface calibrationModel , final AbstractLIBORMonteCarloProduct [ ] calibrationProducts , final RandomVariableInterface [ ] calibrationTargetValues , double [ ] calibrationWeights , Map < String , Object > calibrationParameters ) throws CalculationException { } } | if ( calibrationParameters == null ) { calibrationParameters = new HashMap < String , Object > ( ) ; } Integer numberOfPathsParameter = ( Integer ) calibrationParameters . get ( "numberOfPaths" ) ; Integer seedParameter = ( Integer ) calibrationParameters . get ( "seed" ) ; Integer maxIterationsParameter = ( Integer ) calibrationParameters . get ( "maxIterations" ) ; Double parameterStepParameter = ( Double ) calibrationParameters . get ( "parameterStep" ) ; Double accuracyParameter = ( Double ) calibrationParameters . get ( "accuracy" ) ; BrownianMotionInterface brownianMotionParameter = ( BrownianMotionInterface ) calibrationParameters . get ( "brownianMotion" ) ; double [ ] initialParameters = this . getParameter ( ) ; double [ ] lowerBound = new double [ initialParameters . length ] ; double [ ] upperBound = new double [ initialParameters . length ] ; double [ ] parameterStep = new double [ initialParameters . length ] ; double [ ] zero = new double [ calibrationTargetValues . length ] ; Arrays . fill ( lowerBound , Double . NEGATIVE_INFINITY ) ; Arrays . fill ( upperBound , Double . POSITIVE_INFINITY ) ; Arrays . fill ( parameterStep , parameterStepParameter != null ? parameterStepParameter . doubleValue ( ) : 1E-4 ) ; Arrays . fill ( zero , 0 ) ; /* * We allow for 2 simultaneous calibration models .
* Note : In the case of a Monte - Carlo calibration , the memory requirement is that of
* one model with 2 times the number of paths . In the case of an analytic calibration
* memory requirement is not the limiting factor . */
int numberOfThreads = 2 ; OptimizerFactoryInterface optimizerFactoryParameter = ( OptimizerFactoryInterface ) calibrationParameters . get ( "optimizerFactory" ) ; int numberOfPaths = numberOfPathsParameter != null ? numberOfPathsParameter . intValue ( ) : 2000 ; int seed = seedParameter != null ? seedParameter . intValue ( ) : 31415 ; int maxIterations = maxIterationsParameter != null ? maxIterationsParameter . intValue ( ) : 400 ; double accuracy = accuracyParameter != null ? accuracyParameter . doubleValue ( ) : 1E-7 ; final BrownianMotionInterface brownianMotion = brownianMotionParameter != null ? brownianMotionParameter : new BrownianMotion ( getTimeDiscretization ( ) , getNumberOfFactors ( ) , numberOfPaths , seed ) ; OptimizerFactoryInterface optimizerFactory = optimizerFactoryParameter != null ? optimizerFactoryParameter : new OptimizerFactoryLevenbergMarquardt ( maxIterations , accuracy , numberOfThreads ) ; int numberOfThreadsForProductValuation = 2 * Math . max ( 2 , Runtime . getRuntime ( ) . availableProcessors ( ) ) ; final ExecutorService executor = null ; // Executors . newFixedThreadPool ( numberOfThreadsForProductValuation ) ;
ObjectiveFunction calibrationError = new ObjectiveFunction ( ) { // Calculate model values for given parameters
@ Override public void setValues ( double [ ] parameters , double [ ] values ) throws SolverException { AbstractLIBORCovarianceModelParametric calibrationCovarianceModel = AbstractLIBORCovarianceModelParametric . this . getCloneWithModifiedParameters ( parameters ) ; // Create a LIBOR market model with the new covariance structure .
LIBORMarketModelInterface model = calibrationModel . getCloneWithModifiedCovarianceModel ( calibrationCovarianceModel ) ; ProcessEulerScheme process = new ProcessEulerScheme ( brownianMotion ) ; final LIBORModelMonteCarloSimulation liborMarketModelMonteCarloSimulation = new LIBORModelMonteCarloSimulation ( model , process ) ; ArrayList < Future < RandomVariableInterface > > valueFutures = new ArrayList < Future < RandomVariableInterface > > ( calibrationProducts . length ) ; for ( int calibrationProductIndex = 0 ; calibrationProductIndex < calibrationProducts . length ; calibrationProductIndex ++ ) { final int workerCalibrationProductIndex = calibrationProductIndex ; Callable < RandomVariableInterface > worker = new Callable < RandomVariableInterface > ( ) { public RandomVariableInterface call ( ) { try { return calibrationProducts [ workerCalibrationProductIndex ] . getValue ( 0.0 , liborMarketModelMonteCarloSimulation ) . sub ( calibrationTargetValues [ workerCalibrationProductIndex ] ) ; } catch ( CalculationException e ) { // We do not signal exceptions to keep the solver working and automatically exclude non - working calibration products .
return null ; } catch ( Exception e ) { // We do not signal exceptions to keep the solver working and automatically exclude non - working calibration products .
return null ; } } } ; if ( executor != null ) { Future < RandomVariableInterface > valueFuture = executor . submit ( worker ) ; valueFutures . add ( calibrationProductIndex , valueFuture ) ; } else { FutureTask < RandomVariableInterface > valueFutureTask = new FutureTask < RandomVariableInterface > ( worker ) ; valueFutureTask . run ( ) ; valueFutures . add ( calibrationProductIndex , valueFutureTask ) ; } } for ( int calibrationProductIndex = 0 ; calibrationProductIndex < calibrationProducts . length ; calibrationProductIndex ++ ) { try { RandomVariableInterface value = valueFutures . get ( calibrationProductIndex ) . get ( ) ; values [ calibrationProductIndex ] = value != null ? value . getAverage ( ) : 0.0 ; ; } catch ( InterruptedException e ) { throw new SolverException ( e ) ; } catch ( ExecutionException e ) { throw new SolverException ( e ) ; } } } } ; OptimizerInterface optimizer = optimizerFactory . getOptimizer ( calibrationError , initialParameters , lowerBound , upperBound , parameterStep , zero ) ; try { optimizer . run ( ) ; } catch ( SolverException e ) { throw new CalculationException ( e ) ; } finally { if ( executor != null ) { executor . shutdown ( ) ; } } // Get covariance model corresponding to the best parameter set .
double [ ] bestParameters = optimizer . getBestFitParameters ( ) ; AbstractLIBORCovarianceModelParametric calibrationCovarianceModel = this . getCloneWithModifiedParameters ( bestParameters ) ; // Diagnostic output
if ( logger . isLoggable ( Level . FINE ) ) { logger . fine ( "The solver required " + optimizer . getIterations ( ) + " iterations. The best fit parameters are:" ) ; String logString = "Best parameters:" ; for ( int i = 0 ; i < bestParameters . length ; i ++ ) { logString += "\tparameter[" + i + "]: " + bestParameters [ i ] ; } logger . fine ( logString ) ; } return calibrationCovarianceModel ; |
public class DensityTree { /** * Sets min fitness .
* @ param minFitness the min fitness
* @ return the min fitness */
@ javax . annotation . Nonnull public com . simiacryptus . util . data . DensityTree setMinFitness ( double minFitness ) { } } | this . minFitness = minFitness ; return this ; |
public class SolrEventLogSubscriber { /** * Initializes the plugin using the specified JSON configuration
* @ param jsonFile JSON configuration file
* @ throws SubscriberException if there was an error in initialization */
@ Override public void init ( File jsonFile ) throws SubscriberException { } } | try { setConfig ( new JsonSimpleConfig ( jsonFile ) ) ; } catch ( IOException ioe ) { throw new SubscriberException ( ioe ) ; } |
public class ClassInfo { /** * Get the default parameter values for this annotation , if this is an annotation class .
* @ return A list of { @ link AnnotationParameterValue } objects for each of the default parameter values for this
* annotation , if this is an annotation class with default parameter values , otherwise the empty list . */
public AnnotationParameterValueList getAnnotationDefaultParameterValues ( ) { } } | if ( ! scanResult . scanSpec . enableAnnotationInfo ) { throw new IllegalArgumentException ( "Please call ClassGraph#enableAnnotationInfo() before #scan()" ) ; } if ( ! isAnnotation ) { throw new IllegalArgumentException ( "Class is not an annotation: " + getName ( ) ) ; } if ( annotationDefaultParamValues == null ) { return AnnotationParameterValueList . EMPTY_LIST ; } if ( ! annotationDefaultParamValuesHasBeenConvertedToPrimitive ) { annotationDefaultParamValues . convertWrapperArraysToPrimitiveArrays ( this ) ; annotationDefaultParamValuesHasBeenConvertedToPrimitive = true ; } return annotationDefaultParamValues ; |
public class ConfigFactory { /** * Like { @ link # load ( Config ) } but allows you to specify
* { @ link ConfigResolveOptions } .
* @ param config
* the application ' s portion of the configuration
* @ param resolveOptions
* options for resolving the assembled config stack
* @ return resolved configuration with overrides and fallbacks added */
public static Config load ( Config config , ConfigResolveOptions resolveOptions ) { } } | return load ( checkedContextClassLoader ( "load" ) , config , resolveOptions ) ; |
public class Histogram { /** * Returns a map of range - & gt ; event count . The elements of the stream are
* a mutable copy of the internal data . */
public Stream < RangeWithCount > stream ( ) { } } | return buckets_ . stream ( ) . map ( bucket -> new RangeWithCount ( bucket . getRange ( ) , bucket . getEvents ( ) ) ) ; |
public class GLFWInput { /** * Converts GLFW modifier key flags into PlayN modifier key flags . */
private int toModifierFlags ( int mods ) { } } | return modifierFlags ( ( mods & GLFW_MOD_ALT ) != 0 , ( mods & GLFW_MOD_CONTROL ) != 0 , ( mods & GLFW_MOD_SUPER ) != 0 , ( mods & GLFW_MOD_SHIFT ) != 0 ) ; |
public class DB { /** * Creates a taxonomy term .
* @ param taxonomy The taxonomy .
* @ param name The term name .
* @ param slug The term slug .
* @ param description The taxonomy term description .
* @ return The created term .
* @ throws SQLException on database error . */
public TaxonomyTerm createTaxonomyTerm ( final String taxonomy , final String name , final String slug , final String description ) throws SQLException { } } | Term term = createTerm ( name , slug ) ; Connection conn = null ; PreparedStatement stmt = null ; ResultSet rs = null ; Timer . Context ctx = metrics . createTaxonomyTermTimer . time ( ) ; try { conn = connectionSupplier . getConnection ( ) ; stmt = conn . prepareStatement ( insertTaxonomyTermSQL , Statement . RETURN_GENERATED_KEYS ) ; stmt . setLong ( 1 , term . id ) ; stmt . setString ( 2 , taxonomy ) ; stmt . setString ( 3 , Strings . nullToEmpty ( description ) ) ; stmt . executeUpdate ( ) ; rs = stmt . getGeneratedKeys ( ) ; if ( rs . next ( ) ) { return new TaxonomyTerm ( rs . getLong ( 1 ) , taxonomy , term , description ) ; } else { throw new SQLException ( "Problem creating taxonomy term (no generated id)" ) ; } } finally { ctx . stop ( ) ; closeQuietly ( conn , stmt , rs ) ; } |
public class DockerRuleBuilder { /** * Dynamic link .
* Define ( legacy ) container links ( equaivalent of command - line < code > - - link " targetContainerId : alias " < / code >
* where targetContainerId will be substituted after target container start ) .
* Legacy links works only on docker < code > bridge < / code > network .
* Unlike static link ( see { @ link # link ( String ) } ) it does not require assigning name to target container
* so it is especially convenient in setups where multiple concurrent test cases
* shares single docker server .
* Target container must be started first and < b > because of no guarantees of rule execution
* order in JUnit suggested solution is to take advantage of JUnit { @ link RuleChain } < / b > , for example :
* < pre >
* DockerRule db = DockerRule . builder ( )
* . imageName ( " busybox " )
* DockerRule web = DockerRule . builder ( )
* . imageName ( " busybox " )
* . link ( db , " db " )
* { @ literal @ } Rule
* public RuleChain containers = RuleChain . outerRule ( db ) . around ( web ) ;
* < / pre >
* @ param targetContainer Container link points to
* @ param alias Alias assinged to link in current container */
public DockerRuleBuilder link ( DockerRule targetContainer , String alias ) { } } | LinkNameValidator . validateContainerName ( alias ) ; dynamicLinks . add ( Pair . of ( targetContainer , alias ) ) ; return this ; |
public class MessageProcessorMatching { /** * Method retrieveNonSelectorConsumers
* Performs a search against the MatchSpace in order to retrieve the set of
* consumers ( registered without selectors ) that match a fully qualified
* topic expression .
* @ param topicSpace
* @ param discriminatorExpression
* @ param localConsumers
* @ param remoteConsumers
* @ throws SIDiscriminatorSyntaxException
* @ throws SIErrorException */
public void retrieveNonSelectorConsumers ( DestinationHandler topicSpace , String discriminatorExpression , Set localConsumers , Set remoteConsumers ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "retrieveNonSelectorConsumers" , new Object [ ] { topicSpace , discriminatorExpression } ) ; // Get the uuid for the topicspace
SIBUuid12 topicSpaceUuid = topicSpace . getBaseUuid ( ) ; String topicSpaceStr = topicSpaceUuid . toString ( ) ; String theTopic = null ; try { // If fully qualified drive a dummy publication against the MatchSpace
MatchSpaceKey msk = new DiscriminatorMatchSpaceKey ( discriminatorExpression ) ; // Combine the topicSpace and topic
theTopic = buildSendTopicExpression ( topicSpaceStr , discriminatorExpression ) ; // Get a search results object to use
MessageProcessorSearchResults searchResults = ( MessageProcessorSearchResults ) _messageProcessor . getSearchResultsObjectPool ( ) . remove ( ) ; // Set up Results object to hold the results from the MatchSpace traversal
searchResults . reset ( ) ; // Set a reference to the destination into the search results
// This is used to avoid ACL checks where a topicspace does not require access
// checks ( e . g admin flag set , temporary topicspace , etc )
if ( _isBusSecure ) { searchResults . setTopicSpace ( topicSpace ) ; } // Set up an evaluation cache ( need to keep one of these per thread . Newing up is expensive )
EvalCache cache = _matching . createEvalCache ( ) ; search ( theTopic , // keyed on destination name
msk , cache , searchResults ) ; // MatchSpace searching is complete , add any matching consumers to the
// Sets
Object allResults [ ] = searchResults . getResults ( theTopic ) ; localConsumers . addAll ( ( Set ) allResults [ MessageProcessorMatchTarget . JS_SUBSCRIPTION_TYPE ] ) ; remoteConsumers . addAll ( ( Set ) allResults [ MessageProcessorMatchTarget . JS_NEIGHBOUR_TYPE ] ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "Found " + localConsumers . size ( ) + " local consumers and " + remoteConsumers . size ( ) + "remote consumers in MatchSpace search" ) ; } // Return search results object to cache
_messageProcessor . getSearchResultsObjectPool ( ) . add ( searchResults ) ; } catch ( BadMessageFormatMatchingException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.matching.MessageProcessorMatching.retrieveNonSelectorConsumers" , "1:2439:1.117.1.11" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "retrieveNonSelectorConsumers" , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching" , "1:2450:1.117.1.11" , e } ) ; throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching" , "1:2458:1.117.1.11" , e } , null ) , e ) ; } catch ( MatchingException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.matching.MessageProcessorMatching.retrieveNonSelectorConsumers" , "1:2469:1.117.1.11" , this ) ; SibTr . exception ( tc , e ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "retrieveNonSelectorConsumers" , e ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching" , "1:2479:1.117.1.11" , e } ) ; throw new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.matching.MessageProcessorMatching" , "1:2486:1.117.1.11" , e } , null ) , e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "retrieveNonSelectorConsumers" ) ; |
public class ActivityChooserModel { /** * Sets the default activity . The default activity is set by adding a
* historical record with weight high enough that this activity will
* become the highest ranked . Such a strategy guarantees that the default
* will eventually change if not used . Also the weight of the record for
* setting a default is inflated with a constant amount to guarantee that
* it will stay as default for awhile .
* @ param index The index of the activity to set as default . */
public void setDefaultActivity ( int index ) { } } | synchronized ( mInstanceLock ) { ensureConsistentState ( ) ; ActivityResolveInfo newDefaultActivity = mActivities . get ( index ) ; ActivityResolveInfo oldDefaultActivity = mActivities . get ( 0 ) ; final float weight ; if ( oldDefaultActivity != null ) { // Add a record with weight enough to boost the chosen at the top .
weight = oldDefaultActivity . weight - newDefaultActivity . weight + DEFAULT_ACTIVITY_INFLATION ; } else { weight = DEFAULT_HISTORICAL_RECORD_WEIGHT ; } ComponentName defaultName = new ComponentName ( newDefaultActivity . resolveInfo . activityInfo . packageName , newDefaultActivity . resolveInfo . activityInfo . name ) ; HistoricalRecord historicalRecord = new HistoricalRecord ( defaultName , System . currentTimeMillis ( ) , weight ) ; addHisoricalRecord ( historicalRecord ) ; } |
public class JdbcMapperFactory { /** * Associate the specified Getter for the specified property .
* @ param key the property
* @ param getter the getter
* @ return the current factory */
public JdbcMapperFactory addCustomGetter ( String key , Getter < ResultSet , ? > getter ) { } } | return addColumnDefinition ( key , FieldMapperColumnDefinition . < JdbcColumnKey > customGetter ( getter ) ) ; |
public class MPIO { /** * Initialize the MP I / O component .
* @ param CEL TRM ' s implementation of the CommsErrorListener interface .
* @ param RM TRM ' s implementation of the RoutingManager interface . */
public void init ( CommsErrorListener CEL , RoutingManager RM ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "init" , new Object [ ] { CEL , RM } ) ; // Lock exclusively for start operations
mpioLockManager . lockExclusive ( ) ; _commsErrorListener = CEL ; _routingManager = RM ; synchronized ( _mpConnectionsByMEConnection ) { _mpConnectionsByMEUuid . clear ( ) ; _mpConnectionsByMEConnection . clear ( ) ; } // Create a new RemoteMessageReceiver if we don ' t already have one
// or re - initialise the existing one to refresh any cached data
// ( used when an ME is re - started without the Server being re - started )
if ( _remoteMessageReciever == null ) _remoteMessageReciever = new RemoteMessageReceiver ( _messageProcessor , this ) ; else _remoteMessageReciever . init ( ) ; started = true ; mpioLockManager . unlockExclusive ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "init" ) ; |
public class CDKMCS { /** * Determines if 2 bondA1 have 1 atom in common if second is atom query AtomContainer
* @ param atom1 first bondA1
* @ param bondB1 second bondA1
* @ return the symbol of the common atom or " " if
* the 2 bonds have no common atom */
private static boolean queryAdjacency ( IBond bondA1 , IBond bondB1 , IBond bondA2 , IBond bondB2 ) { } } | IAtom atom1 = null ; IAtom atom2 = null ; if ( bondA1 . contains ( bondB1 . getBegin ( ) ) ) { atom1 = bondB1 . getBegin ( ) ; } else if ( bondA1 . contains ( bondB1 . getEnd ( ) ) ) { atom1 = bondB1 . getEnd ( ) ; } if ( bondA2 . contains ( bondB2 . getBegin ( ) ) ) { atom2 = bondB2 . getBegin ( ) ; } else if ( bondA2 . contains ( bondB2 . getEnd ( ) ) ) { atom2 = bondB2 . getEnd ( ) ; } if ( atom1 != null && atom2 != null ) { // well , this looks fishy : the atom2 is not always atom IQueryAtom !
return ( ( IQueryAtom ) atom2 ) . matches ( atom1 ) ; } else { return atom1 == null && atom2 == null ; } |
public class DbxClientV1 { /** * Same as { @ link # getMetadataWithChildren } except instead of always returning a list of
* { @ link DbxEntry } objects , you specify a { @ link Collector } that processes the { @ link DbxEntry }
* objects one by one and aggregates them however you want .
* This allows your to process the { @ link DbxEntry } values as they arrive , instead of having to
* wait for the entire API call to finish before processing the first one . Be careful , though ,
* because the API call may fail in the middle ( after you ' ve already processed some entries ) .
* Make sure your code can handle that situation . For example , if you ' re inserting stuff into a
* database as they arrive , you might want do everything in a transaction and commit only if
* the entire call succeeds . */
public < C > DbxEntry . /* @ Nullable */
WithChildrenC < C > getMetadataWithChildrenC ( String path , boolean includeMediaInfo , final Collector < DbxEntry , ? extends C > collector ) throws DbxException { } } | return getMetadataWithChildrenBase ( path , includeMediaInfo , new DbxEntry . WithChildrenC . ReaderMaybeDeleted < C > ( collector ) ) ; |
public class KeybindComponent { /** * Creates a keybind component with content , and optional color and decorations .
* @ param keybind the keybind
* @ param color the color
* @ param decorations the decorations
* @ return the keybind component */
public static KeybindComponent of ( final @ NonNull String keybind , final @ Nullable TextColor color , final @ NonNull Set < TextDecoration > decorations ) { } } | return builder ( keybind ) . color ( color ) . decorations ( decorations , true ) . build ( ) ; |
public class IoUtil { /** * Convert an { @ link InputStream } to a { @ link String }
* @ param inputStream the { @ link InputStream } to convert
* @ param trim trigger if whitespaces are trimmed in the output
* @ return the resulting { @ link String }
* @ throws IOException */
private static String getStringFromInputStream ( InputStream inputStream , boolean trim ) throws IOException { } } | BufferedReader bufferedReader = null ; StringBuilder stringBuilder = new StringBuilder ( ) ; try { bufferedReader = new BufferedReader ( new InputStreamReader ( inputStream ) ) ; String line ; while ( ( line = bufferedReader . readLine ( ) ) != null ) { if ( trim ) { stringBuilder . append ( line . trim ( ) ) ; } else { stringBuilder . append ( line ) . append ( "\n" ) ; } } } finally { closeSilently ( bufferedReader ) ; } return stringBuilder . toString ( ) ; |
public class FactoryDescribePointAlgs { /** * todo remove filterBlur for all BRIEF change to radius , sigma , type */
public static < T extends ImageGray < T > > DescribePointBriefSO < T > briefso ( BinaryCompareDefinition_I32 definition , BlurFilter < T > filterBlur ) { } } | Class < T > imageType = filterBlur . getInputType ( ) . getImageClass ( ) ; InterpolatePixelS < T > interp = FactoryInterpolation . bilinearPixelS ( imageType , BorderType . EXTENDED ) ; return new DescribePointBriefSO < > ( definition , filterBlur , interp ) ; |
public class ssl_key { /** * < pre >
* Converts API response of bulk operation into object and returns the object array in case of get request .
* < / pre > */
protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } } | ssl_key_responses result = ( ssl_key_responses ) service . get_payload_formatter ( ) . string_to_resource ( ssl_key_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . ssl_key_response_array ) ; } ssl_key [ ] result_ssl_key = new ssl_key [ result . ssl_key_response_array . length ] ; for ( int i = 0 ; i < result . ssl_key_response_array . length ; i ++ ) { result_ssl_key [ i ] = result . ssl_key_response_array [ i ] . ssl_key [ 0 ] ; } return result_ssl_key ; |
public class JobDetail { /** * Additional parameters passed to the job that replace parameter substitution placeholders or override any
* corresponding parameter defaults from the job definition .
* @ param parameters
* Additional parameters passed to the job that replace parameter substitution placeholders or override any
* corresponding parameter defaults from the job definition .
* @ return Returns a reference to this object so that method calls can be chained together . */
public JobDetail withParameters ( java . util . Map < String , String > parameters ) { } } | setParameters ( parameters ) ; return this ; |
public class PartitionManagerImpl { /** * Return the entries assigned for a node into a partition .
* The returned entries are represented by a Map < String , List < String > > where :
* - key is a tenantId
* - value is a List of triggersId assigned to the tenant
* @ param partition the partition used internally
* @ param node the node to filter
* @ return a map representing the entries for given node */
public Map < String , List < String > > getNodePartition ( Map < PartitionEntry , Integer > partition , Integer node ) { } } | Map < String , List < String > > nodePartition = new HashMap < > ( ) ; if ( partition != null ) { for ( Entry < PartitionEntry , Integer > entry : partition . entrySet ( ) ) { if ( entry . getValue ( ) . equals ( node ) ) { add ( nodePartition , entry . getKey ( ) ) ; } } } return nodePartition ; |
public class GetConsoleOutputRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < GetConsoleOutputRequest > getDryRunRequest ( ) { } } | Request < GetConsoleOutputRequest > request = new GetConsoleOutputRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
public class IntegerMapper { /** * { @ inheritDoc } */
@ Override protected Integer doBase ( String name , Object value ) { } } | if ( value instanceof Number ) { return ( ( Number ) value ) . intValue ( ) ; } else if ( value instanceof Date ) { return SimpleDateSerializer . timeInMillisToDay ( ( ( Date ) value ) . getTime ( ) ) ; } else if ( value instanceof String ) { try { return Double . valueOf ( ( String ) value ) . intValue ( ) ; } catch ( NumberFormatException e ) { throw new IndexException ( "Field '{}' with value '{}' can not be parsed as integer" , name , value ) ; } } throw new IndexException ( "Field '{}' requires an integer, but found '{}'" , name , value ) ; |
public class DataTree { /** * this method sets up the path trie and sets up stats for quota nodes */
private void setupQuota ( ) { } } | String quotaPath = Quotas . quotaZookeeper ; DataNode node = getNode ( quotaPath ) ; if ( node == null ) { return ; } traverseNode ( quotaPath ) ; |
public class DescribeWorkspacesRequest { /** * The identifiers of the WorkSpaces . You cannot combine this parameter with any other filter .
* Because the < a > CreateWorkspaces < / a > operation is asynchronous , the identifier it returns is not immediately
* available . If you immediately call < a > DescribeWorkspaces < / a > with this identifier , no information is returned .
* @ param workspaceIds
* The identifiers of the WorkSpaces . You cannot combine this parameter with any other filter . < / p >
* Because the < a > CreateWorkspaces < / a > operation is asynchronous , the identifier it returns is not
* immediately available . If you immediately call < a > DescribeWorkspaces < / a > with this identifier , no
* information is returned . */
public void setWorkspaceIds ( java . util . Collection < String > workspaceIds ) { } } | if ( workspaceIds == null ) { this . workspaceIds = null ; return ; } this . workspaceIds = new com . amazonaws . internal . SdkInternalList < String > ( workspaceIds ) ; |
public class SummonerTeamService { /** * Invite a player to the target team
* @ param summonerId The id of the player
* @ param teamId The id of the team
* @ return The new team state */
public Team invitePlayer ( long summonerId , TeamId teamId ) { } } | return client . sendRpcAndWait ( SERVICE , "invitePlayer" , summonerId , teamId ) ; |
public class MAPParameterFactoryImpl { /** * ( non - Javadoc )
* @ see org . restcomm . protocols . ss7 . map . api . MAPParameterFactory # createUnstructuredSSNotifyRequestIndication ( byte ,
* org . restcomm . protocols . ss7 . map . api . primitives . USSDString ,
* org . restcomm . protocols . ss7 . map . api . primitives . AlertingPattern ,
* org . restcomm . protocols . ss7 . map . api . primitives . ISDNAddressString ) */
public UnstructuredSSNotifyRequest createUnstructuredSSNotifyRequestIndication ( CBSDataCodingScheme ussdDataCodingSch , USSDString ussdString , AlertingPattern alertingPattern , ISDNAddressString msisdnAddressString ) { } } | UnstructuredSSNotifyRequest request = new UnstructuredSSNotifyRequestImpl ( ussdDataCodingSch , ussdString , alertingPattern , msisdnAddressString ) ; return request ; |
public class BallView { /** * To complete . */
public void resetScale ( ) { } } | ScaleTransitionBuilder . create ( ) . duration ( Duration . millis ( 400 ) ) . node ( node ( ) ) . toX ( 1f ) . toY ( 1f ) . cycleCount ( 1 ) . autoReverse ( false ) . build ( ) . play ( ) ; |
public class RoleAssignmentsInner { /** * Get the specified role assignment .
* @ param scope The scope of the role assignment .
* @ param roleAssignmentName The name of the role assignment to get .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the RoleAssignmentInner object if successful . */
public RoleAssignmentInner get ( String scope , String roleAssignmentName ) { } } | return getWithServiceResponseAsync ( scope , roleAssignmentName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class AbstractSortFilterDataProvider { /** * { @ inheritDoc } */
@ Override public Iterator < ? extends T > iterator ( final long first , final long count ) { } } | List < T > ret = new ArrayList < > ( filter ( sort ( ) ) ) ; if ( ret . size ( ) > ( first + count ) ) { ret = ret . subList ( ( int ) first , ( int ) first + ( int ) count ) ; } else { ret = ret . subList ( ( int ) first , ret . size ( ) ) ; } return ret . iterator ( ) ; |
public class Utils { /** * Parse the given string and return a list of the whitespace - delimited tokens that it
* contains mapped to the number of occurrences of each token . Only whitespace
* characters ( SP , CR , LF , TAB , FF , VT ) are used to delimit tokens .
* @ param string String to be tokenized .
* @ return Map of tokens to occurrence counts . */
public static Map < String , AtomicInteger > tokenize ( String string ) { } } | Map < String , AtomicInteger > result = new HashMap < String , AtomicInteger > ( ) ; String [ ] tokens = string . split ( "\\s" ) ; // regular expression for " all whitespace "
for ( String token : tokens ) { // For some reasons , sometimes split ( ) creates empty values .
if ( token . length ( ) == 0 ) { continue ; } // Tokens are returned down - cased .
String tokenDown = token . toLowerCase ( ) ; AtomicInteger count = result . get ( tokenDown ) ; if ( count == null ) { result . put ( tokenDown , new AtomicInteger ( 1 ) ) ; } else { count . incrementAndGet ( ) ; } } return result ; |
public class LBiObjLongPredicateBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */
@ Nonnull public static < T1 , T2 > LBiObjLongPredicateBuilder < T1 , T2 > biObjLongPredicate ( Consumer < LBiObjLongPredicate < T1 , T2 > > consumer ) { } } | return new LBiObjLongPredicateBuilder ( consumer ) ; |
public class CSL { /** * Checks if the given String contains the serialized XML representation
* of a style
* @ param style the string to examine
* @ return true if the String is XML , false otherwise */
private boolean isStyle ( String style ) { } } | for ( int i = 0 ; i < style . length ( ) ; ++ i ) { char c = style . charAt ( i ) ; if ( ! Character . isWhitespace ( c ) ) { return ( c == '<' ) ; } } return false ; |
public class VoltNetwork { /** * Register a channel with the selector and create a Connection that will pass incoming events
* to the provided handler .
* @ param channel
* @ param handler
* @ throws IOException */
Connection registerChannel ( final SocketChannel channel , final InputHandler handler , final int interestOps , final ReverseDNSPolicy dns , final CipherExecutor cipherService , final SSLEngine sslEngine ) throws IOException { } } | synchronized ( channel . blockingLock ( ) ) { channel . configureBlocking ( false ) ; channel . socket ( ) . setKeepAlive ( true ) ; } Callable < Connection > registerTask = new Callable < Connection > ( ) { @ Override public Connection call ( ) throws Exception { final VoltPort port = VoltPortFactory . createVoltPort ( channel , VoltNetwork . this , handler , ( InetSocketAddress ) channel . socket ( ) . getRemoteSocketAddress ( ) , m_pool , cipherService , sslEngine ) ; port . registering ( ) ; /* * This means we are used by a client . No need to wait then , trigger
* the reverse DNS lookup now . */
if ( dns != ReverseDNSPolicy . NONE ) { port . resolveHostname ( dns == ReverseDNSPolicy . SYNCHRONOUS ) ; } try { SelectionKey key = channel . register ( m_selector , interestOps , null ) ; port . setKey ( key ) ; port . registered ( ) ; // Fix a bug witnessed on the mini where the registration lock and the selector wakeup contained
// within was not enough to prevent the selector from returning the port after it was registered ,
// but before setKey was called . Suspect a bug in the selector . wakeup ( ) or register ( ) implementation
// on the mac .
// The null check in invokeCallbacks will catch the null attachment , continue , and do the work
// next time through the selection loop
key . attach ( port ) ; return port ; } finally { m_ports . add ( port ) ; m_numPorts . incrementAndGet ( ) ; } } } ; FutureTask < Connection > ft = new FutureTask < Connection > ( registerTask ) ; m_tasks . offer ( ft ) ; m_selector . wakeup ( ) ; try { return ft . get ( ) ; } catch ( Exception e ) { throw new IOException ( e ) ; } |
public class LinkProperties { /** * Create a { @ link LinkProperties } object based on the latest link click .
* @ return A { @ link LinkProperties } object based on the latest link click or a null if there is no link click registered for this session */
public static LinkProperties getReferredLinkProperties ( ) { } } | LinkProperties linkProperties = null ; Branch branchInstance = Branch . getInstance ( ) ; if ( branchInstance != null && branchInstance . getLatestReferringParams ( ) != null ) { JSONObject latestParam = branchInstance . getLatestReferringParams ( ) ; try { if ( latestParam . has ( "+clicked_branch_link" ) && latestParam . getBoolean ( "+clicked_branch_link" ) ) { linkProperties = new LinkProperties ( ) ; if ( latestParam . has ( "~channel" ) ) { linkProperties . setChannel ( latestParam . getString ( "~channel" ) ) ; } if ( latestParam . has ( "~feature" ) ) { linkProperties . setFeature ( latestParam . getString ( "~feature" ) ) ; } if ( latestParam . has ( "~stage" ) ) { linkProperties . setStage ( latestParam . getString ( "~stage" ) ) ; } if ( latestParam . has ( "~campaign" ) ) { linkProperties . setCampaign ( latestParam . getString ( "~campaign" ) ) ; } if ( latestParam . has ( "~duration" ) ) { linkProperties . setDuration ( latestParam . getInt ( "~duration" ) ) ; } if ( latestParam . has ( "$match_duration" ) ) { linkProperties . setDuration ( latestParam . getInt ( "$match_duration" ) ) ; } if ( latestParam . has ( "~tags" ) ) { JSONArray tagsArray = latestParam . getJSONArray ( "~tags" ) ; for ( int i = 0 ; i < tagsArray . length ( ) ; i ++ ) { linkProperties . addTag ( tagsArray . getString ( i ) ) ; } } Iterator < String > keys = latestParam . keys ( ) ; while ( keys . hasNext ( ) ) { String key = keys . next ( ) ; if ( key . startsWith ( "$" ) ) { linkProperties . addControlParameter ( key , latestParam . getString ( key ) ) ; } } } } catch ( Exception ignore ) { } } return linkProperties ; |
public class ViewGroupMvpViewStateDelegateImpl { /** * Generates the unique ( mosby internal ) viewState id and calls { @ link
* MvpDelegateCallback # createPresenter ( ) }
* to create a new presenter instance
* @ return The new created presenter instance */
private P createViewIdAndCreatePresenter ( ) { } } | P presenter = delegateCallback . createPresenter ( ) ; if ( presenter == null ) { throw new NullPointerException ( "Presenter returned from createPresenter() is null." ) ; } if ( keepPresenterDuringScreenOrientationChange ) { Context context = delegateCallback . getContext ( ) ; mosbyViewId = UUID . randomUUID ( ) . toString ( ) ; PresenterManager . putPresenter ( PresenterManager . getActivity ( context ) , mosbyViewId , presenter ) ; } return presenter ; |
public class LdapTemplate { /** * { @ inheritDoc } */
@ Override public < T > List < T > search ( Name base , String filter , SearchControls controls , ContextMapper < T > mapper , DirContextProcessor processor ) { } } | assureReturnObjFlagSet ( controls ) ; ContextMapperCallbackHandler < T > handler = new ContextMapperCallbackHandler < T > ( mapper ) ; search ( base , filter , controls , handler , processor ) ; return handler . getList ( ) ; |
public class MergePath { /** * Returns all the resources matching the path . */
public ArrayList < PathImpl > getResources ( String pathName ) { } } | ArrayList < PathImpl > list = new ArrayList < PathImpl > ( ) ; String pathname = _pathname ; // XXX : why was this here ?
if ( pathname . startsWith ( "/" ) ) pathname = "." + pathname ; ArrayList < PathImpl > pathList = ( ( MergePath ) _root ) . _pathList ; for ( int i = 0 ; i < pathList . size ( ) ; i ++ ) { PathImpl path = pathList . get ( i ) ; path = path . lookup ( pathname ) ; ArrayList < PathImpl > subResources = path . getResources ( pathName ) ; for ( int j = 0 ; j < subResources . size ( ) ; j ++ ) { PathImpl newPath = subResources . get ( j ) ; if ( ! list . contains ( newPath ) ) list . add ( newPath ) ; } } return list ; |
public class RobotiumUtils { /** * Filters all Views not within the given set .
* @ param classSet contains all classes that are ok to pass the filter
* @ param viewList the Iterable to filter form
* @ return an ArrayList with filtered views */
public static ArrayList < View > filterViewsToSet ( Class < View > classSet [ ] , Iterable < View > viewList ) { } } | ArrayList < View > filteredViews = new ArrayList < View > ( ) ; for ( View view : viewList ) { if ( view == null ) continue ; for ( Class < View > filter : classSet ) { if ( filter . isAssignableFrom ( view . getClass ( ) ) ) { filteredViews . add ( view ) ; break ; } } } return filteredViews ; |
public class TraceSpecification { /** * This method will create a new String array for the supplied spec and add it to the < code > specsToAddTo < / code > list . It will
* only do this if it can find the right trace level for the supplied < code > level < / code > .
* @ param updatedSpecList The list to add this spec setting to , may be < code > null < / code > which will cause the list to be created
* @ param specString The original spec string that will be used for logging messaged
* @ param clazz The class that this spec setting applies to
* @ param level The level to set the logging to
* @ param enableString The setting for this spec , must be either { @ link TrLevelConstants # TRACE _ ENABLED } or { @ link TrLevelConstants # TRACE _ DISABLED }
* @ return true if a trace level lower than INFO was enabled ( e . g . FINE , FINER , debug , entry ) */
private boolean addSpecToList ( List < TraceElement > updatedSpecList , final String specString , final String clazz , final String level , final String enableString ) { } } | TraceSpecificationException tex = null ; boolean enable = true ; final boolean enableSetting ; final String fullString = clazz + "=" + level + "=" + enableString ; String setLower = enableString . trim ( ) . toLowerCase ( ) ; if ( setLower . equals ( TrLevelConstants . TRACE_ENABLED ) || setLower . equals ( TrLevelConstants . TRACE_ON ) ) { enableSetting = true ; } else if ( setLower . equals ( TrLevelConstants . TRACE_DISABLED ) || setLower . equals ( TrLevelConstants . TRACE_OFF ) ) { enableSetting = false ; enable = false ; } else { tex = new TraceSpecificationException ( "Unknown trace setting, must be either 'enabled' or 'disabled'" , "TRACE_STRING_BAD_ACTION" , enableString , fullString ) ; enableSetting = true ; tex . setPreviousException ( ex ) ; ex = tex ; } int found = - 1 ; int traceLevel = 0 ; int traceLevelCount = 0 ; for ( int i = 0 ; i < TrLevelConstants . traceLevels . length ; i ++ ) { final String [ ] traceLevelsRow = TrLevelConstants . traceLevels [ i ] ; for ( int j = 0 ; j < traceLevelsRow . length ; j ++ ) { if ( level . equalsIgnoreCase ( traceLevelsRow [ j ] ) ) { found = i ; traceLevel = traceLevelCount ; break ; } traceLevelCount ++ ; } if ( found >= 0 ) { break ; } } if ( found < 0 ) { tex = new TraceSpecificationException ( "Unknown trace level" , "TRACE_STRING_BAD_LEVEL" , level , fullString ) ; tex . setPreviousException ( ex ) ; ex = tex ; } else if ( tex == null ) { // Only add the specification string to the list if there was not an exception
TraceElement spec = new TraceElement ( clazz , found , traceLevel , enableSetting , fullString ) ; updatedSpecList . add ( spec ) ; // If found ( as a Logger level ) is < INFO , then we have some kind of
// detailed trace enabled for this trace spec .
return enable && found >= 0 && TrLevelConstants . levels [ found ] . intValue ( ) < Level . INFO . intValue ( ) ; } return false ; |
public class Base58 { /** * Uses the checksum in the last 4 bytes of the decoded data to verify the
* rest are correct . The checksum is removed from the returned data .
* @ param input base58 encoded string
* @ return byte [ ] representation of input string if checksum matches */
public static byte [ ] decodeChecked ( String input ) { } } | byte tmp [ ] = decode ( input ) ; if ( tmp == null || tmp . length < 4 ) { return null ; } byte [ ] bytes = copyOfRange ( tmp , 0 , tmp . length - 4 ) ; byte [ ] checksum = copyOfRange ( tmp , tmp . length - 4 , tmp . length ) ; tmp = HashUtils . doubleSha256 ( bytes ) ; byte [ ] hash = copyOfRange ( tmp , 0 , 4 ) ; if ( ! Arrays . equals ( checksum , hash ) ) { return null ; } return bytes ; |
public class PendingItemAnimator { /** * Preform your animation . You do not need to override this in most cases cause the default is pretty good .
* Listeners will be overridden * */
protected ViewPropertyAnimatorCompat animateMoveImpl ( final ViewHolder holder , int fromX , int fromY , int toX , int toY ) { } } | final View view = holder . itemView ; final int deltaX = toX - fromX ; final int deltaY = toY - fromY ; ViewCompat . animate ( view ) . cancel ( ) ; if ( deltaX != 0 ) { ViewCompat . animate ( view ) . translationX ( 0 ) ; } if ( deltaY != 0 ) { ViewCompat . animate ( view ) . translationY ( 0 ) ; } // TODO : make EndActions end listeners instead , since end actions aren ' t called when
// vpas are canceled ( and can ' t end them . why ? )
// need listener functionality in VPACompat for this . Ick .
return ViewCompat . animate ( view ) . setInterpolator ( null ) . setDuration ( getMoveDuration ( ) ) ; |
public class ArrayFunctions { /** * Returned expression results in true if the array contains value . */
public static Expression arrayContains ( String expression , Expression value ) { } } | return arrayContains ( x ( expression ) , value ) ; |
public class RegionUtils { /** * Initializes the region metadata singleton from the given resource .
* @ param classLoader the class loader to use to load the resource
* @ param name the path to the resource
* @ throws SdkClientException on error */
@ Deprecated public static synchronized void initializeFromResource ( final ClassLoader classLoader , final String name ) { } } | try { regionMetadata = loadMetadataFromResource ( classLoader , name ) ; } catch ( IOException exception ) { throw new SdkClientException ( "Error parsing region metadata from resource " + name , exception ) ; } |
public class NFA { /** * Processes the next input event . If some of the computations reach a final state then the
* resulting event sequences are returned . If computations time out and timeout handling is
* activated , then the timed out event patterns are returned .
* < p > If computations reach a stop state , the path forward is discarded and currently constructed path is returned
* with the element that resulted in the stop state .
* @ param sharedBufferAccessor the accessor to SharedBuffer object that we need to work upon while processing
* @ param nfaState The NFAState object that we need to affect while processing
* @ param event The current event to be processed or null if only pruning shall be done
* @ param timestamp The timestamp of the current event
* @ param afterMatchSkipStrategy The skip strategy to use after per match
* @ param timerService gives access to processing time and time characteristic , needed for condition evaluation
* @ return Tuple of the collection of matched patterns ( e . g . the result of computations which have
* reached a final state ) and the collection of timed out patterns ( if timeout handling is
* activated )
* @ throws Exception Thrown if the system cannot access the state . */
public Collection < Map < String , List < T > > > process ( final SharedBufferAccessor < T > sharedBufferAccessor , final NFAState nfaState , final T event , final long timestamp , final AfterMatchSkipStrategy afterMatchSkipStrategy , final TimerService timerService ) throws Exception { } } | try ( EventWrapper eventWrapper = new EventWrapper ( event , timestamp , sharedBufferAccessor ) ) { return doProcess ( sharedBufferAccessor , nfaState , eventWrapper , afterMatchSkipStrategy , timerService ) ; } |
public class MSUImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case AfplibPackage . MSU__RG : getRg ( ) . clear ( ) ; getRg ( ) . addAll ( ( Collection < ? extends MSURG > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class LssClient { /** * Pause your app stream by app name and stream name
* @ param request The request object containing all parameters for pausing app session .
* @ return the response */
public PauseAppStreamResponse pauseAppStream ( PauseAppStreamRequest request ) { } } | checkNotNull ( request , "The parameter request should NOT be null." ) ; checkStringNotEmpty ( request . getApp ( ) , "The parameter app should NOT be null or empty string." ) ; checkStringNotEmpty ( request . getStream ( ) , "The parameter stream should NOT be null or empty string." ) ; InternalRequest internalRequest = createRequest ( HttpMethodName . PUT , request , LIVE_APP , request . getApp ( ) , LIVE_SESSION , request . getStream ( ) ) ; internalRequest . addParameter ( PAUSE , null ) ; return invokeHttpClient ( internalRequest , PauseAppStreamResponse . class ) ; |
public class BlobStoreUtils { /** * Download updated blobs from potential nimbodes */
public static boolean downloadUpdatedBlob ( Map conf , BlobStore blobStore , String key , Set < NimbusInfo > nimbusInfos ) throws TTransportException { } } | NimbusClient client ; ClientBlobStore remoteBlobStore ; boolean isSuccess = false ; LOG . debug ( "Download blob NimbusInfos {}" , nimbusInfos ) ; for ( NimbusInfo nimbusInfo : nimbusInfos ) { if ( isSuccess ) { break ; } try { client = new NimbusClient ( conf , nimbusInfo . getHost ( ) , nimbusInfo . getPort ( ) , null ) ; remoteBlobStore = new NimbusBlobStore ( ) ; remoteBlobStore . setClient ( conf , client ) ; isSuccess = updateBlob ( blobStore , key , remoteBlobStore . getBlob ( key ) ) ; } catch ( IOException exception ) { throw new RuntimeException ( exception ) ; } catch ( KeyNotFoundException knf ) { // Catching and logging KeyNotFoundException because , if
// there is a subsequent update and delete , the non - leader
// nimbodes might throw an exception .
LOG . info ( "KeyNotFoundException {}" , knf ) ; } catch ( Exception exp ) { // Logging an exception while client is connecting
LOG . error ( "Exception {}" , exp ) ; } } if ( ! isSuccess ) { LOG . error ( "Could not update the blob with key" + key ) ; } return isSuccess ; |
public class CacheClear { /** * FUTURE remove , only exist for code in Lucee archives using that function */
public static double call ( PageContext pc , String strFilter ) throws PageException { } } | return _call ( pc , strFilter , null ) ; |
public class ExecutorServiceUtils { /** * Creates a scheduled thread pool where each thread has the daemon
* property set to true . This allows the program to quit without
* explicitly calling shutdown on the pool
* @ param corePoolSize the number of threads to keep in the pool ,
* even if they are idle
* @ return a newly created scheduled thread pool */
public static ScheduledExecutorService newScheduledDaemonThreadPool ( int corePoolSize ) { } } | return Executors . newScheduledThreadPool ( corePoolSize , r -> { Thread t = Executors . defaultThreadFactory ( ) . newThread ( r ) ; t . setDaemon ( true ) ; return t ; } ) ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link IdentifierType } { @ code > } } */
@ XmlElementDecl ( namespace = "http://www.ibm.com/websphere/wim" , name = "secretary" ) public JAXBElement < IdentifierType > createSecretary ( IdentifierType value ) { } } | return new JAXBElement < IdentifierType > ( _Secretary_QNAME , IdentifierType . class , null , value ) ; |
public class PropertiesComparisonRule { /** * Validates a given value against this rule .
* @ param path a dot notation path to the value .
* @ param schema a schema this rule is called from
* @ param value a value to be validated .
* @ param results a list with validation results to add new results . */
public void validate ( String path , Schema schema , Object value , List < ValidationResult > results ) { } } | String name = path != null ? path : "value" ; Object value1 = ObjectReader . getProperty ( value , _property1 ) ; Object value2 = ObjectReader . getProperty ( value , _property2 ) ; if ( ! ObjectComparator . compare ( value1 , _operation , value2 ) ) { results . add ( new ValidationResult ( path , ValidationResultType . Error , "PROPERTIES_NOT_MATCH" , name + " must have " + _property1 + " " + _operation + " " + _property2 , value2 , value1 ) ) ; } |
public class CmsEditorFrameset { /** * Returns the editor title . < p >
* @ return the editor title */
public String getParamEditorTitle ( ) { } } | if ( CmsStringUtil . isEmpty ( m_paramEditorTitle ) ) { return key ( Messages . GUI_EDITOR_TITLE_PREFIX_0 ) + " " + getParamResource ( ) ; } return m_paramEditorTitle ; |
public class KbTypeConflictException { /** * Converts a Throwable to a KbTypeConflictException . If the Throwable is a
* KbTypeConflictException , it will be passed through unmodified ; otherwise , it will be wrapped
* in a new KbTypeConflictException .
* @ param cause the Throwable to convert
* @ return a KbTypeConflictException */
public static KbTypeConflictException fromThrowable ( Throwable cause ) { } } | return ( cause instanceof KbTypeConflictException ) ? ( KbTypeConflictException ) cause : new KbTypeConflictException ( cause ) ; |
public class XMemberFeatureCallImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public NotificationChain eInverseRemove ( InternalEObject otherEnd , int featureID , NotificationChain msgs ) { } } | switch ( featureID ) { case XbasePackage . XMEMBER_FEATURE_CALL__MEMBER_CALL_TARGET : return basicSetMemberCallTarget ( null , msgs ) ; case XbasePackage . XMEMBER_FEATURE_CALL__MEMBER_CALL_ARGUMENTS : return ( ( InternalEList < ? > ) getMemberCallArguments ( ) ) . basicRemove ( otherEnd , msgs ) ; } return super . eInverseRemove ( otherEnd , featureID , msgs ) ; |
public class ConfigObject { /** * Overrides the default getProperty implementation to create nested ConfigObject instances on demand
* for non - existent keys */
public Object getProperty ( String name ) { } } | if ( "configFile" . equals ( name ) ) return this . configFile ; if ( ! containsKey ( name ) ) { ConfigObject prop = new ConfigObject ( this . configFile ) ; put ( name , prop ) ; return prop ; } return get ( name ) ; |
public class Field { /** * Returns a builder for a Field object with given name and type . */
public static Builder newBuilder ( String name , LegacySQLTypeName type , Field ... subFields ) { } } | return new Builder ( ) . setName ( name ) . setType ( type , subFields ) ; |
public class AsciiTable { /** * Sets the HTML entity translator for all cells in the table .
* It will also remove any other translator set .
* Nothing will happen if the argument is null .
* @ param htmlElementTranslator translator
* @ return this to allow chaining */
public AsciiTable setHtmlElementTranslator ( HtmlElementTranslator htmlElementTranslator ) { } } | for ( AT_Row row : this . rows ) { if ( row . getType ( ) == TableRowType . CONTENT ) { row . setHtmlElementTranslator ( htmlElementTranslator ) ; } } return this ; |
public class Query { /** * Verifies if the passed expression is true for the JsonNode
* @ param expr
* Comparison expression to be evaluated
* @ return returns if the expression is true for the JsonNode */
public boolean is ( ComparisonExpression expr ) { } } | try { if ( expr != null ) { return expr . evaluate ( node ) ; } } catch ( MissingNodeException e ) { return false ; } return false ; |
public class Sentence { /** * 按照 PartOfSpeechTagDictionary 指定的映射表将词语词性翻译过去
* @ return */
public Sentence translateLabels ( ) { } } | for ( IWord word : wordList ) { word . setLabel ( PartOfSpeechTagDictionary . translate ( word . getLabel ( ) ) ) ; if ( word instanceof CompoundWord ) { for ( Word child : ( ( CompoundWord ) word ) . innerList ) { child . setLabel ( PartOfSpeechTagDictionary . translate ( child . getLabel ( ) ) ) ; } } } return this ; |
public class CmsUserTable { /** * initializes table .
* @ param showAll boolean */
protected void init ( boolean showAll ) { } } | m_menu = new CmsContextMenu ( ) ; m_menu . setAsTableContextMenu ( this ) ; m_container = new IndexedContainer ( ) ; for ( TableProperty prop : TableProperty . values ( ) ) { m_container . addContainerProperty ( prop , prop . getType ( ) , prop . getDefault ( ) ) ; setColumnHeader ( prop , prop . getName ( ) ) ; } m_app . addUserContainerProperties ( m_container ) ; setContainerDataSource ( m_container ) ; setItemIconPropertyId ( TableProperty . Icon ) ; setRowHeaderMode ( RowHeaderMode . ICON_ONLY ) ; setColumnWidth ( null , 40 ) ; setColumnWidth ( TableProperty . STATUS , 100 ) ; setSelectable ( true ) ; setMultiSelect ( true ) ; setVisibleColumns ( TableProperty . Name , TableProperty . OU ) ; fillContainer ( showAll ) ; addItemClickListener ( new ItemClickListener ( ) { private static final long serialVersionUID = 4807195510202231174L ; @ SuppressWarnings ( "unchecked" ) public void itemClick ( ItemClickEvent event ) { changeValueIfNotMultiSelect ( event . getItemId ( ) ) ; if ( event . getButton ( ) . equals ( MouseButton . RIGHT ) || ( event . getPropertyId ( ) == null ) ) { Set < String > userIds = new HashSet < String > ( ) ; for ( CmsUser user : ( Set < CmsUser > ) getValue ( ) ) { userIds . add ( user . getId ( ) . getStringValue ( ) ) ; } m_menu . setEntries ( getMenuEntries ( ) , userIds ) ; m_menu . openForTable ( event , event . getItemId ( ) , event . getPropertyId ( ) , CmsUserTable . this ) ; } else if ( event . getButton ( ) . equals ( MouseButton . LEFT ) && TableProperty . Name . equals ( event . getPropertyId ( ) ) ) { CmsUser user = ( ( Set < CmsUser > ) getValue ( ) ) . iterator ( ) . next ( ) ; try { openInfoDialog ( user . getId ( ) ) ; } catch ( CmsException e ) { LOG . error ( "Error on opening user info dialog" , e ) ; } } } } ) ; setCellStyleGenerator ( new CellStyleGenerator ( ) { private static final long serialVersionUID = 4685652851810828147L ; public String getStyle ( Table source , Object itemId , Object propertyId ) { if ( TableProperty . STATUS . equals ( propertyId ) ) { return getStatusStyleForItem ( source . getItem ( itemId ) , ( CmsUser ) itemId ) ; } String css = " " ; if ( ( ( Boolean ) ( source . getItem ( itemId ) . getItemProperty ( TableProperty . FROMOTHEROU ) . getValue ( ) ) ) . booleanValue ( ) ) { css += OpenCmsTheme . EXPIRED ; } if ( TableProperty . Name . equals ( propertyId ) ) { css += " " + OpenCmsTheme . HOVER_COLUMN ; } if ( ( ( Boolean ) source . getItem ( itemId ) . getItemProperty ( TableProperty . INDIRECT ) . getValue ( ) ) . booleanValue ( ) ) { return css + " " + OpenCmsTheme . TABLE_CELL_DISABLED ; } return css . length ( ) == 1 ? null : css ; } private String getStatusStyleForItem ( Item item , CmsUser user ) { if ( ( ( Boolean ) item . getItemProperty ( TableProperty . DISABLED ) . getValue ( ) ) . booleanValue ( ) ) { return OpenCmsTheme . TABLE_COLUMN_BOX_GRAY ; } if ( ( ( Boolean ) item . getItemProperty ( TableProperty . NEWUSER ) . getValue ( ) ) . booleanValue ( ) ) { return OpenCmsTheme . TABLE_COLUMN_BOX_BLUE ; } if ( OpenCms . getLoginManager ( ) . isUserTempDisabled ( user . getName ( ) ) ) { return OpenCmsTheme . TABLE_COLUMN_BOX_RED ; } if ( isUserPasswordReset ( user ) ) { return OpenCmsTheme . TABLE_COLUMN_BOX_ORANGE ; } return OpenCmsTheme . TABLE_COLUMN_BOX_GREEN ; } } ) ; addGeneratedColumn ( TableProperty . STATUS , new ColumnGenerator ( ) { private static final long serialVersionUID = - 2144476865774782965L ; public Object generateCell ( Table source , Object itemId , Object columnId ) { return getStatus ( ( CmsUser ) itemId , ( ( Boolean ) source . getItem ( itemId ) . getItemProperty ( TableProperty . DISABLED ) . getValue ( ) ) . booleanValue ( ) , ( ( Boolean ) source . getItem ( itemId ) . getItemProperty ( TableProperty . NEWUSER ) . getValue ( ) ) . booleanValue ( ) ) ; } } ) ; addGeneratedColumn ( TableProperty . LastLogin , new ColumnGenerator ( ) { private static final long serialVersionUID = - 6781906011584975559L ; public Object generateCell ( Table source , Object itemId , Object columnId ) { long lastLogin = ( ( Long ) source . getItem ( itemId ) . getItemProperty ( TableProperty . LastLogin ) . getValue ( ) ) . longValue ( ) ; return lastLogin == 0L ? CmsVaadinUtils . getMessageText ( Messages . GUI_USERMANAGEMENT_USER_NEVER_LOGGED_IN_0 ) : CmsDateUtil . getDateTime ( new Date ( lastLogin ) , DateFormat . SHORT , A_CmsUI . get ( ) . getLocale ( ) ) ; } } ) ; addGeneratedColumn ( TableProperty . Created , new ColumnGenerator ( ) { private static final long serialVersionUID = - 6781906011584975559L ; public Object generateCell ( Table source , Object itemId , Object columnId ) { long created = ( ( Long ) source . getItem ( itemId ) . getItemProperty ( TableProperty . Created ) . getValue ( ) ) . longValue ( ) ; return created == 0L ? "" : CmsDateUtil . getDateTime ( new Date ( created ) , DateFormat . SHORT , A_CmsUI . get ( ) . getLocale ( ) ) ; } } ) ; setItemDescriptionGenerator ( new ItemDescriptionGenerator ( ) { private static final long serialVersionUID = 7367011213487089661L ; public String generateDescription ( Component source , Object itemId , Object propertyId ) { if ( TableProperty . STATUS . equals ( propertyId ) ) { return getStatusHelp ( ( CmsUser ) itemId , ( ( Boolean ) ( ( Table ) source ) . getItem ( itemId ) . getItemProperty ( TableProperty . DISABLED ) . getValue ( ) ) . booleanValue ( ) , ( ( Boolean ) ( ( Table ) source ) . getItem ( itemId ) . getItemProperty ( TableProperty . NEWUSER ) . getValue ( ) ) . booleanValue ( ) ) ; } return null ; } } ) ; setVisibleColumns ( TableProperty . STATUS , TableProperty . Name , TableProperty . FullName , TableProperty . OU , TableProperty . LastLogin , TableProperty . Created ) ; |
public class Table { /** * Determine if the passed column is part of a unique index that has only 1 element , the passed column . */
public boolean isUnique ( String columnName ) { } } | if ( columnName == null ) { return false ; } for ( IndexHolder ih : indexHoldersByName . values ( ) ) { if ( ih . isUnique ( ) && ih . getSize ( ) == 1 ) { if ( ih . getIndexes ( ) . iterator ( ) . next ( ) . getColumnName ( ) . equalsIgnoreCase ( columnName ) ) { return true ; } } } return false ; |
public class XMLValidatingParser { /** * Parses and validates an XML resource using the given schema references .
* @ param input
* The XML input to parse and validate . It must be either an
* InputStream or a Document object .
* @ param parserConfig
* An Element
* ( { http : / / www . occamlab . com / te / parsers } XMLValidatingParser )
* containing configuration info . If it is { @ code null } or empty
* validation will be performed by using location hints in the
* input document .
* @ param logger
* The PrintWriter to log all results to
* @ return { @ code null } If any non - ignorable errors or warnings occurred ;
* otherwise the resulting Document . */
Document parse ( Object input , Element parserConfig , PrintWriter logger ) throws Exception { } } | jlogger . finer ( "Received XML resource of type " + input . getClass ( ) . getName ( ) ) ; ArrayList < Object > schemas = new ArrayList < Object > ( ) ; ArrayList < Object > dtds = new ArrayList < Object > ( ) ; schemas . addAll ( this . schemaList ) ; dtds . addAll ( this . dtdList ) ; loadSchemaLists ( parserConfig , schemas , dtds ) ; Document resultDoc = null ; ErrorHandlerImpl errHandler = new ErrorHandlerImpl ( "Parsing" , logger ) ; if ( input instanceof InputStream ) { DocumentBuilderFactory dbf = nonValidatingDBF ; DocumentBuilder db = dbf . newDocumentBuilder ( ) ; db . setErrorHandler ( errHandler ) ; try ( InputStream xmlInput = ( InputStream ) input ) { resultDoc = db . parse ( xmlInput ) ; } catch ( Exception e ) { jlogger . log ( Level . INFO , "Error parsing InputStream" , e ) ; } } else if ( input instanceof Document ) { resultDoc = ( Document ) input ; } else { throw new IllegalArgumentException ( "XML input must be an InputStream or a Document object." ) ; } if ( null == resultDoc ) { throw new RuntimeException ( "Failed to parse input: " + input . getClass ( ) . getName ( ) ) ; } errHandler . setRole ( "Validation" ) ; if ( null == resultDoc . getDoctype ( ) && dtds . isEmpty ( ) ) { validateAgainstXMLSchemaList ( resultDoc , schemas , errHandler ) ; } else { validateAgainstDTDList ( resultDoc , dtds , errHandler ) ; } int error_count = errHandler . getErrorCount ( ) ; int warning_count = errHandler . getWarningCount ( ) ; if ( error_count > 0 || warning_count > 0 ) { String msg = "" ; if ( error_count > 0 ) { msg += error_count + " validation error" + ( error_count == 1 ? "" : "s" ) ; if ( warning_count > 0 ) msg += " and " ; } if ( warning_count > 0 ) { msg += warning_count + " warning" + ( warning_count == 1 ? "" : "s" ) ; } msg += " detected." ; logger . println ( msg ) ; } if ( error_count > 0 ) { String s = ( null != parserConfig ) ? parserConfig . getAttribute ( "ignoreErrors" ) : "false" ; if ( s . length ( ) == 0 || Boolean . parseBoolean ( s ) == false ) { resultDoc = null ; } } if ( warning_count > 0 ) { String s = ( null != parserConfig ) ? parserConfig . getAttribute ( "ignoreWarnings" ) : "true" ; if ( s . length ( ) > 0 && Boolean . parseBoolean ( s ) == false ) { resultDoc = null ; } } return resultDoc ; |
public class PacketCapturesInner { /** * Create and start a packet capture on the specified VM .
* @ param resourceGroupName The name of the resource group .
* @ param networkWatcherName The name of the network watcher .
* @ param packetCaptureName The name of the packet capture session .
* @ param parameters Parameters that define the create packet capture operation .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < PacketCaptureResultInner > beginCreateAsync ( String resourceGroupName , String networkWatcherName , String packetCaptureName , PacketCaptureInner parameters , final ServiceCallback < PacketCaptureResultInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( beginCreateWithServiceResponseAsync ( resourceGroupName , networkWatcherName , packetCaptureName , parameters ) , serviceCallback ) ; |
public class ApiTokenStats { /** * In case of duplicate entries , we keep only the last updated element */
private void keepLastUpdatedUnique ( ) { } } | Map < String , SingleTokenStats > temp = new HashMap < > ( ) ; this . tokenStats . forEach ( candidate -> { SingleTokenStats current = temp . get ( candidate . tokenUuid ) ; if ( current == null ) { temp . put ( candidate . tokenUuid , candidate ) ; } else { int comparison = SingleTokenStats . COMP_BY_LAST_USE_THEN_COUNTER . compare ( current , candidate ) ; if ( comparison < 0 ) { // candidate was updated more recently ( or has a bigger counter in case of perfectly equivalent dates )
temp . put ( candidate . tokenUuid , candidate ) ; } } } ) ; this . tokenStats = new ArrayList < > ( temp . values ( ) ) ; |
public class SmallMap { /** * num ranges from 1 to numMappings */
private void removeMapping ( int num ) { } } | if ( num < numMappings ) { System . arraycopy ( mappings , num * 2 , mappings , ( num - 1 ) * 2 , ( numMappings - num ) * 2 ) ; } mappings [ numMappings * 2 - 1 ] = null ; mappings [ numMappings * 2 - 2 ] = null ; numMappings -- ; |
public class Configuration { /** * Sets up configuration for reloading classes .
* @ param path The directory that contains compiled classes . This will be
* monitored for changes . */
void configureClasses ( String path ) { } } | findClassesInClasspath ( ) ; if ( StringUtils . isNotBlank ( path ) ) { // If the path is set , set up class reloading :
configureClassesReloadable ( path ) ; } packagePrefix = getValue ( PACKAGE_PREFIX ) ; classesReloadable = classesUrl != null && classesInClasspath == null ; // Communicate :
showClassesConfiguration ( ) ; |
public class AtomicBitflags { /** * Atomically remove the given flags from the current set
* @ param flags to unset
* @ return the previous value */
public int unset ( final int flags ) { } } | for ( ; ; ) { int current = _flags . get ( ) ; int newValue = current & ~ flags ; if ( _flags . compareAndSet ( current , newValue ) ) { return current ; } } |
public class Packager { /** * Return a container which holds all the boxes in the argument
* @ param boxes list of boxes to fit in a container
* @ param containers list of containers
* @ param deadline the system time in milliseconds at which the search should be aborted
* @ return index of container if match , - 1 if not */
public Container pack ( List < BoxItem > boxes , List < Container > containers , long deadline ) { } } | return pack ( boxes , containers , deadLinePredicate ( deadline ) ) ; |
public class HttpChannelConfig { /** * Check the input configuration for the flag on whether to immediately
* extract header values during the parsing stage or not .
* @ param props */
private void parseDelayedExtract ( Map < Object , Object > props ) { } } | Object value = props . get ( HttpConfigConstants . PROPNAME_EXTRACT_VALUE ) ; if ( null != value ) { this . bExtractValue = convertBoolean ( value ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Config: header value extraction is " + shouldExtractValue ( ) ) ; } } |
public class AbstractProtoRealization { /** * < p > Cleanup any localisations of the destination that require it < / p > */
public boolean cleanupPremediatedItemStreams ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "cleanupPremediatedItemStreams" ) ; // true if all localisations have been cleaned up successfully
boolean allCleanedUp = true ; // Only clean up the post mediated itemstreams when all the pre mediated
// itemstreams have been succesfully cleaned up
if ( allCleanedUp ) { // Check each localisation in turn to see if it should be deleted .
synchronized ( _postMediatedItemStreamsRequiringCleanup ) { HashMap clonedLocalisations = ( HashMap ) _postMediatedItemStreamsRequiringCleanup . clone ( ) ; // Reallocate the messages from each localisations itemstream that is in
// to - be - deleted state .
Iterator i = clonedLocalisations . keySet ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { SIBUuid8 uuid = ( SIBUuid8 ) i . next ( ) ; PtoPMessageItemStream ptoPMessageItemStream = ( PtoPMessageItemStream ) _postMediatedItemStreamsRequiringCleanup . get ( uuid ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Removing Localisation " + uuid + " for destination " + _baseDestinationHandler . getName ( ) + " : " + _baseDestinationHandler . getUuid ( ) ) ; boolean itemStreamCleanedUp = ptoPMessageItemStream . reallocateMsgs ( ) ; // we do not remove from xmit queues if it was never added
boolean removeFromXmitPoints = ! ptoPMessageItemStream . getDeleteRequiredAtReconstitute ( ) ; if ( itemStreamCleanedUp ) { _postMediatedItemStreamsRequiringCleanup . remove ( uuid ) ; // If the queuePoint is in the " live " queuePoints set , remove it from there too
if ( removeFromXmitPoints ) _localisationManager . removeXmitQueuePoint ( uuid ) ; } if ( allCleanedUp ) { allCleanedUp = itemStreamCleanedUp ; } } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "cleanupPremediatedItemStreams" , new Boolean ( allCleanedUp ) ) ; return allCleanedUp ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.