signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class br_broker { /** * < pre > * Use this operation to modify Unified Repeater Instance . * < / pre > */ public static br_broker update ( nitro_service client , br_broker resource ) throws Exception { } }
resource . validate ( "modify" ) ; return ( ( br_broker [ ] ) resource . update_resource ( client ) ) [ 0 ] ;
public class ResourceFormatGeneratorService { /** * Return a generator for a mime type . * @ param mimeType the MIME type string * @ return The first matching parser for the mime type * @ throws UnsupportedFormatException if no generators are available for the MIME type */ public ResourceFormatGenerator getGeneratorForMIMEType ( final String mimeType ) throws UnsupportedFormatException { } }
// clean up encoding final String cleanMime ; if ( mimeType . indexOf ( ";" ) > 0 ) { cleanMime = mimeType . substring ( 0 , mimeType . indexOf ( ";" ) ) ; } else { cleanMime = mimeType ; } if ( ! ResourceFormatParserService . validMimeType ( cleanMime ) ) { throw new IllegalArgumentException ( "Invalid MIME type: " + mimeType ) ; } for ( final ResourceFormatGenerator generator : listGenerators ( ) ) { if ( null != generator . getMIMETypes ( ) ) { if ( generator . getMIMETypes ( ) . contains ( cleanMime ) ) { return generator ; } else { for ( final String s : generator . getMIMETypes ( ) ) { if ( ResourceFormatParserService . validMimeType ( s ) && cleanMime . startsWith ( "*/" ) ) { String t1 = cleanMime . substring ( 2 ) ; String t2 = s . substring ( s . indexOf ( "/" ) + 1 ) ; if ( t1 . equals ( t2 ) ) { return generator ; } } } } } } throw new UnsupportedFormatException ( "No provider available to parse MIME type: " + mimeType ) ;
public class MCWrapper { /** * clear the handle list */ public void clearHandleList ( ) { } }
final boolean isTracingEnabled = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTracingEnabled && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "Clear the McWrapper handlelist for the following MCWrapper: " + this ) ; } // since we know we are only in this method on a destroy or clean up // of a MCWrapper , we can double check that all the handles that this MCWrapper // owns are removed from the handlelist on thread local storage before clearing the // handlelist in the MCWrapper class . I tried to be really careful to avoid NPEs // Liberty doesn ' t have real HandleList so don ' t need to remove anything mcwHandleList . clear ( ) ;
public class Logger { /** * Log . */ public void log ( Level level , String message ) { } }
if ( level . ordinal ( ) < getLevel ( ) ) return ; logMessage ( level , message , null ) ;
public class TaskClient { /** * Retrieve pending task identified by reference name for a workflow * @ param workflowId Workflow instance id * @ param taskReferenceName reference name of the task * @ return Returns the pending workflow task identified by the reference name */ public Task getPendingTaskForWorkflow ( String workflowId , String taskReferenceName ) { } }
Preconditions . checkArgument ( StringUtils . isNotBlank ( workflowId ) , "Workflow id cannot be blank" ) ; Preconditions . checkArgument ( StringUtils . isNotBlank ( taskReferenceName ) , "Task reference name cannot be blank" ) ; TaskServicePb . PendingTaskResponse response = stub . getPendingTaskForWorkflow ( TaskServicePb . PendingTaskRequest . newBuilder ( ) . setWorkflowId ( workflowId ) . setTaskRefName ( taskReferenceName ) . build ( ) ) ; return protoMapper . fromProto ( response . getTask ( ) ) ;
public class JobmanagerInfoServlet { /** * Writes Json with a list of currently archived jobs , sorted by time * @ param wrt * @ param jobs */ private void writeJsonForArchive ( PrintWriter wrt , List < RecentJobEvent > jobs ) { } }
wrt . write ( "[" ) ; // sort jobs by time Collections . sort ( jobs , new Comparator < RecentJobEvent > ( ) { @ Override public int compare ( RecentJobEvent o1 , RecentJobEvent o2 ) { if ( o1 . getTimestamp ( ) < o2 . getTimestamp ( ) ) { return 1 ; } else { return - 1 ; } } } ) ; // Loop Jobs for ( int i = 0 ; i < jobs . size ( ) ; i ++ ) { RecentJobEvent jobEvent = jobs . get ( i ) ; // Serialize job to json wrt . write ( "{" ) ; wrt . write ( "\"jobid\": \"" + jobEvent . getJobID ( ) + "\"," ) ; wrt . write ( "\"jobname\": \"" + jobEvent . getJobName ( ) + "\"," ) ; wrt . write ( "\"status\": \"" + jobEvent . getJobStatus ( ) + "\"," ) ; wrt . write ( "\"time\": " + jobEvent . getTimestamp ( ) ) ; wrt . write ( "}" ) ; // Write seperator between json objects if ( i != jobs . size ( ) - 1 ) { wrt . write ( "," ) ; } } wrt . write ( "]" ) ;
public class DefaultValidatorMessageMixin { /** * { @ inheritDoc } */ @ Override public String lookup ( String key , Object [ ] msgValues ) { } }
String msg = lookup ( key ) ; if ( msg != null ) { msg = MessageFormat . format ( msg , msgValues ) ; } return msg ;
public class Model { /** * Find model by id and load specific columns only . * < pre > * Example : * User user = User . dao . findByIdLoadColumns ( 123 , " name , age " ) ; * < / pre > * @ param idValue the id value of the model * @ param columns the specific columns to load */ public M findByIdLoadColumns ( Object idValue , String columns ) { } }
return findByIdLoadColumns ( new Object [ ] { idValue } , columns ) ;
public class ClusterSizeMonitor { /** * Returns a listener that completes when the minimum number of workers for the cluster has been met . * Note : caller should not add a listener using the direct executor , as this can delay the * notifications for other listeners . */ public synchronized ListenableFuture < ? > waitForMinimumWorkers ( ) { } }
if ( currentCount >= executionMinCount ) { return immediateFuture ( null ) ; } SettableFuture < ? > future = SettableFuture . create ( ) ; futures . add ( future ) ; // if future does not finish in wait period , complete with an exception ScheduledFuture < ? > timeoutTask = executor . schedule ( ( ) -> { synchronized ( this ) { future . setException ( new PrestoException ( GENERIC_INSUFFICIENT_RESOURCES , format ( "Insufficient active worker nodes. Waited %s for at least %s workers, but only %s workers are active" , executionMaxWait , executionMinCount , currentCount ) ) ) ; } } , executionMaxWait . toMillis ( ) , MILLISECONDS ) ; // remove future if finished ( e . g . , canceled , timed out ) future . addListener ( ( ) -> { timeoutTask . cancel ( true ) ; removeFuture ( future ) ; } , executor ) ; return future ;
public class WebContainerRequestState { /** * / * ( non - Javadoc ) * @ see javax . servlet . ServletRequest # getAttribute ( java . lang . String ) */ public Object getAttribute ( String arg0 ) { } }
// 321485 if ( _attributes == null ) return null ; if ( com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) && logger . isLoggable ( Level . FINE ) ) { // 306998.15 logger . logp ( Level . FINE , CLASS_NAME , "getAttribute" , " name --> " + arg0 ) ; } Object obj = _attributes . get ( arg0 ) ; return obj ;
public class GVRAnimation { /** * Many animations can take multiple target types : for example , * { @ link GVRMaterialAnimation material animations } can work directly with * { @ link GVRMaterial } targets , but also ' know how ' to get a * { @ link GVRMaterial } from a { @ link GVRSceneObject } . They can , of course , * just expose multiple constructors , but that makes for a combinatorial * explosion when the other parameters also ' want ' to be overloaded . This * method allows them to just take a { @ link GVRHybridObject } and throw an * exception if they get a type they can ' t handle ; it also returns the * matched type ( which may not be equal to { @ code target . getClass ( ) } ) so * that calling code doesn ' t have to do { @ code instanceof } tests . * @ param target * A { @ link GVRHybridObject } instance * @ param supported * An array of supported types * @ return The element of { @ code supported } that matched * @ throws IllegalArgumentException * If { @ code target } is not an instance of any of the * { @ code supported } types */ protected static Class < ? > checkTarget ( GVRHybridObject target , Class < ? > ... supported ) { } }
for ( Class < ? > type : supported ) { if ( type . isInstance ( target ) ) { return type ; } } // else throw new IllegalArgumentException ( ) ;
public class Term { /** * Returns a list of all terms contained by and within this term . * @ return Non - null list of terms */ public List < Term > getAllTerms ( ) { } }
List < Term > ret = new ArrayList < Term > ( ) ; if ( terms != null ) { ret . addAll ( terms ) ; for ( final Term term : terms ) { ret . addAll ( term . getAllTerms ( ) ) ; } } return ret ;
public class JMTimeUtil { /** * Gets simple date format . * @ param dateFormat the date format * @ param zoneId the zone id * @ return the simple date format */ public static SimpleDateFormat getSimpleDateFormat ( String dateFormat , String zoneId ) { } }
return JMMap . getOrPutGetNew ( simpleDateFormatMap , buildSimpleDateFormatKey ( dateFormat , zoneId ) , newSimpleDateFormatBuilder . apply ( dateFormat , zoneId ) ) ;
public class CommerceShipmentPersistenceImpl { /** * Returns all the commerce shipments where groupId = & # 63 ; . * @ param groupId the group ID * @ return the matching commerce shipments */ @ Override public List < CommerceShipment > findByGroupId ( long groupId ) { } }
return findByGroupId ( groupId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ;
public class InternationalFixedDate { /** * Obtains a { @ code InternationalFixedDate } representing a date in the International fixed calendar * system from the proleptic - year and day - of - year fields . * This returns a { @ code InternationalFixedDate } with the specified fields . * The day must be valid for the year , otherwise an exception will be thrown . * @ param prolepticYear the International fixed proleptic - year * @ param dayOfYear the International fixed day - of - year , from 1 to 366 * @ return the date in International fixed calendar system , not null * @ throws DateTimeException if the value of any field is out of range , * or if the day - of - year is invalid for the year */ static InternationalFixedDate ofYearDay ( int prolepticYear , int dayOfYear ) { } }
YEAR_RANGE . checkValidValue ( prolepticYear , ChronoField . YEAR_OF_ERA ) ; ChronoField . DAY_OF_YEAR . checkValidValue ( dayOfYear ) ; boolean isLeapYear = INSTANCE . isLeapYear ( prolepticYear ) ; int lastDoy = ( DAYS_IN_YEAR + ( isLeapYear ? 1 : 0 ) ) ; if ( dayOfYear > lastDoy ) { throw new DateTimeException ( "Invalid date 'DayOfYear 366' as '" + prolepticYear + "' is not a leap year" ) ; } if ( dayOfYear == lastDoy ) { return new InternationalFixedDate ( prolepticYear , 13 , 29 ) ; } if ( dayOfYear == LEAP_DAY_AS_DAY_OF_YEAR && isLeapYear ) { return new InternationalFixedDate ( prolepticYear , 6 , 29 ) ; } int doy0 = dayOfYear - 1 ; if ( dayOfYear >= LEAP_DAY_AS_DAY_OF_YEAR && isLeapYear ) { doy0 -- ; } int month = ( doy0 / DAYS_IN_MONTH ) + 1 ; int day = ( doy0 % DAYS_IN_MONTH ) + 1 ; return new InternationalFixedDate ( prolepticYear , month , day ) ;
public class PluginManager { /** * Loads a plugin by the specified plugin directory and put it into the specified holder . * @ param pluginDirPath the specified plugin directory * @ param holder the specified holder * @ return loaded plugin * @ throws Exception exception */ private AbstractPlugin load ( final String pluginDirPath , final Map < String , HashSet < AbstractPlugin > > holder ) throws Exception { } }
final Properties props = new Properties ( ) ; final ServletContext servletContext = AbstractServletListener . getServletContext ( ) ; String plugin = StringUtils . substringAfter ( pluginDirPath , "/plugins" ) ; plugin = plugin . replace ( "/" , "" ) ; final File file = Latkes . getWebFile ( "/plugins/" + plugin + "/plugin.properties" ) ; props . load ( new FileInputStream ( file ) ) ; final URL defaultClassesFileDirURL = servletContext . getResource ( "/plugins/" + plugin + "classes" ) ; URL classesFileDirURL = null ; try { classesFileDirURL = servletContext . getResource ( props . getProperty ( "classesDirPath" ) ) ; } catch ( final MalformedURLException e ) { LOGGER . log ( Level . ERROR , "Reads [" + props . getProperty ( "classesDirPath" ) + "] failed" , e ) ; } final URLClassLoader classLoader = new URLClassLoader ( new URL [ ] { defaultClassesFileDirURL , classesFileDirURL } , PluginManager . class . getClassLoader ( ) ) ; classLoaders . add ( classLoader ) ; String pluginClassName = props . getProperty ( Plugin . PLUGIN_CLASS ) ; if ( StringUtils . isBlank ( pluginClassName ) ) { pluginClassName = NotInteractivePlugin . class . getName ( ) ; } final String rendererId = props . getProperty ( Plugin . PLUGIN_RENDERER_ID ) ; if ( StringUtils . isBlank ( rendererId ) ) { LOGGER . log ( Level . WARN , "no renderer defined by this plugin[" + plugin + "],this plugin will be ignore!" ) ; return null ; } final Class < ? > pluginClass = classLoader . loadClass ( pluginClassName ) ; LOGGER . log ( Level . TRACE , "Loading plugin class[name={0}]" , pluginClassName ) ; final AbstractPlugin ret = ( AbstractPlugin ) pluginClass . newInstance ( ) ; ret . setRendererId ( rendererId ) ; setPluginProps ( plugin , ret , props ) ; registerEventListeners ( props , classLoader , ret ) ; register ( ret , holder ) ; ret . changeStatus ( ) ; return ret ;
public class DocumentBuilderImpl { /** * Implements the whole parsing of the XML document . The XML pull parser is * actually more of a tokenizer , and we are doing a classical recursive * descent parsing ( the method invokes itself for XML elements ) . Our * approach to parsing does accept some illegal documents ( more than one * root element , for example ) . The assumption is that the DOM implementation * throws the proper exceptions in these cases . * @ param parser The XML pull parser we ' re reading from . * @ param document The document we ' re building . * @ param node The node we ' re currently on ( initially the document itself ) . * @ param endToken The token that will end this recursive call . Either * XmlPullParser . END _ DOCUMENT or XmlPullParser . END _ TAG . * @ throws XmlPullParserException If a parsing error occurs . * @ throws IOException If a general IO error occurs . */ private void parse ( KXmlParser parser , DocumentImpl document , Node node , int endToken ) throws XmlPullParserException , IOException { } }
int token = parser . getEventType ( ) ; /* * The main parsing loop . The precondition is that we are already on the * token to be processed . This holds for each iteration of the loop , so * the inner statements have to ensure that ( in particular the recursive * call ) . */ while ( token != endToken && token != XmlPullParser . END_DOCUMENT ) { if ( token == XmlPullParser . PROCESSING_INSTRUCTION ) { /* * Found a processing instructions . We need to split the token * text at the first whitespace character . */ String text = parser . getText ( ) ; int dot = text . indexOf ( ' ' ) ; String target = ( dot != - 1 ? text . substring ( 0 , dot ) : text ) ; String data = ( dot != - 1 ? text . substring ( dot + 1 ) : "" ) ; node . appendChild ( document . createProcessingInstruction ( target , data ) ) ; } else if ( token == XmlPullParser . DOCDECL ) { String name = parser . getRootElementName ( ) ; String publicId = parser . getPublicId ( ) ; String systemId = parser . getSystemId ( ) ; document . appendChild ( new DocumentTypeImpl ( document , name , publicId , systemId ) ) ; } else if ( token == XmlPullParser . COMMENT ) { /* * Found a comment . We simply take the token text , but we only * create a node if the client wants to see comments at all . */ if ( ! ignoreComments ) { node . appendChild ( document . createComment ( parser . getText ( ) ) ) ; } } else if ( token == XmlPullParser . IGNORABLE_WHITESPACE ) { /* * Found some ignorable whitespace . We only add it if the client * wants to see whitespace . Whitespace before and after the * document element is always ignored . */ if ( ! ignoreElementContentWhitespace && document != node ) { appendText ( document , node , token , parser . getText ( ) ) ; } } else if ( token == XmlPullParser . TEXT || token == XmlPullParser . CDSECT ) { /* * Found a piece of text ( possibly encoded as a CDATA section ) . * That ' s the easiest case . We simply take it and create a new text node , * or merge with an adjacent text node . */ appendText ( document , node , token , parser . getText ( ) ) ; } else if ( token == XmlPullParser . ENTITY_REF ) { /* * Found an entity reference . If an entity resolver is * installed , we replace it by text ( if possible ) . Otherwise we * add an entity reference node . */ String entity = parser . getName ( ) ; if ( entityResolver != null ) { // TODO Implement this . . . } String resolved = resolvePredefinedOrCharacterEntity ( entity ) ; if ( resolved != null ) { appendText ( document , node , token , resolved ) ; } else { node . appendChild ( document . createEntityReference ( entity ) ) ; } } else if ( token == XmlPullParser . START_TAG ) { /* * Found an element start tag . We create an element node with * the proper info and attributes . We then invoke parse ( ) * recursively to handle the next level of nesting . When we * return from this call , we check that we are on the proper * element end tag . The whole handling differs somewhat * depending on whether the parser is namespace - aware or not . */ if ( namespaceAware ) { // Collect info for element node String namespace = parser . getNamespace ( ) ; String name = parser . getName ( ) ; String prefix = parser . getPrefix ( ) ; if ( "" . equals ( namespace ) ) { namespace = null ; } // Create element node and wire it correctly Element element = document . createElementNS ( namespace , name ) ; element . setPrefix ( prefix ) ; node . appendChild ( element ) ; for ( int i = 0 ; i < parser . getAttributeCount ( ) ; i ++ ) { // Collect info for a single attribute node String attrNamespace = parser . getAttributeNamespace ( i ) ; String attrPrefix = parser . getAttributePrefix ( i ) ; String attrName = parser . getAttributeName ( i ) ; String attrValue = parser . getAttributeValue ( i ) ; if ( "" . equals ( attrNamespace ) ) { attrNamespace = null ; } // Create attribute node and wire it correctly Attr attr = document . createAttributeNS ( attrNamespace , attrName ) ; attr . setPrefix ( attrPrefix ) ; attr . setValue ( attrValue ) ; element . setAttributeNodeNS ( attr ) ; } // Recursive descent token = parser . nextToken ( ) ; parse ( parser , document , element , XmlPullParser . END_TAG ) ; // Expect the element ' s end tag here parser . require ( XmlPullParser . END_TAG , namespace , name ) ; } else { // Collect info for element node String name = parser . getName ( ) ; // Create element node and wire it correctly Element element = document . createElement ( name ) ; node . appendChild ( element ) ; for ( int i = 0 ; i < parser . getAttributeCount ( ) ; i ++ ) { // Collect info for a single attribute node String attrName = parser . getAttributeName ( i ) ; String attrValue = parser . getAttributeValue ( i ) ; // Create attribute node and wire it correctly Attr attr = document . createAttribute ( attrName ) ; attr . setValue ( attrValue ) ; element . setAttributeNode ( attr ) ; } // Recursive descent token = parser . nextToken ( ) ; parse ( parser , document , element , XmlPullParser . END_TAG ) ; // Expect the element ' s end tag here parser . require ( XmlPullParser . END_TAG , "" , name ) ; } } token = parser . nextToken ( ) ; }
public class DefaultDatabus { /** * Implements peek ( ) or poll ( ) based on whether claimTtl is null or non - null . */ private PollResult peekOrPoll ( String subscription , @ Nullable Duration claimTtl , int limit ) { } }
int remaining = limit ; Map < Coordinate , EventList > rawEvents = ImmutableMap . of ( ) ; Map < Coordinate , Item > uniqueItems = Maps . newHashMap ( ) ; boolean isPeek = claimTtl == null ; boolean repeatable = ! isPeek && claimTtl . toMillis ( ) > 0 ; boolean eventsAvailableForNextPoll = false ; boolean noMaxPollTimeOut = true ; int itemsDiscarded = 0 ; Meter eventMeter = isPeek ? _peekedMeter : _polledMeter ; // Reading raw events from the event store is a significantly faster operation than resolving the events into // databus poll events . This is because the former sequentially loads small event references while the latter // requires reading and resolving the effectively random objects associated with those references from the data // store . // To make the process more efficient this method first polls for " limit " raw events from the event store . // Then , up to the first 10 of those raw events are resolved synchronously with a time limit of MAX _ POLL _ TIME . // Any remaining raw events are resolved lazily as the event list is consumed by the caller . This makes the // return time for this method faster and more predictable while supporting polls for more events than can // be resolved within MAX _ POLL _ TIME . This is especially beneficial for REST clients which may otherwise time // out while waiting for " limit " events to be read and resolved . Stopwatch stopwatch = Stopwatch . createStarted ( _ticker ) ; int padding = 0 ; do { if ( remaining == 0 ) { break ; // Don ' t need any more events . } // Query the databus event store . Consolidate multiple events that refer to the same item . ConsolidatingEventSink sink = new ConsolidatingEventSink ( remaining + padding ) ; boolean more = isPeek ? _eventStore . peek ( subscription , sink ) : _eventStore . poll ( subscription , claimTtl , sink ) ; rawEvents = sink . getEvents ( ) ; if ( rawEvents . isEmpty ( ) ) { // No events to be had . eventsAvailableForNextPoll = more ; break ; } // Resolve the raw events in batches of 10 until at least one response item is found for a maximum time of MAX _ POLL _ TIME . do { int batchItemsDiscarded = resolvePeekOrPollEvents ( subscription , rawEvents , Math . min ( 10 , remaining ) , ( coord , item ) -> { // Check whether we ' ve already added this piece of content to the poll result . If so , consolidate // the two together to reduce the amount of work a client must do . Note that the previous item // would be from a previous batch of events and it ' s possible that we have read two different // versions of the same item of content . This will prefer the most recent . Item previousItem = uniqueItems . get ( coord ) ; if ( previousItem != null && previousItem . consolidateWith ( item ) ) { _consolidatedMeter . mark ( ) ; } else { // We have found a new item of content to return ! uniqueItems . put ( coord , item ) ; } } ) ; remaining = limit - uniqueItems . size ( ) ; itemsDiscarded += batchItemsDiscarded ; } while ( ! rawEvents . isEmpty ( ) && remaining > 0 && stopwatch . elapsed ( TimeUnit . MILLISECONDS ) < MAX_POLL_TIME . toMillis ( ) ) ; // There are more events for the next poll if either the event store explicitly said so or if , due to padding , // we got more events than " limit " , in which case we ' re likely to unclaim at last one . eventsAvailableForNextPoll = more || rawEvents . size ( ) + uniqueItems . size ( ) > limit ; if ( ! more ) { // There are no more events to be had , so exit now break ; } // Note : Due to redundant / unknown events , it ' s possible that the ' events ' list is empty even though , if we // tried again , we ' d find more events . Try again a few times , but not for more than MAX _ POLL _ TIME so clients // don ' t timeout the request . This helps move through large amounts of redundant deltas relatively quickly // while also putting a bound on the total amount of work done by a single call to poll ( ) . padding = 10 ; } while ( repeatable && ( noMaxPollTimeOut = stopwatch . elapsed ( TimeUnit . MILLISECONDS ) < MAX_POLL_TIME . toMillis ( ) ) ) ; Iterator < Event > events ; int approximateSize ; if ( uniqueItems . isEmpty ( ) ) { // Either there were no raw events or all events found were for redundant or unknown changes . It ' s possible // that eventually there will be more events , but to prevent a lengthy delay iterating the remaining events // quit now and return an empty result . The caller can always poll again to try to pick up any more events , // and if necessary an async drain will kick off a few lines down to assist in clearing the redundant update // wasteland . events = Iterators . emptyIterator ( ) ; approximateSize = 0 ; // If there are still more unresolved events claimed then unclaim them now if ( repeatable && ! rawEvents . isEmpty ( ) ) { unclaim ( subscription , rawEvents . values ( ) ) ; } } else if ( rawEvents . isEmpty ( ) ) { // All events have been resolved events = toEvents ( uniqueItems . values ( ) ) . iterator ( ) ; approximateSize = uniqueItems . size ( ) ; eventMeter . mark ( approximateSize ) ; } else { // Return an event list which contains the first events which were resolved synchronously plus the // remaining events from the peek or poll which will be resolved lazily in batches of 25. final Map < Coordinate , EventList > deferredRawEvents = Maps . newLinkedHashMap ( rawEvents ) ; final int initialDeferredLimit = remaining ; Iterator < Event > deferredEvents = new AbstractIterator < Event > ( ) { private Iterator < Event > currentBatch = Iterators . emptyIterator ( ) ; private int remaining = initialDeferredLimit ; @ Override protected Event computeNext ( ) { Event next = null ; if ( currentBatch . hasNext ( ) ) { next = currentBatch . next ( ) ; } else if ( ! deferredRawEvents . isEmpty ( ) && remaining > 0 ) { // Resolve the next batch of events try { final List < Item > items = Lists . newArrayList ( ) ; do { resolvePeekOrPollEvents ( subscription , deferredRawEvents , Math . min ( remaining , 25 ) , ( coord , item ) -> { // Unlike with the original batch the deferred batch ' s events are always // already de - duplicated by coordinate , so there is no need to maintain // a coordinate - to - item uniqueness map . items . add ( item ) ; } ) ; if ( ! items . isEmpty ( ) ) { remaining -= items . size ( ) ; currentBatch = toEvents ( items ) . iterator ( ) ; next = currentBatch . next ( ) ; } } while ( next == null && ! deferredRawEvents . isEmpty ( ) && remaining > 0 ) ; } catch ( Exception e ) { // Don ' t fail ; the caller has already received some events . Just cut the result stream short // now and throw back any remaining events for a future poll . _log . warn ( "Failed to load additional events during peek/poll for subscription {}" , subscription , e ) ; } } if ( next != null ) { return next ; } if ( ! deferredRawEvents . isEmpty ( ) ) { // If we padded the number of raw events it ' s possible there are more than the caller actually // requested . Release the extra padded events now . try { unclaim ( subscription , deferredRawEvents . values ( ) ) ; } catch ( Exception e ) { // Don ' t fail , just log a warning . The claims will eventually time out on their own . _log . warn ( "Failed to unclaim {} events from subscription {}" , deferredRawEvents . size ( ) , subscription , e ) ; } } // Update the metric for the actual number of events returned eventMeter . mark ( limit - remaining ) ; return endOfData ( ) ; } } ; events = Iterators . concat ( toEvents ( uniqueItems . values ( ) ) . iterator ( ) , deferredEvents ) ; approximateSize = uniqueItems . size ( ) + deferredRawEvents . size ( ) ; } // Try draining the queue asynchronously if there are still more events available and more redundant events were // discarded than resolved items found so far . // Doing this only in the poll case for now . if ( repeatable && eventsAvailableForNextPoll && itemsDiscarded > uniqueItems . size ( ) ) { drainQueueAsync ( subscription ) ; } return new PollResult ( events , approximateSize , eventsAvailableForNextPoll ) ;
public class EmailGlobalSettings { /** * Enable or disable javax . mail debugging . By default debugging is disabled . * @ param bDebug * < code > true < / code > to enabled debugging , < code > false < / code > to * disable it . */ @ SuppressFBWarnings ( "LG_LOST_LOGGER_DUE_TO_WEAK_REFERENCE" ) public static void enableJavaxMailDebugging ( final boolean bDebug ) { } }
java . util . logging . Logger . getLogger ( "com.sun.mail.smtp" ) . setLevel ( bDebug ? Level . FINEST : Level . INFO ) ; java . util . logging . Logger . getLogger ( "com.sun.mail.smtp.protocol" ) . setLevel ( bDebug ? Level . FINEST : Level . INFO ) ; SystemProperties . setPropertyValue ( "mail.socket.debug" , bDebug ) ; SystemProperties . setPropertyValue ( "java.security.debug" , bDebug ? "certpath" : null ) ; SystemProperties . setPropertyValue ( "javax.net.debug" , bDebug ? "trustmanager" : null ) ;
public class MatrixIO { /** * Creates a { @ code Matrix } from the data encoded as { @ link * Format # SVDLIBC _ DENSE _ TEXT } in provided file . * @ param matrix * @ param matrixType * @ return a matrix whose data was specified by the provided file */ private static Matrix readDenseSVDLIBCtext ( File matrix , Type matrixType , boolean transposeOnRead ) throws IOException { } }
BufferedReader br = new BufferedReader ( new FileReader ( matrix ) ) ; // Note that according to the formatting , spaces and new lines are // equivalent . Therefore , someone could just print all of the matrix // values on a single line . int rows = - 1 ; int cols = - 1 ; int valuesSeen = 0 ; // REMINDER : possibly use on disk if the matrix is too big Matrix m = null ; for ( String line = null ; ( line = br . readLine ( ) ) != null ; ) { String [ ] vals = line . split ( "\\s+" ) ; for ( int i = 0 ; i < vals . length ; ++ i ) { // rows is specified first if ( rows == - 1 ) { rows = Integer . parseInt ( vals [ i ] ) ; } // cols will be second else if ( cols == - 1 ) { cols = Integer . parseInt ( vals [ i ] ) ; // once both rows and cols have been assigned , create the // matrix m = ( transposeOnRead ) ? Matrices . create ( cols , rows , matrixType ) : Matrices . create ( rows , cols , matrixType ) ; MATRIX_IO_LOGGER . log ( Level . FINE , "created matrix of size {0} x {1}" , new Object [ ] { Integer . valueOf ( rows ) , Integer . valueOf ( cols ) } ) ; } else { int row = valuesSeen / cols ; int col = valuesSeen % cols ; // NOTE : SVDLIBC doesn ' t seem to capitalize its NaN values , // which causes Java ' s double parsing code to break . We // don ' t really expect this case to happen but it seems to // when passed the results of calling SVD on a degenerate // matrix , so we check so that the code " just works " . double val = ( vals [ i ] . equals ( "nan" ) ) ? Double . NaN : Double . parseDouble ( vals [ i ] ) ; if ( transposeOnRead ) m . set ( col , row , val ) ; else m . set ( row , col , val ) ; // increment the number of values seen to properly set the // next index of the matrix ++ valuesSeen ; } } } br . close ( ) ; return m ;
public class SerDeserializers { /** * parses and registers the classes */ private static void registerFromClasspath ( String beanName , String deserName , Map < Class < ? > , SerDeserializer > map ) throws Exception { } }
Class < ? extends Bean > beanClass = Class . forName ( beanName ) . asSubclass ( Bean . class ) ; Class < ? > deserClass = Class . forName ( deserName ) ; Field field = null ; SerDeserializer deser ; try { field = deserClass . getDeclaredField ( "DESERIALIZER" ) ; if ( ! Modifier . isStatic ( field . getModifiers ( ) ) ) { throw new IllegalStateException ( "Field " + field + " must be static" ) ; } deser = SerDeserializer . class . cast ( field . get ( null ) ) ; } catch ( NoSuchFieldException ex ) { Constructor < ? > cons = null ; try { cons = deserClass . getConstructor ( ) ; deser = SerDeserializer . class . cast ( cons . newInstance ( ) ) ; } catch ( NoSuchMethodException ex2 ) { throw new IllegalStateException ( "Class " + deserClass . getName ( ) + " must have field DESERIALIZER or a no-arg constructor" ) ; } catch ( IllegalAccessException ex2 ) { cons . setAccessible ( true ) ; deser = SerDeserializer . class . cast ( cons . newInstance ( ) ) ; } } catch ( IllegalAccessException ex ) { field . setAccessible ( true ) ; deser = SerDeserializer . class . cast ( field . get ( null ) ) ; } map . put ( beanClass , deser ) ;
public class ntpserver { /** * Use this API to delete ntpserver of given name . */ public static base_response delete ( nitro_service client , String serverip ) throws Exception { } }
ntpserver deleteresource = new ntpserver ( ) ; deleteresource . serverip = serverip ; return deleteresource . delete_resource ( client ) ;
public class GBSIterator { /** * Find the next key in the index . * < p > If _ eof is false the iterator has not reached the end of the * index and we can move forward in the normal way . But if _ eof is * true then the iterator has hit the end of the index and has lost * its position . In this case we have to search the index for a key * that is greater than the last one returned iff the index has * changed since the last call to next ( ) . If the index has not * changed then no keys have been added and the iterator is still * stuck at the end of the index . < / p > * < p > On return the information about the found entry ( if any ) is * stored in _ current1 . < / p > * @ param stack The stack to use to record the traversal . */ private void findNext ( DeleteStack stack ) { } }
if ( _last1 . key ( ) == null ) throw new RuntimeException ( "Help! In findNext(), _last1.key() == null" ) ; if ( ! _eof ) findNextBeforeEof ( stack ) ; else findNextAfterEof ( stack ) ;
public class WebSocketSerializer { /** * Deserialize the Response Json String . * @ param jsonStr * The Response Json String . * @ return * the ResponseDeserializer . * @ throws JsonParseException * @ throws JsonMappingException * @ throws IOException */ public static ResponseDeserializer getResponseDeserializer ( String jsonStr ) throws JsonParseException , JsonMappingException , IOException { } }
final ResponsePair p = mapper . readValue ( jsonStr , ResponsePair . class ) ; Response rr = null ; if ( p . getType ( ) != null ) { rr = ( Response ) mapper . readValue ( p . getResponse ( ) , p . getType ( ) ) ; } final Response resp = rr ; return new ResponseDeserializer ( ) { @ Override public ResponseHeader deserializerResponseHeader ( ) { return p . getResponseHeader ( ) ; } @ Override public Response deserializerResponse ( ) { return resp ; } } ;
public class BaseScreen { /** * Process the " Login " toolbar command . * @ return true if successful . */ public boolean onLogout ( ) { } }
App application = this . getTask ( ) . getApplication ( ) ; int iErrorCode = application . login ( this . getTask ( ) , null , null , this . getProperty ( DBParams . DOMAIN ) ) ; if ( iErrorCode != DBConstants . NORMAL_RETURN ) return false ; String strMenu = this . getProperty ( DBParams . HOME ) ; if ( strMenu == null ) strMenu = this . getTask ( ) . getProperty ( DBParams . MENU ) ; if ( strMenu != null ) { BasePanel screenParent = this . getParentScreen ( ) ; int count = 0 ; while ( screenParent . popHistory ( 1 , false ) != null ) { count ++ ; } count -- ; // Want to move back to the first one if ( count > 0 ) popHistory ( count , true ) ; // Dump all browser history Map < String , Object > properties = this . getProperties ( ) ; this . finalizeThisScreen ( ) ; // Validate current control , update record , get ready to close screen . if ( ! ( this instanceof MenuScreen ) ) { this . free ( ) ; new MenuScreen ( null , null , screenParent , null , ScreenConstants . MAINT_MODE , properties ) ; } else this . doCommand ( "?" + DBParams . MENU + '=' + strMenu , this , ScreenConstants . USE_SAME_WINDOW | DBConstants . PUSH_TO_BROWSER ) ; } return true ; // Should always be successful
public class DscCompilationJobsInner { /** * Creates the Dsc compilation job of the configuration . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param compilationJobId The the DSC configuration Id . * @ param parameters The parameters supplied to the create compilation job operation . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < DscCompilationJobInner > createAsync ( String resourceGroupName , String automationAccountName , UUID compilationJobId , DscCompilationJobCreateParameters parameters , final ServiceCallback < DscCompilationJobInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( createWithServiceResponseAsync ( resourceGroupName , automationAccountName , compilationJobId , parameters ) , serviceCallback ) ;
public class Configuration { /** * Check if a UID was already used in any context * @ param category The category to check * @ param projectName The project name to check * @ param uid The UID to validate * @ return True if the UID is present for at least one version of a project in a category , false otherwise */ private boolean uidAlreadyUsed ( String category , String projectName , String uid ) { } }
if ( uid == null ) { return false ; } else { for ( File versionDirectory : getUidFilesForProject ( category , projectName ) ) { String uidRead = readUid ( new File ( versionDirectory , "latest" ) ) ; if ( uidRead != null && ! uidRead . isEmpty ( ) ) { if ( uidRead . equals ( uid ) ) { return true ; } } } return false ; }
public class AbsWorker { /** * null not run , false err , true ok */ public Obj work ( Obj cmd ) throws Exception { } }
stopping = false ; return status ( ) . with ( P_STATUS , STATUS_STARTED ) ;
public class MarkdownDoclet { /** * Process the class documentation . * @ param doc The class documentation . */ protected void processClass ( ClassDoc doc ) { } }
defaultProcess ( doc , true ) ; for ( MemberDoc member : doc . fields ( ) ) { processMember ( member ) ; } for ( MemberDoc member : doc . constructors ( ) ) { processMember ( member ) ; } for ( MemberDoc member : doc . methods ( ) ) { processMember ( member ) ; } if ( doc instanceof AnnotationTypeDoc ) { for ( MemberDoc member : ( ( AnnotationTypeDoc ) doc ) . elements ( ) ) { processMember ( member ) ; } }
public class NonBlockingBufferedWriter { /** * Writes a portion of an array of characters . * Ordinarily this method stores characters from the given array into this * stream ' s buffer , flushing the buffer to the underlying stream as needed . If * the requested length is at least as large as the buffer , however , then this * method will flush the buffer and write the characters directly to the * underlying stream . Thus redundant < code > BufferedWriter < / code > s will not * copy data unnecessarily . * @ param cbuf * A character array * @ param nOfs * Offset from which to start reading characters * @ param nLen * Number of characters to write * @ exception IOException * If an I / O error occurs */ @ Override @ SuppressFBWarnings ( "IL_INFINITE_LOOP" ) public void write ( final char [ ] cbuf , final int nOfs , final int nLen ) throws IOException { } }
_ensureOpen ( ) ; ValueEnforcer . isArrayOfsLen ( cbuf , nOfs , nLen ) ; if ( nLen == 0 ) return ; if ( nLen >= m_nChars ) { /* * If the request length exceeds the size of the output buffer , flush the * buffer and then write the data directly . In this way buffered streams * will cascade harmlessly . */ flushBuffer ( ) ; m_aWriter . write ( cbuf , nOfs , nLen ) ; } else { int b = nOfs ; final int t = nOfs + nLen ; while ( b < t ) { final int d = Math . min ( m_nChars - m_nNextChar , t - b ) ; System . arraycopy ( cbuf , b , m_aBuf , m_nNextChar , d ) ; b += d ; m_nNextChar += d ; if ( m_nNextChar >= m_nChars ) flushBuffer ( ) ; } }
public class IQRCode { /** * 将指定数据生成二维码图片的BufferedImage * @ param data 二维码数据 * @ param width 图片的宽 * @ param height 图片的高 * @ return 二维码图片对应的BufferedImage * @ throws IOException IO异常 */ public static BufferedImage createImg ( String data , int width , int height ) throws IOException { } }
logger . debug ( "生成二维码,要生成的图片的宽为{},高为{}" , width , height ) ; QRCode code = createQRCode ( data , width , height ) ; BufferedImage image = ImageIO . read ( new ByteArrayInputStream ( code . stream ( ) . toByteArray ( ) ) ) ; logger . debug ( "二维码生成成功" ) ; return image ;
public class ApiOvhHostingweb { /** * Install new database * REST : POST / hosting / web / { serviceName } / database * @ param type [ required ] Type you want for your database * @ param password [ required ] Database password * @ param user [ required ] Database user name . Must begin with your hosting login and must be in lower case * @ param quota [ required ] Quota assign to your database . Only for extraSql * @ param version [ required ] Version you want for your database following the type * @ param capabilitie [ required ] Type of your database * @ param serviceName [ required ] The internal name of your hosting */ public OvhTask serviceName_database_POST ( String serviceName , OvhDatabaseCapabilitiesTypeEnum capabilitie , String password , OvhExtraSqlQuotaEnum quota , OvhDatabaseTypeEnum type , String user , OvhVersionEnum version ) throws IOException { } }
String qPath = "/hosting/web/{serviceName}/database" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "capabilitie" , capabilitie ) ; addBody ( o , "password" , password ) ; addBody ( o , "quota" , quota ) ; addBody ( o , "type" , type ) ; addBody ( o , "user" , user ) ; addBody ( o , "version" , version ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTask . class ) ;
public class JumblrException { /** * Pull the message out of the response * @ param object the parsed response object */ private void extractMessage ( JsonObject object ) { } }
// Prefer to pull the message out of meta JsonObject meta = object . getAsJsonObject ( "meta" ) ; if ( meta != null ) { JsonPrimitive msg = meta . getAsJsonPrimitive ( "msg" ) ; if ( msg != null ) { this . message = msg . getAsString ( ) ; return ; } } // Fall back on error JsonPrimitive error = object . getAsJsonPrimitive ( "error" ) ; if ( error != null ) { this . message = error . getAsString ( ) ; return ; } // Otherwise set a default this . message = "Unknown Error" ;
public class PHS398FellowshipSupplementalV1_1Generator { /** * This method used to set data to ResearchTrainingPlan XMLObject from * DevelopmentProposal */ private ResearchTrainingPlan getResearchTrainingPlan ( ) { } }
ResearchTrainingPlan researchTrainingPlan = ResearchTrainingPlan . Factory . newInstance ( ) ; setHumanSubjectInvolvedAndVertebrateAnimalUsed ( researchTrainingPlan ) ; setQuestionnareAnswerForResearchTrainingPlan ( researchTrainingPlan ) ; setNarrativeDataForResearchTrainingPlan ( researchTrainingPlan ) ; return researchTrainingPlan ;
public class HalFormsConfiguration { /** * Translate a { @ link HalFormsConfiguration } into a { @ link HalConfiguration } . * @ return */ public HalConfiguration toHalConfiguration ( ) { } }
if ( this . getRenderSingleLinks ( ) == RenderSingleLinks . AS_SINGLE ) { return new HalConfiguration ( ) . withRenderSingleLinks ( HalConfiguration . RenderSingleLinks . AS_SINGLE ) ; } if ( this . getRenderSingleLinks ( ) == RenderSingleLinks . AS_ARRAY ) { return new HalConfiguration ( ) . withRenderSingleLinks ( HalConfiguration . RenderSingleLinks . AS_ARRAY ) ; } throw new IllegalStateException ( "Don't know how to translate " + this ) ;
public class CmsContainerpageHandler { /** * Opens the elements info dialog . < p > */ public void openElementsInfo ( ) { } }
CmsResourceInfoDialog . load ( CmsCoreProvider . get ( ) . getStructureId ( ) , true , CmsContainerpageController . get ( ) . getData ( ) . getDetailId ( ) , new HashMap < String , String > ( ) , new CloseHandler < PopupPanel > ( ) { public void onClose ( CloseEvent < PopupPanel > event ) { deactivateCurrentButton ( ) ; activateSelection ( ) ; } } ) ;
public class VirtualMachineErrorTerminator { /** * Checks if the throwable or any of the causes in its cause chain are a * virtual machine error , and if so , terminates the virtual machine . If * neither the throwable nor any of its causes are a virtual machine error , * then does nothing . Useful in the body of a generic * < pre > catch ( Throwable t ) < / pre > block . * @ param t the topmost throwable . Can be null . */ public static void terminateVMIfMust ( Throwable t ) { } }
while ( t != null ) { if ( t instanceof VirtualMachineError ) { VirtualMachineErrorTerminator . terminateVM ( ( VirtualMachineError ) t ) ; } t = t . getCause ( ) ; }
public class ModelsImpl { /** * Gets information of a regex entity model . * @ param appId The application ID . * @ param versionId The version ID . * @ param regexEntityId The regex entity model ID . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the RegexEntityExtractor object */ public Observable < ServiceResponse < RegexEntityExtractor > > getRegexEntityEntityInfoWithServiceResponseAsync ( UUID appId , String versionId , UUID regexEntityId ) { } }
if ( this . client . endpoint ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.endpoint() is required and cannot be null." ) ; } if ( appId == null ) { throw new IllegalArgumentException ( "Parameter appId is required and cannot be null." ) ; } if ( versionId == null ) { throw new IllegalArgumentException ( "Parameter versionId is required and cannot be null." ) ; } if ( regexEntityId == null ) { throw new IllegalArgumentException ( "Parameter regexEntityId is required and cannot be null." ) ; } String parameterizedHost = Joiner . on ( ", " ) . join ( "{Endpoint}" , this . client . endpoint ( ) ) ; return service . getRegexEntityEntityInfo ( appId , versionId , regexEntityId , this . client . acceptLanguage ( ) , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < RegexEntityExtractor > > > ( ) { @ Override public Observable < ServiceResponse < RegexEntityExtractor > > call ( Response < ResponseBody > response ) { try { ServiceResponse < RegexEntityExtractor > clientResponse = getRegexEntityEntityInfoDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class UtlInvBase { /** * < p > Finds ( if need ) line with same tax or creates one . < / p > * @ param < T > invoice type * @ param < TL > invoice tax line type * @ param pReqVars additional param * @ param pInv invoice * @ param pInvTxLns invoice tax lines * @ param pTax tax * @ param pNeedFind if need to find enabled * @ param pFctInvTxLn invoice tax line factory * @ return line * @ throws Exception if no need to find but line is found */ public final < T extends IInvoice , TL extends AInvTxLn < T > > TL findCreateTaxLine ( final Map < String , Object > pReqVars , final T pInv , final List < TL > pInvTxLns , final Tax pTax , final boolean pNeedFind , final IFactorySimple < TL > pFctInvTxLn ) throws Exception { } }
TL itl = null ; // find same line to add amount : for ( TL tl : pInvTxLns ) { if ( tl . getTax ( ) != null && tl . getTax ( ) . getItsId ( ) . equals ( pTax . getItsId ( ) ) ) { if ( ! pNeedFind ) { throw new Exception ( "Algorithm error!!!" ) ; } itl = tl ; break ; } } // find and enable disabled line : for ( TL tl : pInvTxLns ) { if ( tl . getTax ( ) == null ) { itl = tl ; itl . setTax ( pTax ) ; break ; } } if ( itl == null ) { itl = pFctInvTxLn . create ( pReqVars ) ; itl . setItsOwner ( pInv ) ; itl . setIsNew ( true ) ; itl . setTax ( pTax ) ; pInvTxLns . add ( itl ) ; } return itl ;
public class Tags { /** * 删除标签 * @ param id 标签ID */ public void delete ( int id ) { } }
String url = WxEndpoint . get ( "url.tag.delete" ) ; String json = JsonMapper . nonEmptyMapper ( ) . toJson ( new TagWrapper ( id ) ) ; logger . debug ( "delete tag: {}" , json ) ; wxClient . post ( url , json ) ;
public class DifferentialFunctionClassHolder { /** * Returns the missing tensorflow ops * @ return */ public Set < String > missingTensorflowOps ( ) { } }
Set < String > copy = new HashSet < > ( tensorflowOpDescriptors . keySet ( ) ) ; copy . removeAll ( tensorFlowNames . keySet ( ) ) ; return copy ;
public class WriterFactoryImpl { /** * { @ inheritDoc } */ @ Override public FieldWriterImpl getFieldWriter ( ClassWriter classWriter ) { } }
return new FieldWriterImpl ( ( SubWriterHolderWriter ) classWriter , classWriter . getTypeElement ( ) ) ;
public class CmsLockFilter { /** * Matches the given lock against this filter and the given path . < p > * @ param rootPath the path to match the lock against * @ param lock the lock to match * @ return < code > true < / code > if the given lock matches */ public boolean match ( String rootPath , CmsLock lock ) { } }
boolean match = false ; if ( m_includeChildren ) { // safe since rootPath always ends with slash if a folder match = lock . getResourceName ( ) . startsWith ( rootPath ) ; } if ( ! match && m_includeParents ) { // since parents can only be folders , check it only for folders if ( lock . getResourceName ( ) . endsWith ( "/" ) ) { match = rootPath . startsWith ( lock . getResourceName ( ) ) ; } } if ( match && ( m_projectId != null ) && ! m_projectId . isNullUUID ( ) && ( lock . getProjectId ( ) != null ) ) { match = lock . getProjectId ( ) . equals ( m_projectId ) ; } if ( match && ( m_ownedByUserId != null ) && ! m_ownedByUserId . isNullUUID ( ) ) { match = lock . getUserId ( ) . equals ( m_ownedByUserId ) ; } if ( match && ( m_notOwnedByUserId != null ) && ! m_notOwnedByUserId . isNullUUID ( ) ) { match = ! lock . getUserId ( ) . equals ( m_notOwnedByUserId ) ; } if ( match && ( m_lockableByUser != null ) ) { match = lock . isLockableBy ( m_lockableByUser ) ; } if ( match && ( m_notLockableByUser != null ) ) { match = ! lock . isLockableBy ( m_notLockableByUser ) ; } if ( match && ! m_types . isEmpty ( ) ) { match = m_types . contains ( lock . getType ( ) ) ; match = match || ( m_includeParents && lock . isInherited ( ) ) ; } // check the related lock if available if ( ! match && ! lock . getRelatedLock ( ) . isNullLock ( ) ) { match = match ( rootPath , lock . getRelatedLock ( ) ) ; } return match ;
public class FileBasedAtomHandler { /** * Return true if specified pathinfo represents URI of service doc . */ @ Override public boolean isAtomServiceURI ( final AtomRequest areq ) { } }
final String [ ] pathInfo = StringUtils . split ( areq . getPathInfo ( ) , "/" ) ; if ( pathInfo . length == 0 ) { return true ; } return false ;
public class AbstractBoundarySurfaceType { /** * Gets the value of the genericApplicationPropertyOfBoundarySurface property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the genericApplicationPropertyOfBoundarySurface property . * For example , to add a new item , do as follows : * < pre > * get _ GenericApplicationPropertyOfBoundarySurface ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } * { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } */ public List < JAXBElement < Object > > get_GenericApplicationPropertyOfBoundarySurface ( ) { } }
if ( _GenericApplicationPropertyOfBoundarySurface == null ) { _GenericApplicationPropertyOfBoundarySurface = new ArrayList < JAXBElement < Object > > ( ) ; } return this . _GenericApplicationPropertyOfBoundarySurface ;
public class MacEditorKit { /** * Invoked when the user attempts an invalid operation , such as pasting into * an uneditable < code > JTextField < / code > that has focus . The default * implementation beeps . Subclasses that wish different behavior should * override this and provide the additional feedback . * @ param component Component the error occured in , may be null indicating * the error condition is not directly associated with a * < code > Component < / code > . */ static void provideErrorFeedback ( Component component ) { } }
Toolkit toolkit = null ; if ( component != null ) { toolkit = component . getToolkit ( ) ; } else { toolkit = Toolkit . getDefaultToolkit ( ) ; } toolkit . beep ( ) ;
public class DestinationManager { /** * This method will check whether a destination is available . */ private boolean isAvailable ( BaseDestinationHandler destinationHandler , DestinationAvailability destinationAvailability ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "isAvailable" , new Object [ ] { destinationHandler , destinationAvailability } ) ; boolean sendAvailable = false ; boolean receiveAvailable = false ; if ( destinationAvailability == DestinationAvailability . SEND || destinationAvailability == DestinationAvailability . SEND_AND_RECEIVE ) { // Destination must be a non - mediated destination with a local queue point if ( destinationHandler . hasLocal ( ) ) { sendAvailable = true ; } if ( sendAvailable ) { if ( ( destinationHandler . definition . isSendAllowed ( ) ) && ( isLocalizationAvailable ( destinationHandler , destinationAvailability ) ) ) { sendAvailable = true ; } else { sendAvailable = false ; } } } if ( destinationAvailability == DestinationAvailability . RECEIVE || destinationAvailability == DestinationAvailability . SEND_AND_RECEIVE ) { // Destination must have a queue point on the ME to which the connection is connected if ( destinationHandler . hasLocal ( ) ) { receiveAvailable = true ; } if ( receiveAvailable ) { if ( ( destinationHandler . definition . isReceiveAllowed ( ) ) && ( isLocalizationAvailable ( destinationHandler , destinationAvailability ) ) ) { receiveAvailable = true ; } else { receiveAvailable = false ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "isAvailable" , new Object [ ] { new Boolean ( sendAvailable || receiveAvailable ) } ) ; return ( sendAvailable || receiveAvailable ) ;
public class JniSocketImpl { /** * Closes the socket . */ @ Override public void close ( ) throws IOException { } }
if ( _isClosed . getAndSet ( true ) ) { return ; } _ss = null ; if ( _stream != null ) { _stream . close ( ) ; } // XXX : can ' t be locked because of shutdown _nativeFd = - 1 ; nativeClose ( _socketFd ) ;
public class nd6 { /** * Use this API to fetch all the nd6 resources that are configured on netscaler . */ public static nd6 [ ] get ( nitro_service service ) throws Exception { } }
nd6 obj = new nd6 ( ) ; nd6 [ ] response = ( nd6 [ ] ) obj . get_resources ( service ) ; return response ;
public class OpenViduClient { /** * sender should look like ' username _ streamId ' */ public void unsubscribeFromVideo ( String sender ) throws IOException { } }
JsonObject params = new JsonObject ( ) ; params . addProperty ( UNSUBSCRIBEFROMVIDEO_SENDER_PARAM , sender ) ; client . sendRequest ( UNSUBSCRIBEFROMVIDEO_METHOD , params ) ;
public class StringUtilities { /** * Returns < CODE > true < / CODE > if the given string matches the given regular expression . * @ param str The string against which the expression is to be matched * @ param expr The regular expression to match with the input string * @ return An object giving the results of the search ( or null if no match found ) */ public static Matcher getWildcardMatcher ( String str , String expr ) { } }
return getWildcardMatcher ( str , expr , false ) ;
public class InternalSARLParser { /** * InternalSARL . g : 12497:1 : entryRuleXEqualityExpression returns [ EObject current = null ] : iv _ ruleXEqualityExpression = ruleXEqualityExpression EOF ; */ public final EObject entryRuleXEqualityExpression ( ) throws RecognitionException { } }
EObject current = null ; EObject iv_ruleXEqualityExpression = null ; try { // InternalSARL . g : 12497:60 : ( iv _ ruleXEqualityExpression = ruleXEqualityExpression EOF ) // InternalSARL . g : 12498:2 : iv _ ruleXEqualityExpression = ruleXEqualityExpression EOF { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXEqualityExpressionRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleXEqualityExpression = ruleXEqualityExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleXEqualityExpression ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class Configuration { /** * Given a xml config file specified by a path , return the corresponding json * object if it exists . */ public JSONObject getJsonConfig ( Path name ) throws IOException , JSONException { } }
String pathString = name . toUri ( ) . getPath ( ) ; String xml = new Path ( pathString ) . getName ( ) ; File jsonFile = new File ( pathString . replace ( xml , MATERIALIZEDJSON ) ) . getAbsoluteFile ( ) ; if ( jsonFile . exists ( ) ) { InputStream in = new BufferedInputStream ( new FileInputStream ( jsonFile ) ) ; if ( in != null ) { JSONObject json = instantiateJsonObject ( in ) ; // Try to load the xml entity inside the json blob . if ( json . has ( xmlToThrift ( xml ) ) ) { return json . getJSONObject ( xmlToThrift ( xml ) ) ; } } } return null ;
public class SVGIcon { /** * Method starts the rotate animation of the foreground icon . * @ param fromAngle the rotation angle where the transition should start . * @ param toAngle the rotation angle where the transition should end . * @ param cycleCount the number of times the animation should be played ( use Animation . INDEFINITE for endless ) . * @ param duration the duration which one animation cycle should take . * @ param interpolator defines the rotation value interpolation between { @ code fromAngle } and { @ code toAngle } . * @ param autoReverse defines if the animation should be reversed at the end . */ public void startForegroundIconRotateAnimation ( final double fromAngle , final double toAngle , final int cycleCount , final double duration , final Interpolator interpolator , final boolean autoReverse ) { } }
stopForegroundIconRotateAnimation ( ) ; foregroundRotateAnimation = Animations . createRotateTransition ( foregroundIcon , fromAngle , toAngle , cycleCount , duration , interpolator , autoReverse ) ; foregroundRotateAnimation . setOnFinished ( event -> foregroundIcon . setRotate ( 0 ) ) ; foregroundRotateAnimation . play ( ) ;
public class DTMException { /** * Initializes the < i > cause < / i > of this throwable to the specified value . * ( The cause is the throwable that caused this throwable to get thrown . ) * < p > This method can be called at most once . It is generally called from * within the constructor , or immediately after creating the * throwable . If this throwable was created * with { @ link # DTMException ( Throwable ) } or * { @ link # DTMException ( String , Throwable ) } , this method cannot be called * even once . * @ param cause the cause ( which is saved for later retrieval by the * { @ link # getCause ( ) } method ) . ( A < tt > null < / tt > value is * permitted , and indicates that the cause is nonexistent or * unknown . ) * @ return a reference to this < code > Throwable < / code > instance . * @ throws IllegalArgumentException if < code > cause < / code > is this * throwable . ( A throwable cannot * be its own cause . ) * @ throws IllegalStateException if this throwable was * created with { @ link # DTMException ( Throwable ) } or * { @ link # DTMException ( String , Throwable ) } , or this method has already * been called on this throwable . */ public synchronized Throwable initCause ( Throwable cause ) { } }
if ( ( this . containedException == null ) && ( cause != null ) ) { throw new IllegalStateException ( XMLMessages . createXMLMessage ( XMLErrorResources . ER_CANNOT_OVERWRITE_CAUSE , null ) ) ; // " Can ' t overwrite cause " ) ; } if ( cause == this ) { throw new IllegalArgumentException ( XMLMessages . createXMLMessage ( XMLErrorResources . ER_SELF_CAUSATION_NOT_PERMITTED , null ) ) ; // " Self - causation not permitted " ) ; } this . containedException = cause ; return this ;
public class JmolSymmetryScriptGeneratorH { /** * Returns a Jmol script that colors subunits to highlight the symmetry within a structure * Different subunits should have a consistent color scheme or different shade of the same colors * @ return Jmol script */ @ Override public String colorBySymmetry ( ) { } }
List < List < Integer > > units = helixAxisAligner . getHelixLayers ( ) . getByLargestContacts ( ) . getLayerLines ( ) ; units = orientLayerLines ( units ) ; QuatSymmetrySubunits subunits = helixAxisAligner . getSubunits ( ) ; List < Integer > modelNumbers = subunits . getModelNumbers ( ) ; List < String > chainIds = subunits . getChainIds ( ) ; List < Integer > clusterIds = subunits . getClusterIds ( ) ; int clusterCount = Collections . max ( clusterIds ) + 1 ; Map < Color4f , List < String > > colorMap = new HashMap < Color4f , List < String > > ( ) ; int maxLen = 0 ; for ( List < Integer > unit : units ) { maxLen = Math . max ( maxLen , unit . size ( ) ) ; } // Color4f [ ] colors = getSymmetryColors ( permutation . size ( ) ) ; Color4f [ ] colors = getSymmetryColors ( subunits . getSubunitCount ( ) ) ; int count = 0 ; for ( int i = 0 ; i < maxLen ; i ++ ) { for ( int j = 0 ; j < units . size ( ) ; j ++ ) { int m = units . get ( j ) . size ( ) ; if ( i < m ) { int subunit = units . get ( j ) . get ( i ) ; int cluster = clusterIds . get ( subunit ) ; float scale = 0.3f + 0.7f * ( cluster + 1 ) / clusterCount ; Color4f c = new Color4f ( colors [ count ] ) ; count ++ ; c . scale ( scale ) ; List < String > ids = colorMap . get ( c ) ; if ( ids == null ) { ids = new ArrayList < String > ( ) ; colorMap . put ( c , ids ) ; } String id = getChainSpecification ( modelNumbers , chainIds , subunit ) ; ids . add ( id ) ; } } } String coloring = defaultColoring + getJmolColorScript ( colorMap ) ; return coloring ;
public class AOPool { /** * Shuts down the pool , exceptions during close will be logged as a warning and not thrown . */ final public void close ( ) { } }
List < C > connsToClose ; synchronized ( poolLock ) { // Prevent any new connections isClosed = true ; // Find any connections that are available and open connsToClose = new ArrayList < > ( availableConnections . size ( ) ) ; for ( PooledConnection < C > availableConnection : availableConnections ) { synchronized ( availableConnection ) { C conn = availableConnection . connection ; if ( conn != null ) { availableConnection . connection = null ; connsToClose . add ( conn ) ; } } } poolLock . notifyAll ( ) ; } // Close all of the connections for ( C conn : connsToClose ) { try { close ( conn ) ; } catch ( Exception err ) { logger . log ( Level . WARNING , null , err ) ; } }
public class DBPropertiesUpdate { /** * Insert a new localized Value . * @ param _ propertyid ID of the Property , the localized value is related to * @ param _ value Value of the Property * @ param _ language Language of the property */ private void insertNewLocal ( final long _propertyid , final String _value , final String _language ) { } }
try { final Insert insert = new Insert ( DBPropertiesUpdate . TYPE_PROPERTIES_LOCAL ) ; insert . add ( "Value" , _value ) ; insert . add ( "PropertyID" , _propertyid ) ; insert . add ( "LanguageID" , getLanguageId ( _language ) ) ; insert . executeWithoutAccessCheck ( ) ; insert . close ( ) ; } catch ( final EFapsException e ) { DBPropertiesUpdate . LOG . error ( "insertNewLocal(String)" , e ) ; }
public class FeatureStateBenchmarks { /** * create an in - memory state repository for our feature */ @ Setup ( Level . Trial ) public void setupFeatureManager ( ) { } }
FeatureManager featureManager = new FeatureManagerBuilder ( ) . featureEnums ( ReleaseDateBenchmarkFeature . class ) . stateRepository ( new InMemoryStateRepository ( ) ) . userProvider ( new NoOpUserProvider ( ) ) . build ( ) ; // set up the toggle activation state StaticFeatureManagerProvider . setFeatureManager ( featureManager ) ; manager = featureManager ; FeatureState releaseDateFeatureState = new FeatureState ( ReleaseDateBenchmarkFeature . RELEASE_DATE_STRATEGY_ENABLED ) ; releaseDateFeatureState . setEnabled ( true ) ; releaseDateFeatureState . setStrategyId ( ReleaseDateActivationStrategy . ID ) ; releaseDateFeatureState . setParameter ( ReleaseDateActivationStrategy . PARAM_DATE , "2014-12-31" ) ; releaseDateFeatureState . setParameter ( ReleaseDateActivationStrategy . PARAM_TIME , "12:45:00" ) ; FeatureState disabledReleaseDate = new FeatureState ( ReleaseDateBenchmarkFeature . RELEASE_DATE_STRATEGY_DISABLED ) ; disabledReleaseDate . setEnabled ( false ) ; disabledReleaseDate . setStrategyId ( ReleaseDateActivationStrategy . ID ) ; disabledReleaseDate . setParameter ( ReleaseDateActivationStrategy . PARAM_DATE , "2014-12-31" ) ; disabledReleaseDate . setParameter ( ReleaseDateActivationStrategy . PARAM_TIME , "12:45:00" ) ; manager . setFeatureState ( releaseDateFeatureState ) ; manager . setFeatureState ( new FeatureState ( ReleaseDateBenchmarkFeature . DISABLED_BY_FEATURE_STATE , false ) ) ; manager . setFeatureState ( disabledReleaseDate ) ;
public class FormattableUtils { /** * Handles the common { @ code Formattable } operations of truncate - pad - append , * with no ellipsis on precision overflow , and padding width underflow with * spaces . * @ param seq the string to handle , not null * @ param formatter the destination formatter , not null * @ param flags the flags for formatting , see { @ code Formattable } * @ param width the width of the output , see { @ code Formattable } * @ param precision the precision of the output , see { @ code Formattable } * @ return the { @ code formatter } instance , not null */ @ GwtIncompatible ( "incompatible method" ) public static Formatter append ( final CharSequence seq , final Formatter formatter , final int flags , final int width , final int precision ) { } }
return append ( seq , formatter , flags , width , precision , ' ' , null ) ;
public class IOState { /** * WebSocket has successfully upgraded , but the end - user onOpen call hasn ' t run yet . * This is an intermediate state between the RFC ' s { @ link ConnectionState # CONNECTING } and { @ link ConnectionState # OPEN } */ public void onConnected ( ) { } }
ConnectionState event = null ; synchronized ( this ) { if ( this . state != ConnectionState . CONNECTING ) { LOG . debug ( "Unable to set to connected, not in CONNECTING state: {}" , this . state ) ; return ; } this . state = ConnectionState . CONNECTED ; inputAvailable = false ; // cannot read ( yet ) outputAvailable = true ; // write allowed event = this . state ; } notifyStateListeners ( event ) ;
public class Proxy { /** * Update the proxy status */ public void updateStatus ( ) { } }
String hostaddr = null ; try { hostaddr = InetAddress . getByName ( ip ) . getHostAddress ( ) ; } catch ( UnknownHostException e ) { online = false ; latency = Long . MAX_VALUE ; return ; } int total = 0 ; long totalPing = 0 ; // test ping 4 times int times = 4 ; while ( total < times ) { total ++ ; long start = System . currentTimeMillis ( ) ; SocketAddress sockaddr = new InetSocketAddress ( hostaddr , port ) ; try ( Socket socket = new Socket ( ) ) { socket . connect ( sockaddr , 1000 ) ; } catch ( Exception e ) { online = false ; return ; } totalPing += ( System . currentTimeMillis ( ) - start ) ; } online = true ; latency = totalPing / total ;
public class DateRangeChooser { /** * Load choices from the named property * @ param propertyName Name of the property containing the choices . Must be in the acceptable * format . If the property name is null , loads a default set of choices . */ public void loadChoices ( String propertyName ) { } }
clear ( ) ; if ( propertyName == null || propertyName . isEmpty ( ) ) { addChoice ( "All Dates" , false ) ; addChoice ( "Today|T|T" , false ) ; addChoice ( "Last Week|T|T-7" , false ) ; addChoice ( "Last Month|T|T-30|1" , false ) ; addChoice ( "Last Year|T|T-365" , false ) ; addChoice ( "Last Two Years|T|T-730" , false ) ; } else { for ( String value : PropertyUtil . getValues ( propertyName , null ) ) { addChoice ( value , false ) ; } } checkSelection ( true ) ;
public class JsonNodeClaim { /** * Helper method to extract a Claim from the given JsonNode tree . * @ param claimName the Claim to search for . * @ param tree the JsonNode tree to search the Claim in . * @ return a valid non - null Claim . */ static Claim extractClaim ( String claimName , Map < String , JsonNode > tree , ObjectReader objectReader ) { } }
JsonNode node = tree . get ( claimName ) ; return claimFromNode ( node , objectReader ) ;
public class JdbcTemp { /** * same as queryMultiObject ( Collection queryParams , String sqlquery ) but the * result is a block result , the result ' size is the value of count , and the * result ' s start poiny is the value of start . * @ param queryParams * @ param sqlquery * @ param start * @ param count * @ return * @ throws Exception */ public List queryMultiObject ( Collection queryParams , String sqlquery , int start , int count ) throws Exception { } }
Debug . logVerbose ( "[JdonFramework]--> enter queryMultiObject from:" + start + " size:" + count , module ) ; Connection c = null ; PreparedStatement ps = null ; ResultSet rs = null ; List items = new ArrayList ( count ) ; try { c = dataSource . getConnection ( ) ; DbUtil . testConnection ( c ) ; ps = c . prepareStatement ( sqlquery , ResultSet . TYPE_SCROLL_INSENSITIVE , ResultSet . CONCUR_READ_ONLY ) ; Debug . logVerbose ( sqlquery , module ) ; jdbcUtil . setQueryParams ( queryParams , ps ) ; rs = ps . executeQuery ( ) ; if ( DbUtil . supportsFetchSize ) rs . setFetchSize ( count ) ; if ( start >= 0 && rs . absolute ( start + 1 ) ) { do { items = jdbcUtil . extract ( rs ) ; } while ( ( rs . next ( ) ) && ( -- count > 0 ) ) ; } } catch ( SQLException se ) { throw new SQLException ( "SQLException: " + se . getMessage ( ) ) ; } catch ( Exception ex ) { Debug . logError ( ex , module ) ; throw new Exception ( ex ) ; } finally { if ( rs != null ) try { rs . close ( ) ; } catch ( SQLException quiet ) { } if ( ps != null ) try { ps . close ( ) ; } catch ( SQLException quiet ) { } if ( c != null ) try { c . close ( ) ; } catch ( SQLException quiet ) { } } return items ;
public class JaxRsClientFactory { /** * Register many features at once . Mostly a convenience for DI environments . */ public synchronized JaxRsClientFactory addFeatureMap ( SetMultimap < JaxRsFeatureGroup , Feature > map ) { } }
return addFeatureMap ( Multimaps . asMap ( map ) ) ;
public class ArrayUtil { /** * 数组中是否包含元素 * @ param < T > 数组元素类型 * @ param array 数组 * @ param value 被检查的元素 * @ return 是否包含 */ public static < T > boolean contains ( T [ ] array , T value ) { } }
return indexOf ( array , value ) > INDEX_NOT_FOUND ;
public class BNFHeadersImpl { /** * Utility method to parse the header name from the input buffer . * @ param buff * @ return boolean ( false means it needs more data , true otherwise ) * @ throws MalformedMessageException */ private boolean parseHeaderName ( WsByteBuffer buff ) throws MalformedMessageException { } }
// if we ' re just starting , then skip leading white space characters // otherwise ignore them ( i . e we might be in the middle of // " Mozilla / 5.0 ( Win " if ( null == this . parsedToken ) { if ( ! skipWhiteSpace ( buff ) ) { return false ; } } int start = findCurrentBufferPosition ( buff ) ; int cachestart = this . bytePosition ; TokenCodes rc = findHeaderLength ( buff ) ; if ( TokenCodes . TOKEN_RC_MOREDATA . equals ( rc ) ) { // ran out of data saveParsedToken ( buff , start , false , LOG_FULL ) ; return false ; } // could be in one single bytecache , otherwise we have to extract from // buffer byte [ ] data ; int length = this . parsedTokenLength ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "length=" + length + " pos=" + this . bytePosition + ", cachestart=" + cachestart + ", start=" + start + ", trailingWhitespace=" + this . foundTrailingWhitespace ) ; } // PI13987 - Added the first argument to the if statement if ( ! this . foundTrailingWhitespace && null == this . parsedToken && length < this . bytePosition ) { // it ' s all in the bytecache if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { // PI13987 - Modified the message being printed as we now print the same thing above Tr . debug ( tc , "Using bytecache" ) ; } data = this . byteCache ; start = cachestart ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { // PI13987 Tr . debug ( tc , "Using bytebuffer" ) ; } saveParsedToken ( buff , start , true , LOG_FULL ) ; data = this . parsedToken ; start = 0 ; length = data . length ; } // otherwise we found the entire length of the name this . currentElem = getElement ( findKey ( data , start , length ) ) ; // Reset all the global variables once HeaderElement has been instantiated if ( HeaderStorage . NOTSET != this . headerChangeLimit ) { this . currentElem . updateLastCRLFInfo ( this . lastCRLFBufferIndex , this . lastCRLFPosition , this . lastCRLFisCR ) ; } this . stateOfParsing = PARSING_VALUE ; this . parsedToken = null ; this . parsedTokenLength = 0 ; this . foundTrailingWhitespace = false ; // PI13987 return true ;
public class VisitorHelper { /** * Returns the value of the given field on the given object . * @ param object * the object whose field is to be retrieved . * @ param field * the field being retrieved . * @ return * the value of the field . * @ throws VisitorException * if an error occurs while evaluating the node ' s value . */ public static Object getValue ( Object object , Field field ) throws VisitorException { } }
boolean reprotect = false ; if ( object == null ) { return null ; } try { if ( ! field . isAccessible ( ) ) { field . setAccessible ( true ) ; reprotect = true ; } Object value = field . get ( object ) ; logger . trace ( "field '{}' has value '{}'" , field . getName ( ) , value ) ; return value ; } catch ( IllegalArgumentException e ) { logger . error ( "Trying to access field '{}' on invalid object of class '{}'" , field . getName ( ) , object . getClass ( ) . getSimpleName ( ) ) ; throw new VisitorException ( "Trying to access field on invalid object" , e ) ; } catch ( IllegalAccessException e ) { logger . error ( "Illegal access to class '{}'" , object . getClass ( ) . getSimpleName ( ) ) ; throw new VisitorException ( "Illegal access to class" , e ) ; } finally { if ( reprotect ) { field . setAccessible ( false ) ; } }
public class Sql { /** * Factory for the PreparedQueryCommand command pattern object allows subclass to supply implementations * of the command class . * @ param sql statement to be executed , including optional parameter placeholders ( ? ) * @ param queryParams List of parameter values corresponding to parameter placeholders * @ return a command - invoke its execute ( ) and closeResource ( ) methods * @ see # createQueryCommand ( String ) */ protected AbstractQueryCommand createPreparedQueryCommand ( String sql , List < Object > queryParams ) { } }
return new PreparedQueryCommand ( sql , queryParams ) ;
public class DivTag { /** * override doTag method */ @ Override public void doTag ( ) throws JspException , IOException { } }
LayoutTag layoutTag = ( LayoutTag ) findAncestorWithClass ( this , LayoutTag . class ) ; if ( layoutTag == null ) { throw new IllegalStateException ( "div tag should be used in a layout tag inside." ) ; } layoutTag . putDiv ( name , TagUtils . toString ( getJspBody ( ) ) ) ; logger . debug ( "div#{} put to layout#{}" , name , layoutTag . getName ( ) ) ;
public class KieServerInstanceManager { /** * helper methods */ protected List < Container > callRemoteKieServerOperation ( ServerTemplate serverTemplate , ContainerSpec containerSpec , RemoteKieServerOperation operation ) { } }
List < Container > containers = new ArrayList < org . kie . server . controller . api . model . runtime . Container > ( ) ; if ( serverTemplate . getServerInstanceKeys ( ) == null || serverTemplate . getServerInstanceKeys ( ) . isEmpty ( ) || containerSpec == null ) { return containers ; } for ( ServerInstanceKey instanceUrl : serverTemplate . getServerInstanceKeys ( ) ) { Container container = new Container ( ) ; container . setContainerSpecId ( containerSpec . getId ( ) ) ; container . setServerTemplateId ( serverTemplate . getId ( ) ) ; container . setServerInstanceId ( instanceUrl . getServerInstanceId ( ) ) ; container . setUrl ( instanceUrl . getUrl ( ) + "/containers/" + containerSpec . getId ( ) ) ; container . setStatus ( containerSpec . getStatus ( ) ) ; try { final KieServicesClient client = getClient ( instanceUrl . getUrl ( ) ) ; operation . doOperation ( client , container ) ; containers . add ( container ) ; } catch ( Exception e ) { logger . debug ( "Unable to connect to {}" , instanceUrl ) ; } } return containers ;
public class ProtoTruthMessageDifferencer { /** * Compare the two non - null messages , and return a detailed comparison report . */ DiffResult diffMessages ( Message actual , Message expected ) { } }
checkNotNull ( actual ) ; checkNotNull ( expected ) ; checkArgument ( actual . getDescriptorForType ( ) == expected . getDescriptorForType ( ) , "The actual [%s] and expected [%s] message descriptors do not match." , actual . getDescriptorForType ( ) , expected . getDescriptorForType ( ) ) ; return diffMessages ( actual , expected , rootConfig ) ;
public class LTieFltConsumerBuilder { /** * Adds full new case for the argument that are of specific classes ( matched by instanceOf , null is a wildcard ) . */ @ Nonnull public < V extends T > LTieFltConsumerBuilder < T > aCase ( Class < V > argC1 , LTieFltConsumer < V > function ) { } }
PartialCase . The pc = partialCaseFactoryMethod ( ( a1 , a2 , a3 ) -> ( argC1 == null || argC1 . isInstance ( a1 ) ) ) ; pc . evaluate ( function ) ; return self ( ) ;
public class AttackSummaryMarshaller { /** * Marshall the given parameter object . */ public void marshall ( AttackSummary attackSummary , ProtocolMarshaller protocolMarshaller ) { } }
if ( attackSummary == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( attackSummary . getAttackId ( ) , ATTACKID_BINDING ) ; protocolMarshaller . marshall ( attackSummary . getResourceArn ( ) , RESOURCEARN_BINDING ) ; protocolMarshaller . marshall ( attackSummary . getStartTime ( ) , STARTTIME_BINDING ) ; protocolMarshaller . marshall ( attackSummary . getEndTime ( ) , ENDTIME_BINDING ) ; protocolMarshaller . marshall ( attackSummary . getAttackVectors ( ) , ATTACKVECTORS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DataBufferFactoryWrapperConfiguration { /** * Returns a new { @ link DataBufferFactoryWrapper } for { @ link ArmeriaWebServer } and * { @ link ArmeriaClientHttpConnector } . */ @ Bean @ ConditionalOnMissingBean ( DataBufferFactoryWrapper . class ) public DataBufferFactoryWrapper < ? > armeriaBufferFactory ( Optional < DataBufferFactory > dataBufferFactory ) { } }
if ( dataBufferFactory . isPresent ( ) ) { return new DataBufferFactoryWrapper < > ( dataBufferFactory . get ( ) ) ; } return DataBufferFactoryWrapper . DEFAULT ;
public class LNDImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setSOLid ( Integer newSOLid ) { } }
Integer oldSOLid = soLid ; soLid = newSOLid ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . LND__SO_LID , oldSOLid , soLid ) ) ;
public class ObjectGraphBuilder { /** * Sets the current ReferenceResolver . < br > * It will assign DefaultReferenceResolver if null . < br > * It accepts a ReferenceResolver instance , a String or a Closure . */ public void setReferenceResolver ( final Object referenceResolver ) { } }
if ( referenceResolver instanceof ReferenceResolver ) { this . referenceResolver = ( ReferenceResolver ) referenceResolver ; } else if ( referenceResolver instanceof String ) { this . referenceResolver = new ReferenceResolver ( ) { public String getReferenceFor ( String nodeName ) { return ( String ) referenceResolver ; } } ; } else if ( referenceResolver instanceof Closure ) { final ObjectGraphBuilder self = this ; this . referenceResolver = new ReferenceResolver ( ) { public String getReferenceFor ( String nodeName ) { Closure cls = ( Closure ) referenceResolver ; cls . setDelegate ( self ) ; return ( String ) cls . call ( new Object [ ] { nodeName } ) ; } } ; } else { this . referenceResolver = new DefaultReferenceResolver ( ) ; }
public class AWSDatabaseMigrationServiceClient { /** * Deletes the specified replication instance . * < note > * You must delete any migration tasks that are associated with the replication instance before you can delete it . * < / note > * @ param deleteReplicationInstanceRequest * @ return Result of the DeleteReplicationInstance operation returned by the service . * @ throws InvalidResourceStateException * The resource is in a state that prevents it from being used for database migration . * @ throws ResourceNotFoundException * The resource could not be found . * @ sample AWSDatabaseMigrationService . DeleteReplicationInstance * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / dms - 2016-01-01 / DeleteReplicationInstance " target = " _ top " > AWS * API Documentation < / a > */ @ Override public DeleteReplicationInstanceResult deleteReplicationInstance ( DeleteReplicationInstanceRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteReplicationInstance ( request ) ;
public class SequenceModelResource { /** * Tag the current sentence . * @ param tokens * the current sentence * @ return the array of span sequences */ public Span [ ] seqToSpans ( final String [ ] tokens ) { } }
final Span [ ] origSpans = this . sequenceLabeler . tag ( tokens ) ; final Span [ ] seqSpans = SequenceLabelerME . dropOverlappingSpans ( origSpans ) ; return seqSpans ;
public class TldVarianceFilter { /** * Performs variance test at the specified rectangle * @ return true if it passes and false if not */ public boolean checkVariance ( ImageRectangle r ) { } }
double sigma2 = computeVariance ( r . x0 , r . y0 , r . x1 , r . y1 ) ; return sigma2 >= thresholdLower ;
public class SnorocketOWLReasoner { /** * Returns the identifier for an { @ link OWLClass } . If the owl class is * anonymous then it returns null . * @ param oc * @ return */ private Object getId ( OWLClass oc ) { } }
if ( oc . isAnonymous ( ) ) return null ; String id = oc . toStringID ( ) ; if ( id . equals ( "<" + OWLImporter . THING_IRI + ">" ) || id . equals ( OWLImporter . THING_IRI ) ) { return NamedConcept . TOP ; } else if ( id . equals ( "<" + OWLImporter . NOTHING_IRI + ">" ) || id . equals ( OWLImporter . NOTHING_IRI ) ) { return NamedConcept . BOTTOM ; } else { return id ; }
public class TableFactoryService { /** * Filters for factories with matching context . * @ return all matching factories */ private static < T > List < TableFactory > filterByContext ( Class < T > factoryClass , Map < String , String > properties , List < TableFactory > foundFactories , List < TableFactory > classFactories ) { } }
List < TableFactory > matchingFactories = classFactories . stream ( ) . filter ( factory -> { Map < String , String > requestedContext = normalizeContext ( factory ) ; Map < String , String > plainContext = new HashMap < > ( requestedContext ) ; // we remove the version for now until we have the first backwards compatibility case // with the version we can provide mappings in case the format changes plainContext . remove ( CONNECTOR_PROPERTY_VERSION ) ; plainContext . remove ( FORMAT_PROPERTY_VERSION ) ; plainContext . remove ( METADATA_PROPERTY_VERSION ) ; plainContext . remove ( STATISTICS_PROPERTY_VERSION ) ; plainContext . remove ( CATALOG_PROPERTY_VERSION ) ; // check if required context is met return plainContext . keySet ( ) . stream ( ) . allMatch ( e -> properties . containsKey ( e ) && properties . get ( e ) . equals ( plainContext . get ( e ) ) ) ; } ) . collect ( Collectors . toList ( ) ) ; if ( matchingFactories . isEmpty ( ) ) { throw new NoMatchingTableFactoryException ( "No context matches." , factoryClass , foundFactories , properties ) ; } return matchingFactories ;
public class CmsSetupXmlHelper { /** * Returns the value in the given xpath of the given xml file . < p > * @ param xmlFilename the xml config file ( could be relative to the base path ) * @ param xPath the xpath to read ( should select a single node or attribute ) * @ return the value in the given xpath of the given xml file , or < code > null < / code > if no matching node * @ throws CmsXmlException if something goes wrong while reading */ public String getValue ( String xmlFilename , String xPath ) throws CmsXmlException { } }
return getValue ( getDocument ( xmlFilename ) , xPath ) ;
public class ElementsExceptionsFactory { /** * Constructs and initializes a new { @ link NoSuchFileException } with the given { @ link Throwable cause } * and { @ link String message } formatted with the given { @ link Object [ ] arguments } . * @ param cause { @ link Throwable } identified as the reason this { @ link NoSuchFileException } was thrown . * @ param message { @ link String } describing the { @ link NoSuchFileException exception } . * @ param args { @ link Object [ ] arguments } used to replace format placeholders in the { @ link String message } . * @ return a new { @ link NoSuchFileException } with the given { @ link Throwable cause } and { @ link String message } . * @ see org . cp . elements . io . NoSuchFileException */ public static NoSuchFileException newNoSuchFileException ( Throwable cause , String message , Object ... args ) { } }
return new NoSuchFileException ( format ( message , args ) , cause ) ;
public class Sql { /** * A variant of { @ link # eachRow ( String , java . util . List , groovy . lang . Closure , int , int , groovy . lang . Closure ) } * allowing the named parameters to be supplied in a map . * @ param sql the sql statement * @ param map a map containing the named parameters * @ param offset the 1 - based offset for the first row to be processed * @ param maxRows the maximum number of rows to be processed * @ param metaClosure called for meta data ( only once after sql execution ) * @ param rowClosure called for each row with a GroovyResultSet * @ throws SQLException if a database access error occurs * @ since 1.8.7 */ public void eachRow ( String sql , Map map , Closure metaClosure , int offset , int maxRows , Closure rowClosure ) throws SQLException { } }
eachRow ( sql , singletonList ( map ) , metaClosure , offset , maxRows , rowClosure ) ;
public class DeleteFunctionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteFunctionRequest deleteFunctionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteFunctionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteFunctionRequest . getApiId ( ) , APIID_BINDING ) ; protocolMarshaller . marshall ( deleteFunctionRequest . getFunctionId ( ) , FUNCTIONID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Cache { /** * Return the cache as JSON . * @ param pretty make it pretty * @ return the string as JSON */ public String asJSON ( boolean pretty ) { } }
JSONSerializer ser = new JSONSerializer ( ) ; Gson gson = new Gson ( ) ; Gson prettyJson = new GsonBuilder ( ) . setPrettyPrinting ( ) . create ( ) ; JsonArray cacheArray = new JsonArray ( ) ; String cacheAsString ; for ( ServiceWrapper s : _cache ) { String json = ser . marshalService ( s ) ; JsonObject jsonService = gson . fromJson ( json , JsonObject . class ) ; cacheArray . add ( jsonService ) ; } if ( pretty ) { cacheAsString = prettyJson . toJson ( cacheArray ) ; } else { cacheAsString = gson . toJson ( cacheArray ) ; } return cacheAsString ;
public class WebhookMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Webhook webhook , ProtocolMarshaller protocolMarshaller ) { } }
if ( webhook == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( webhook . getUrl ( ) , URL_BINDING ) ; protocolMarshaller . marshall ( webhook . getPayloadUrl ( ) , PAYLOADURL_BINDING ) ; protocolMarshaller . marshall ( webhook . getSecret ( ) , SECRET_BINDING ) ; protocolMarshaller . marshall ( webhook . getBranchFilter ( ) , BRANCHFILTER_BINDING ) ; protocolMarshaller . marshall ( webhook . getFilterGroups ( ) , FILTERGROUPS_BINDING ) ; protocolMarshaller . marshall ( webhook . getLastModifiedSecret ( ) , LASTMODIFIEDSECRET_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ResourcePoolsBuilder { /** * Add or replace the { @ link ResourcePool } of { @ link ResourceType } in the returned builder . * @ param resourcePool the non - { @ code null } resource pool to add / replace * @ return a new builder with the added pool */ public ResourcePoolsBuilder withReplacing ( ResourcePool resourcePool ) { } }
Map < ResourceType < ? > , ResourcePool > newPools = new HashMap < > ( resourcePools ) ; newPools . put ( resourcePool . getType ( ) , resourcePool ) ; return new ResourcePoolsBuilder ( newPools ) ;
public class JobsInner { /** * Gets a list of currently existing nodes which were used for the Job execution . The returned information contains the node ID , its public IP and SSH port . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param workspaceName The name of the workspace . Workspace names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ param experimentName The name of the experiment . Experiment names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ param jobName The name of the job within the specified resource group . Job names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; RemoteLoginInformationInner & gt ; object if successful . */ public PagedList < RemoteLoginInformationInner > listRemoteLoginInformation ( final String resourceGroupName , final String workspaceName , final String experimentName , final String jobName ) { } }
ServiceResponse < Page < RemoteLoginInformationInner > > response = listRemoteLoginInformationSinglePageAsync ( resourceGroupName , workspaceName , experimentName , jobName ) . toBlocking ( ) . single ( ) ; return new PagedList < RemoteLoginInformationInner > ( response . body ( ) ) { @ Override public Page < RemoteLoginInformationInner > nextPage ( String nextPageLink ) { return listRemoteLoginInformationNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcTaskDurationEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class DruidQuerySignature { /** * Get existing or create new ( if not { @ link DruidQuerySignature # isAggregateSignature } ) { @ link VirtualColumn } for a given * { @ link DruidExpression } */ @ Nullable public VirtualColumn getOrCreateVirtualColumnForExpression ( PlannerContext plannerContext , DruidExpression expression , SqlTypeName typeName ) { } }
if ( ! isAggregateSignature && ! virtualColumnsByExpression . containsKey ( expression . getExpression ( ) ) ) { final String virtualColumnName = virtualColumnPrefix + virtualColumnCounter ++ ; final VirtualColumn virtualColumn = expression . toVirtualColumn ( virtualColumnName , Calcites . getValueTypeForSqlTypeName ( typeName ) , plannerContext . getExprMacroTable ( ) ) ; virtualColumnsByExpression . put ( expression . getExpression ( ) , virtualColumn ) ; virtualColumnsByName . put ( virtualColumnName , virtualColumn ) ; } return virtualColumnsByExpression . get ( expression . getExpression ( ) ) ;
public class EnvironmentClassLoader { /** * Returns the listeners . */ protected ArrayList < EnvLoaderListener > getEnvironmentListeners ( ) { } }
ArrayList < EnvLoaderListener > listeners ; listeners = new ArrayList < EnvLoaderListener > ( ) ; /* / / add the descendant listeners if ( _ childListeners ! = null ) { synchronized ( _ childListenerLock ) { ClassLoader loader ; for ( loader = this ; loader ! = null ; loader = loader . getParent ( ) ) { if ( loader instanceof EnvironmentClassLoader ) { ArrayList < EnvironmentListener > childListeners ; childListeners = _ childListeners . getLevel ( loader ) ; if ( childListeners ! = null ) listeners . addAll ( childListeners ) ; */ if ( _listeners == null ) { return listeners ; } ArrayList < EnvLoaderListener > envListeners = _listeners ; if ( envListeners != null ) { synchronized ( envListeners ) { for ( int i = 0 ; i < envListeners . size ( ) ; i ++ ) { EnvLoaderListener listener = envListeners . get ( i ) ; if ( listener != null ) listeners . add ( listener ) ; else { envListeners . remove ( i ) ; i -- ; } } } } return listeners ;
public class CPDAvailabilityEstimatePersistenceImpl { /** * Removes the cpd availability estimate where uuid = & # 63 ; and groupId = & # 63 ; from the database . * @ param uuid the uuid * @ param groupId the group ID * @ return the cpd availability estimate that was removed */ @ Override public CPDAvailabilityEstimate removeByUUID_G ( String uuid , long groupId ) throws NoSuchCPDAvailabilityEstimateException { } }
CPDAvailabilityEstimate cpdAvailabilityEstimate = findByUUID_G ( uuid , groupId ) ; return remove ( cpdAvailabilityEstimate ) ;
public class CocoQuery { /** * Open a confirm dialog with title and message * @ param title * @ param message */ public void confirm ( final int title , final int message , final DialogInterface . OnClickListener onClickListener ) { } }
final AlertDialog . Builder builder = new AlertDialog . Builder ( getContext ( ) ) ; builder . setTitle ( title ) . setIcon ( android . R . drawable . ic_dialog_info ) . setMessage ( message ) ; builder . setPositiveButton ( android . R . string . ok , new DialogInterface . OnClickListener ( ) { @ Override public void onClick ( final DialogInterface dialog , final int which ) { if ( onClickListener != null ) { onClickListener . onClick ( dialog , which ) ; } } } ) ; builder . setNegativeButton ( android . R . string . cancel , new DialogInterface . OnClickListener ( ) { @ Override public void onClick ( final DialogInterface dialog , final int which ) { if ( onClickListener != null ) { onClickListener . onClick ( dialog , which ) ; } } } ) ; builder . show ( ) ;
public class PrcEntitiesPage { /** * < p > Process entity request . < / p > * @ param pAddParam additional param * @ param pRequestData Request Data * @ throws Exception - an exception */ @ Override public final void process ( final Map < String , Object > pAddParam , final IRequestData pRequestData ) throws Exception { } }
this . srvEntitiesPage . retrievePage ( pAddParam , pRequestData ) ;
public class DRL5Expressions { /** * src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 200:1 : annotationElementValuePair [ AnnotationDescr descr ] : key = ID EQUALS _ ASSIGN val = annotationValue ; */ public final void annotationElementValuePair ( AnnotationDescr descr ) throws RecognitionException { } }
Token key = null ; ParserRuleReturnScope val = null ; try { // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 201:3 : ( key = ID EQUALS _ ASSIGN val = annotationValue ) // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 201:5 : key = ID EQUALS _ ASSIGN val = annotationValue { key = ( Token ) match ( input , ID , FOLLOW_ID_in_annotationElementValuePair1032 ) ; if ( state . failed ) return ; match ( input , EQUALS_ASSIGN , FOLLOW_EQUALS_ASSIGN_in_annotationElementValuePair1034 ) ; if ( state . failed ) return ; pushFollow ( FOLLOW_annotationValue_in_annotationElementValuePair1038 ) ; val = annotationValue ( ) ; state . _fsp -- ; if ( state . failed ) return ; if ( state . backtracking == 0 ) { if ( buildDescr ) { descr . setKeyValue ( ( key != null ? key . getText ( ) : null ) , ( val != null ? input . toString ( val . start , val . stop ) : null ) ) ; } } } } catch ( RecognitionException re ) { throw re ; } finally { // do for sure before leaving }
public class TracezZPageHandler { /** * Emits the internal html for a single { @ link SpanData } . */ @ SuppressWarnings ( "deprecation" ) private static void emitSingleSpan ( Formatter formatter , SpanData span ) { } }
Calendar calendar = Calendar . getInstance ( ) ; calendar . setTimeInMillis ( TimeUnit . SECONDS . toMillis ( span . getStartTimestamp ( ) . getSeconds ( ) ) ) ; long microsField = TimeUnit . NANOSECONDS . toMicros ( span . getStartTimestamp ( ) . getNanos ( ) ) ; String elapsedSecondsStr = span . getEndTimestamp ( ) != null ? String . format ( "%13.6f" , durationToNanos ( span . getEndTimestamp ( ) . subtractTimestamp ( span . getStartTimestamp ( ) ) ) * 1.0e-9 ) : String . format ( "%13s" , " " ) ; SpanContext spanContext = span . getContext ( ) ; formatter . format ( "<b>%04d/%02d/%02d-%02d:%02d:%02d.%06d %s TraceId: <b style=\"color:%s;\">%s</b> " + "SpanId: %s ParentSpanId: %s</b>%n" , calendar . get ( Calendar . YEAR ) , calendar . get ( Calendar . MONTH ) + 1 , calendar . get ( Calendar . DAY_OF_MONTH ) , calendar . get ( Calendar . HOUR_OF_DAY ) , calendar . get ( Calendar . MINUTE ) , calendar . get ( Calendar . SECOND ) , microsField , elapsedSecondsStr , spanContext . getTraceOptions ( ) . isSampled ( ) ? SAMPLED_TRACE_ID_COLOR : NOT_SAMPLED_TRACE_ID_COLOR , BaseEncoding . base16 ( ) . lowerCase ( ) . encode ( spanContext . getTraceId ( ) . getBytes ( ) ) , BaseEncoding . base16 ( ) . lowerCase ( ) . encode ( spanContext . getSpanId ( ) . getBytes ( ) ) , BaseEncoding . base16 ( ) . lowerCase ( ) . encode ( span . getParentSpanId ( ) == null ? SpanId . INVALID . getBytes ( ) : span . getParentSpanId ( ) . getBytes ( ) ) ) ; int lastEntryDayOfYear = calendar . get ( Calendar . DAY_OF_YEAR ) ; Timestamp lastTimestampNanos = span . getStartTimestamp ( ) ; TimedEvents < Annotation > annotations = span . getAnnotations ( ) ; TimedEvents < io . opencensus . trace . NetworkEvent > networkEvents = span . getNetworkEvents ( ) ; List < TimedEvent < ? > > timedEvents = new ArrayList < TimedEvent < ? > > ( annotations . getEvents ( ) ) ; timedEvents . addAll ( networkEvents . getEvents ( ) ) ; Collections . sort ( timedEvents , new TimedEventComparator ( ) ) ; for ( TimedEvent < ? > event : timedEvents ) { // Special printing so that durations smaller than one second // are left padded with blanks instead of ' 0 ' characters . // E . g . , // Number Printout // 0.000534 . 534 // 1.000534 1.000534 long deltaMicros = TimeUnit . NANOSECONDS . toMicros ( durationToNanos ( event . getTimestamp ( ) . subtractTimestamp ( lastTimestampNanos ) ) ) ; String deltaString ; if ( deltaMicros >= 1000000 ) { deltaString = String . format ( "%.6f" , ( deltaMicros / 1000000.0 ) ) ; } else { deltaString = String . format ( ".%6d" , deltaMicros ) ; } calendar . setTimeInMillis ( TimeUnit . SECONDS . toMillis ( event . getTimestamp ( ) . getSeconds ( ) ) + TimeUnit . NANOSECONDS . toMillis ( event . getTimestamp ( ) . getNanos ( ) ) ) ; microsField = TimeUnit . NANOSECONDS . toMicros ( event . getTimestamp ( ) . getNanos ( ) ) ; int dayOfYear = calendar . get ( Calendar . DAY_OF_YEAR ) ; if ( dayOfYear == lastEntryDayOfYear ) { formatter . format ( "%11s" , "" ) ; } else { formatter . format ( "%04d/%02d/%02d-" , calendar . get ( Calendar . YEAR ) , calendar . get ( Calendar . MONTH ) + 1 , calendar . get ( Calendar . DAY_OF_MONTH ) ) ; lastEntryDayOfYear = dayOfYear ; } formatter . format ( "%02d:%02d:%02d.%06d %13s ... %s%n" , calendar . get ( Calendar . HOUR_OF_DAY ) , calendar . get ( Calendar . MINUTE ) , calendar . get ( Calendar . SECOND ) , microsField , deltaString , htmlEscaper ( ) . escape ( event . getEvent ( ) instanceof Annotation ? renderAnnotation ( ( Annotation ) event . getEvent ( ) ) : renderNetworkEvents ( ( io . opencensus . trace . NetworkEvent ) castNonNull ( event . getEvent ( ) ) ) ) ) ; lastTimestampNanos = event . getTimestamp ( ) ; } Status status = span . getStatus ( ) ; if ( status != null ) { formatter . format ( "%44s %s%n" , "" , htmlEscaper ( ) . escape ( renderStatus ( status ) ) ) ; } formatter . format ( "%44s %s%n" , "" , htmlEscaper ( ) . escape ( renderAttributes ( span . getAttributes ( ) . getAttributeMap ( ) ) ) ) ;
public class Level { /** * Parse a level name string into a Level . * The argument string may consist of either a level name * or an integer value . * For example : * < ul > * < li > " SEVERE " * < li > " 1000" * < / ul > * @ param name string to be parsed * @ throws NullPointerException if the name is null * @ throws IllegalArgumentException if the value is not valid . * Valid values are integers between < CODE > Integer . MIN _ VALUE < / CODE > * and < CODE > Integer . MAX _ VALUE < / CODE > , and all known level names . * Known names are the levels defined by this class ( e . g . , < CODE > FINE < / CODE > , * < CODE > FINER < / CODE > , < CODE > FINEST < / CODE > ) , or created by this class with * appropriate package access , or new levels defined or created * by subclasses . * @ return The parsed value . Passing an integer that corresponds to a known name * ( e . g . , 700 ) will return the associated name ( e . g . , < CODE > CONFIG < / CODE > ) . * Passing an integer that does not ( e . g . , 1 ) will return a new level name * initialized to that value . */ public static synchronized Level parse ( String name ) throws IllegalArgumentException { } }
// Check that name is not null . name . length ( ) ; KnownLevel level ; // Look for a known Level with the given non - localized name . level = KnownLevel . findByName ( name ) ; if ( level != null ) { return level . levelObject ; } // J2ObjC modified : Check for known level names before testing for Integer and throwing // exception if it isn ' t . // Finally , look for a known level with the given localized name , // in the current default locale . // This is relatively expensive , but not excessively so . level = KnownLevel . findByLocalizedName ( name ) ; if ( level != null ) { return level . levelObject ; } // Now , check if the given name is an integer . If so , // first look for a Level with the given value and then // if necessary create one . try { int x = Integer . parseInt ( name ) ; level = KnownLevel . findByValue ( x ) ; if ( level == null ) { // add new Level Level levelObject = new Level ( name , x ) ; level = KnownLevel . findByValue ( x ) ; } return level . levelObject ; } catch ( NumberFormatException ex ) { // Not an integer . // Drop through . } // OK , we ' ve tried everything and failed throw new IllegalArgumentException ( "Bad level \"" + name + "\"" ) ;