signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class InfinispanCacheManager { /** * This method returns a default cache .
* @ param cacheName The cache name
* @ return The default cache */
public static synchronized < K , V > Cache < K , V > getDefaultCache ( String cacheName ) { } } | if ( defaultCacheManager == null ) { defaultCacheManager = new DefaultCacheManager ( ) ; } return defaultCacheManager . getCache ( cacheName ) ; |
public class CollUtil { /** * 循环遍历 { @ link Enumeration } , 使用 { @ link Consumer } 接受遍历的每条数据 , 并针对每条数据做处理
* @ param < T > 集合元素类型
* @ param enumeration { @ link Enumeration }
* @ param consumer { @ link Consumer } 遍历的每条数据处理器 */
public static < T > void forEach ( Enumeration < T > enumeration , Consumer < T > consumer ) { } } | int index = 0 ; while ( enumeration . hasMoreElements ( ) ) { consumer . accept ( enumeration . nextElement ( ) , index ) ; index ++ ; } |
public class IotHubResourcesInner { /** * Add a consumer group to an Event Hub - compatible endpoint in an IoT hub .
* Add a consumer group to an Event Hub - compatible endpoint in an IoT hub .
* @ param resourceGroupName The name of the resource group that contains the IoT hub .
* @ param resourceName The name of the IoT hub .
* @ param eventHubEndpointName The name of the Event Hub - compatible endpoint in the IoT hub .
* @ param name The name of the consumer group to add .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorDetailsException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the EventHubConsumerGroupInfoInner object if successful . */
public EventHubConsumerGroupInfoInner createEventHubConsumerGroup ( String resourceGroupName , String resourceName , String eventHubEndpointName , String name ) { } } | return createEventHubConsumerGroupWithServiceResponseAsync ( resourceGroupName , resourceName , eventHubEndpointName , name ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class Duration { /** * ~ Parse - Routinen - - - - - */
private static < U extends IsoUnit > Duration < U > parsePeriod ( String period , Class < U > type ) throws ParseException { } } | int index = 0 ; boolean negative = false ; if ( period . length ( ) == 0 ) { throw new ParseException ( "Empty period string." , index ) ; } else if ( period . charAt ( 0 ) == '-' ) { negative = true ; index = 1 ; } try { if ( period . charAt ( index ) != 'P' ) { throw new ParseException ( "Format symbol \'P\' expected: " + period , index ) ; } else { index ++ ; } List < Item < U > > items = new ArrayList < > ( ) ; int sep = period . indexOf ( 'T' , index ) ; boolean calendrical = ( sep == - 1 ) ; int typeID = SUPER_TYPE ; if ( type == CalendarUnit . class ) { typeID = CALENDAR_TYPE ; } else if ( type == ClockUnit . class ) { typeID = CLOCK_TYPE ; } else if ( type == IsoDateUnit . class ) { typeID = WEEK_BASED_TYPE ; } if ( calendrical ) { if ( typeID == CLOCK_TYPE ) { throw new ParseException ( "Format symbol \'T\' expected: " + period , index ) ; } else { parse ( period , index , period . length ( ) , ( ( typeID == SUPER_TYPE ) ? CALENDAR_TYPE : typeID ) , items ) ; } } else if ( ( typeID == CALENDAR_TYPE ) || ( typeID == WEEK_BASED_TYPE ) ) { throw new ParseException ( "Unexpected time component found: " + period , sep ) ; } else { boolean alternative = false ; if ( sep > index ) { if ( typeID == CLOCK_TYPE ) { throw new ParseException ( "Unexpected date component found: " + period , index ) ; } else { alternative = parse ( period . substring ( 0 , sep ) , index , sep , CALENDAR_TYPE , items ) ; } } if ( alternative ) { parseAlt ( period , sep + 1 , period . length ( ) , false , items ) ; } else { parse ( period , sep + 1 , period . length ( ) , CLOCK_TYPE , items ) ; } } return new Duration < > ( items , negative ) ; } catch ( IndexOutOfBoundsException ex ) { ParseException pe = new ParseException ( "Unexpected termination of period string: " + period , index ) ; pe . initCause ( ex ) ; throw pe ; } |
public class ImageModerationsImpl { /** * Fuzzily match an image against one of your custom Image Lists . You can create and manage your custom image lists using & lt ; a href = " / docs / services / 578ff44d2703741568569ab9 / operations / 578ff7b12703741568569abe " & gt ; this & lt ; / a & gt ; API .
* Returns ID and tags of matching image . & lt ; br / & gt ;
* & lt ; br / & gt ;
* Note : Refresh Index must be run on the corresponding Image List before additions and removals are reflected in the response .
* @ param contentType The content type .
* @ param imageUrl The image url .
* @ param matchUrlInputOptionalParameter the object representing the optional parameters to be set before calling this API
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws APIErrorException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the MatchResponse object if successful . */
public MatchResponse matchUrlInput ( String contentType , BodyModelModel imageUrl , MatchUrlInputOptionalParameter matchUrlInputOptionalParameter ) { } } | return matchUrlInputWithServiceResponseAsync ( contentType , imageUrl , matchUrlInputOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class FactorReader { /** * Retrieve the next page from the Twilio API .
* @ param page current page
* @ param client TwilioRestClient with which to make the request
* @ return Next Page */
@ Override public Page < Factor > nextPage ( final Page < Factor > page , final TwilioRestClient client ) { } } | Request request = new Request ( HttpMethod . GET , page . getNextPageUrl ( Domains . AUTHY . toString ( ) , client . getRegion ( ) ) ) ; return pageForRequest ( client , request ) ; |
public class Calcites { /** * The inverse of { @ link # jodaToCalciteTimestamp ( DateTime , DateTimeZone ) } .
* @ param timestamp Calcite style timestamp
* @ param timeZone session time zone
* @ return joda timestamp , with time zone set to the session time zone */
public static DateTime calciteTimestampToJoda ( final long timestamp , final DateTimeZone timeZone ) { } } | return new DateTime ( timestamp , DateTimeZone . UTC ) . withZoneRetainFields ( timeZone ) ; |
public class Versions { /** * Create a version range from a string representation
* For example :
* < ul >
* < li > < code > 1.0 < / code > Version 1.0 < / li >
* < li > < code > [ 1.0,2.0 ) < / code > Versions 1.0 ( included ) to 2.0 ( not included ) < / li >
* < li > < code > [ 1.0,2.0 ] < / code > Versions 1.0 to 2.0 ( both included ) < / li >
* < li > < code > [ 1.5 , ) < / code > Versions 1.5 and higher < / li >
* < li > < code > ( , 1.0 ] , [ 1.2 , ) < / code > Versions up to 1.0 ( included ) and 1.2 or higher < / li >
* < / ul >
* @ param intersection string representation of a version or version range
* @ return a new { @ link MultipleVersionRange } object that represents the specification
* @ throws VersionException */
public static MultipleVersionRange parseMultipleVersionRange ( String intersection ) throws VersionException { } } | Assert . notNull ( intersection , "Version range must not be null." ) ; List < VersionRange > ranges = new ArrayList < VersionRange > ( ) ; String process = intersection ; Version upperBound = null ; Version lowerBound = null ; while ( process . startsWith ( "[" ) || process . startsWith ( "(" ) ) { int index1 = process . indexOf ( ")" ) ; int index2 = process . indexOf ( "]" ) ; int index = index2 ; if ( index2 < 0 || index1 < index2 ) { if ( index1 >= 0 ) { index = index1 ; } } if ( index < 0 ) { throw new VersionException ( "Unbounded range: " + intersection ) ; } VersionRange range = parseVersionRange ( process . substring ( 0 , index + 1 ) ) ; if ( lowerBound == null ) { lowerBound = range . getMin ( ) ; } if ( upperBound != null ) { if ( range . getMin ( ) == null || range . getMin ( ) . compareTo ( upperBound ) < 0 ) { throw new VersionException ( "Ranges overlap: " + intersection ) ; } } ranges . add ( range ) ; upperBound = range . getMax ( ) ; process = process . substring ( index + 1 ) . trim ( ) ; if ( process . length ( ) > 0 && process . startsWith ( "," ) ) { process = process . substring ( 1 ) . trim ( ) ; } } if ( process . length ( ) > 0 ) { if ( ranges . size ( ) > 0 ) { throw new VersionException ( "Only fully-qualified sets allowed in multiple version range scenario: " + intersection ) ; } if ( process . contains ( "," ) ) { String [ ] split = process . split ( "," ) ; for ( String version : split ) { if ( version . startsWith ( "[" ) || version . startsWith ( "(" ) ) ranges . add ( parseVersionRange ( version ) ) ; else ranges . add ( new SingleVersionRange ( SingleVersion . valueOf ( version ) ) ) ; } } else { ranges . add ( new SingleVersionRange ( SingleVersion . valueOf ( process ) ) ) ; } } return new MultipleVersionRange ( ranges ) ; |
public class Flowables { /** * Simple { @ link Flowable } implementation to emit a range of BigInteger values .
* @ param startValue first value to emit in range
* @ param endValue final value to emit in range
* @ param ascending direction to iterate through range
* @ return a { @ link Flowable } instance to emit this range of values */
public static Flowable < BigInteger > range ( final BigInteger startValue , final BigInteger endValue , final boolean ascending ) { } } | if ( startValue . compareTo ( BigInteger . ZERO ) == - 1 ) { throw new IllegalArgumentException ( "Negative start index cannot be used" ) ; } else if ( startValue . compareTo ( endValue ) > 0 ) { throw new IllegalArgumentException ( "Negative start index cannot be greater then end index" ) ; } if ( ascending ) { return Flowable . create ( subscriber -> { for ( BigInteger i = startValue ; i . compareTo ( endValue ) < 1 && ! subscriber . isCancelled ( ) ; i = i . add ( BigInteger . ONE ) ) { subscriber . onNext ( i ) ; } if ( ! subscriber . isCancelled ( ) ) { subscriber . onComplete ( ) ; } } , BackpressureStrategy . BUFFER ) ; } else { return Flowable . create ( subscriber -> { for ( BigInteger i = endValue ; i . compareTo ( startValue ) > - 1 && ! subscriber . isCancelled ( ) ; i = i . subtract ( BigInteger . ONE ) ) { subscriber . onNext ( i ) ; } if ( ! subscriber . isCancelled ( ) ) { subscriber . onComplete ( ) ; } } , BackpressureStrategy . BUFFER ) ; } |
public class AuthUtils { /** * Get the Subject after authentication
* < li > If Security is enabled , fetch the Subject from SIBSecurity Service < / li >
* < li > If Security is disabled , create a new Java Subject < / li >
* @ param subject
* @ return */
public Subject getSubject ( Subject subject ) { } } | Subject result = null ; if ( isSecurityEnabled ) { /* * Check if the SIBSecurity Service is loaded . If it is not loaded ,
* load SIBSecurity Service Invoke the Login method of the
* SIBSecurity Service to get the Subject
* Existing Code
* String userName = getAuthorisationUtils ( ) . getUserName ( subject ) ;
* SibLoginFactory loginFactory = SibLoginFactory . getInstance ( ) ;
* SibLogin myLogin = loginFactory . createNewSibLogin ( ) ;
* subject = myLogin . login ( theBus , subject ) ; */
} else { /* * Create a JAVA Subject and return it , anyways it does not matter */
result = new Subject ( ) ; } return result ; |
public class AbstractMarshaller { /** * Template method for handling { @ code StreamSource } s .
* < p > This implementation defers to { @ code unmarshalInputStream } or { @ code unmarshalReader } .
* @ param streamSource the { @ code StreamSource }
* @ return the object graph
* @ throws IOException if an I / O exception occurs
* @ throws XmlMappingException if the given source cannot be mapped to an object */
protected Object unmarshalStreamSource ( StreamSource streamSource ) throws XmlMappingException , IOException { } } | if ( streamSource . getInputStream ( ) != null ) { return unmarshalInputStream ( streamSource . getInputStream ( ) ) ; } else if ( streamSource . getReader ( ) != null ) { return unmarshalReader ( streamSource . getReader ( ) ) ; } else { throw new IllegalArgumentException ( "StreamSource contains neither InputStream nor Reader" ) ; } |
public class _ValueReferenceResolver { /** * # # # # # Standard delegating implementations # # # # # */
public Class < ? > getType ( final ELContext ctx , final Object base , final Object property ) { } } | return resolver . getType ( ctx , base , property ) ; |
public class EpanetWrapper { /** * Retrieves the value of a particular analysis option .
* @ param optionCode The { @ link OptionParameterCodes } .
* @ throws EpanetException */
public float ENgetoption ( OptionParameterCodes optionCode ) throws EpanetException { } } | float [ ] optionValue = new float [ 1 ] ; int error = epanet . ENgetoption ( optionCode . getCode ( ) , optionValue ) ; checkError ( error ) ; return optionValue [ 0 ] ; |
public class InChIGeneratorFactory { /** * Gets InChI generator for CDK IAtomContainer .
* @ param container AtomContainer to generate InChI for .
* @ param options String of options for InChI generation .
* @ return the InChI generator object
* @ throws CDKException if the generator cannot be instantiated */
public InChIGenerator getInChIGenerator ( IAtomContainer container , String options ) throws CDKException { } } | return ( new InChIGenerator ( container , options , ignoreAromaticBonds ) ) ; |
public class PublicationData { /** * Returns a publication string that is a base - 32 encoded value that is meant published to print media as human
* readable text
* @ return returns a base - 32 encoded publication string */
public String getPublicationString ( ) { } } | byte [ ] imprint = publicationHash . getImprint ( ) ; byte [ ] data = new byte [ imprint . length + 8 ] ; System . arraycopy ( Util . toByteArray ( publicationTime . getTime ( ) / 1000 ) , 0 , data , 0 , 8 ) ; System . arraycopy ( imprint , 0 , data , 8 , imprint . length ) ; return Base32 . encodeWithDashes ( Util . addCrc32 ( data ) ) ; |
public class CveDB { /** * Creates a prepared statement from the given key . The SQL is stored in a
* properties file and the key is used to lookup the specific query .
* @ param key the key to select the prepared statement from the properties
* file
* @ return the prepared statement
* @ throws DatabaseException throw if there is an error generating the
* prepared statement */
private PreparedStatement prepareStatement ( PreparedStatementCveDb key ) throws DatabaseException { } } | PreparedStatement preparedStatement = null ; try { final String statementString = statementBundle . getString ( key . name ( ) ) ; if ( key == INSERT_VULNERABILITY || key == INSERT_CPE ) { preparedStatement = connection . prepareStatement ( statementString , Statement . RETURN_GENERATED_KEYS ) ; } else { preparedStatement = connection . prepareStatement ( statementString ) ; } } catch ( SQLException ex ) { throw new DatabaseException ( ex ) ; } catch ( MissingResourceException ex ) { if ( ! ex . getMessage ( ) . contains ( "key MERGE_PROPERTY" ) ) { throw new DatabaseException ( ex ) ; } } return preparedStatement ; |
public class GBSIterator { /** * Return the next element in the collection . */
public Object next ( ) { } } | _current1 . reset ( ) ; if ( _last1 . key ( ) == null ) findFirst ( _dstack ) ; else { findNext ( _dstack ) ; if ( _current1 . key ( ) == null ) _eof = true ; } if ( _current1 . key ( ) != null ) _last1 . setLocation ( _current1 ) ; return _current1 . key ( ) ; |
public class Properties { /** * Emits an XML document representing all of the properties contained
* in this table .
* < p > An invocation of this method of the form < tt > props . storeToXML ( os ,
* comment ) < / tt > behaves in exactly the same way as the invocation
* < tt > props . storeToXML ( os , comment , " UTF - 8 " ) ; < / tt > .
* @ param os the output stream on which to emit the XML document .
* @ param comment a description of the property list , or < code > null < / code >
* if no comment is desired .
* @ throws IOException if writing to the specified output stream
* results in an < tt > IOException < / tt > .
* @ throws NullPointerException if < code > os < / code > is null .
* @ throws ClassCastException if this < code > Properties < / code > object
* contains any keys or values that are not
* < code > Strings < / code > .
* @ see # loadFromXML ( InputStream )
* @ since 1.5 */
public synchronized void storeToXML ( OutputStream os , String comment ) throws IOException { } } | if ( os == null ) throw new NullPointerException ( ) ; storeToXML ( os , comment , "UTF-8" ) ; |
public class AppServiceEnvironmentsInner { /** * Reboot all machines in an App Service Environment .
* Reboot all machines in an App Service Environment .
* @ param resourceGroupName Name of the resource group to which the resource belongs .
* @ param name Name of the App Service Environment .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > rebootAsync ( String resourceGroupName , String name , final ServiceCallback < Void > serviceCallback ) { } } | return ServiceFuture . fromResponse ( rebootWithServiceResponseAsync ( resourceGroupName , name ) , serviceCallback ) ; |
public class AbstractLogger { /** * Logs the provided data at the info level .
* @ param message
* The message parts ( may be null )
* @ param throwable
* The error ( may be null ) */
public void logInfo ( Object [ ] message , Throwable throwable ) { } } | this . log ( LogLevel . INFO , message , throwable ) ; |
public class OSHelper { /** * Runs a new OS process
* @ param toRun OS console command to run
* @ return the process object
* @ throws OSHelperException */
public static Process runCommand ( List < String > toRun ) throws OSHelperException { } } | Process proc ; ProcessBuilder procBuilder = new ProcessBuilder ( ) ; procBuilder . command ( toRun ) ; try { proc = procBuilder . start ( ) ; } catch ( IOException ex ) { throw new OSHelperException ( "Received an IOException when running command:\n" + toRun + "\n" + ex . getMessage ( ) , ex ) ; } return proc ; |
public class AbstractPrimitiveBindTransform { /** * / * ( non - Javadoc )
* @ see com . abubusoft . kripton . processor . bind . transform . BindTransform # generateParseOnJackson ( com . abubusoft . kripton . processor . bind . BindTypeContext , com . squareup . javapoet . MethodSpec . Builder , java . lang . String , com . squareup . javapoet . TypeName , java . lang . String , com . abubusoft . kripton . processor . bind . model . BindProperty ) */
@ Override public void generateParseOnJackson ( BindTypeContext context , MethodSpec . Builder methodBuilder , String parserName , TypeName beanClass , String beanName , BindProperty property ) { } } | if ( nullable && property . isNullable ( ) ) { methodBuilder . beginControlFlow ( "if ($L.currentToken()!=$T.VALUE_NULL)" , parserName , JsonToken . class ) ; } if ( property . hasTypeAdapter ( ) ) { // there ' s an type adapter
methodBuilder . addCode ( "// using type adapter $L\n" , property . typeAdapter . adapterClazz ) ; // no adapter is present
if ( CharacterBindTransform . CHAR_CAST_CONST . equals ( XML_CAST_TYPE ) ) { methodBuilder . addStatement ( setter ( beanClass , beanName , property , PRE_TYPE_ADAPTER_TO_JAVA + "Character.valueOf((char)$L.$L())+POST_TYPE_ADAPTER" ) , TypeAdapterUtils . class , TypeUtility . typeName ( property . typeAdapter . adapterClazz ) , parserName , JSON_PARSER_METHOD ) ; } else { methodBuilder . addStatement ( setter ( beanClass , beanName , property , PRE_TYPE_ADAPTER_TO_JAVA + "$L.$L()" + POST_TYPE_ADAPTER ) , TypeAdapterUtils . class , TypeUtility . typeName ( property . typeAdapter . adapterClazz ) , parserName , JSON_PARSER_METHOD ) ; } } else { // no adapter is present
if ( CharacterBindTransform . CHAR_CAST_CONST . equals ( XML_CAST_TYPE ) ) { methodBuilder . addStatement ( setter ( beanClass , beanName , property , "Character.valueOf((char)$L.$L())" ) , parserName , JSON_PARSER_METHOD ) ; } else { methodBuilder . addStatement ( setter ( beanClass , beanName , property , "$L.$L()" ) , parserName , JSON_PARSER_METHOD ) ; } } if ( nullable && property . isNullable ( ) ) { methodBuilder . endControlFlow ( ) ; } |
public class TreeLayout { /** * dumpTree */
private void dumpTree ( PrintStream output , TreeNode node , int indent , DumpConfiguration dumpConfiguration ) { } } | StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < indent ; i ++ ) { sb . append ( dumpConfiguration . indent ) ; } if ( dumpConfiguration . includeObjectToString ) { sb . append ( "[" ) ; sb . append ( node . getClass ( ) . getName ( ) + "@" + Integer . toHexString ( node . hashCode ( ) ) ) ; if ( node . hashCode ( ) != System . identityHashCode ( node ) ) { sb . append ( "/identityHashCode:" ) ; sb . append ( Integer . toHexString ( System . identityHashCode ( node ) ) ) ; } sb . append ( "]" ) ; } sb . append ( StringUtil . quote ( node != null ? node . toString ( ) : null ) ) ; if ( dumpConfiguration . includeNodeSize ) { sb . append ( " (size: " ) ; sb . append ( getNodeWidth ( node ) ) ; sb . append ( "x" ) ; sb . append ( getNodeHeight ( node ) ) ; sb . append ( ")" ) ; } output . println ( sb . toString ( ) ) ; for ( TreeNode n : tree . getChildren ( node ) ) { dumpTree ( output , n , indent + 1 , dumpConfiguration ) ; } |
public class ChannelUtils { /** * Load and possibly start the provided configuration information .
* @ param config
* @ param start
* @ param restart
* @ return Map < String , List < String > >
* @ see ChannelUtils # loadConfig ( Dictionary )
* @ see ChannelUtils # startConfig ( Dictionary , boolean ) */
private static synchronized Map < String , List < String > > load ( Map < String , Object > config , boolean start , boolean restart ) { } } | final boolean bTrace = TraceComponent . isAnyTracingEnabled ( ) ; if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Loading CHFW config from " + config ) ; } Map < String , Map < String , String [ ] > > parsed = extractConfig ( config ) ; // if a restart is set , then stop and remove existing chains
// that may be running already
if ( restart ) { unloadChains ( parsed . get ( "chains" ) . keySet ( ) . iterator ( ) ) ; } // handle any factory config first
List < String > createdFactories = loadFactories ( parsed . get ( "factories" ) ) ; // load any endpoints
List < String > createdEndpoints = loadEndPoints ( parsed . get ( "endpoints" ) ) ; // now load any channels
List < String > createdChannels = loadChannels ( parsed . get ( "channels" ) ) ; // now load any chains
List < String > createdChains = loadChains ( parsed . get ( "chains" ) , start , restart ) ; // now load the chain group definitions
List < String > createdGroups = loadGroups ( parsed . get ( "groups" ) , start , restart ) ; Map < String , List < String > > rc = new HashMap < String , List < String > > ( ) ; rc . put ( "factory" , createdFactories ) ; rc . put ( "channel" , createdChannels ) ; rc . put ( "chain" , createdChains ) ; rc . put ( "group" , createdGroups ) ; rc . put ( "endpoint" , createdEndpoints ) ; return rc ; |
public class HtmlTool { /** * Outputs the list of partition root elements to HTML .
* @ param elements
* @ return */
private static String outerHtml ( List < Element > elements ) { } } | switch ( elements . size ( ) ) { case 0 : return "" ; case 1 : return elements . get ( 0 ) . outerHtml ( ) ; default : { // more than one element
// wrap into < div > which we will remove afterwards
Element root = new Element ( Tag . valueOf ( "div" ) , "" ) ; for ( Element elem : elements ) { root . appendChild ( elem ) ; } return root . html ( ) ; } } |
public class StreamSource { /** * Construct a Pushable StreamSource using the provided QueueFactory as a push mechanism
* @ see QueueFactories for Factory creation options and various backpressure strategies
* < pre >
* { @ code
* PushableStream < Integer > pushable = StreamSource . of ( QueueFactories . boundedQueue ( 10 ) )
* . stream ( ) ;
* pushable . getInput ( )
* . offer ( 10 ) ;
* Stream < Integer > stream = pushable . getStream ( ) ;
* stream . forEach ( System . out : : println ) ;
* print 10
* pushable . getInput ( )
* . offer ( 20 ) ;
* print 20
* pushable . getInput ( )
* . close ( ) ;
* < / pre >
* @ param q QueueFactory used to create the Adapter to back the pushable StreamSource
* @ return Pushable StreamSource */
public static StreamSource of ( final QueueFactory < ? > q ) { } } | Objects . requireNonNull ( q ) ; return new StreamSource ( ) { @ SuppressWarnings ( "unchecked" ) @ Override < T > Queue < T > createQueue ( ) { return ( Queue < T > ) q . build ( ) ; } } ; |
public class FileColumn { /** * Returns the cell type for the given value .
* @ param value The cell type value
* @ return The code for the cell type */
private short getCellType ( String value ) { } } | short ret = STRING_TYPE ; if ( value . equals ( "number" ) ) ret = NUMBER_TYPE ; else if ( value . equals ( "datetime" ) ) ret = DATETIME_TYPE ; else if ( value . equals ( "boolean" ) ) ret = BOOLEAN_TYPE ; return ret ; |
public class QueryParametersLazyList { /** * Returns array of cached elements ( inc . statement as first element ) .
* @ return arrya of cached elements */
public Object [ ] toArrayCached ( ) { } } | Object [ ] readCacheArray = generatedCacheMap . values ( ) . toArray ( ) ; Object [ ] writeCacheArray = resultSetCacheMap . values ( ) . toArray ( ) ; Object [ ] combinedCacheArray = new Object [ readCacheArray . length + writeCacheArray . length ] ; System . arraycopy ( readCacheArray , 0 , combinedCacheArray , 0 , readCacheArray . length ) ; System . arraycopy ( writeCacheArray , 0 , combinedCacheArray , readCacheArray . length , writeCacheArray . length ) ; return combinedCacheArray ; |
public class ForeignDestination { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . runtime . SIMPMessageHandlerControllable # getState ( ) */
public String getState ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getState" ) ; String state = messageProcessor . getDestinationManager ( ) . getDestinationIndex ( ) . getState ( foreignDest ) . toString ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getState" ) ; return state ; |
public class CreateWSDL20 { /** * AddProcessForWSDL Method . */
public void addProcessForWSDL ( String strVersion , Object typeObject , MessageProcessInfo recMessageProcessInfo , OperationType type ) { } } | if ( type == OperationType . BINDING_OPERATIONS ) this . addBindingOperationType ( strVersion , ( BindingType ) typeObject , recMessageProcessInfo ) ; else if ( type == OperationType . INTERFACE_OPERATIONS ) this . addInterfaceOperationType ( strVersion , ( InterfaceType ) typeObject , recMessageProcessInfo ) ; else if ( type == OperationType . TYPES_OPERATIONS ) this . addTypeType ( strVersion , ( TypesType ) typeObject , recMessageProcessInfo ) ; |
public class ConfigManager { /** * Removes all the entries currently in neverDeletePaths
* and add the new ones specified */
void reloadWhitelist ( ) throws IOException { } } | // read the entire whitelist into memory outside the
// FSNamessytem lock .
LinkedList < String > paths = new LinkedList < String > ( ) ; FileInputStream fstream = new FileInputStream ( whitelistFile ) ; DataInputStream in = new DataInputStream ( fstream ) ; BufferedReader br = new BufferedReader ( new InputStreamReader ( in ) ) ; int count = 0 ; while ( true ) { String str = br . readLine ( ) ; if ( str == null ) { break ; // end of file
} str = str . trim ( ) ; // remove all whitespace from start and end
if ( str . startsWith ( "#" ) ) { continue ; // ignore lines with starting with #
} paths . add ( str ) ; LOG . info ( "Whitelisted directory [" + count + "] " + str ) ; count ++ ; } in . close ( ) ; // acquire the writelock and insert newly read entries into
// the Namenode ' s configuration .
namesys . writeLock ( ) ; try { namesys . neverDeletePaths . clear ( ) ; for ( String s : paths ) { namesys . neverDeletePaths . add ( s ) ; } } finally { namesys . writeUnlock ( ) ; } |
public class UserAttrs { /** * Set user - defined - attribute
* @ param path
* @ param attribute user : attribute name . user : can be omitted .
* @ param value
* @ param options
* @ throws IOException */
public static final void setLongAttribute ( Path path , String attribute , long value , LinkOption ... options ) throws IOException { } } | attribute = attribute . startsWith ( "user:" ) ? attribute : "user:" + attribute ; Files . setAttribute ( path , attribute , Primitives . writeLong ( value ) , options ) ; |
public class DcpControl { /** * Store / Override a control parameter .
* @ param name the name of the control parameter .
* @ param value the stringified version what it should be set to .
* @ return the { @ link DcpControl } instance for chainability . */
public DcpControl put ( final Names name , final String value ) { } } | // Provide a default NOOP interval because the client needs
// to know the interval in order to detect dead connections .
if ( name == Names . ENABLE_NOOP && get ( Names . SET_NOOP_INTERVAL ) == null ) { put ( Names . SET_NOOP_INTERVAL , Integer . toString ( DEFAULT_NOOP_INTERVAL_SECONDS ) ) ; } values . put ( name . value ( ) , value ) ; return this ; |
public class TtlBucketList { /** * Inserts an inode to the appropriate bucket where its ttl end time lies in the
* bucket ' s interval , if no appropriate bucket exists , a new bucket will be created to contain
* this inode , if ttl value is { @ link Constants # NO _ TTL } , the inode won ' t be inserted to any
* buckets and nothing will happen .
* @ param inode the inode to be inserted */
public void insert ( Inode inode ) { } } | if ( inode . getTtl ( ) == Constants . NO_TTL ) { return ; } TtlBucket bucket ; while ( true ) { bucket = getBucketContaining ( inode ) ; if ( bucket != null ) { break ; } long ttlEndTimeMs = inode . getCreationTimeMs ( ) + inode . getTtl ( ) ; // No bucket contains the inode , so a new bucket should be added with an appropriate interval
// start . Assume the list of buckets have continuous intervals , and the first interval starts
// at 0 , then ttlEndTimeMs should be in number ( ttlEndTimeMs / interval ) interval , so the
// start time of this interval should be ( ttlEndTimeMs / interval ) * interval .
long interval = TtlBucket . getTtlIntervalMs ( ) ; bucket = new TtlBucket ( interval == 0 ? ttlEndTimeMs : ttlEndTimeMs / interval * interval ) ; if ( mBucketList . add ( bucket ) ) { break ; } // If we reach here , it means the same bucket has been concurrently inserted by another
// thread .
} // TODO ( zhouyufa ) : Consider the concurrent situation that the bucket is expired and processed by
// the InodeTtlChecker , then adding the inode into the bucket is meaningless since the bucket
// will not be accessed again . ( c . f . ALLUXIO - 2821)
bucket . addInode ( inode ) ; |
public class JsonReader { /** * Throws a new IO exception with the given message and a context snippet
* with this reader ' s content . */
private IOException syntaxError ( String message ) throws IOException { } } | throw new MalformedJsonException ( message + " at line " + getLineNumber ( ) + " column " + getColumnNumber ( ) + " path " + getPath ( ) ) ; |
public class Util { /** * Send back no entity body response and close the connection . This function is mostly used
* when we send back error messages .
* @ param ctx connection
* @ param status response status
* @ param httpVersion of the response
* @ param serverName server name */
public static void sendAndCloseNoEntityBodyResp ( ChannelHandlerContext ctx , HttpResponseStatus status , HttpVersion httpVersion , String serverName ) { } } | HttpResponse outboundResponse = new DefaultHttpResponse ( httpVersion , status ) ; outboundResponse . headers ( ) . set ( HttpHeaderNames . CONTENT_LENGTH , 0 ) ; outboundResponse . headers ( ) . set ( HttpHeaderNames . CONNECTION . toString ( ) , Constants . CONNECTION_CLOSE ) ; outboundResponse . headers ( ) . set ( HttpHeaderNames . SERVER . toString ( ) , serverName ) ; ChannelFuture outboundRespFuture = ctx . channel ( ) . writeAndFlush ( outboundResponse ) ; outboundRespFuture . addListener ( ( ChannelFutureListener ) channelFuture -> LOG . warn ( "Failed to send {}" , status . reasonPhrase ( ) ) ) ; ctx . channel ( ) . close ( ) ; |
public class FedoraSessionImpl { /** * Get the internal JCR session from an existing FedoraSession
* @ param session the FedoraSession
* @ return the JCR session */
public static Session getJcrSession ( final FedoraSession session ) { } } | if ( session instanceof FedoraSessionImpl ) { return ( ( FedoraSessionImpl ) session ) . getJcrSession ( ) ; } throw new ClassCastException ( "FedoraSession is not a " + FedoraSessionImpl . class . getCanonicalName ( ) ) ; |
public class AbstractUpsertPlugin { /** * add update xml element to mapper . xml for upsert
* @ param document The generated xml mapper dom
* @ param introspectedTable The metadata for database table */
protected void addSingleUpsertToSqlMap ( Document document , IntrospectedTable introspectedTable ) { } } | XmlElement update = new XmlElement ( "update" ) ; update . addAttribute ( new Attribute ( "id" , UPSERT ) ) ; update . addAttribute ( new Attribute ( "parameterType" , "map" ) ) ; generateSqlMapContent ( introspectedTable , update ) ; document . getRootElement ( ) . addElement ( update ) ; |
public class TransactionTagSupport { /** * Setter method for the transaction isolation level . */
public void setIsolation ( String iso ) throws JspTagException { } } | if ( TRANSACTION_READ_COMMITTED . equals ( iso ) ) { isolation = Connection . TRANSACTION_READ_COMMITTED ; } else if ( TRANSACTION_READ_UNCOMMITTED . equals ( iso ) ) { isolation = Connection . TRANSACTION_READ_UNCOMMITTED ; } else if ( TRANSACTION_REPEATABLE_READ . equals ( iso ) ) { isolation = Connection . TRANSACTION_REPEATABLE_READ ; } else if ( TRANSACTION_SERIALIZABLE . equals ( iso ) ) { isolation = Connection . TRANSACTION_SERIALIZABLE ; } else { throw new JspTagException ( Resources . getMessage ( "TRANSACTION_INVALID_ISOLATION" ) ) ; } |
public class ReconfigurableClient { /** * Associates a message processor with this instance .
* The message processor cannot be started before . This method will start it .
* This method must be invoked only once .
* @ param messageProcessor the message processor */
public void associateMessageProcessor ( AbstractMessageProcessor < T > messageProcessor ) { } } | if ( this . messageProcessor != null ) throw new IllegalArgumentException ( "The message processor was already defined." ) ; this . messageProcessor = messageProcessor ; configureMessageProcessor ( messageProcessor ) ; this . messageProcessor . start ( ) ; |
public class Utils { /** * Return array of class members whose documentation is to be generated .
* If the member is deprecated do not include such a member in the
* returned array .
* @ param members Array of members to choose from .
* @ return List List of eligible members for whom
* documentation is getting generated . */
public List < ProgramElementDoc > excludeDeprecatedMembersAsList ( ProgramElementDoc [ ] members ) { } } | List < ProgramElementDoc > list = new ArrayList < > ( ) ; for ( ProgramElementDoc member : members ) { if ( member . tags ( "deprecated" ) . length == 0 ) { list . add ( member ) ; } } Collections . sort ( list ) ; return list ; |
public class MediaSource { /** * Get property name containing the cropping parameters
* @ param mediaRequest Media request
* @ param mediaHandlerConfig Media handler config ( can be null , but should not be null )
* @ return Property name */
@ SuppressWarnings ( "null" ) protected final @ NotNull String getMediaCropProperty ( @ NotNull MediaRequest mediaRequest , @ Nullable MediaHandlerConfig mediaHandlerConfig ) { } } | String cropProperty = mediaRequest . getCropProperty ( ) ; if ( StringUtils . isEmpty ( cropProperty ) ) { if ( mediaHandlerConfig != null ) { cropProperty = mediaHandlerConfig . getMediaCropProperty ( ) ; } else { cropProperty = MediaNameConstants . PN_MEDIA_CROP ; } } return cropProperty ; |
public class SqlBuilderHelper { /** * Generate log for where conditions .
* < h2 > pre conditions < / h2 >
* required variable are :
* < ul >
* < li > _ sqlWhereParams < / li >
* < / ul >
* < h2 > post conditions < / h2 >
* created variables are :
* < ul >
* < li > _ whereParamCounter < / li >
* < / ul >
* @ param method
* the method
* @ param methodBuilder
* the method builder */
public static void generateLogForWhereParameters ( SQLiteModelMethod method , MethodSpec . Builder methodBuilder ) { } } | // manage log for where parameters
if ( method . getParent ( ) . getParent ( ) . generateLog ) { methodBuilder . addCode ( "\n// log for where parameters -- BEGIN\n" ) ; methodBuilder . addStatement ( "int _whereParamCounter=0" ) ; // methodBuilder . beginControlFlow ( " for ( String _ whereParamItem :
// _ sqlWhereParams ) " ) ;
methodBuilder . beginControlFlow ( "for (String _whereParamItem: _contentValues.whereArgs())" ) ; methodBuilder . addStatement ( "$T.info(\"==> param%s: '%s'\",(_whereParamCounter++), $T.checkSize(_whereParamItem))" , Logger . class , StringUtils . class ) ; methodBuilder . endControlFlow ( ) ; methodBuilder . addCode ( "// log for where parameters -- END\n" ) ; } |
public class FunctionInformationMap { /** * < code > repeated group Entry = 1 { . . . } < / code > */
public com . google . javascript . jscomp . FunctionInformationMap . Entry getEntry ( int index ) { } } | return entry_ . get ( index ) ; |
public class AnalysisContext { /** * Return whether or not the given class is an application class .
* @ param className
* name of a class
* @ return true if the class is an application class , false if not an
* application class or if the class cannot be located */
public boolean isApplicationClass ( @ DottedClassName String className ) { } } | // try {
// JavaClass javaClass = lookupClass ( className ) ;
// return isApplicationClass ( javaClass ) ;
// } catch ( ClassNotFoundException e ) {
// AnalysisContext . reportMissingClass ( e ) ;
// return false ;
ClassDescriptor classDesc = DescriptorFactory . createClassDescriptorFromDottedClassName ( className ) ; return getSubtypes2 ( ) . isApplicationClass ( classDesc ) ; |
public class DataSourcesResource { /** * / * When this method is removed , a new method needs to be introduced corresponding to
* the end point " DELETE / druid / coordinator / v1 / datasources / { dataSourceName } " ( with no query parameters ) .
* Ultimately we want to have no method with kill parameter -
* DELETE ` { dataSourceName } ` will be used to disable datasource and
* DELETE ` { dataSourceName } / intervals / { interval } ` will be used to nuke segments */
@ DELETE @ Deprecated @ Path ( "/{dataSourceName}" ) @ ResourceFilters ( DatasourceResourceFilter . class ) @ Produces ( MediaType . APPLICATION_JSON ) public Response deleteDataSource ( @ PathParam ( "dataSourceName" ) final String dataSourceName , @ QueryParam ( "kill" ) final String kill , @ QueryParam ( "interval" ) final String interval ) { } } | if ( indexingServiceClient == null ) { return Response . ok ( ImmutableMap . of ( "error" , "no indexing service found" ) ) . build ( ) ; } if ( kill != null && Boolean . valueOf ( kill ) ) { try { indexingServiceClient . killSegments ( dataSourceName , Intervals . of ( interval ) ) ; } catch ( IllegalArgumentException e ) { return Response . status ( Response . Status . BAD_REQUEST ) . entity ( ImmutableMap . of ( "error" , "Exception occurred. Probably the interval is invalid" , "message" , e . toString ( ) ) ) . build ( ) ; } catch ( Exception e ) { return Response . serverError ( ) . entity ( ImmutableMap . of ( "error" , "Exception occurred. Are you sure you have an indexing service?" , "message" , e . toString ( ) ) ) . build ( ) ; } } else { if ( ! databaseSegmentManager . removeDataSource ( dataSourceName ) ) { return Response . noContent ( ) . build ( ) ; } } return Response . ok ( ) . build ( ) ; |
public class CorporationApi { /** * Get corporation members Return the current member list of a corporation ,
* the token & # 39 ; s character need to be a member of the corporation . - - -
* This route is cached for up to 3600 seconds SSO Scope :
* esi - corporations . read _ corporation _ membership . v1
* @ param corporationId
* An EVE corporation ID ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param ifNoneMatch
* ETag from a previous request . A 304 will be returned if this
* matches the current ETag ( optional )
* @ param token
* Access token to use if unable to set a header ( optional )
* @ return List & lt ; Integer & gt ;
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public List < Integer > getCorporationsCorporationIdMembers ( Integer corporationId , String datasource , String ifNoneMatch , String token ) throws ApiException { } } | ApiResponse < List < Integer > > resp = getCorporationsCorporationIdMembersWithHttpInfo ( corporationId , datasource , ifNoneMatch , token ) ; return resp . getData ( ) ; |
public class FloatMapper { /** * { @ inheritDoc } */
@ Override public SortField sortField ( String name , boolean reverse ) { } } | return new SortedNumericSortField ( name , Type . FLOAT , reverse ) ; |
public class BasePrefetcher { /** * Build the Criteria using multiple ORs
* @ param ids collection of identities
* @ param fields
* @ return Criteria */
private Criteria buildPrefetchCriteriaMultipleKeys ( Collection ids , FieldDescriptor fields [ ] ) { } } | Criteria crit = new Criteria ( ) ; Iterator iter = ids . iterator ( ) ; Object [ ] val ; Identity id ; while ( iter . hasNext ( ) ) { Criteria c = new Criteria ( ) ; id = ( Identity ) iter . next ( ) ; val = id . getPrimaryKeyValues ( ) ; for ( int i = 0 ; i < val . length ; i ++ ) { if ( val [ i ] == null ) { c . addIsNull ( fields [ i ] . getAttributeName ( ) ) ; } else { c . addEqualTo ( fields [ i ] . getAttributeName ( ) , val [ i ] ) ; } } crit . addOrCriteria ( c ) ; } return crit ; |
public class ServerBuilder { /** * Decorates and binds the specified { @ link ServiceWithPathMappings } at multiple { @ link PathMapping } s
* of the default { @ link VirtualHost } .
* @ param serviceWithPathMappings the { @ link ServiceWithPathMappings } .
* @ param decorators the decorator functions , which will be applied in the order specified .
* @ throws IllegalStateException if the default { @ link VirtualHost } has been set via
* { @ link # defaultVirtualHost ( VirtualHost ) } already */
public ServerBuilder service ( ServiceWithPathMappings < HttpRequest , HttpResponse > serviceWithPathMappings , Iterable < Function < ? super Service < HttpRequest , HttpResponse > , ? extends Service < HttpRequest , HttpResponse > > > decorators ) { } } | defaultVirtualHostBuilderUpdated ( ) ; defaultVirtualHostBuilder . service ( serviceWithPathMappings , decorators ) ; return this ; |
public class Timing { /** * Print elapsed time ( without stopping timer ) .
* @ param str Additional prefix string to be printed
* @ param writer PrintWriter on which to write output
* @ return Number of milliseconds elapsed */
public long report ( String str , PrintWriter writer ) { } } | long elapsed = this . report ( ) ; writer . println ( str + " Time elapsed: " + ( elapsed ) + " ms" ) ; return elapsed ; |
public class Hours { /** * Compares this amount to the specified { @ code Hours } .
* The comparison is based on the total length of the amounts .
* It is " consistent with equals " , as defined by { @ link Comparable } .
* @ param otherAmount the other amount , not null
* @ return the comparator value , negative if less , positive if greater */
@ Override public int compareTo ( Hours otherAmount ) { } } | int thisValue = this . hours ; int otherValue = otherAmount . hours ; return Integer . compare ( thisValue , otherValue ) ; |
public class DOMUtils { /** * Serialise the provided source to a pretty - printed String with the default indent settings
* @ param source the input Source */
public static String pretty ( final Source source ) { } } | StreamResult result = new StreamResult ( new StringWriter ( ) ) ; pretty ( source , result ) ; return result . getWriter ( ) . toString ( ) ; |
public class NfsFsInfoResponse { /** * ( non - Javadoc )
* @ see com . emc . ecs . nfsclient . nfs . NfsResponseBase # unmarshalling ( com . emc . ecs .
* nfsclient . rpc . Xdr ) */
public void unmarshalling ( Xdr xdr ) throws RpcException { } } | super . unmarshalling ( xdr ) ; unmarshallingAttributes ( xdr ) ; if ( stateIsOk ( ) ) { _fsInfo = new NfsFsInfo ( xdr ) ; } |
public class PreambleUtil { /** * Checks Memory for capacity to hold the preamble and returns the first 8 bytes .
* @ param mem the given Memory
* @ return the first 8 bytes of preamble as a long . */
static long checkPreambleSize ( final Memory mem ) { } } | final long cap = mem . getCapacity ( ) ; if ( cap < 8 ) { throwNotBigEnough ( cap , 8 ) ; } final long pre0 = mem . getLong ( 0 ) ; final int preLongs = ( int ) ( pre0 & 0X3FL ) ; // lower 6 bits
final int required = Math . max ( preLongs << 3 , 8 ) ; if ( cap < required ) { throwNotBigEnough ( cap , required ) ; } return pre0 ; |
public class LicenseConfigurationAssociationMarshaller { /** * Marshall the given parameter object . */
public void marshall ( LicenseConfigurationAssociation licenseConfigurationAssociation , ProtocolMarshaller protocolMarshaller ) { } } | if ( licenseConfigurationAssociation == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( licenseConfigurationAssociation . getResourceArn ( ) , RESOURCEARN_BINDING ) ; protocolMarshaller . marshall ( licenseConfigurationAssociation . getResourceType ( ) , RESOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( licenseConfigurationAssociation . getResourceOwnerId ( ) , RESOURCEOWNERID_BINDING ) ; protocolMarshaller . marshall ( licenseConfigurationAssociation . getAssociationTime ( ) , ASSOCIATIONTIME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ComputeNodesImpl { /** * Disables task scheduling on the specified compute node .
* You can disable task scheduling on a node only if its current scheduling state is enabled .
* @ param poolId The ID of the pool that contains the compute node .
* @ param nodeId The ID of the compute node on which you want to disable task scheduling .
* @ param nodeDisableSchedulingOption What to do with currently running tasks when disabling task scheduling on the compute node . The default value is requeue . Possible values include : ' requeue ' , ' terminate ' , ' taskCompletion '
* @ param computeNodeDisableSchedulingOptions Additional parameters for the operation
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponseWithHeaders } object if successful . */
public Observable < Void > disableSchedulingAsync ( String poolId , String nodeId , DisableComputeNodeSchedulingOption nodeDisableSchedulingOption , ComputeNodeDisableSchedulingOptions computeNodeDisableSchedulingOptions ) { } } | return disableSchedulingWithServiceResponseAsync ( poolId , nodeId , nodeDisableSchedulingOption , computeNodeDisableSchedulingOptions ) . map ( new Func1 < ServiceResponseWithHeaders < Void , ComputeNodeDisableSchedulingHeaders > , Void > ( ) { @ Override public Void call ( ServiceResponseWithHeaders < Void , ComputeNodeDisableSchedulingHeaders > response ) { return response . body ( ) ; } } ) ; |
public class Sets { /** * Returns the elements of a { @ code NavigableSet } , { @ code unfiltered } , that
* satisfy a predicate . The returned set is a live view of { @ code unfiltered } ;
* changes to one affect the other .
* < p > The resulting set ' s iterator does not support { @ code remove ( ) } , but all
* other set methods are supported . When given an element that doesn ' t satisfy
* the predicate , the set ' s { @ code add ( ) } and { @ code addAll ( ) } methods throw
* an { @ link IllegalArgumentException } . When methods such as
* { @ code removeAll ( ) } and { @ code clear ( ) } are called on the filtered set ,
* only elements that satisfy the filter will be removed from the underlying
* set .
* < p > The returned set isn ' t threadsafe or serializable , even if
* { @ code unfiltered } is .
* < p > Many of the filtered set ' s methods , such as { @ code size ( ) } , iterate across
* every element in the underlying set and determine which elements satisfy
* the filter . When a live view is < i > not < / i > needed , it may be faster to copy
* { @ code Iterables . filter ( unfiltered , predicate ) } and use the copy .
* < p > < b > Warning : < / b > { @ code predicate } must be < i > consistent with equals < / i > ,
* as documented at { @ link Predicate # apply } . Do not provide a predicate such as
* { @ code Predicates . instanceOf ( ArrayList . class ) } , which is inconsistent with
* equals . ( See { @ link Iterables # filter ( Iterable , Class ) } for related
* functionality . )
* @ since 14.0 */
@ GwtIncompatible ( "NavigableSet" ) @ SuppressWarnings ( "unchecked" ) public static < E > NavigableSet < E > filter ( NavigableSet < E > unfiltered , Predicate < ? super E > predicate ) { } } | if ( unfiltered instanceof FilteredSet ) { // Support clear ( ) , removeAll ( ) , and retainAll ( ) when filtering a filtered
// collection .
FilteredSet < E > filtered = ( FilteredSet < E > ) unfiltered ; Predicate < E > combinedPredicate = Predicates . < E > and ( filtered . predicate , predicate ) ; return new FilteredNavigableSet < E > ( ( NavigableSet < E > ) filtered . unfiltered , combinedPredicate ) ; } return new FilteredNavigableSet < E > ( checkNotNull ( unfiltered ) , checkNotNull ( predicate ) ) ; |
public class AbstractMacro { /** * Allows sub classes to set the default macro category . This method only has an effect if the internal
* { @ link MacroDescriptor } is of type { @ link AbstractMacroDescriptor } .
* @ param defaultCategory the default macro category to be set . */
protected void setDefaultCategory ( String defaultCategory ) { } } | // If setDefaultCategory ( ) method is invoked before macro initialization , this will make sure the macro will
// have correct default category after initialization .
this . defaultCategory = defaultCategory ; // In case if setDefaultCategory ( ) is invoked after macro initialization . Only works if the internal
// MacroDescriptor is of type AbstractMacroDescriptor .
if ( getDescriptor ( ) instanceof AbstractMacroDescriptor ) { ( ( AbstractMacroDescriptor ) getDescriptor ( ) ) . setDefaultCategory ( defaultCategory ) ; } |
public class JDateChooser { /** * Sets a valid date range for selectable dates . If max is before min , the
* default range with no limitation is set .
* @ param min
* the minimum selectable date or null ( then the minimum date is
* set to 01\01\0001)
* @ param max
* the maximum selectable date or null ( then the maximum date is
* set to 01\01\9999) */
public void setSelectableDateRange ( Date min , Date max ) { } } | jcalendar . setSelectableDateRange ( min , max ) ; dateEditor . setSelectableDateRange ( jcalendar . getMinSelectableDate ( ) , jcalendar . getMaxSelectableDate ( ) ) ; |
public class TaskStatistics { /** * Get Long key value */
public long getLongValue ( Enum key ) { } } | if ( this . _task . get ( key ) == null ) { return ( long ) 0 ; } else { return Long . parseLong ( this . _task . get ( key ) ) ; } |
public class InjectorBuilderImpl { /** * @ Override
* public < T , X > BindingBuilder < T > function ( Function < X , T > function )
* clearManager ( ) ;
* Objects . requireNonNull ( function ) ;
* BindingBuilderImpl < T > binding = new BindingBuilderImpl < > ( this , function ) ;
* _ bindings . add ( binding ) ;
* return binding ; */
@ Override public < T , U > BindingBuilder < T > provider ( Key < U > parent , Method m ) { } } | throw new UnsupportedOperationException ( getClass ( ) . getName ( ) ) ; |
public class Cli { /** * Parse and execute the appropriate command based on the args .
* The general flow looks like this :
* 1 . Parse a set of global options ( eg host / port for the admin server )
* 2 . Parse out the command name
* 3 . Pass the global options and any left over parameters to a command handler */
public void parseAndExecuteCommand ( ) { } } | CommandLineParser parser = new DefaultParser ( ) ; try { CommandLine parsedOpts = parser . parse ( this . options , this . args , true ) ; GlobalOptions globalOptions = createGlobalOptions ( parsedOpts ) ; // Fetch the command and fail if there is ambiguity
String [ ] remainingArgs = parsedOpts . getArgs ( ) ; if ( remainingArgs . length == 0 ) { printHelpAndExit ( "Command not specified!" ) ; } String commandName = remainingArgs [ 0 ] . toLowerCase ( ) ; remainingArgs = remainingArgs . length > 1 ? Arrays . copyOfRange ( remainingArgs , 1 , remainingArgs . length ) : new String [ ] { } ; Command command = commandList . get ( commandName ) ; if ( command == null ) { System . out . println ( "Command " + commandName + " not known." ) ; printHelpAndExit ( ) ; } else { command . execute ( globalOptions , remainingArgs ) ; } } catch ( ParseException e ) { printHelpAndExit ( "Ran into an error parsing args." ) ; } |
public class DefinitelyDerefedParamsDriver { /** * Write model jar file with nullability model at DEFAULT _ ASTUBX _ LOCATION
* @ param outPath Path of output model jar file . */
private static void writeModelJAR ( String outPath ) throws IOException { } } | Preconditions . checkArgument ( outPath . endsWith ( MODEL_JAR_SUFFIX ) , "invalid model file path! " + outPath ) ; ZipOutputStream zos = new ZipOutputStream ( new FileOutputStream ( outPath ) ) ; if ( ! map_result . isEmpty ( ) ) { ZipEntry entry = new ZipEntry ( DEFAULT_ASTUBX_LOCATION ) ; // Set the modification / creation time to 0 to ensure that this jars always have the same
// checksum
entry . setTime ( 0 ) ; entry . setCreationTime ( FileTime . fromMillis ( 0 ) ) ; zos . putNextEntry ( entry ) ; writeModel ( new DataOutputStream ( zos ) ) ; zos . closeEntry ( ) ; } zos . close ( ) ; LOG ( VERBOSE , "Info" , "wrote model to: " + outPath ) ; |
public class ListenerFactory { /** * Instantiates the listener of given type and caches it .
* @ param listenerClass
* the listener type
* @ return the instantiated object */
private Object loadListener ( Class < ? > listenerClass ) { } } | synchronized ( listenerClass ) { Object listener = listeners . get ( listenerClass ) ; if ( listener == null ) { listener = IntrospectionUtils . instantiateObject ( listenerClass ) ; listeners . put ( listenerClass , listener ) ; } return listener ; } |
public class InternalXtextParser { /** * InternalXtext . g : 1883:1 : ruleNegation returns [ EObject current = null ] : ( this _ Atom _ 0 = ruleAtom | ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNegation ) ) ) ) ; */
public final EObject ruleNegation ( ) throws RecognitionException { } } | EObject current = null ; Token otherlv_2 = null ; EObject this_Atom_0 = null ; EObject lv_value_3_0 = null ; enterRule ( ) ; try { // InternalXtext . g : 1889:2 : ( ( this _ Atom _ 0 = ruleAtom | ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNegation ) ) ) ) )
// InternalXtext . g : 1890:2 : ( this _ Atom _ 0 = ruleAtom | ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNegation ) ) ) )
{ // InternalXtext . g : 1890:2 : ( this _ Atom _ 0 = ruleAtom | ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNegation ) ) ) )
int alt46 = 2 ; int LA46_0 = input . LA ( 1 ) ; if ( ( LA46_0 == RULE_ID || LA46_0 == 15 || ( LA46_0 >= 39 && LA46_0 <= 40 ) ) ) { alt46 = 1 ; } else if ( ( LA46_0 == 41 ) ) { alt46 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 46 , 0 , input ) ; throw nvae ; } switch ( alt46 ) { case 1 : // InternalXtext . g : 1891:3 : this _ Atom _ 0 = ruleAtom
{ newCompositeNode ( grammarAccess . getNegationAccess ( ) . getAtomParserRuleCall_0 ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_2 ) ; this_Atom_0 = ruleAtom ( ) ; state . _fsp -- ; current = this_Atom_0 ; afterParserOrEnumRuleCall ( ) ; } break ; case 2 : // InternalXtext . g : 1900:3 : ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNegation ) ) )
{ // InternalXtext . g : 1900:3 : ( ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNegation ) ) )
// InternalXtext . g : 1901:4 : ( ) otherlv _ 2 = ' ! ' ( ( lv _ value _ 3_0 = ruleNegation ) )
{ // InternalXtext . g : 1901:4 : ( )
// InternalXtext . g : 1902:5:
{ current = forceCreateModelElement ( grammarAccess . getNegationAccess ( ) . getNegationAction_1_0 ( ) , current ) ; } otherlv_2 = ( Token ) match ( input , 41 , FollowSets000 . FOLLOW_26 ) ; newLeafNode ( otherlv_2 , grammarAccess . getNegationAccess ( ) . getExclamationMarkKeyword_1_1 ( ) ) ; // InternalXtext . g : 1912:4 : ( ( lv _ value _ 3_0 = ruleNegation ) )
// InternalXtext . g : 1913:5 : ( lv _ value _ 3_0 = ruleNegation )
{ // InternalXtext . g : 1913:5 : ( lv _ value _ 3_0 = ruleNegation )
// InternalXtext . g : 1914:6 : lv _ value _ 3_0 = ruleNegation
{ newCompositeNode ( grammarAccess . getNegationAccess ( ) . getValueNegationParserRuleCall_1_2_0 ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_2 ) ; lv_value_3_0 = ruleNegation ( ) ; state . _fsp -- ; if ( current == null ) { current = createModelElementForParent ( grammarAccess . getNegationRule ( ) ) ; } set ( current , "value" , lv_value_3_0 , "org.eclipse.xtext.Xtext.Negation" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; } } leaveRule ( ) ; } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class CodepointMatcher { /** * Returns a copy of the input string with all Unicode codepoints matching this matcher replaced
* with { @ code replacementCharacter } . */
public final String replaceAll ( String s , char replacementCharacter ) { } } | final StringBuilder sb = new StringBuilder ( ) ; for ( int offset = 0 ; offset < s . length ( ) ; ) { final int codePoint = s . codePointAt ( offset ) ; if ( matches ( codePoint ) ) { sb . append ( replacementCharacter ) ; } else { sb . appendCodePoint ( codePoint ) ; } offset += Character . charCount ( codePoint ) ; } return sb . toString ( ) ; |
public class ActionImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setType ( TypeRef newType ) { } } | if ( newType != type ) { NotificationChain msgs = null ; if ( type != null ) msgs = ( ( InternalEObject ) type ) . eInverseRemove ( this , EOPPOSITE_FEATURE_BASE - XtextPackage . ACTION__TYPE , null , msgs ) ; if ( newType != null ) msgs = ( ( InternalEObject ) newType ) . eInverseAdd ( this , EOPPOSITE_FEATURE_BASE - XtextPackage . ACTION__TYPE , null , msgs ) ; msgs = basicSetType ( newType , msgs ) ; if ( msgs != null ) msgs . dispatch ( ) ; } else if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , XtextPackage . ACTION__TYPE , newType , newType ) ) ; |
public class ConstraintMessage { /** * Given a set of constraint violations and a Jersey { @ link Invocable } where the constraint
* occurred , determine the HTTP Status code for the response . A return value violation is an
* internal server error , an invalid request body is unprocessable entity , and any params that
* are invalid means a bad request */
public static < T extends ConstraintViolation < ? > > int determineStatus ( Set < T > violations , Invocable invocable ) { } } | if ( violations . size ( ) > 0 ) { final ConstraintViolation < ? > violation = violations . iterator ( ) . next ( ) ; for ( Path . Node node : violation . getPropertyPath ( ) ) { switch ( node . getKind ( ) ) { case RETURN_VALUE : return 500 ; case PARAMETER : // Now determine if the parameter is the request entity
final int index = node . as ( Path . ParameterNode . class ) . getParameterIndex ( ) ; final Parameter parameter = invocable . getParameters ( ) . get ( index ) ; return parameter . getSource ( ) . equals ( Parameter . Source . UNKNOWN ) ? 422 : 400 ; default : continue ; } } } // This shouldn ' t hit , but if it does , we ' ll return a unprocessable entity
return 422 ; |
public class PropertiesLoaderUtils { /** * Load all properties from the specified class path resource
* ( in ISO - 8859-1 encoding ) , using the given class loader .
* < p > Merges properties if more than one resource of the same name
* found in the class path . < / p >
* @ param resourceName the name of the class path resource
* @ param classLoader the class loader
* @ return the Properties instance
* @ throws IOException if loading failed */
public static Properties loadProperties ( String resourceName , ClassLoader classLoader ) throws IOException { } } | Properties props = cache . get ( resourceName ) ; if ( props == null ) { props = new Properties ( ) ; fillProperties ( props , resourceName , classLoader ) ; Properties existing = cache . putIfAbsent ( resourceName , props ) ; if ( existing != null ) { props = existing ; } } return props ; |
public class CanalFilterSupport { /** * 构建filter 表达式 */
public static String makeFilterExpression ( Pipeline pipeline ) { } } | List < DataMediaPair > dataMediaPairs = pipeline . getPairs ( ) ; if ( dataMediaPairs . isEmpty ( ) ) { throw new SelectException ( "ERROR ## the pair is empty,the pipeline id = " + pipeline . getId ( ) ) ; } Set < String > mediaNames = new HashSet < String > ( ) ; for ( DataMediaPair dataMediaPair : dataMediaPairs ) { DataMedia . ModeValue namespaceMode = dataMediaPair . getSource ( ) . getNamespaceMode ( ) ; DataMedia . ModeValue nameMode = dataMediaPair . getSource ( ) . getNameMode ( ) ; if ( namespaceMode . getMode ( ) . isSingle ( ) ) { buildFilter ( mediaNames , namespaceMode . getSingleValue ( ) , nameMode , false ) ; } else if ( namespaceMode . getMode ( ) . isMulti ( ) ) { for ( String namespace : namespaceMode . getMultiValue ( ) ) { buildFilter ( mediaNames , namespace , nameMode , false ) ; } } else if ( namespaceMode . getMode ( ) . isWildCard ( ) ) { buildFilter ( mediaNames , namespaceMode . getSingleValue ( ) , nameMode , true ) ; } } StringBuilder result = new StringBuilder ( ) ; Iterator < String > iter = mediaNames . iterator ( ) ; int i = - 1 ; while ( iter . hasNext ( ) ) { i ++ ; if ( i == 0 ) { result . append ( iter . next ( ) ) ; } else { result . append ( "," ) . append ( iter . next ( ) ) ; } } String markTable = pipeline . getParameters ( ) . getSystemSchema ( ) + "." + pipeline . getParameters ( ) . getSystemMarkTable ( ) ; String bufferTable = pipeline . getParameters ( ) . getSystemSchema ( ) + "." + pipeline . getParameters ( ) . getSystemBufferTable ( ) ; String dualTable = pipeline . getParameters ( ) . getSystemSchema ( ) + "." + pipeline . getParameters ( ) . getSystemDualTable ( ) ; if ( ! mediaNames . contains ( markTable ) ) { result . append ( "," ) . append ( markTable ) ; } if ( ! mediaNames . contains ( bufferTable ) ) { result . append ( "," ) . append ( bufferTable ) ; } if ( ! mediaNames . contains ( dualTable ) ) { result . append ( "," ) . append ( dualTable ) ; } // String otterTable = pipeline . getParameters ( ) . getSystemSchema ( ) +
// if ( ! mediaNames . contains ( otterTable ) ) {
// result . append ( " , " ) . append ( otterTable ) ;
return result . toString ( ) ; |
public class IfcTableRowImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) public EList < IfcValue > getRowCells ( ) { } } | return ( EList < IfcValue > ) eGet ( Ifc2x3tc1Package . Literals . IFC_TABLE_ROW__ROW_CELLS , true ) ; |
public class RecentCaseCommunications { /** * The five most recent communications associated with the case .
* @ param communications
* The five most recent communications associated with the case . */
public void setCommunications ( java . util . Collection < Communication > communications ) { } } | if ( communications == null ) { this . communications = null ; return ; } this . communications = new com . amazonaws . internal . SdkInternalList < Communication > ( communications ) ; |
public class PickActivity { /** * Check if the edit text is valid or not .
* @ param editText field to check .
* @ return true if the edit text isn ' t empty */
private boolean checkField ( EditText editText ) { } } | boolean valid = true ; if ( TextUtils . isEmpty ( editText . getText ( ) ) ) { editText . startAnimation ( mWiggle ) ; editText . requestFocus ( ) ; valid = false ; } return valid ; |
public class PathWrapperDeserializer { /** * Credits to : http : / / stackoverflow . com / a / 24012023/194609 */
private static Map < String , Object > toMap ( ObjectMapper objectMapper , JsonNode object ) { } } | return objectMapper . convertValue ( object , new TypeReference < Map < String , Object > > ( ) { } ) ; |
public class Dataset { /** * Method to convert this dataset to RVFDataset using L1 - normalized TF - IDF features
* @ return RVFDataset */
public RVFDataset < L , F > getL1NormalizedTFIDFDataset ( ) { } } | RVFDataset < L , F > rvfDataset = new RVFDataset < L , F > ( this . size ( ) , this . featureIndex , this . labelIndex ) ; Counter < F > featureDocCounts = getFeatureCounter ( ) ; for ( int i = 0 ; i < this . size ( ) ; i ++ ) { Datum < L , F > datum = this . getDatum ( i ) ; RVFDatum < L , F > rvfDatum = getL1NormalizedTFIDFDatum ( datum , featureDocCounts ) ; rvfDataset . add ( rvfDatum ) ; } return rvfDataset ; |
public class JMWordSplitter { /** * Split as list list .
* @ param splitPattern the split pattern
* @ param text the text
* @ return the list */
public static List < String > splitAsList ( Pattern splitPattern , String text ) { } } | return splitAsStream ( splitPattern , text ) . collect ( toList ( ) ) ; |
public class Transform { /** * Build Java code to represent a cast to the given type .
* This will be an argument of the form " ( pkg . Cls1 ) " or " ( pkg . Cls2 [ ] ) " or " ( primtype ) "
* @ param builder the builder in which to build the argument
* @ param cls the type for the argument */
protected static void buildClassCast ( StringBuilder builder , Class cls ) { } } | builder . append ( '(' ) ; buildClass ( builder , cls ) ; builder . append ( ')' ) ; |
public class ListUtil { /** * 返回包装后同步的List , 所有方法都会被synchronized原语同步 .
* 用于CopyOnWriteArrayList与 ArrayDequeue均不符合的场景 */
public static < T > List < T > synchronizedList ( List < T > list ) { } } | return Collections . synchronizedList ( list ) ; |
public class WebSocketPacket { /** * 消息解码 < br >
* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
* | F | R | R | R | opcode | M | Payload len | Extended payload length |
* | I | S | S | S | ( 4 ) | A | ( 7 ) | ( 16/64 ) |
* | N | V | V | V | | S | | ( if payload len = = 126/127 ) |
* | | 1 | 2 | 3 | | K | | |
* | Extended payload length continued , if payload len = = 127 |
* | | Masking - key , if MASK set to 1 |
* | Masking - key ( continued ) | Payload Data |
* : Payload Data continued :
* | Payload Data continued |
* @ param buffer
* @ param exbuffers
* @ return 返回NONE表示Buffer内容不够 ; 返回this表示解析完成或部分解析完成 ; 返回null表示解析异常 ; */
WebSocketPacket decode ( final Logger logger , final WebSocketRunner runner , final WebSocket webSocket , final int wsmaxbody , final AbstractMap . SimpleEntry < String , byte [ ] > halfBytes , final ByteBuffer buffer ) { } } | // 开始
final boolean debug = false ; // 调试开关
if ( debug ) logger . log ( Level . FINEST , "read websocket message's length = " + buffer . remaining ( ) ) ; if ( ! buffer . hasRemaining ( ) ) return NONE ; if ( buffer . remaining ( ) < 2 ) { byte [ ] bs = new byte [ buffer . remaining ( ) ] ; buffer . get ( bs ) ; halfBytes . setValue ( bs ) ; return NONE ; } final byte opcode = buffer . get ( ) ; // 第一个字节
this . last = ( opcode & 0b1000_0000 ) != 0 ; this . type = FrameType . valueOf ( opcode & 0xF ) ; if ( type == FrameType . CLOSE ) { if ( debug ) logger . log ( Level . FINEST , " receive close command from websocket client" ) ; } if ( type == null ) { logger . log ( Level . SEVERE , " receive unknown frametype(opcode=" + ( opcode & 0xF ) + ") from websocket client" ) ; } final boolean checkrsv = false ; // 暂时不校验
if ( checkrsv && ( opcode & 0b0111_0000 ) != 0 ) { if ( debug ) logger . log ( Level . FINE , "rsv1 rsv2 rsv3 must be 0, but not (" + opcode + ")" ) ; return null ; // rsv1 rsv2 rsv3 must be 0
} // 0x00 表示一个后续帧
// 0x01 表示一个文本帧
// 0x02 表示一个二进制帧
// 0x03-07 为以后的非控制帧保留
// 0x8 表示一个连接关闭
// 0x9 表示一个ping
// 0xA 表示一个pong
// 0x0B - 0F 为以后的控制帧保留
final boolean control = ( opcode & 0b0000_1000 ) != 0 ; // 是否控制帧
final byte crcode = buffer . get ( ) ; // 第二个字节
byte lengthCode = crcode ; final boolean masked = ( lengthCode & 0x80 ) == 0x80 ; if ( masked ) lengthCode ^= 0x80 ; // mask
// 判断Buffer剩余内容够不够基本信息的创建
int minBufferLength = ( ( lengthCode <= 0x7D ) ? 0 : ( lengthCode == 0x7E ? 2 : 4 ) ) + ( masked ? 4 : 0 ) ; if ( buffer . remaining ( ) < minBufferLength ) { byte [ ] bs = new byte [ 2 + buffer . remaining ( ) ] ; bs [ 0 ] = opcode ; bs [ 1 ] = crcode ; buffer . get ( bs , 2 , buffer . remaining ( ) ) ; halfBytes . setValue ( bs ) ; return NONE ; } int length ; if ( lengthCode <= 0x7D ) { // 125
length = lengthCode ; } else { if ( control ) { if ( debug ) logger . log ( Level . FINE , " receive control command from websocket client" ) ; return null ; } if ( lengthCode == 0x7E ) { // 0x7E = 126
length = ( int ) buffer . getChar ( ) ; } else if ( lengthCode == 0x7F ) { // 0x7E = 127
length = ( int ) buffer . getLong ( ) ; } else { length = buffer . getInt ( ) ; } } if ( length > wsmaxbody && wsmaxbody > 0 ) { logger . log ( Level . WARNING , "message length (" + length + ") too big, must less " + wsmaxbody + "" ) ; return null ; } this . receiveLength = length ; if ( debug ) logger . finest ( "this.receiveLength: " + length + ", code=" + lengthCode + ", last=" + last ) ; if ( masked ) { final byte [ ] masks = new byte [ 4 ] ; buffer . get ( masks ) ; this . receiveMasker = new ConvertMask ( ) { private int index = 0 ; @ Override public byte unmask ( byte value ) { return ( byte ) ( value ^ masks [ index ++ % 4 ] ) ; } } ; } if ( buffer . remaining ( ) >= this . receiveLength ) { // 内容足够 , 可以解析
this . parseReceiveMessage ( logger , runner , webSocket , buffer ) ; this . receiveCount = this . receiveLength ; } else { this . receiveCount = buffer . remaining ( ) ; this . receiveBuffers = buffer . hasRemaining ( ) ? new ByteBuffer [ ] { buffer } : null ; } return this ; |
public class AuditFinding { /** * The list of related resources .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setRelatedResources ( java . util . Collection ) } or { @ link # withRelatedResources ( java . util . Collection ) } if you
* want to override the existing values .
* @ param relatedResources
* The list of related resources .
* @ return Returns a reference to this object so that method calls can be chained together . */
public AuditFinding withRelatedResources ( RelatedResource ... relatedResources ) { } } | if ( this . relatedResources == null ) { setRelatedResources ( new java . util . ArrayList < RelatedResource > ( relatedResources . length ) ) ; } for ( RelatedResource ele : relatedResources ) { this . relatedResources . add ( ele ) ; } return this ; |
public class SwiftAPIClient { /** * Accepts any object name .
* If object name of the form
* a / b / c / gil . data / part - r - 00000-48ae3461-203f - 4dd3 - b141 - a45426e2d26c
* - attempt _ 20160317132a _ wrong _ 0000 _ m _ 00000_1 . csv
* Then a / b / c / gil . data is returned .
* Code testing that attempt _ 20160317132a _ wrong _ 0000 _ m _ 00000_1 is valid
* task id identifier
* @ param objectName
* @ return unified object name */
private String extractUnifiedObjectName ( String objectName ) { } } | Path p = new Path ( objectName ) ; int attemptIndex = objectName . indexOf ( HADOOP_ATTEMPT ) ; int dotIndex = objectName . lastIndexOf ( '.' ) ; if ( attemptIndex >= 0 && dotIndex > attemptIndex ) { String attempt = objectName . substring ( attemptIndex , dotIndex ) ; try { TaskAttemptID . forName ( attempt ) ; return p . getParent ( ) . toString ( ) ; } catch ( IllegalArgumentException e ) { return objectName ; } } else if ( objectName . indexOf ( HADOOP_SUCCESS ) > 0 ) { return p . getParent ( ) . toString ( ) ; } return objectName ; |
public class JmsConnectionFactoryImpl { /** * default persistent mapping is RELIABLE _ PERSISTENT , null persistent mapping is
* not valid . */
@ Override public void setPersistentMapping ( String persistentMapping ) throws JMSException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setPersistentMapping" , persistentMapping ) ; if ( persistentMapping == null ) { throw ( JMSException ) JmsErrorUtils . newThrowable ( JMSException . class , "INVALID_VALUE_CWSIA0261" , new Object [ ] { "persistentMapping" , null } , tc ) ; } if ( persistentMapping . equals ( ApiJmsConstants . MAPPING_BEST_EFFORT_NONPERSISTENT ) || persistentMapping . equals ( ApiJmsConstants . MAPPING_EXPRESS_NONPERSISTENT ) || persistentMapping . equals ( ApiJmsConstants . MAPPING_RELIABLE_NONPERSISTENT ) || persistentMapping . equals ( ApiJmsConstants . MAPPING_RELIABLE_PERSISTENT ) || persistentMapping . equals ( ApiJmsConstants . MAPPING_ASSURED_PERSISTENT ) || persistentMapping . equals ( ApiJmsConstants . MAPPING_NONE ) ) { jcaManagedConnectionFactory . setPersistentMapping ( persistentMapping ) ; } else if ( persistentMapping . equals ( ApiJmsConstants . MAPPING_AS_SIB_DESTINATION ) ) { jcaManagedConnectionFactory . setPersistentMapping ( ApiJmsConstants . MAPPING_NONE ) ; } else { throw ( JMSException ) JmsErrorUtils . newThrowable ( JMSException . class , "INVALID_VALUE_CWSIA0261" , new Object [ ] { "persistentMapping" , persistentMapping } , tc ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setPersistentMapping" ) ; |
public class Options { /** * Looks up key in the options .
* If key is an Option , its default value will be returned if the key
* isn ' t found .
* @ param key
* @ return */
public Object get ( Object key ) { } } | Object val = super . get ( key ) ; if ( val == null && key instanceof Option ) { val = ( ( Option ) key ) . defaultValue ; } return val ; |
public class CircularList { /** * Remove an element from this list . This causes the inner list to be swapped out
* @ param element */
public synchronized void removeElement ( T element ) { } } | List < T > origList = ref . get ( ) . list ; boolean isPresent = origList . contains ( element ) ; if ( ! isPresent ) { return ; } List < T > newList = new ArrayList < T > ( origList ) ; newList . remove ( element ) ; swapWithList ( newList ) ; |
public class FnBoolean { /** * Determines whether the result of executing the
* specified function on the target object is null or not .
* @ return false if the function result is null , true if not . */
public static final < X > Function < X , Boolean > isNotNullBy ( final IFunction < X , ? > by ) { } } | return FnFunc . chain ( by , FnObject . isNotNull ( ) ) ; |
public class JSONSerializer { /** * Creates a JSONObject , JSONArray or a JSONNull from a JSONString .
* @ throws JSONException if the string is not a valid JSON string */
private static JSON toJSON ( JSONString string , JsonConfig jsonConfig ) { } } | return toJSON ( string . toJSONString ( ) , jsonConfig ) ; |
public class authenticationauthnprofile { /** * Use this API to fetch authenticationauthnprofile resource of given name . */
public static authenticationauthnprofile get ( nitro_service service , String name ) throws Exception { } } | authenticationauthnprofile obj = new authenticationauthnprofile ( ) ; obj . set_name ( name ) ; authenticationauthnprofile response = ( authenticationauthnprofile ) obj . get_resource ( service ) ; return response ; |
public class OptionGroup { /** * / * ( non - Javadoc )
* @ see org . overture . ide . internal . core . resources . IOptionGroup # getAttribute ( java . lang . String , boolean ) */
public synchronized boolean getAttribute ( String key , boolean defaultValue ) { } } | if ( attributes . containsKey ( key ) ) { Object obj = attributes . get ( key ) ; if ( obj instanceof Boolean ) { return ( Boolean ) obj ; } } return defaultValue ; |
public class ConsumerMonitoring { /** * Method deregisterConsumerSetMonitor
* Deregisters a previously registered callback .
* @ param callback
* @ throws SINotPossibleInCurrentConfigurationException */
public void deregisterConsumerSetMonitor ( ConnectionImpl connection , ConsumerSetChangeCallback callback ) throws SINotPossibleInCurrentConfigurationException { } } | if ( tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "deregisterConsumerSetMonitor" , new Object [ ] { connection , callback } ) ; // Call the registrar to do the work
_consumerMonitorRegistrar . deregisterMonitor ( connection , callback ) ; if ( tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "deregisterConsumerSetMonitor" ) ; |
public class CPDefinitionPersistenceImpl { /** * Returns the first cp definition in the ordered set where CPTaxCategoryId = & # 63 ; .
* @ param CPTaxCategoryId the cp tax category ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp definition
* @ throws NoSuchCPDefinitionException if a matching cp definition could not be found */
@ Override public CPDefinition findByCPTaxCategoryId_First ( long CPTaxCategoryId , OrderByComparator < CPDefinition > orderByComparator ) throws NoSuchCPDefinitionException { } } | CPDefinition cpDefinition = fetchByCPTaxCategoryId_First ( CPTaxCategoryId , orderByComparator ) ; if ( cpDefinition != null ) { return cpDefinition ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPTaxCategoryId=" ) ; msg . append ( CPTaxCategoryId ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionException ( msg . toString ( ) ) ; |
public class ClusterRuleFactory { /** * Performs clustering .
* @ param ts the input time series .
* @ param grammarRules the grammar .
* @ param thresholdLength a parameter .
* @ param thresholdCom another parameter .
* @ param fractionTopDist yet another parameter .
* @ return pruned ruleset . */
public static ArrayList < SameLengthMotifs > performPruning ( double [ ] ts , GrammarRules grammarRules , double thresholdLength , double thresholdCom , double fractionTopDist ) { } } | RuleOrganizer ro = new RuleOrganizer ( ) ; ArrayList < SameLengthMotifs > allClassifiedMotifs = ro . classifyMotifs ( thresholdLength , grammarRules ) ; allClassifiedMotifs = ro . removeOverlappingInSimiliar ( allClassifiedMotifs , grammarRules , ts , thresholdCom ) ; ArrayList < SameLengthMotifs > newAllClassifiedMotifs = ro . refinePatternsByClustering ( grammarRules , ts , allClassifiedMotifs , fractionTopDist ) ; return newAllClassifiedMotifs ; |
public class HttpSender { /** * Sets the maximum number of retries of an unsuccessful request caused by I / O errors .
* The default number of retries is 3.
* @ param retries the number of retries
* @ throws IllegalArgumentException if { @ code retries } is negative .
* @ since 2.4.0 */
public void setMaxRetriesOnIOError ( int retries ) { } } | if ( retries < 0 ) { throw new IllegalArgumentException ( "Parameter retries must be greater or equal to zero." ) ; } HttpMethodRetryHandler retryHandler = new DefaultHttpMethodRetryHandler ( retries , false ) ; client . getParams ( ) . setParameter ( HttpMethodParams . RETRY_HANDLER , retryHandler ) ; clientViaProxy . getParams ( ) . setParameter ( HttpMethodParams . RETRY_HANDLER , retryHandler ) ; |
public class InstancedConfiguration { /** * Checks that tiered locality configuration is consistent .
* @ throws IllegalStateException if invalid tiered locality configuration is encountered */
private void checkTieredLocality ( ) { } } | // Check that any custom tiers set by alluxio . locality . { custom _ tier } = value are also defined in
// the tier ordering defined by alluxio . locality . order .
Set < String > tiers = Sets . newHashSet ( getList ( PropertyKey . LOCALITY_ORDER , "," ) ) ; Set < PropertyKey > predefinedKeys = new HashSet < > ( PropertyKey . defaultKeys ( ) ) ; for ( PropertyKey key : mProperties . keySet ( ) ) { if ( predefinedKeys . contains ( key ) ) { // Skip non - templated keys .
continue ; } Matcher matcher = Template . LOCALITY_TIER . match ( key . toString ( ) ) ; if ( matcher . matches ( ) && matcher . group ( 1 ) != null ) { String tierName = matcher . group ( 1 ) ; if ( ! tiers . contains ( tierName ) ) { throw new IllegalStateException ( String . format ( "Tier %s is configured by %s, but does not exist in the tier list %s " + "configured by %s" , tierName , key , tiers , PropertyKey . LOCALITY_ORDER ) ) ; } } } |
public class SearchPortletController { /** * Accepts a map ( tab name , List of tuple search results ) and returns a prioritized list of
* results for the ajax autocomplete feature . The computing impact of moving the list items
* around should be fairly small since there are not too many search results ( unless we get a
* lot of search providers , in which case the results could be put into the appropriate format
* in the Event SEARCH _ RESULTS _ QNAME _ STRING handling ) .
* < p > Note that the method ( as well as the SearchResults Event handler ) do not impose a
* consistent ordering on results . The results are ordered by priority , but within a particular
* priority the same search may have results ordered differently based upon when the
* SearchResults Event handler receives the search results event list . Also if a search result
* is in multiple category types , even within the same priority , the search result will show up
* multiple times . Currently all results are in a single category so it is not worth adding
* extra complexity to handle a situation that is not present .
* < p > This method also cleans up and trims down the amount of data shipped so the feature is
* more responsive , especially on mobile networks .
* @ param resultsMap
* @ return */
private List < AutocompleteResultsModel > collateResultsForAutoCompleteResponse ( ConcurrentMap < String , List < Tuple < SearchResult , String > > > resultsMap , int maxTextLength ) { } } | SortedMap < Integer , List < AutocompleteResultsModel > > prioritizedResultsMap = getCleanedAndSortedMapResults ( resultsMap , maxTextLength ) ; // Consolidate the results into a single , ordered list of max entries .
List < AutocompleteResultsModel > results = new ArrayList < > ( ) ; for ( List < AutocompleteResultsModel > items : prioritizedResultsMap . values ( ) ) { results . addAll ( items ) ; if ( results . size ( ) >= maxAutocompleteSearchResults ) { break ; } } return results . subList ( 0 , results . size ( ) > maxAutocompleteSearchResults ? maxAutocompleteSearchResults : results . size ( ) ) ; |
public class CmsDefaultXmlContentHandler { /** * Initializes the forbidden template contexts . < p >
* @ param root the root XML element
* @ param contentDefinition the content definition */
protected void initTemplates ( Element root , CmsXmlContentDefinition contentDefinition ) { } } | String strEnabledByDefault = root . attributeValue ( ATTR_ENABLED_BY_DEFAULT ) ; m_allowedTemplates . setDefaultMembership ( safeParseBoolean ( strEnabledByDefault , true ) ) ; List < Node > elements = root . selectNodes ( APPINFO_TEMPLATE ) ; for ( Node elem : elements ) { boolean enabled = safeParseBoolean ( ( ( Element ) elem ) . attributeValue ( ATTR_ENABLED ) , true ) ; String templateName = elem . getText ( ) . trim ( ) ; m_allowedTemplates . setContains ( templateName , enabled ) ; } m_allowedTemplates . freeze ( ) ; |
public class DRL5Expressions { /** * src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 159:1 : dummy2 : relationalExpression EOF ; */
public final void dummy2 ( ) throws RecognitionException { } } | try { // src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 160:5 : ( relationalExpression EOF )
// src / main / resources / org / drools / compiler / lang / DRL5Expressions . g : 160:8 : relationalExpression EOF
{ pushFollow ( FOLLOW_relationalExpression_in_dummy2747 ) ; relationalExpression ( ) ; state . _fsp -- ; if ( state . failed ) return ; match ( input , EOF , FOLLOW_EOF_in_dummy2749 ) ; if ( state . failed ) return ; } } catch ( RecognitionException re ) { throw re ; } finally { // do for sure before leaving
} |
public class SmoothedChart { /** * * * * * * Internal Methods * * * * * */
@ Override protected void layoutPlotChildren ( ) { } } | super . layoutPlotChildren ( ) ; double height = getLayoutBounds ( ) . getHeight ( ) ; getData ( ) . forEach ( series -> { final Path [ ] paths = getPaths ( series ) ; if ( null == paths ) { return ; } if ( isSmoothed ( ) ) { smooth ( paths [ 1 ] . getElements ( ) , paths [ 0 ] . getElements ( ) , height ) ; } paths [ 0 ] . setVisible ( ChartType . AREA == getChartType ( ) ) ; paths [ 0 ] . setManaged ( ChartType . AREA == getChartType ( ) ) ; } ) ; |
public class AbstractMySQLQuery { /** * For SQL _ BIG _ RESULT , MySQL directly uses disk - based temporary tables if needed , and prefers
* sorting to using a temporary table with a key on the GROUP BY elements .
* @ return the current object */
@ WithBridgeMethods ( value = MySQLQuery . class , castRequired = true ) public C bigResult ( ) { } } | return addFlag ( Position . AFTER_SELECT , SQL_BIG_RESULT ) ; |
public class ErrorLoggerManager { /** * Gets a list of all of the logs that are managed .
* @ return A List of all the log messages ordered by their timestamp . */
public List < LogMessage > getLogs ( ) { } } | final ArrayList < LogMessage > messages = new ArrayList < LogMessage > ( ) ; for ( final String logName : logs . keySet ( ) ) { messages . addAll ( logs . get ( logName ) . getLogMessages ( ) ) ; } Collections . sort ( messages , new LogMessageComparator ( ) ) ; return messages ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.