signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class Aggregations { /** * Returns an aggregation to find the { @ link java . math . BigInteger } minimum
* of all supplied values . < br / >
* This aggregation is similar to : < pre > SELECT MIN ( value ) FROM x < / pre >
* @ param < Key > the input key type
* @ param < Value > the supplied value type
* @ return the minimum value over all supplied values */
public static < Key , Value > Aggregation < Key , BigInteger , BigInteger > bigIntegerMin ( ) { } } | return new AggregationAdapter ( new BigIntegerMinAggregation < Key , Value > ( ) ) ; |
public class UniversalProjectReader { /** * XER files can contain multiple projects when there are cross - project dependencies .
* As the UniversalProjectReader is designed just to read a single project , we need
* to select one project from those available in the XER file .
* The original project selected for export by the user will have its " export flag "
* set to true . We ' ll return the first project we find where the export flag is
* set to true , otherwise we ' ll just return the first project we find in the file .
* @ param stream schedule data
* @ return ProjectFile instance */
private ProjectFile handleXerFile ( InputStream stream ) throws Exception { } } | PrimaveraXERFileReader reader = new PrimaveraXERFileReader ( ) ; reader . setCharset ( m_charset ) ; List < ProjectFile > projects = reader . readAll ( stream ) ; ProjectFile project = null ; for ( ProjectFile file : projects ) { if ( file . getProjectProperties ( ) . getExportFlag ( ) ) { project = file ; break ; } } if ( project == null && ! projects . isEmpty ( ) ) { project = projects . get ( 0 ) ; } return project ; |
public class StackdriverExportUtils { /** * StackDriver . */
static List < TimeSeries > createTimeSeriesList ( io . opencensus . metrics . export . Metric metric , MonitoredResource monitoredResource , String domain , String projectId , Map < LabelKey , LabelValue > constantLabels ) { } } | List < TimeSeries > timeSeriesList = Lists . newArrayList ( ) ; io . opencensus . metrics . export . MetricDescriptor metricDescriptor = metric . getMetricDescriptor ( ) ; if ( ! projectId . equals ( cachedProjectIdForExemplar ) ) { cachedProjectIdForExemplar = projectId ; } // Shared fields for all TimeSeries generated from the same Metric
TimeSeries . Builder shared = TimeSeries . newBuilder ( ) ; shared . setMetricKind ( createMetricKind ( metricDescriptor . getType ( ) ) ) ; shared . setResource ( monitoredResource ) ; shared . setValueType ( createValueType ( metricDescriptor . getType ( ) ) ) ; // Each entry in timeSeriesList will be converted into an independent TimeSeries object
for ( io . opencensus . metrics . export . TimeSeries timeSeries : metric . getTimeSeriesList ( ) ) { // TODO ( mayurkale ) : Consider using setPoints instead of builder clone and addPoints .
TimeSeries . Builder builder = shared . clone ( ) ; builder . setMetric ( createMetric ( metricDescriptor , timeSeries . getLabelValues ( ) , domain , constantLabels ) ) ; io . opencensus . common . Timestamp startTimeStamp = timeSeries . getStartTimestamp ( ) ; for ( io . opencensus . metrics . export . Point point : timeSeries . getPoints ( ) ) { builder . addPoints ( createPoint ( point , startTimeStamp ) ) ; } timeSeriesList . add ( builder . build ( ) ) ; } return timeSeriesList ; |
public class MBeanBase { /** * Deregisters this bean on platform MBeanServer . */
public void deregister ( ) { } } | if ( objectName == null ) { return ; } try { MBeanServer mbs = ManagementFactory . getPlatformMBeanServer ( ) ; if ( mbs . isRegistered ( objectName ) ) { mbs . unregisterMBean ( objectName ) ; } objectName = null ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } |
public class MongoUtils { /** * Create a new collection .
* @ param db
* @ param collectionName
* @ param options
* @ return */
public static MongoCollection < Document > createCollection ( MongoDatabase db , String collectionName , CreateCollectionOptions options ) { } } | db . createCollection ( collectionName , options ) ; return db . getCollection ( collectionName ) ; |
public class CmsSingleFileUploadDialog { /** * Parses the upload response of the server and decides what to do . < p >
* @ param results a JSON Object */
@ SuppressWarnings ( "null" ) public void parseResponse ( String results ) { } } | cancelUpdateProgress ( ) ; stopLoadingAnimation ( ) ; if ( ( ! m_canceled ) && CmsStringUtil . isNotEmptyOrWhitespaceOnly ( results ) ) { JSONObject jsonObject = JSONParser . parseStrict ( results ) . isObject ( ) ; boolean success = jsonObject . get ( I_CmsUploadConstants . KEY_SUCCESS ) . isBoolean ( ) . booleanValue ( ) ; // If the upload is done so fast that we did not receive any progress information , then
// the content length is unknown . For that reason take the request size to show how
// much bytes were uploaded .
double size = jsonObject . get ( I_CmsUploadConstants . KEY_REQUEST_SIZE ) . isNumber ( ) . doubleValue ( ) ; long requestSize = new Double ( size ) . longValue ( ) ; if ( m_contentLength == 0 ) { m_contentLength = requestSize ; } if ( success ) { m_mainPanel . displayDialogInfo ( Messages . get ( ) . key ( Messages . GUI_UPLOAD_INFO_FINISHING_0 ) , false ) ; m_progressInfo . finish ( ) ; JSONValue uploadedFilesVal = jsonObject . get ( I_CmsUploadConstants . KEY_UPLOADED_FILE_NAMES ) ; if ( uploadedFilesVal != null ) { JSONArray uploadedFilesArray = uploadedFilesVal . isArray ( ) ; if ( uploadedFilesArray != null ) { List < String > uploadedFiles = new ArrayList < String > ( ) ; if ( uploadedFilesArray != null ) { for ( int i = 0 ; i < uploadedFilesArray . size ( ) ; i ++ ) { JSONString entry = uploadedFilesArray . get ( i ) . isString ( ) ; if ( entry != null ) { uploadedFiles . add ( entry . stringValue ( ) ) ; } } } if ( m_context != null ) { m_context . onUploadFinished ( uploadedFiles ) ; } } } closeOnSuccess ( ) ; } else { String message = jsonObject . get ( I_CmsUploadConstants . KEY_MESSAGE ) . isString ( ) . stringValue ( ) ; String stacktrace = jsonObject . get ( I_CmsUploadConstants . KEY_STACKTRACE ) . isString ( ) . stringValue ( ) ; showErrorReport ( message , stacktrace ) ; } } |
public class RegularExpression { /** * Checks whether the < var > target < / var > text < strong > contains < / strong > this
* pattern or not .
* @ param match
* A Match instance for storing matching result .
* @ return Offset of the start position in < VAR > target < / VAR > ; or - 1 if not
* match . */
public boolean matches ( CharacterIterator target , Match match ) { } } | int start = target . getBeginIndex ( ) ; int end = target . getEndIndex ( ) ; synchronized ( this ) { if ( this . operations == null ) this . prepare ( ) ; if ( this . context == null ) this . context = new Context ( ) ; } Context con = null ; synchronized ( this . context ) { con = this . context . inuse ? new Context ( ) : this . context ; con . reset ( target , start , end , this . numberOfClosures ) ; } if ( match != null ) { match . setNumberOfGroups ( this . nofparen ) ; match . setSource ( target ) ; } else if ( this . hasBackReferences ) { match = new Match ( ) ; match . setNumberOfGroups ( this . nofparen ) ; // Need not to call setSource ( ) because
// a caller can not access this match instance .
} con . match = match ; if ( RegularExpression . isSet ( this . options , XMLSCHEMA_MODE ) ) { int matchEnd = this . match ( con , this . operations , con . start , 1 , this . options ) ; // System . err . println ( " DEBUG : matchEnd = " + matchEnd ) ;
if ( matchEnd == con . limit ) { if ( con . match != null ) { con . match . setBeginning ( 0 , con . start ) ; con . match . setEnd ( 0 , matchEnd ) ; } con . setInUse ( false ) ; return true ; } return false ; } /* * The pattern has only fixed string . The engine uses Boyer - Moore . */
if ( this . fixedStringOnly ) { // System . err . println ( " DEBUG : fixed - only : " + this . fixedString ) ;
int o = this . fixedStringTable . matches ( target , con . start , con . limit ) ; if ( o >= 0 ) { if ( con . match != null ) { con . match . setBeginning ( 0 , o ) ; con . match . setEnd ( 0 , o + this . fixedString . length ( ) ) ; } con . setInUse ( false ) ; return true ; } con . setInUse ( false ) ; return false ; } /* * The pattern contains a fixed string . The engine checks with
* Boyer - Moore whether the text contains the fixed string or not . If
* not , it return with false . */
if ( this . fixedString != null ) { int o = this . fixedStringTable . matches ( target , con . start , con . limit ) ; if ( o < 0 ) { // System . err . println ( " Non - match in fixed - string search . " ) ;
con . setInUse ( false ) ; return false ; } } int limit = con . limit - this . minlength ; int matchStart ; int matchEnd = - 1 ; /* * Checks whether the expression starts with " . * " . */
if ( this . operations != null && this . operations . type == Op . CLOSURE && this . operations . getChild ( ) . type == Op . DOT ) { if ( isSet ( this . options , SINGLE_LINE ) ) { matchStart = con . start ; matchEnd = this . match ( con , this . operations , con . start , 1 , this . options ) ; } else { boolean previousIsEOL = true ; for ( matchStart = con . start ; matchStart <= limit ; matchStart ++ ) { int ch = target . setIndex ( matchStart ) ; if ( isEOLChar ( ch ) ) { previousIsEOL = true ; } else { if ( previousIsEOL ) { if ( 0 <= ( matchEnd = this . match ( con , this . operations , matchStart , 1 , this . options ) ) ) break ; } previousIsEOL = false ; } } } } /* * Optimization against the first character . */
else if ( this . firstChar != null ) { // System . err . println ( " DEBUG : with firstchar - matching : " + this . firstChar ) ;
RangeToken range = this . firstChar ; for ( matchStart = con . start ; matchStart <= limit ; matchStart ++ ) { int ch = target . setIndex ( matchStart ) ; if ( REUtil . isHighSurrogate ( ch ) && matchStart + 1 < con . limit ) { ch = REUtil . composeFromSurrogates ( ch , target . setIndex ( matchStart + 1 ) ) ; } if ( ! range . match ( ch ) ) { continue ; } if ( 0 <= ( matchEnd = this . match ( con , this . operations , matchStart , 1 , this . options ) ) ) { break ; } } } /* * Straightforward matching . */
else { for ( matchStart = con . start ; matchStart <= limit ; matchStart ++ ) { if ( 0 <= ( matchEnd = this . match ( con , this . operations , matchStart , 1 , this . options ) ) ) break ; } } if ( matchEnd >= 0 ) { if ( con . match != null ) { con . match . setBeginning ( 0 , matchStart ) ; con . match . setEnd ( 0 , matchEnd ) ; } con . setInUse ( false ) ; return true ; } else { con . setInUse ( false ) ; return false ; } |
public class TemplateParser { /** * Get the type of an expression from the cast at the beginning . ( int ) 12 - > 12 of type int ( int )
* 15 + 5 - > 15 + 5 of type int ( float ) ( 12 + 3 ) - > 12 + 3 of type float ( ( int ) 12 ) + 3 - > ( ( int )
* 12 ) + 3 of type Any ( ( JsArray ) myArray ) . getAt ( 0 ) - > ( ( JsArray ) myArray ) . getAt ( 0 ) of type Any
* @ param expression The expression to process
* @ return The modified expression ( where cast has been removed if necessary ) */
private Expression getTypeFromCast ( Expression expression ) { } } | if ( expression instanceof BinaryExpr ) { Expression mostLeft = getLeftmostExpression ( expression ) ; if ( mostLeft instanceof CastExpr ) { CastExpr castExpr = ( CastExpr ) mostLeft ; currentExpressionReturnType = stringTypeToTypeName ( castExpr . getType ( ) . toString ( ) ) ; BinaryExpr parent = ( BinaryExpr ) mostLeft . getParentNode ( ) . get ( ) ; parent . setLeft ( castExpr . getExpression ( ) ) ; } } else if ( expression instanceof CastExpr ) { CastExpr castExpr = ( CastExpr ) expression ; currentExpressionReturnType = stringTypeToTypeName ( castExpr . getType ( ) . toString ( ) ) ; expression = castExpr . getExpression ( ) ; } return expression ; |
public class ListApplicationVersionsResult { /** * An array of version summaries for the application .
* @ param versions
* An array of version summaries for the application . */
public void setVersions ( java . util . Collection < VersionSummary > versions ) { } } | if ( versions == null ) { this . versions = null ; return ; } this . versions = new java . util . ArrayList < VersionSummary > ( versions ) ; |
public class PairManager { /** * Get a pair - positionList table
* It ' s a hash map which the key is the hashed pair , and the value is list of positions
* That means the table stores the positions which have the same hashed pair
* @ param fingerprintfingerprint bytes
* @ return pair - positionList HashMap */
public HashMap < Integer , List < Integer > > getPair_PositionList_Table ( byte [ ] fingerprint ) { } } | List < int [ ] > pairPositionList = getPairPositionList ( fingerprint ) ; // table to store pair : pos , pos , pos , . . . ; pair2 : pos , pos , pos , . . . .
HashMap < Integer , List < Integer > > pair_positionList_table = new HashMap < > ( ) ; // get all pair _ positions from list , use a table to collect the data group by pair hashcode
for ( int [ ] pair_position : pairPositionList ) { // System . out . println ( pair _ position [ 0 ] + " , " + pair _ position [ 1 ] ) ;
// group by pair - hashcode , i . e . : < pair , List < position > >
if ( pair_positionList_table . containsKey ( pair_position [ 0 ] ) ) { pair_positionList_table . get ( pair_position [ 0 ] ) . add ( pair_position [ 1 ] ) ; } else { List < Integer > positionList = new LinkedList < > ( ) ; positionList . add ( pair_position [ 1 ] ) ; pair_positionList_table . put ( pair_position [ 0 ] , positionList ) ; } // end group by pair - hashcode , i . e . : < pair , List < position > >
} // end get all pair _ positions from list , use a table to collect the data group by pair hashcode
return pair_positionList_table ; |
public class SqlFileFixture { /** * Execute the SQL file ' s content .
* @ param unitDaoFactory the { @ link net . cpollet . jixture . dao . UnitDaoFactory } to use . */
@ Override public void load ( UnitDaoFactory unitDaoFactory ) { } } | BufferedReader reader = new BufferedReader ( new InputStreamReader ( fileInputStream ) ) ; BufferedReaderIterator iterator = new BufferedReaderIterator ( reader ) ; StringBuilder currentQuery = new StringBuilder ( ) ; while ( iterator . hasNext ( ) ) { String line = iterator . next ( ) . trim ( ) ; if ( ! isCommentOrEmptyLine ( line ) ) { currentQuery . append ( line ) . append ( " " ) ; } if ( isEndOfQuery ( line ) ) { unitDaoFactory . getUnitDao ( ) . execute ( currentQuery . toString ( ) ) ; currentQuery = new StringBuilder ( ) ; } } |
public class GenericLogicDiscoverer { /** * Generic implementation for finding all the Services or Operations that have SOME of the given types as inputs or outputs .
* @ param entityType the MSM URI of the type of entity we are looking for . Only supports Service and Operation .
* @ param relationship the MSM URI of the relationship we are looking for . Only supports hasInput and hasOutput .
* @ param types the input / output types ( modelReferences that is ) we are looking for
* @ param matchType
* @ return a Map mapping operation / services URIs to MatchResults . */
private Map < URI , MatchResult > findSome ( URI entityType , URI relationship , Set < URI > types , MatchType matchType ) { } } | // Ensure that we have been given correct parameters
if ( types == null || types . isEmpty ( ) || ( ! entityType . toASCIIString ( ) . equals ( MSM . Service . getURI ( ) ) && ! entityType . toASCIIString ( ) . equals ( MSM . Operation . getURI ( ) ) ) || ( ! relationship . toASCIIString ( ) . equals ( MSM . hasInput . getURI ( ) ) && ! relationship . toASCIIString ( ) . equals ( MSM . hasOutput . getURI ( ) ) && ! relationship . toASCIIString ( ) . equals ( SAWSDL . modelReference . getURI ( ) ) ) ) { return ImmutableMap . of ( ) ; } // Expand the input types to get all that match enough to be consumed
// The structure is : < OriginalType , MatchingType , MatchResult >
Table < URI , URI , MatchResult > expandedTypes ; if ( matchType . equals ( LogicConceptMatchType . Subsume ) ) { expandedTypes = HashBasedTable . create ( ) ; for ( URI type : types ) { expandedTypes . putAll ( this . conceptMatcher . listMatchesAtMostOfType ( ImmutableSet . of ( type ) , LogicConceptMatchType . Subsume ) ) ; expandedTypes . putAll ( this . conceptMatcher . listMatchesOfType ( ImmutableSet . of ( type ) , LogicConceptMatchType . Exact ) ) ; } } else if ( matchType . equals ( LogicConceptMatchType . Plugin ) ) { expandedTypes = this . conceptMatcher . listMatchesAtLeastOfType ( types , LogicConceptMatchType . Plugin ) ; } else { expandedTypes = HashBasedTable . create ( ) ; for ( URI type : types ) { expandedTypes . putAll ( this . conceptMatcher . listMatchesOfType ( ImmutableSet . of ( type ) , LogicConceptMatchType . Exact ) ) ; } } // Track all the results in a multimap to push the details up the stack
Multimap < URI , MatchResult > result = ArrayListMultimap . create ( ) ; // Find all the entities with modelReferences to the expanded types
// The column view is the one with all the possible matches since a class will always match itself
Map < URI , Map < URI , MatchResult > > columnMap = expandedTypes . columnMap ( ) ; for ( URI type : columnMap . keySet ( ) ) { Set < URI > entities = ImmutableSet . of ( ) ; if ( relationship . toASCIIString ( ) . equals ( SAWSDL . modelReference . getURI ( ) ) ) { entities = listEntitesWithModelReference ( entityType , type ) ; } else if ( relationship . toASCIIString ( ) . equals ( MSM . hasInput . getURI ( ) ) || relationship . toASCIIString ( ) . equals ( MSM . hasOutput . getURI ( ) ) ) { entities = listEntitiesWithType ( entityType , relationship , type ) ; } for ( URI entity : entities ) { result . putAll ( entity , columnMap . get ( type ) . values ( ) ) ; } } // Merge the results into a single map using Union
return Maps . transformValues ( result . asMap ( ) , MatchResultsMerger . UNION ) ; |
public class RESTReflect { /** * Finds query parameters based on { @ link javax . ws . rs . QueryParam } annotation .
* @ param baseParam the base parameter URI
* @ param method the method to scan
* @ return the map of parameter URI by parameter name */
static Map < String , String > findQueryParams ( String baseParam , Method method ) { } } | Map < String , String > hrefVars = new HashMap < > ( ) ; Annotation [ ] [ ] parameterAnnotations = method . getParameterAnnotations ( ) ; for ( int i = 0 ; i < parameterAnnotations . length ; i ++ ) { Annotation [ ] paramAnnotations = parameterAnnotations [ i ] ; for ( Annotation paramAnnotation : paramAnnotations ) { Class < ? > parameterClass = method . getParameterTypes ( ) [ i ] ; hrefVars . putAll ( findQueryParamOnParameter ( baseParam , parameterClass , paramAnnotation ) ) ; } } return hrefVars ; |
public class PrefMapSize { /** * Preference */
@ Override public void apply ( MapTile map ) { } } | map . create ( tileWidth , tileHeight , widthInTile , heightInTile ) ; |
public class TopicAdminClientSnippets { /** * Example of listing topics . */
public ListTopicsPagedResponse listTopics ( ) throws Exception { } } | // [ START pubsub _ list _ topics ]
try ( TopicAdminClient topicAdminClient = TopicAdminClient . create ( ) ) { ListTopicsRequest listTopicsRequest = ListTopicsRequest . newBuilder ( ) . setProject ( ProjectName . format ( projectId ) ) . build ( ) ; ListTopicsPagedResponse response = topicAdminClient . listTopics ( listTopicsRequest ) ; Iterable < Topic > topics = response . iterateAll ( ) ; for ( Topic topic : topics ) { // do something with the topic
} return response ; } // [ END pubsub _ list _ topics ] |
public class HandshakeInitializationPacket { /** * < pre >
* Bytes Name
* 1 protocol _ version
* n ( Null - Terminated String ) server _ version
* 4 thread _ id
* 8 scramble _ buff
* 1 ( filler ) always 0x00
* 2 server _ capabilities
* 1 server _ language
* 2 server _ status
* 13 ( filler ) always 0x00 . . .
* 13 rest of scramble _ buff ( 4.1)
* < / pre > */
public void fromBytes ( byte [ ] data ) { } } | int index = 0 ; // 1 . read protocol _ version
protocolVersion = data [ index ] ; index ++ ; // 2 . read server _ version
byte [ ] serverVersionBytes = ByteHelper . readNullTerminatedBytes ( data , index ) ; serverVersion = new String ( serverVersionBytes ) ; index += ( serverVersionBytes . length + 1 ) ; // 3 . read thread _ id
threadId = ByteHelper . readUnsignedIntLittleEndian ( data , index ) ; index += 4 ; // 4 . read scramble _ buff
seed = ByteHelper . readFixedLengthBytes ( data , index , 8 ) ; index += 8 ; index += 1 ; // 1 byte ( filler ) always 0x00
// 5 . read server _ capabilities
this . serverCapabilities = ByteHelper . readUnsignedShortLittleEndian ( data , index ) ; index += 2 ; if ( data . length > index ) { // 6 . read server _ language
this . serverCharsetNumber = data [ index ] ; index ++ ; // 7 . read server _ status
this . serverStatus = ByteHelper . readUnsignedShortLittleEndian ( data , index ) ; index += 2 ; // 8 . bypass filtered bytes
int capabilityFlags2 = ByteHelper . readUnsignedShortLittleEndian ( data , index ) ; index += 2 ; int capabilities = ( capabilityFlags2 << 16 ) | this . serverCapabilities ; // int authPluginDataLen = - 1;
// if ( ( capabilities & Capability . CLIENT _ PLUGIN _ AUTH ) ! = 0 ) {
// authPluginDataLen = data [ index ] ;
index += 1 ; index += 10 ; // 9 . read rest of scramble _ buff
if ( ( capabilities & Capability . CLIENT_SECURE_CONNECTION ) != 0 ) { // int len = Math . max ( 13 , authPluginDataLen - 8 ) ;
// this . authPluginDataPart2 =
// buffer . readFixedLengthString ( len ) ; / / scramble2
// Packet规定最后13个byte是剩下的scrumble ,
// 但实际上最后一个字节是0 , 不应该包含在scrumble中 .
this . restOfScrambleBuff = ByteHelper . readFixedLengthBytes ( data , index , 12 ) ; } index += 12 + 1 ; if ( ( capabilities & Capability . CLIENT_PLUGIN_AUTH ) != 0 ) { this . authPluginName = ByteHelper . readNullTerminatedBytes ( data , index ) ; } // end read
} |
public class Matrix4d { /** * / * ( non - Javadoc )
* @ see org . joml . Matrix4dc # transpose ( org . joml . Matrix4d ) */
public Matrix4d transpose ( Matrix4d dest ) { } } | if ( ( properties & PROPERTY_IDENTITY ) != 0 ) return dest . identity ( ) ; return transposeGeneric ( dest ) ; |
public class AdGroupFeedPage { /** * Returns an iterator over this page ' s { @ code entries } that :
* < ul >
* < li > Will not be { @ code null } . < / li >
* < li > Will not support { @ link java . util . Iterator # remove ( ) } . < / li >
* < / ul >
* @ return a non - null iterator . */
@ Override public java . util . Iterator < com . google . api . ads . adwords . axis . v201809 . cm . AdGroupFeed > iterator ( ) { } } | if ( entries == null ) { return java . util . Collections . < com . google . api . ads . adwords . axis . v201809 . cm . AdGroupFeed > emptyIterator ( ) ; } return java . util . Arrays . < com . google . api . ads . adwords . axis . v201809 . cm . AdGroupFeed > asList ( entries ) . iterator ( ) ; |
public class ISUPMessageFactoryImpl { /** * ( non - Javadoc )
* @ see org . restcomm . protocols . ss7 . isup . ISUPMessageFactory # createCGB ( int cic ) */
public CircuitGroupBlockingMessage createCGB ( int cic ) { } } | CircuitGroupBlockingMessage msg = createCGB ( ) ; CircuitIdentificationCode code = this . parameterFactory . createCircuitIdentificationCode ( ) ; code . setCIC ( cic ) ; msg . setCircuitIdentificationCode ( code ) ; return msg ; |
public class DateUtils { /** * Given a string that may represent a date or range of dates , or date time or range of date times ,
* attempt to extract a standard date from that string .
* @ param verbatimEventDate a string containing a verbatim event date .
* @ param yearsBeforeSuspect Dates that parse to a year prior to this year are marked as suspect .
* @ return a map with keys resultState for the nature of the match and result for the resulting date .
* @ deprecated
* @ see DateUtils # extractDateFromVerbatimER ( String , int , Boolean ) replacement method . */
public static Map < String , String > extractDateFromVerbatim ( String verbatimEventDate , int yearsBeforeSuspect ) { } } | Map result = new HashMap < String , String > ( ) ; EventResult eresult = extractDateFromVerbatimER ( verbatimEventDate , yearsBeforeSuspect , null ) ; if ( eresult != null ) { if ( ! eresult . getResultState ( ) . equals ( EventResult . EventQCResultState . NOT_RUN ) ) { result . put ( "resultState" , eresult . getResultState ( ) . toString ( ) . toLowerCase ( ) ) ; result . put ( "result" , eresult . getResult ( ) ) ; } } return result ; |
public class JDBCStorageConnection { /** * { @ inheritDoc } */
public List < NodeData > getChildNodesData ( NodeData parent , List < QPathEntryFilter > pattern ) throws RepositoryException , IllegalStateException { } } | // return all child nodes by default
return getChildNodesData ( parent ) ; |
public class PreprocessorContext { /** * Check that there is a global variable with such name .
* @ param variableName a name to be checked , can be null
* @ return false if there is not such variable or it is null , true if such global or special variable exists */
public boolean isGlobalVariable ( @ Nullable final String variableName ) { } } | boolean result = false ; if ( variableName != null ) { final String normalized = PreprocessorUtils . normalizeVariableName ( variableName ) ; result = this . globalVarTable . containsKey ( normalized ) || mapVariableNameToSpecialVarProcessor . containsKey ( normalized ) ; } return result ; |
public class VelocityEngineFactory { /** * Initialize a Velocity resource loader for the given VelocityEngine :
* either a standard Velocity FileResourceLoader or a SpringResourceLoader .
* < p > Called by { @ code createVelocityEngine ( ) } .
* @ param velocityEngine the VelocityEngine to configure
* @ param resourceLoaderPath the path to load Velocity resources from
* @ see org . apache . velocity . runtime . resource . loader . FileResourceLoader
* @ see SpringResourceLoader
* @ see # initSpringResourceLoader
* @ see # createVelocityEngine ( ) */
protected void initVelocityResourceLoader ( VelocityEngine velocityEngine , String resourceLoaderPath ) { } } | if ( isPreferFileSystemAccess ( ) ) { // Try to load via the file system , fall back to SpringResourceLoader
// ( for hot detection of template changes , if possible ) .
try { StringBuilder resolvedPath = new StringBuilder ( ) ; String [ ] paths = StringUtils . commaDelimitedListToStringArray ( resourceLoaderPath ) ; for ( int i = 0 ; i < paths . length ; i ++ ) { String path = paths [ i ] ; Resource resource = getResourceLoader ( ) . getResource ( path ) ; File file = resource . getFile ( ) ; // will fail if not resolvable in the file system
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Resource loader path [" + path + "] resolved to file [" + file . getAbsolutePath ( ) + "]" ) ; } resolvedPath . append ( file . getAbsolutePath ( ) ) ; if ( i < paths . length - 1 ) { resolvedPath . append ( ',' ) ; } } velocityEngine . setProperty ( RuntimeConstants . RESOURCE_LOADER , "file" ) ; velocityEngine . setProperty ( RuntimeConstants . FILE_RESOURCE_LOADER_CACHE , "true" ) ; velocityEngine . setProperty ( RuntimeConstants . FILE_RESOURCE_LOADER_PATH , resolvedPath . toString ( ) ) ; } catch ( IOException ex ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Cannot resolve resource loader path [" + resourceLoaderPath + "] to [java.io.File]: using SpringResourceLoader" , ex ) ; } initSpringResourceLoader ( velocityEngine , resourceLoaderPath ) ; } } else { // Always load via SpringResourceLoader
// ( without hot detection of template changes ) .
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "File system access not preferred: using SpringResourceLoader" ) ; } initSpringResourceLoader ( velocityEngine , resourceLoaderPath ) ; } |
public class Composite { /** * / * initialize pwr928 encoding table */
private void init928 ( ) { } } | int i , j , v ; int [ ] cw = new int [ 7 ] ; cw [ 6 ] = 1 ; for ( i = 5 ; i >= 0 ; i -- ) { cw [ i ] = 0 ; } for ( i = 0 ; i < 7 ; i ++ ) { pwr928 [ 0 ] [ i ] = cw [ i ] ; } for ( j = 1 ; j < 69 ; j ++ ) { for ( v = 0 , i = 6 ; i >= 1 ; i -- ) { v = ( 2 * cw [ i ] ) + ( v / 928 ) ; pwr928 [ j ] [ i ] = cw [ i ] = v % 928 ; } pwr928 [ j ] [ 0 ] = cw [ 0 ] = ( 2 * cw [ 0 ] ) + ( v / 928 ) ; } |
public class GenericUtils { /** * Given a wsbb [ ] , we ' re adding a byte [ ] value to the last buffer . If
* that buffer fills up , then we will allocate a new one , expand the
* wsbb [ ] and keep going until the entire byte [ ] is added .
* Returns the new wsbb [ ] ( expanded if needed )
* @ param inBuffers
* @ param value
* @ param bnfObj
* @ return WsByteBuffer [ ] */
static private WsByteBuffer [ ] putByteArrayKnownOverflow ( WsByteBuffer [ ] inBuffers , byte [ ] value , BNFHeadersImpl bnfObj ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Known buffer overflow in put." ) ; } WsByteBuffer [ ] buffers = inBuffers ; boolean bDone = false ; int remaining = value . length ; int offset = 0 ; // current offset into the value
WsByteBuffer buffer = buffers [ buffers . length - 1 ] ; int avail = buffer . capacity ( ) - buffer . position ( ) ; while ( ! bDone ) { // if the available space is enough for the rest of the data ,
// add it and we ' re done
if ( remaining <= avail ) { buffer . put ( value , offset , remaining ) ; bDone = true ; } // if it ' s not , then we need to put what we can and then
// expand the buffer [ ]
else { buffer . put ( value , offset , avail ) ; buffer . flip ( ) ; offset += avail ; remaining -= avail ; // allocate a new buffer and expand the array
buffer = bnfObj . allocateBuffer ( bnfObj . getOutgoingBufferSize ( ) ) ; buffers = WsByteBufferUtils . expandBufferArray ( buffers , buffer ) ; avail = buffer . capacity ( ) ; } } return buffers ; |
public class Key { /** * Returns the translation corresponding to the given locale code .
* @ param locale locale code
* @ return the translation
* @ throws java . lang . IllegalArgumentException if the locale is null or empty
* or contains other characters than letters and " - " . */
public Translation getTranslation ( String locale ) { } } | LocaleCodeSpecification . assertCode ( locale ) ; return this . getTranslations ( ) . get ( locale ) ; |
public class TimeBasedKeys { /** * Get the next key for the current time in UTC .
* @ return a long that is determined by the current time in UTC and a unique counter value for the current time . */
public long nextKey ( ) { } } | // Note that per Oracle the currentTimeMillis is the current number of seconds past the epoch
// in UTC ( not in local time ) . Therefore , processes with exactly synchronized clocks will
// always get the same value regardless of their timezone . . .
final long timestamp = System . currentTimeMillis ( ) ; final int increment = counterFor ( timestamp ) ; if ( increment <= maximumCounterValue ) { return ( timestamp << counterBits ) + increment ; } // The counter is surprisingly too high , so try again ( repeatedly ) until we get to the next millisecond . . .
return this . nextKey ( ) ; |
public class DefaultInterval { /** * ( non - Javadoc )
* @ see org . virginia . pbhs . parameters . IInterval # getMaximumDuration ( ) */
@ Override public Long getMaximumLength ( ) { } } | calculator ( ) ; if ( simple ) { return v [ 3 ] - v [ 0 ] ; } else { Weight maxDur = cn . getMaximumDuration ( ) ; return maxDur . isInfinity ( ) ? null : maxDur . value ( ) ; } |
public class BProgram { /** * Registers a BThread into the program . If the program started , the BThread
* will take part in the current bstep .
* @ param bt the BThread to be registered . */
public void registerBThread ( BThreadSyncSnapshot bt ) { } } | recentlyRegisteredBthreads . add ( bt ) ; addBThreadCallback . ifPresent ( cb -> cb . bthreadAdded ( this , bt ) ) ; |
public class Instrumentation { /** * < code > optional string app _ name _ setter = 5 ; < / code >
* < pre >
* name of function ( & lt ; string & gt ; ) ;
* used to inform the harness about the app name
* < / pre > */
public com . google . protobuf . ByteString getAppNameSetterBytes ( ) { } } | java . lang . Object ref = appNameSetter_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; appNameSetter_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; } |
public class ProgressEstimate { /** * Gets the estimated time remaining in milliseconds based upon the total number of work units , the start time , and how many units have been done
* so far .
* This should not be called before any work units have been done . */
public long getTimeRemainingInMillis ( ) { } } | long batchTime = System . currentTimeMillis ( ) - startTime ; double timePerIteration = ( double ) batchTime / ( double ) worked . get ( ) ; return ( long ) ( timePerIteration * ( total - worked . get ( ) ) ) ; |
public class LicenseTypeImpl { /** * { @ inheritDoc } */
public LicenseType copy ( ) { } } | return new LicenseTypeImpl ( CopyUtil . cloneList ( description ) , licenseRequired , CopyUtil . cloneString ( id ) , CopyUtil . cloneString ( licReqId ) ) ; |
public class GpsdHelper { /** * Process the TPVObject , all params required .
* @ param tpv The time - position - velocity object to process
* @ param processor Processor that handles the exchange .
* @ param endpoint GpsdEndpoint receiving the exchange . */
public static void consumeTPVObject ( TPVObject tpv , Processor processor , GpsdEndpoint endpoint , ExceptionHandler exceptionHandler ) { } } | // Simplify logging when https : / / github . com / taimos / GPSd4Java / pull / 19 is merged into upstream .
LOG . debug ( "About to consume TPV object {},{} from {}" , tpv . getLatitude ( ) , tpv . getLongitude ( ) , endpoint ) ; Validate . notNull ( tpv ) ; Validate . notNull ( processor ) ; Validate . notNull ( endpoint ) ; GpsCoordinates coordinates = gpsCoordinates ( new Date ( new Double ( tpv . getTimestamp ( ) ) . longValue ( ) ) , tpv . getLatitude ( ) , tpv . getLongitude ( ) ) ; Exchange exchange = anExchange ( endpoint . getCamelContext ( ) ) . withPattern ( OutOnly ) . withHeader ( TPV_HEADER , tpv ) . withBody ( coordinates ) . build ( ) ; try { processor . process ( exchange ) ; } catch ( Exception e ) { exceptionHandler . handleException ( e ) ; } |
public class XExtensionConverter { /** * / * ( non - Javadoc )
* @ see com . thoughtworks . xstream . converters . Converter # unmarshal ( com . thoughtworks . xstream . io . HierarchicalStreamReader , com . thoughtworks . xstream . converters . UnmarshallingContext ) */
public Object unmarshal ( HierarchicalStreamReader reader , UnmarshallingContext context ) { } } | URI uri = URI . create ( reader . getAttribute ( "uri" ) ) ; return XExtensionManager . instance ( ) . getByUri ( uri ) ; |
public class PropKit { /** * Use the properties file . It will loading the properties file if not loading .
* Example : < br >
* PropKit . use ( " config . txt " , " UTF - 8 " ) ; < br >
* PropKit . use ( " other _ config . txt " , " UTF - 8 " ) ; < br > < br >
* String userName = PropKit . get ( " userName " ) ; < br >
* String password = PropKit . get ( " password " ) ; < br > < br >
* userName = PropKit . use ( " other _ config . txt " ) . get ( " userName " ) ; < br >
* password = PropKit . use ( " other _ config . txt " ) . get ( " password " ) ; < br > < br >
* PropKit . use ( " com / jfinal / config _ in _ sub _ directory _ of _ classpath . txt " ) ;
* @ param fileName the properties file ' s name in classpath or the sub directory of classpath
* @ param encoding the encoding */
public static Prop use ( String fileName , String encoding ) { } } | Prop result = map . get ( fileName ) ; if ( result == null ) { synchronized ( PropKit . class ) { result = map . get ( fileName ) ; if ( result == null ) { result = new Prop ( fileName , encoding ) ; map . put ( fileName , result ) ; if ( PropKit . prop == null ) { PropKit . prop = result ; } } } } return result ; |
public class ClientWmsRasterLayerStore { /** * Method based on WmsTileServiceImpl in GWT2 client . */
private List < org . geomajas . layer . tile . RasterTile > calculateTilesForBounds ( Bbox bounds ) { } } | List < org . geomajas . layer . tile . RasterTile > tiles = new ArrayList < org . geomajas . layer . tile . RasterTile > ( ) ; if ( bounds . getHeight ( ) == 0 || bounds . getWidth ( ) == 0 ) { return tiles ; } return tiles ; |
public class ResourceAssignment { /** * Splits timephased work segments in line with cost rates . Note that this is
* an approximation - where a rate changes during a working day , the second
* rate is used for the whole day .
* @ param table cost rate table
* @ param calendar calendar used by this assignment
* @ param work timephased work segment
* @ param rateIndex rate applicable at the start of the timephased work segment
* @ return list of segments which replace the one supplied by the caller */
private List < TimephasedWork > splitWork ( CostRateTable table , ProjectCalendar calendar , TimephasedWork work , int rateIndex ) { } } | List < TimephasedWork > result = new LinkedList < TimephasedWork > ( ) ; work . setTotalAmount ( Duration . getInstance ( 0 , work . getAmountPerDay ( ) . getUnits ( ) ) ) ; while ( true ) { CostRateTableEntry rate = table . get ( rateIndex ) ; Date splitDate = rate . getEndDate ( ) ; if ( splitDate . getTime ( ) >= work . getFinish ( ) . getTime ( ) ) { result . add ( work ) ; break ; } Date currentPeriodEnd = calendar . getPreviousWorkFinish ( splitDate ) ; TimephasedWork currentPeriod = new TimephasedWork ( work ) ; currentPeriod . setFinish ( currentPeriodEnd ) ; result . add ( currentPeriod ) ; Date nextPeriodStart = calendar . getNextWorkStart ( splitDate ) ; work . setStart ( nextPeriodStart ) ; ++ rateIndex ; } return result ; |
public class SingleContextSource { /** * Construct a SingleContextSource and execute the LdapOperationsCallback using the created instance .
* This makes sure the same connection will be used for all operations inside the LdapOperationsCallback ,
* which is particularly useful when working with e . g . Paged Results as these typically require the exact
* same connection to be used for all requests involving the same cookie . .
* The SingleContextSource instance will be properly disposed of once the operation has been completed .
* @ param contextSource The target ContextSource to retrieve a DirContext from
* @ param callback the callback to perform the Ldap operations
* @ param useReadOnly if < code > true < / code > , use the { @ link org . springframework . ldap . core . ContextSource # getReadOnlyContext ( ) }
* method on the target ContextSource to get the actual DirContext instance , if < code > false < / code > ,
* use { @ link org . springframework . ldap . core . ContextSource # getReadWriteContext ( ) } .
* @ param ignorePartialResultException Used for populating this property on the created LdapTemplate instance .
* @ param ignoreNameNotFoundException Used for populating this property on the created LdapTemplate instance .
* @ return the result returned from the callback .
* @ since 2.0 */
public static < T > T doWithSingleContext ( ContextSource contextSource , LdapOperationsCallback < T > callback , boolean useReadOnly , boolean ignorePartialResultException , boolean ignoreNameNotFoundException ) { } } | SingleContextSource singleContextSource ; if ( useReadOnly ) { singleContextSource = new SingleContextSource ( contextSource . getReadOnlyContext ( ) ) ; } else { singleContextSource = new SingleContextSource ( contextSource . getReadWriteContext ( ) ) ; } LdapTemplate ldapTemplate = new LdapTemplate ( singleContextSource ) ; ldapTemplate . setIgnorePartialResultException ( ignorePartialResultException ) ; ldapTemplate . setIgnoreNameNotFoundException ( ignoreNameNotFoundException ) ; try { return callback . doWithLdapOperations ( ldapTemplate ) ; } finally { singleContextSource . destroy ( ) ; } |
public class RuntimeExceptionsFactory { /** * Constructs and initializes a new { @ link UnsupportedOperationException } with the given { @ link Throwable cause }
* and { @ link String message } formatted with the given { @ link Object [ ] arguments } .
* @ param cause { @ link Throwable } identified as the reason this { @ link UnsupportedOperationException } was thrown .
* @ param message { @ link String } describing the { @ link UnsupportedOperationException exception } .
* @ param args { @ link Object [ ] arguments } used to replace format placeholders in the { @ link String message } .
* @ return a new { @ link UnsupportedOperationException } with the given { @ link Throwable cause }
* and { @ link String message } .
* @ see java . lang . UnsupportedOperationException */
public static UnsupportedOperationException newUnsupportedOperationException ( Throwable cause , String message , Object ... args ) { } } | return new UnsupportedOperationException ( format ( message , args ) , cause ) ; |
public class LinkedList { /** * Inserts element e before non - null Node succ . */
void linkBefore ( E e , Node < E > succ ) { } } | // assert succ ! = null ;
final Node < E > pred = succ . prev ; final Node < E > newNode = new Node < > ( pred , e , succ ) ; succ . prev = newNode ; if ( pred == null ) first = newNode ; else pred . next = newNode ; size ++ ; modCount ++ ; |
public class ObjectAnimator { /** * This function is called immediately before processing the first animation
* frame of an animation . If there is a nonzero < code > startDelay < / code > , the
* function is called after that delay ends .
* It takes care of the final initialization steps for the
* animation . This includes setting mEvaluator , if the user has not yet
* set it up , and the setter / getter methods , if the user did not supply
* them .
* < p > Overriders of this method should call the superclass method to cause
* internal mechanisms to be set up correctly . < / p > */
@ Override void initAnimation ( ) { } } | if ( ! mInitialized ) { // mValueType may change due to setter / getter setup ; do this before calling super . init ( ) ,
// which uses mValueType to set up the default type evaluator .
if ( ( mProperty == null ) && AnimatorProxy . NEEDS_PROXY && ( mTarget instanceof View ) && PROXY_PROPERTIES . containsKey ( mPropertyName ) ) { setProperty ( PROXY_PROPERTIES . get ( mPropertyName ) ) ; } int numValues = mValues . length ; for ( int i = 0 ; i < numValues ; ++ i ) { mValues [ i ] . setupSetterAndGetter ( mTarget ) ; } super . initAnimation ( ) ; } |
public class BeamToCDK { /** * Create a new CDK { @ link IAtom } from the Beam Atom . If the element is
* unknown ( i . e . ' * ' ) then an pseudo atom is created .
* @ param atom an Atom from the Beam Graph
* @ return the CDK atom to have it ' s properties set */
IAtom newCDKAtom ( Atom atom ) { } } | Element element = atom . element ( ) ; boolean unknown = element == Element . Unknown ; if ( unknown ) { IPseudoAtom pseudoAtom = builder . newInstance ( IPseudoAtom . class , element . symbol ( ) ) ; pseudoAtom . setSymbol ( element . symbol ( ) ) ; pseudoAtom . setLabel ( atom . label ( ) ) ; return pseudoAtom ; } return createAtom ( element ) ; |
public class UnderReplicatedBlocks { /** * / * remove a block from a under replication queue given a priority */
synchronized boolean remove ( BlockInfo blockInfo , int priLevel ) { } } | INodeFile fileINode = blockInfo . getINode ( ) ; if ( priLevel == QUEUE_WITH_CORRUPT_BLOCKS && fileINode != null && fileINode . getStorageType ( ) . equals ( StorageType . RAID_STORAGE ) ) { RaidCodec codec = ( ( INodeRaidStorage ) fileINode . getStorage ( ) ) . getCodec ( ) ; return raidQueue . remove ( blockInfo , codec ) ; } if ( priLevel >= 0 && priLevel < LEVEL && priorityQueues . get ( priLevel ) . remove ( blockInfo ) ) { if ( NameNode . stateChangeLog . isDebugEnabled ( ) ) { NameNode . stateChangeLog . debug ( "BLOCK* NameSystem.UnderReplicationBlock.remove: " + "Removing block " + blockInfo + " from priority queue " + priLevel ) ; } return true ; } else { for ( int i = 0 ; i < LEVEL ; i ++ ) { if ( i != priLevel && priorityQueues . get ( i ) . remove ( blockInfo ) ) { if ( NameNode . stateChangeLog . isDebugEnabled ( ) ) { NameNode . stateChangeLog . debug ( "BLOCK* NameSystem.UnderReplicationBlock.remove: " + "Removing block " + blockInfo + " from priority queue " + i ) ; } return true ; } } } return false ; |
public class ColumnVector { /** * Gets boolean type values from [ rowId , rowId + count ) . The return values for the null slots
* are undefined and can be anything . */
public boolean [ ] getBooleans ( int rowId , int count ) { } } | boolean [ ] res = new boolean [ count ] ; for ( int i = 0 ; i < count ; i ++ ) { res [ i ] = getBoolean ( rowId + i ) ; } return res ; |
public class TaskDao { /** * TaskManager use only */
public void setTaskState ( String id , String newState ) { } } | db . update ( UPDATE_SQL2 , newState , Integer . parseInt ( id ) ) ; |
public class ConfigurationOption { /** * Constructs a { @ link ConfigurationOptionBuilder } whose value is of type { @ link String }
* @ return a { @ link ConfigurationOptionBuilder } whose value is of type { @ link String } */
public static ConfigurationOptionBuilder < List < URL > > urlsOption ( ) { } } | return new ConfigurationOptionBuilder < List < URL > > ( new ListValueConverter < URL > ( UrlValueConverter . INSTANCE ) , List . class ) . defaultValue ( Collections . < URL > emptyList ( ) ) ; |
public class Refunds { /** * 通过微信订单号查询退款
* @ param transactionId 微信订单号
* @ return 退款查询对象 , 或抛WepayException */
public RefundQueryResponse queryByTransactionId ( String transactionId ) { } } | Map < String , String > queryParams = buildQueryParams ( WepayField . TRANSACTION_ID , transactionId ) ; Map < String , Object > respData = doPost ( QUERY , queryParams ) ; return renderQueryResp ( respData ) ; |
public class Vacuum { /** * Get the vacuums current timezone .
* @ return The current timezone .
* @ throws CommandExecutionException When there has been a error during the communication or the response was invalid . */
public TimeZone getTimezone ( ) throws CommandExecutionException { } } | JSONArray resp = sendToArray ( "get_timezone" ) ; String zone = resp . optString ( 0 , null ) ; if ( zone == null ) throw new CommandExecutionException ( CommandExecutionException . Error . INVALID_RESPONSE ) ; return TimeZone . getTimeZone ( zone ) ; |
public class WeightRandom { /** * 增加对象权重
* @ param weightObj 权重对象
* @ return this */
public WeightRandom < T > add ( WeightObj < T > weightObj ) { } } | if ( null != weightObj ) { final double weight = weightObj . getWeight ( ) ; if ( weightObj . getWeight ( ) > 0 ) { double lastWeight = ( this . weightMap . size ( ) == 0 ) ? 0 : this . weightMap . lastKey ( ) ; this . weightMap . put ( weight + lastWeight , weightObj . getObj ( ) ) ; // 权重累加
} } return this ; |
public class UpdateChecker { /** * Checks if a new release has been published on the website . This does not
* compare the current app version to the release version on the website ,
* just checks if something happened on the website . This ensures that
* updates that were ignored by the user do not show up again . Assumes that
* the artifact has a jar - packaging .
* @ param repoBaseURL The base url of the maven repo to use
* @ param mavenGroupID The maven groupId of the artifact to update
* @ param mavenArtifactID The maven artifactId of the artifact to update
* @ param mavenClassifier The maven classifier of the artifact to update
* @ return { @ code true } if a new release is available and the user did not
* ignore it . */
public static UpdateInfo isUpdateAvailable ( URL repoBaseURL , String mavenGroupID , String mavenArtifactID , String mavenClassifier ) { } } | return isUpdateAvailable ( repoBaseURL , mavenGroupID , mavenArtifactID , mavenClassifier , "jar" ) ; |
public class BaseMessageHeader { /** * Add this name / value pair to this matrix .
* @ param mxString Source matrix ( or null if new ) .
* @ param strName Name to add
* @ param strValue Value to add */
public final Object [ ] [ ] addNameValue ( Map < String , Object > properties , String strName , Object [ ] [ ] mxProperties ) { } } | if ( properties != null ) mxProperties = this . addNameValue ( mxProperties , strName , properties . get ( strName ) ) ; return mxProperties ; |
public class ClassicCounter { /** * Returns the Counter over Strings specified by this String .
* The String is often the whole contents of a file .
* The file can include comments if each line of comment starts with
* a hash ( # ) symbol , and does not contain any TAB characters .
* Otherwise , the format is one entry per line . Each line must contain
* precisely one tab separating a key and a value , giving a format of :
* < blockquote >
* StringKey \ tdoubleValue \ n
* < / blockquote >
* @ param s String representation of a Counter , where entries are one per
* line such that each line is either a comment ( begins with # )
* or key \ t value
* @ return The Counter with String keys */
public static ClassicCounter < String > valueOfIgnoreComments ( String s ) { } } | ClassicCounter < String > result = new ClassicCounter < String > ( ) ; String [ ] lines = s . split ( "\n" ) ; for ( String line : lines ) { String [ ] fields = line . split ( "\t" ) ; if ( fields . length != 2 ) { if ( line . startsWith ( "#" ) ) { continue ; } else { throw new RuntimeException ( "Got unsplittable line: \"" + line + '\"' ) ; } } result . setCount ( fields [ 0 ] , Double . parseDouble ( fields [ 1 ] ) ) ; } return result ; |
public class LogViewer { /** * Lists directories containing HPEL repositories . It is called
* when repository is not specified explicitly in the arguments
* @ return List of files which can be used as repositories */
protected File [ ] listRepositoryChoices ( ) { } } | // check current location
String currentDir = System . getProperty ( "log.repository.root" ) ; if ( currentDir == null ) currentDir = System . getProperty ( "user.dir" ) ; File logDir = new File ( currentDir ) ; if ( logDir . isDirectory ( ) ) { File [ ] result = RepositoryReaderImpl . listRepositories ( logDir ) ; if ( result . length == 0 && ( RepositoryReaderImpl . containsLogFiles ( logDir ) || tailInterval > 0 ) ) { return new File [ ] { logDir } ; } else { return result ; } } else { return new File [ ] { } ; } |
public class RecurringData { /** * Calculate start dates for a monthly recurrence .
* @ param calendar current date
* @ param frequency frequency
* @ param dates array of start dates */
private void getMonthlyDates ( Calendar calendar , int frequency , List < Date > dates ) { } } | if ( m_relative ) { getMonthlyRelativeDates ( calendar , frequency , dates ) ; } else { getMonthlyAbsoluteDates ( calendar , frequency , dates ) ; } |
public class Percent { /** * Parse percent from given string value and return it as { @ link Number } instance . Given string < code > value < / code >
* should have a numeric part and percent sign and should respect this formatter locale . Returns null if
* < code > value < / code > argument is null or empty .
* @ param value percent value .
* @ return percent as { @ link Number } instance , possible null if < code > value < / code > argument is null or empty .
* @ throws ParseException if < code > value < / code > is not formated as percent . */
@ Override public Object parse ( String value ) throws ParseException { } } | if ( value == null || value . isEmpty ( ) ) { return null ; } return numberFormat . parse ( value ) ; |
public class SeqPattern { /** * next .
* @ return a { @ link java . lang . String } object . */
public String next ( ) { } } | seq ++ ; String text = pattern ; for ( final Integer paramLevel : params ) { text = Strings . replace ( text , "{" + paramLevel + "}" , generator . getSytle ( paramLevel . intValue ( ) ) . curSeqText ( ) ) ; } return Strings . replace ( text , "{" + level + "}" , seqNumStyle . build ( seq ) ) ; |
public class RecordTarget { /** * Writes the batch by calling the { @ link # write ( Record ) } method for each record in the batch .
* If the calls to the { @ link # write ( Record ) } throws an { @ link OnRecordErrorException } , the error
* handling is done based on the stage ' on record error ' configuration , discarded , sent to error , or stopping the
* pipeline .
* @ param batch the batch of records to write .
* for them to be available to the rest of the pipeline .
* @ throws StageException if the < code > Target < / code > had an error while writing records . */
@ Override public void write ( Batch batch ) throws StageException { } } | Iterator < Record > it = batch . getRecords ( ) ; if ( it . hasNext ( ) ) { while ( it . hasNext ( ) ) { Record record = it . next ( ) ; try { write ( record ) ; } catch ( OnRecordErrorException ex ) { switch ( getContext ( ) . getOnErrorRecord ( ) ) { case DISCARD : break ; case TO_ERROR : getContext ( ) . toError ( record , ex ) ; break ; case STOP_PIPELINE : throw ex ; default : throw new IllegalStateException ( Utils . format ( "It should never happen. OnError '{}'" , getContext ( ) . getOnErrorRecord ( ) , ex ) ) ; } } } } else { emptyBatch ( ) ; } |
public class TaskTracker { /** * Get the average time in milliseconds to refill a free reduce slot . This
* average is calculated on a rotating buffer .
* @ return Average time in milliseconds to refill a free reduce slot . Return - 1
* if the value is not valid ( hasn ' t actually refilled anything ) */
int getAveReduceSlotRefillMsecs ( ) { } } | synchronized ( reduceSlotRefillMsecsQueue ) { if ( reduceSlotRefillMsecsQueue . isEmpty ( ) ) { return - 1 ; } int totalReduceSlotRefillMsecs = 0 ; for ( int refillMsecs : reduceSlotRefillMsecsQueue ) { totalReduceSlotRefillMsecs += refillMsecs ; } return totalReduceSlotRefillMsecs / reduceSlotRefillMsecsQueue . size ( ) ; } |
public class ModelBuilderTemplateHandler { /** * this handler for parsing ( we are not processing anything ! ) */
@ Override public void handleTemplateStart ( final ITemplateStart templateStart ) { } } | this . events . add ( TemplateStart . asEngineTemplateStart ( templateStart ) ) ; // The engine event we might have created is not forwarded - this makes cache creating transparent to the handler chain
super . handleTemplateStart ( templateStart ) ; |
public class Utils { /** * Shifting the feature Locations according to the new sequence locations
* and remove the features which have been placed fully inside the ' n ' start
* and end .
* @ param entry
* @ param deletedBeginNs
* ( number of deleted ' n ' s at the beginning of sequence )
* @ return ArrayList ( Validation Messages ) */
public static ArrayList < ValidationMessage > shiftAndRemoveFeature ( Entry entry , int deletedBeginNs ) { } } | List < Location > locationsList = null ; ArrayList < Feature > gapFeatures = new ArrayList ( ) ; ArrayList < ValidationMessage > validationMessages = new ArrayList ( ) ; long featureNewBeginLocation = 0 ; long featureNewEndLocation = 0 ; if ( entry == null ) { return null ; } List < Feature > features = entry . getFeatures ( ) ; for ( int i = 0 ; i < features . size ( ) ; i ++ ) { Feature feature = features . get ( i ) ; locationsList = feature . getLocations ( ) . getLocations ( ) ; boolean invalidFeature = false ; // New Sequence String Length
long newSequenceLength = entry . getSequence ( ) . getLength ( ) ; for ( int j = 0 ; j < feature . getLocations ( ) . getLocations ( ) . size ( ) ; j ++ ) { // for ( Location location :
// feature . getLocations ( ) . getLocations ( ) ) {
boolean position = false ; Location location = feature . getLocations ( ) . getLocations ( ) . get ( j ) ; /* * check for all feature locations exists in the entry are
* within range of sequence Begin and End positions and shifting
* the locations of the feature according to the new sequence
* positions */
// check1
if ( location . getBeginPosition ( ) == location . getEndPosition ( ) ) { position = true ; } if ( location . getBeginPosition ( ) <= deletedBeginNs && location . getEndPosition ( ) <= deletedBeginNs ) { if ( position ) { location . setBeginPosition ( location . getBeginPosition ( ) - deletedBeginNs ) ; } else { location . setBeginPosition ( location . getBeginPosition ( ) - deletedBeginNs ) ; location . setEndPosition ( location . getEndPosition ( ) - deletedBeginNs ) ; } if ( feature . getName ( ) . equals ( Feature . GAP_FEATURE_NAME ) ) { validationMessages . add ( ValidationMessage . message ( Severity . FIX , UTILS_2 , location . getBeginPosition ( ) . toString ( ) , location . getEndPosition ( ) . toString ( ) ) ) ; gapFeatures . add ( feature ) ; invalidFeature = true ; } else { // entry . removeFeature ( feature ) ;
feature . getLocations ( ) . removeLocation ( location ) ; j = j - 1 ; ValidationMessage < Origin > message = ValidationMessage . message ( Severity . FIX , UTILS_4 , feature . getName ( ) , location . getBeginPosition ( ) . toString ( ) , location . getEndPosition ( ) . toString ( ) ) ; message . getOrigins ( ) . add ( feature . getOrigin ( ) ) ; validationMessages . add ( message ) ; invalidFeature = true ; // continue ;
} } else if ( location . getBeginPosition ( ) <= deletedBeginNs && location . getEndPosition ( ) > deletedBeginNs ) { location . setBeginPosition ( ( long ) 1 ) ; location . setEndPosition ( location . getEndPosition ( ) - deletedBeginNs ) ; if ( location . getEndPosition ( ) > newSequenceLength ) { location . setEndPosition ( newSequenceLength ) ; } } else if ( location . getBeginPosition ( ) > deletedBeginNs && location . getEndPosition ( ) > deletedBeginNs ) { if ( position ) { location . setBeginPosition ( location . getBeginPosition ( ) - deletedBeginNs ) ; } else { location . setBeginPosition ( location . getBeginPosition ( ) - deletedBeginNs ) ; location . setEndPosition ( location . getEndPosition ( ) - deletedBeginNs ) ; } if ( location . getBeginPosition ( ) > newSequenceLength && location . getEndPosition ( ) > newSequenceLength ) { if ( feature . getName ( ) . equals ( Feature . GAP_FEATURE_NAME ) ) { gapFeatures . add ( feature ) ; ValidationMessage < Origin > message = ValidationMessage . message ( Severity . FIX , UTILS_2 , location . getBeginPosition ( ) . toString ( ) , location . getEndPosition ( ) . toString ( ) ) ; message . getOrigins ( ) . add ( feature . getOrigin ( ) ) ; validationMessages . add ( message ) ; invalidFeature = true ; } else { feature . getLocations ( ) . removeLocation ( location ) ; j = j - 1 ; ValidationMessage < Origin > message = ValidationMessage . message ( Severity . FIX , UTILS_4 , feature . getName ( ) , location . getBeginPosition ( ) . toString ( ) , location . getEndPosition ( ) . toString ( ) ) ; message . getOrigins ( ) . add ( feature . getOrigin ( ) ) ; validationMessages . add ( message ) ; invalidFeature = true ; // continue ;
} } if ( location . getBeginPosition ( ) <= newSequenceLength && location . getEndPosition ( ) > newSequenceLength ) { location . setEndPosition ( newSequenceLength ) ; } } } if ( feature . getLocations ( ) . getLocations ( ) . isEmpty ( ) ) { ValidationMessage < Origin > message = ValidationMessage . message ( Severity . FIX , UTILS_5 , feature . getName ( ) , feature . getName ( ) ) ; message . getOrigins ( ) . add ( feature . getOrigin ( ) ) ; validationMessages . add ( message ) ; removeFeatureQualifiers ( feature ) ; entry . removeFeature ( feature ) ; i = i - 1 ; } locationsList = feature . getLocations ( ) . getLocations ( ) ; for ( Location location : locationsList ) { featureNewBeginLocation = location . getBeginPosition ( ) ; featureNewEndLocation = location . getEndPosition ( ) ; } // Qualifier Location Shifting
Long newBeginLocation , newEndLocation = null ; Location newLocation = null ; if ( ! invalidFeature ) { for ( Qualifier qualifier : feature . getQualifiers ( ) ) { // TRANSL _ EXCEPT _ QUALIFIER
/* if ( qualifier . getName ( ) . equals (
qualifier . TRANSL _ EXCEPT _ QUALIFIER _ NAME ) ) {
TranslExceptQualifier translExcepttqualifier = new TranslExceptQualifier (
qualifier . getValue ( ) ) ;
if ( shiftLocationQualifier ( translExcepttqualifier ,
deletedBeginNs , feature ) ! = null ) {
validationMessages . add ( shiftLocationQualifier (
translExcepttqualifier , deletedBeginNs ,
feature ) ) ;
} else if ( qualifier . getName ( ) . equals (
qualifier . ANTICODON _ QUALIFIER _ NAME ) ) {
AnticodonQualifier antiCodonqualifier = new AnticodonQualifier (
qualifier . getValue ( ) ) ;
if ( shiftLocationQualifier ( antiCodonqualifier ,
deletedBeginNs , feature ) ! = null ) {
validationMessages
. add ( shiftLocationQualifier (
antiCodonqualifier , deletedBeginNs ,
feature ) ) ; */
if ( qualifier . getName ( ) . equals ( qualifier . RPT_UNIT_RANGE_QUALIFIER_NAME ) ) { Rpt_Unit_RangeQualifier rptUnitRangequalifier = new Rpt_Unit_RangeQualifier ( qualifier . getValue ( ) ) ; if ( shiftLocationQualifier ( rptUnitRangequalifier , deletedBeginNs , feature ) != null ) { validationMessages . add ( shiftLocationQualifier ( rptUnitRangequalifier , deletedBeginNs , feature ) ) ; } } else if ( qualifier . getName ( ) . equals ( qualifier . TAG_PEPTIDE_QUALIFIER_NAME ) ) { Tag_PeptideQualifier tagPeptidequalifier = new Tag_PeptideQualifier ( qualifier . getValue ( ) ) ; if ( shiftLocationQualifier ( tagPeptidequalifier , deletedBeginNs , feature ) != null ) { validationMessages . add ( shiftLocationQualifier ( tagPeptidequalifier , deletedBeginNs , feature ) ) ; } } } } } if ( gapFeatures . size ( ) > 0 ) { for ( Feature feature : gapFeatures ) { entry . removeFeature ( feature ) ; } } return validationMessages ; |
public class AuthzFacadeImpl { /** * / * ( non - Javadoc )
* @ see com . att . authz . facade . AuthzFacade # getCertInfoByID ( com . att . authz . env . AuthzTrans , javax . servlet . http . HttpServletRequest , javax . servlet . http . HttpServletResponse , java . lang . String ) */
@ Override public Result < Void > getCertInfoByID ( AuthzTrans trans , HttpServletRequest req , HttpServletResponse resp , String id ) { } } | TimeTaken tt = trans . start ( GET_CERT_BY_ID , Env . SUB | Env . ALWAYS ) ; try { Result < CERTS > rci = service . getCertInfoByID ( trans , req , id ) ; switch ( rci . status ) { case OK : if ( Question . willSpecialLog ( trans , trans . user ( ) ) ) { RosettaData < CERTS > data = certsDF . newData ( trans ) . load ( rci . value ) ; Question . logEncryptTrace ( trans , data . asString ( ) ) ; data . to ( resp . getOutputStream ( ) ) ; } else { certsDF . direct ( trans , rci . value , resp . getOutputStream ( ) ) ; } setContentType ( resp , certsDF . getOutType ( ) ) ; return Result . ok ( ) ; default : return Result . err ( rci ) ; } } catch ( Exception e ) { trans . error ( ) . log ( e , IN , GET_CERT_BY_ID ) ; return Result . err ( e ) ; } finally { tt . done ( ) ; } |
public class AsyncAppender { /** * Process the logging events . This is called by the batcher .
* @ param loggingEvents
* - The logging events to be written to the underlying appender */
private void processLoggingEvents ( List < LoggingEvent > loggingEvents ) { } } | // Lazy initialization of the appender . This is needed because the
// original appenders configuration may be available only after the
// complete
// log4j initialization .
while ( appenders . getAllAppenders ( ) == null ) { if ( ( batcher == null ) || ( batcher . isPaused ( ) ) ) { try { Thread . sleep ( SLEEP_TIME_MS ) ; } catch ( InterruptedException ignore ) { } continue ; } org . apache . log4j . Logger asyncLogger = LoggerCache . getInstance ( ) . getOrCreateLogger ( LOGGER_ASYNC_APPENDER ) ; Appender originalAppender = asyncLogger . getAppender ( originalAppenderName ) ; if ( originalAppender == null ) { try { Thread . sleep ( SLEEP_TIME_MS ) ; } catch ( InterruptedException ignore ) { } continue ; } appenders . addAppender ( originalAppender ) ; } // First take the overflown summary events and put it back in the queue
for ( Iterator < Entry < String , LogSummary > > iter = logSummaryMap . entrySet ( ) . iterator ( ) ; iter . hasNext ( ) ; ) { Entry < String , LogSummary > mapEntry = ( Entry < String , LogSummary > ) iter . next ( ) ; // If the space is not available , then exit immediately
if ( batcher . isSpaceAvailable ( ) ) { LogSummary logSummary = mapEntry . getValue ( ) ; LoggingEvent event = logSummary . createEvent ( ) ; // Put the event in the queue and remove the event from the summary
if ( batcher . process ( event ) ) { iter . remove ( ) ; } else { break ; } } else { break ; } } // Process the events from the queue and call the underlying
// appender
for ( LoggingEvent event : loggingEvents ) { appenders . appendLoopOnAppenders ( event ) ; } |
public class DynamoDBTableMapper { /** * Saves the object given into DynamoDB with the condition that the hash
* and if applicable , the range key , does not already exist .
* @ param object The object to create .
* @ throws ConditionalCheckFailedException If the object exists .
* @ see com . amazonaws . services . dynamodbv2 . datamodeling . DynamoDBMapper # save
* @ see com . amazonaws . services . dynamodbv2 . datamodeling . DynamoDBSaveExpression
* @ see com . amazonaws . services . dynamodbv2 . model . ExpectedAttributeValue */
public void saveIfNotExists ( T object ) throws ConditionalCheckFailedException { } } | final DynamoDBSaveExpression saveExpression = new DynamoDBSaveExpression ( ) ; for ( final DynamoDBMapperFieldModel < T , Object > key : model . keys ( ) ) { saveExpression . withExpectedEntry ( key . name ( ) , new ExpectedAttributeValue ( ) . withExists ( false ) ) ; } mapper . < T > save ( object , saveExpression ) ; |
public class Datapoint { /** * Saves this datapoint in XML format to the supplied XML writer .
* @ param w a XML writer
* @ throws KNXMLException on error saving this datapoint */
public void save ( final XmlWriter w ) throws KNXMLException { } } | /* XML layout :
< datapoint stateBased = [ true | false ] name = string mainNumber = int dptID = string
priority = string >
knxAddress
< / datapoint > */
w . writeStartElement ( TAG_DATAPOINT ) ; w . writeAttribute ( ATTR_STATEBASED , Boolean . toString ( stateBased ) ) ; w . writeAttribute ( ATTR_NAME , name ) ; w . writeAttribute ( ATTR_MAINNUMBER , Integer . toString ( mainNo ) ) ; w . writeAttribute ( ATTR_DPTID , dptId == null ? "" : dptId ) ; w . writeAttribute ( ATTR_PRIORITY , priority . toString ( ) ) ; main . save ( w ) ; doSave ( w ) ; w . writeEndElement ( ) ; |
public class HashUtil { /** * PJW算法
* @ param str 字符串
* @ return hash值 */
public static int pjwHash ( String str ) { } } | int bitsInUnsignedInt = 32 ; int threeQuarters = ( bitsInUnsignedInt * 3 ) / 4 ; int oneEighth = bitsInUnsignedInt / 8 ; int highBits = 0xFFFFFFFF << ( bitsInUnsignedInt - oneEighth ) ; int hash = 0 ; int test = 0 ; for ( int i = 0 ; i < str . length ( ) ; i ++ ) { hash = ( hash << oneEighth ) + str . charAt ( i ) ; if ( ( test = hash & highBits ) != 0 ) { hash = ( ( hash ^ ( test >> threeQuarters ) ) & ( ~ highBits ) ) ; } } return hash & 0x7FFFFFFF ; |
public class UsernameAndPasswordLoginModule { /** * { @ inheritDoc } */
@ Override @ FFDCIgnore ( { } } | AuthenticationException . class , IllegalArgumentException . class , WSLoginFailedException . class } ) public boolean login ( ) throws LoginException { if ( isAlreadyProcessed ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Already processed by other login module, abstaining." ) ; } return false ; } try { Callback [ ] callbacks = getRequiredCallbacks ( callbackHandler ) ; String user = ( ( NameCallback ) callbacks [ 0 ] ) . getName ( ) ; char [ ] passwordChars = ( ( PasswordCallback ) callbacks [ 1 ] ) . getPassword ( ) ; // If we have insufficient data , abstain .
if ( user == null || passwordChars == null ) { return false ; } if ( user . trim ( ) . isEmpty ( ) ) { return false ; } setAlreadyProcessed ( ) ; userRegistry = getUserRegistry ( ) ; urAuthenticatedId = userRegistry . checkPassword ( user , String . valueOf ( passwordChars ) ) ; if ( urAuthenticatedId != null ) { username = getSecurityName ( user , urAuthenticatedId ) ; setUpTemporarySubject ( ) ; updateSharedState ( ) ; return true ; } else { Tr . audit ( tc , "JAAS_AUTHENTICATION_FAILED_BADUSERPWD" , user ) ; throw new AuthenticationException ( TraceNLS . getFormattedMessage ( this . getClass ( ) , TraceConstants . MESSAGE_BUNDLE , "JAAS_AUTHENTICATION_FAILED_BADUSERPWD" , new Object [ ] { user } , "CWWKS1100A: Authentication failed for the userid {0}. A bad userid and/or password was specified." ) ) ; } } catch ( com . ibm . ws . security . registry . PasswordExpiredException e ) { throw new PasswordExpiredException ( e . getLocalizedMessage ( ) , e ) ; } catch ( com . ibm . ws . security . registry . UserRevokedException e ) { throw new UserRevokedException ( e . getLocalizedMessage ( ) , e ) ; } catch ( AuthenticationException e ) { // NO FFDC : AuthenticationExceptions are expected ( bad userid / password is pretty normal )
throw e ; // no - need to wrap
} catch ( IllegalArgumentException e ) { // NO FFDC : This is normal when user and / or password are blank / null
throw new AuthenticationException ( e . getLocalizedMessage ( ) , e ) ; } catch ( WSLoginFailedException e ) { // NO FFDC : This is normal when user and / or password are blank / null
throw new AuthenticationException ( e . getLocalizedMessage ( ) , e ) ; } catch ( LoginException e ) { throw new AuthenticationException ( e . getLocalizedMessage ( ) , e ) ; } catch ( Exception e ) { // This is not normal : FFDC will be instrumented
throw new AuthenticationException ( e . getLocalizedMessage ( ) , e ) ; } |
public class ExecutionEngineJNI { /** * Store a large temp table block to disk .
* @ param siteId The site id of the block to store to disk
* @ param blockCounter The serial number of the block to store to disk
* @ param block A directly - allocated ByteBuffer of the block
* @ return true if operation succeeded , false otherwise */
public boolean storeLargeTempTableBlock ( long siteId , long blockCounter , ByteBuffer block ) { } } | LargeBlockTask task = LargeBlockTask . getStoreTask ( new BlockId ( siteId , blockCounter ) , block ) ; return executeLargeBlockTaskSynchronously ( task ) ; |
public class YarnContainerManager { /** * NM Callback : NM reports stop of a container .
* @ param containerId ID of a container stopped . */
@ Override public void onContainerStopped ( final ContainerId containerId ) { } } | final boolean hasContainer = this . containers . hasContainer ( containerId . toString ( ) ) ; if ( hasContainer ) { this . reefEventHandlers . onResourceStatus ( ResourceStatusEventImpl . newBuilder ( ) . setIdentifier ( containerId . toString ( ) ) . setState ( State . DONE ) . build ( ) ) ; } |
public class NumericSelector { /** * TODO : Add setValues as well , or iterate on API */
public void setLabels ( List < String > labels ) { } } | if ( labels . size ( ) != this . labels . size ( ) ) { throw new IllegalArgumentException ( "You need to provide " + this . labels . size ( ) + " labels." ) ; } this . labels = labels ; ( ( ArrayAdapter < String > ) getAdapter ( ) ) . notifyDataSetChanged ( ) ; |
public class Settings { /** * Get a property by name
* @ param key the property name */
public String getStringProperty ( String key , String defaultValue , boolean replaceSystemProperties ) { } } | String value = settings . getProperty ( key , defaultValue ) ; if ( replaceSystemProperties ) value = SystemTools . replaceSystemProperties ( value ) ; return value != null ? value . trim ( ) : null ; |
public class dnssrvrec { /** * Use this API to fetch a dnssrvrec resources . */
public static dnssrvrec [ ] get ( nitro_service service , dnssrvrec obj [ ] ) throws Exception { } } | if ( obj != null && obj . length > 0 ) { dnssrvrec response [ ] = new dnssrvrec [ obj . length ] ; for ( int i = 0 ; i < obj . length ; i ++ ) { options option = new options ( ) ; option . set_args ( nitro_util . object_to_string_withoutquotes ( obj [ i ] ) ) ; response [ i ] = ( dnssrvrec ) obj [ i ] . get_resource ( service , option ) ; } return response ; } return null ; |
public class LaconicExplanationGenerator { public void foundExplanation ( ExplanationGenerator < E > explanationGenerator , Explanation < E > explanation , Set < Explanation < E > > allFoundExplanations ) { } } | notifyLaconicExplanationGeneratorProgressMonitor ( explanation ) ; |
public class ConfigUtils { /** * Reads configuration from a classpath resource stream obtained from the current thread ' s class loader through
* { @ link ClassLoader # getSystemResourceAsStream ( String ) } .
* @ param resource The resource to be read .
* @ return A { @ link java . util . Properties } object read from the specified resource .
* @ throws IllegalArgumentException When the configuration file could not be found or another I / O error occurs . */
public static Properties configFromResource ( final String resource ) { } } | InputStream input = Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( resource ) ; if ( input == null ) { // non - existent resource will * not * throw an exception , do this anyway
throw new IllegalArgumentException ( "configuration file '" + resource + "' not found on classpath" ) ; } final Properties config = new Properties ( ) ; try { config . load ( input ) ; } catch ( final IOException e ) { throw new IllegalArgumentException ( "reading configuration from '" + resource + "' failed" , e ) ; } return config ; |
public class Types { /** * Does t have the same bounds for quantified variables as s ? */
public boolean hasSameBounds ( ForAll t , ForAll s ) { } } | List < Type > l1 = t . tvars ; List < Type > l2 = s . tvars ; while ( l1 . nonEmpty ( ) && l2 . nonEmpty ( ) && isSameType ( l1 . head . getUpperBound ( ) , subst ( l2 . head . getUpperBound ( ) , s . tvars , t . tvars ) ) ) { l1 = l1 . tail ; l2 = l2 . tail ; } return l1 . isEmpty ( ) && l2 . isEmpty ( ) ; |
public class InstructionGroupPreparer { /** * create FieldNodes for all xLoad instructions */
private void extractFields ( InstructionGroup group ) { } } | List < FieldNode > fields = group . getFields ( ) ; for ( InstructionGraphNode node : group . getNodes ( ) ) { if ( node . isXLoad ( ) ) { VarInsnNode insn = ( VarInsnNode ) node . getInstruction ( ) ; // check whether we already have a field for the var with this index
int index ; for ( index = 0 ; index < fields . size ( ) ; index ++ ) { if ( fields . get ( index ) . access == insn . var ) break ; } // if we don ' t , create a new field for the var
if ( index == fields . size ( ) ) { // CAUTION , HACK ! : for brevity we reuse the access field and the value field of the FieldNode
// for keeping track of the original var index as well as the FieldNodes Type ( respectively )
// so we need to make sure that we correct for this when the field is actually written
Type type = node . getResultValue ( ) . getType ( ) ; fields . add ( new FieldNode ( insn . var , "field$" + index , type . getDescriptor ( ) , null , type ) ) ; } // normalize the instruction so instruction groups that are identical except for the variable
// indexes are still mapped to the same group class ( name )
insn . var = index ; } } |
public class BucketTimeSeries { /** * Determines the number of buckets used to cover the seconds .
* @ param diffInSeconds the difference in seconds
* @ return the amount of buckets used to cover this amount */
public int getBucketSize ( final long diffInSeconds ) { } } | // convert one unit of this into seconds
final long secondsPerBucket = TimeUnit . SECONDS . convert ( config . getBucketSize ( ) , config . getTimeUnit ( ) ) ; return ( int ) Math . ceil ( ( double ) diffInSeconds / secondsPerBucket ) ; |
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertObjectContainerPresentationSpaceSizePDFSizeToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class Main { /** * This Java method calculates the smallest possible value of a periodically occurring function .
* @ param A : A parameter of the function
* @ param B : Another parameter of the function
* @ param N : Yet another parameter of the function
* @ return The smallest possible value this function can provide , given the input parameters .
* Examples :
* > > > min _ periodic _ value ( 10 , 20 , 30)
* 15
* > > > min _ periodic _ value ( 1 , 2 , 1)
* > > > min _ periodic _ value ( 11 , 10 , 9) */
public static int min_periodic_value ( int A , int B , int N ) { } public static void main ( String [ ] args ) { Main . min_periodic_value ( 3 , 2 , 1 ) ; } } | int x = Math . max ( ( B - 1 ) , N ) ; return ( ( A * x ) / B ) ; |
public class Level { /** * Custom deserialization of Level .
* @ param s serialization stream .
* @ throws IOException if IO exception .
* @ throws ClassNotFoundException if class not found . */
private void readObject ( final ObjectInputStream s ) throws IOException , ClassNotFoundException { } } | s . defaultReadObject ( ) ; level = s . readInt ( ) ; syslogEquivalent = s . readInt ( ) ; levelStr = s . readUTF ( ) ; if ( levelStr == null ) { levelStr = "" ; } |
public class ThreadSettingsApi { /** * Adds a single domain on the list of domains that needs to be
* " white listed " .
* @ param domain
* the domain that needs to be " white listed " .
* @ see < a href =
* " https : / / developers . facebook . com / docs / messenger - platform / thread - settings / domain - whitelisting "
* > Facebook ' s Messenger Platform Domain Whitelisting Thread Settings
* Documentation < / a > */
public static void addWhiteListDomain ( String domain ) { } } | WhitelistDomainRequest request = new WhitelistDomainRequest ( ) ; request . addWhiteListedDomain ( domain ) ; request . setDomainActionType ( DomainActionType . ADD ) ; FbBotMillNetworkController . postThreadSetting ( request ) ; |
public class CmsReport { /** * Returns the style sheets for the report . < p >
* @ param cms the current users context
* @ return the style sheets for the report */
public static String generateCssStyle ( CmsObject cms ) { } } | StringBuffer result = new StringBuffer ( 128 ) ; result . append ( "<style type='text/css'>\n" ) ; String contents = "" ; try { contents = new String ( cms . readFile ( CmsWorkplace . VFS_PATH_COMMONS + "style/report.css" ) . getContents ( ) , OpenCms . getSystemInfo ( ) . getDefaultEncoding ( ) ) ; } catch ( Exception e ) { // ignore
} if ( CmsStringUtil . isEmpty ( contents ) ) { // css file not found , create default styles
result . append ( "body { box-sizing: border-box; -moz-box-sizing: border-box; padding: 2px; margin: 0; color: /*begin-color WindowText*/#000000/*end-color*/; background-color: /*begin-color Window*/#ffffff/*end-color*/; font-family: Verdana, Arial, Helvetica, sans-serif; font-size: 11px; }\n" ) ; result . append ( "div.main { box-sizing: border-box; -moz-box-sizing: border-box; color: /*begin-color WindowText*/#000000/*end-color*/; white-space: nowrap; }\n" ) ; result . append ( "span.head { color: #000099; font-weight: bold; }\n" ) ; result . append ( "span.note { color: #666666; }\n" ) ; result . append ( "span.ok { color: #009900; }\n" ) ; result . append ( "span.warn { color: #990000; padding-left: 40px; }\n" ) ; result . append ( "span.err { color: #990000; font-weight: bold; padding-left: 40px; }\n" ) ; result . append ( "span.throw { color: #990000; font-weight: bold; }\n" ) ; result . append ( "span.link1 { color: #666666; }\n" ) ; result . append ( "span.link2 { color: #666666; padding-left: 40px; }\n" ) ; result . append ( "span.link2 { color: #990000; }\n" ) ; } else { result . append ( contents ) ; } result . append ( "</style>\n" ) ; return result . toString ( ) ; |
public class XMLChecker { /** * Checks if the specified string matches the < em > Name < / em > production . See : < a
* href = " http : / / www . w3 . org / TR / REC - xml # NT - Name " > Definition of Name < / a > .
* @ param s the character string to check , cannot be < code > null < / code > .
* @ throws NullPointerException if < code > s = = null < / code > .
* @ throws InvalidXMLException if the specified character string does not match the
* < em > Name < / em > production . */
public static final void checkName ( String s ) throws NullPointerException , InvalidXMLException { } } | checkName ( s . toCharArray ( ) , 0 , s . length ( ) ) ; |
public class AbstractGISGridSet { /** * Update the component type information with
* the type of the new array element .
* @ param newElement is the element for which the known top type in this array must be eventually updated . */
@ SuppressWarnings ( "unchecked" ) protected final void updateComponentType ( P newElement ) { } } | final Class < ? extends P > lclazz = ( Class < ? extends P > ) newElement . getClass ( ) ; this . clazz = ( Class < ? extends P > ) ReflectionUtil . getCommonType ( this . clazz , lclazz ) ; |
public class DataXceiver { /** * Read a block from the disk and then sends it to a destination .
* @ param in The stream to read from
* @ throws IOException */
private void copyBlock ( DataInputStream in , VersionAndOpcode versionAndOpcode ) throws IOException { } } | // Read in the header
CopyBlockHeader copyBlockHeader = new CopyBlockHeader ( versionAndOpcode ) ; copyBlockHeader . readFields ( in ) ; long startTime = System . currentTimeMillis ( ) ; int namespaceId = copyBlockHeader . getNamespaceId ( ) ; long blockId = copyBlockHeader . getBlockId ( ) ; long genStamp = copyBlockHeader . getGenStamp ( ) ; Block block = new Block ( blockId , 0 , genStamp ) ; if ( ! dataXceiverServer . balanceThrottler . acquire ( ) ) { // not able to start
LOG . info ( "Not able to copy block " + blockId + " to " + s . getRemoteSocketAddress ( ) + " because threads quota is exceeded." ) ; return ; } BlockSender blockSender = null ; DataOutputStream reply = null ; boolean isOpSuccess = true ; updateCurrentThreadName ( "Copying block " + block ) ; try { // check if the block exists or not
blockSender = new BlockSender ( namespaceId , block , 0 , - 1 , false , false , false , false , versionAndOpcode . getDataTransferVersion ( ) >= DataTransferProtocol . PACKET_INCLUDE_VERSION_VERSION , true , datanode , null ) ; // set up response stream
OutputStream baseStream = NetUtils . getOutputStream ( s , datanode . socketWriteTimeout ) ; reply = new DataOutputStream ( new BufferedOutputStream ( baseStream , SMALL_BUFFER_SIZE ) ) ; // send block content to the target
long read = blockSender . sendBlock ( reply , baseStream , dataXceiverServer . balanceThrottler ) ; long readDuration = System . currentTimeMillis ( ) - startTime ; datanode . myMetrics . bytesReadLatency . inc ( readDuration ) ; datanode . myMetrics . bytesRead . inc ( ( int ) read ) ; if ( read > KB_RIGHT_SHIFT_MIN ) { datanode . myMetrics . bytesReadRate . inc ( ( int ) ( read >> KB_RIGHT_SHIFT_BITS ) , readDuration ) ; } datanode . myMetrics . blocksRead . inc ( ) ; LOG . info ( "Copied block " + block + " to " + s . getRemoteSocketAddress ( ) ) ; } catch ( IOException ioe ) { isOpSuccess = false ; throw ioe ; } finally { dataXceiverServer . balanceThrottler . release ( ) ; if ( isOpSuccess ) { try { // send one last byte to indicate that the resource is cleaned .
reply . writeChar ( 'd' ) ; } catch ( IOException ignored ) { } } IOUtils . closeStream ( reply ) ; IOUtils . closeStream ( blockSender ) ; } |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EEnum getIfcWindowStyleConstructionEnum ( ) { } } | if ( ifcWindowStyleConstructionEnumEEnum == null ) { ifcWindowStyleConstructionEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 931 ) ; } return ifcWindowStyleConstructionEnumEEnum ; |
public class IRFactory { /** * Create loop node . The code generator will later call
* createWhile | createDoWhile | createFor | createForIn
* to finish loop generation . */
private Scope createLoopNode ( Node loopLabel , int lineno ) { } } | Scope result = createScopeNode ( Token . LOOP , lineno ) ; if ( loopLabel != null ) { ( ( Jump ) loopLabel ) . setLoop ( result ) ; } return result ; |
public class FastMathCalc { /** * Compute split [ 0 ] , split [ 1 ] such that their sum is equal to d ,
* and split [ 0 ] has its 30 least significant bits as zero .
* @ param d number to split
* @ param split placeholder where to place the result */
private static void split ( final double d , final double split [ ] ) { } } | if ( d < 8e298 && d > - 8e298 ) { final double a = d * HEX_40000000 ; split [ 0 ] = ( d + a ) - a ; split [ 1 ] = d - split [ 0 ] ; } else { final double a = d * 9.31322574615478515625E-10 ; split [ 0 ] = ( d + a - d ) * HEX_40000000 ; split [ 1 ] = d - split [ 0 ] ; } |
public class DeviceAttributeDAODefaultImpl { public void insert ( final double argIn ) { } } | attributeValue_5 . data_format = AttrDataFormat . FMT_UNKNOWN ; attributeValue_5 . w_dim . dim_x = 1 ; attributeValue_5 . w_dim . dim_y = 0 ; insert ( new double [ ] { argIn } ) ; |
public class BridgeActivity { /** * Request for permissions . */
static void requestPermission ( Source source , String [ ] permissions ) { } } | Intent intent = new Intent ( source . getContext ( ) , BridgeActivity . class ) ; intent . putExtra ( KEY_TYPE , BridgeRequest . TYPE_PERMISSION ) ; intent . putExtra ( KEY_PERMISSIONS , permissions ) ; source . startActivity ( intent ) ; |
public class ClusteringKeyMapper { /** * Returns the common clustering keys of the specified column family .
* @ param columnFamily A storage engine { @ link ColumnFamily } .
* @ return The common clustering keys of the specified column family . */
public final List < CellName > clusteringKeys ( ColumnFamily columnFamily ) { } } | List < CellName > clusteringKeys = new ArrayList < > ( ) ; CellName lastClusteringKey = null ; for ( Cell cell : columnFamily ) { CellName cellName = cell . name ( ) ; if ( ! isStatic ( cellName ) ) { CellName clusteringKey = extractClusteringKey ( cellName ) ; if ( lastClusteringKey == null || ! lastClusteringKey . isSameCQL3RowAs ( cellNameType , clusteringKey ) ) { lastClusteringKey = clusteringKey ; clusteringKeys . add ( clusteringKey ) ; } } } return sort ( clusteringKeys ) ; |
public class CodeSwitcher { /** * Method declaration
* @ param a */
public static void main ( String [ ] a ) { } } | CodeSwitcher s = new CodeSwitcher ( ) ; if ( a . length == 0 ) { showUsage ( ) ; return ; } File listFile = null ; File baseDir = null ; for ( int i = 0 ; i < a . length ; i ++ ) { String p = a [ i ] ; if ( p . startsWith ( "+" ) ) { s . vSwitchOn . addElement ( p . substring ( 1 ) ) ; } else if ( p . startsWith ( "--basedir=" ) ) { baseDir = new File ( p . substring ( "--basedir=" . length ( ) ) ) ; } else if ( p . startsWith ( "--pathlist=" ) ) { listFile = new File ( p . substring ( "--pathlist=" . length ( ) ) ) ; } else if ( p . startsWith ( "-" ) ) { s . vSwitchOff . addElement ( p . substring ( 1 ) ) ; } else { s . addDir ( p ) ; } } if ( baseDir != null ) { if ( listFile == null ) { System . err . println ( "--basedir= setting ignored, since only used for list files" ) ; } else { if ( ! baseDir . isDirectory ( ) ) { System . err . println ( "Skipping listfile since basedir '" + baseDir . getAbsolutePath ( ) + "' is not a directory" ) ; listFile = null ; } } } if ( listFile != null ) { try { BufferedReader br = new BufferedReader ( new FileReader ( listFile ) ) ; String st , p ; int hashIndex ; File f ; while ( ( st = br . readLine ( ) ) != null ) { hashIndex = st . indexOf ( '#' ) ; p = ( ( hashIndex > - 1 ) ? st . substring ( 0 , hashIndex ) : st ) . trim ( ) ; if ( p . length ( ) < 1 ) { continue ; } f = ( baseDir == null ) ? ( new File ( p ) ) : ( new File ( baseDir , p ) ) ; if ( f . isFile ( ) ) { s . addDir ( f ) ; } else { System . err . println ( "Skipping non-file '" + p . trim ( ) + "'" ) ; } } } catch ( Exception e ) { System . err . println ( "Failed to read pathlist file '" + listFile . getAbsolutePath ( ) + "'" ) ; } } if ( s . size ( ) < 1 ) { printError ( "No path specified, or no specified paths qualify" ) ; showUsage ( ) ; } s . process ( ) ; if ( s . vSwitchOff . size ( ) == 0 && s . vSwitchOn . size ( ) == 0 ) { s . printSwitches ( ) ; } |
public class BaseUpdateableClassifier { /** * Performs training on an updateable classifier by going over the whole
* data set in random order one observation at a time , multiple times .
* @ param dataSet the data set to train from
* @ param toTrain the classifier to train
* @ param epochs the number of passes through the data set */
public static void trainEpochs ( ClassificationDataSet dataSet , UpdateableClassifier toTrain , int epochs ) { } } | if ( epochs < 1 ) throw new IllegalArgumentException ( "epochs must be positive" ) ; toTrain . setUp ( dataSet . getCategories ( ) , dataSet . getNumNumericalVars ( ) , dataSet . getPredicting ( ) ) ; IntList randomOrder = new IntList ( dataSet . size ( ) ) ; ListUtils . addRange ( randomOrder , 0 , dataSet . size ( ) , 1 ) ; for ( int epoch = 0 ; epoch < epochs ; epoch ++ ) { Collections . shuffle ( randomOrder ) ; for ( int i : randomOrder ) toTrain . update ( dataSet . getDataPoint ( i ) , dataSet . getWeight ( i ) , dataSet . getDataPointCategory ( i ) ) ; } |
public class RoleDefinitionsInner { /** * Get all role definitions that are applicable at scope and above .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; RoleDefinitionInner & gt ; object */
public Observable < Page < RoleDefinitionInner > > listNextAsync ( final String nextPageLink ) { } } | return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < RoleDefinitionInner > > , Page < RoleDefinitionInner > > ( ) { @ Override public Page < RoleDefinitionInner > call ( ServiceResponse < Page < RoleDefinitionInner > > response ) { return response . body ( ) ; } } ) ; |
public class AbstractClient { /** * TODO ( calvin ) : General tag logic should be in getMetricName */
private String getQualifiedMetricName ( String metricName ) { } } | try { if ( SecurityUtils . isAuthenticationEnabled ( mContext . getConf ( ) ) && LoginUser . get ( mContext . getConf ( ) ) != null ) { return Metric . getMetricNameWithTags ( metricName , CommonMetrics . TAG_USER , LoginUser . get ( mContext . getConf ( ) ) . getName ( ) ) ; } else { return metricName ; } } catch ( IOException e ) { return metricName ; } |
public class QueryResultRowIterator { /** * { @ inheritDoc }
* @ see javax . jcr . query . RowIterator # nextRow ( ) */
@ Override public Row nextRow ( ) { } } | if ( nextRow == null ) { // Didn ' t call ' hasNext ( ) ' . . .
if ( ! hasNext ( ) ) { throw new NoSuchElementException ( ) ; } } assert nextRow != null ; Row result = nextRow ; nextRow = null ; position ++ ; return result ; |
public class DisqueClient { /** * Open a new connection to a Disque server using the supplied { @ link DisqueURI } and the supplied { @ link RedisCodec codec }
* to encode / decode keys . Command timeouts are applied from the given { @ link DisqueURI # getTimeout ( ) } settings .
* @ param codec use this codec to encode / decode keys and values , must not be { @ literal null }
* @ param disqueURI the Disque server to connect to , must not be { @ literal null }
* @ param socketAddressSupplierFactory factory for { @ link SocketAddress } for connecting to Disque based on multiple
* connection points .
* @ param < K > Key type .
* @ param < V > Value type .
* @ return A new connection . */
public < K , V > DisqueConnection < K , V > connect ( RedisCodec < K , V > codec , DisqueURI disqueURI , SocketAddressSupplierFactory socketAddressSupplierFactory ) { } } | assertNotNull ( disqueURI ) ; checkValidDisqueURI ( disqueURI ) ; return connect0 ( codec , disqueURI , socketAddressSupplierFactory , disqueURI . getTimeout ( ) , disqueURI . getUnit ( ) ) ; |
public class ApiOvhEmaildomain { /** * Create new mailbox in server
* REST : POST / email / domain / { domain } / account
* @ param accountName [ required ] Account name
* @ param password [ required ] Account password
* @ param description [ required ] Description Account
* @ param size [ required ] Account size in bytes ( default : 500000 ) ( possible values : / email / domain / { domain } / allowedAccountSize )
* @ param domain [ required ] Name of your domain name */
public OvhTaskPop domain_account_POST ( String domain , String accountName , String description , String password , Long size ) throws IOException { } } | String qPath = "/email/domain/{domain}/account" ; StringBuilder sb = path ( qPath , domain ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "accountName" , accountName ) ; addBody ( o , "description" , description ) ; addBody ( o , "password" , password ) ; addBody ( o , "size" , size ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhTaskPop . class ) ; |
public class AbstractPrintQuery { /** * Adds the msg phrase .
* @ param _ selectBldr the select bldr
* @ param _ msgPhrase phrase to add
* @ return this PrintQuery
* @ throws EFapsException on error */
public AbstractPrintQuery addMsgPhrase ( final SelectBuilder _selectBldr , final MsgPhrase ... _msgPhrase ) throws EFapsException { } } | final String baseSel ; if ( _selectBldr == null ) { baseSel = "" ; } else { baseSel = _selectBldr . toString ( ) + "." ; } for ( final MsgPhrase phrase : _msgPhrase ) { for ( final String selectStmt : phrase . getArguments ( ) ) { addSelect ( baseSel + selectStmt ) ; } } return this ; |
public class TopicsInner { /** * List topics under an Azure subscription .
* List all the topics under an Azure subscription .
* @ return the observable to the List & lt ; TopicInner & gt ; object */
public Observable < Page < TopicInner > > listAsync ( ) { } } | return listWithServiceResponseAsync ( ) . map ( new Func1 < ServiceResponse < List < TopicInner > > , Page < TopicInner > > ( ) { @ Override public Page < TopicInner > call ( ServiceResponse < List < TopicInner > > response ) { PageImpl < TopicInner > page = new PageImpl < > ( ) ; page . setItems ( response . body ( ) ) ; return page ; } } ) ; |
public class DynamicFactory { /** * Creates a constructor key for use in accessing constructors . The key
* combines the interface and types in a single instance which we can then
* use in a map .
* @ param intf the interface to build the key for
* @ param constructor the constructor value which this key will be linked
* to
* @ return a constructor key which can be used to lookup a constructor */
private static ConstructorKey key ( Class < ? > intf , Constructor < ? > constructor ) { } } | return key ( intf , constructor . getParameterTypes ( ) ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.