signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class TemplateAnalysis { /** * consider moving this to SoyTreeUtils or some similar place . */
private static StaticAnalysisResult isListExpressionEmpty ( ForNode node ) { } } | Optional < RangeArgs > rangeArgs = RangeArgs . createFromNode ( node ) ; if ( rangeArgs . isPresent ( ) ) { return isRangeExpressionEmpty ( rangeArgs . get ( ) ) ; } ExprNode expr = node . getExpr ( ) . getRoot ( ) ; if ( expr instanceof ListLiteralNode ) { return ( ( ListLiteralNode ) expr ) . numChildren ( ) > 0 ? StaticAnalysisResult . FALSE : StaticAnalysisResult . TRUE ; } return StaticAnalysisResult . UNKNOWN ; |
public class DateUtils { /** * Get data from data string using the given pattern and the default date
* format symbols for the default locale .
* @ param dateString date string to be handled .
* @ param pattern pattern to be formated .
* @ return a new Date object by given date string and pattern .
* @ throws DateException */
public static Date getDateFromString ( final String dateString , final String pattern ) { } } | try { SimpleDateFormat df = buildDateFormat ( pattern ) ; return df . parse ( dateString ) ; } catch ( ParseException e ) { throw new DateException ( String . format ( "Could not parse %s with pattern %s." , dateString , pattern ) , e ) ; } |
public class HTTPBatchClientConnectionInterceptor { /** * Executes communication with remote host
* @ param httpRequest
* @ param client
* @ return IntuitMessage
* @ throws FMSException */
private IntuitMessage executeHttpRequest ( HttpRequestBase httpRequest , CloseableHttpClient client ) throws FMSException { } } | CloseableHttpResponse httpResponse = null ; IntuitMessage intuitMessage = new IntuitMessage ( ) ; try { // prepare HttpHost object
HttpHost target = new HttpHost ( httpRequest . getURI ( ) . getHost ( ) , - 1 , httpRequest . getURI ( ) . getScheme ( ) ) ; httpResponse = client . execute ( target , httpRequest ) ; LOG . debug ( "Connection status : " + httpResponse . getStatusLine ( ) ) ; // set the response elements before close the connection
setResponseElements ( intuitMessage , httpResponse ) ; return intuitMessage ; } catch ( ClientProtocolException e ) { throw new ConfigurationException ( "Error in Http Protocol definition" , e ) ; } catch ( IOException e ) { throw new FMSException ( e ) ; } finally { if ( httpResponse != null ) { try { httpResponse . close ( ) ; } catch ( IOException e ) { LOG . warn ( "Unable to close CloseableHttpResponse ." , e ) ; } } if ( client != null ) { try { client . close ( ) ; } catch ( Exception e ) { LOG . warn ( "Unable to close CloseableHttpClient connection." , e ) ; } } } |
public class MsgPlaceholderNode { /** * Returns whether this node and the given other node are the same , such that they should be
* represented by the same placeholder . */
@ Override public boolean shouldUseSameVarNameAs ( MsgSubstUnitNode other ) { } } | return ( other instanceof MsgPlaceholderNode ) && this . initialNodeKind == ( ( MsgPlaceholderNode ) other ) . initialNodeKind && this . samenessKey . equals ( ( ( MsgPlaceholderNode ) other ) . samenessKey ) ; |
public class FsCrawlerUtil { /** * We check if we can index the content or skip it
* @ param content Content to parse
* @ param filters regular expressions that all needs to match if we want to index . If empty
* we consider it always matches . */
public static boolean isIndexable ( String content , List < String > filters ) { } } | if ( isNullOrEmpty ( content ) ) { logger . trace ( "Null or empty content always matches." ) ; return true ; } if ( filters == null || filters . isEmpty ( ) ) { logger . trace ( "No pattern always matches." ) ; return true ; } logger . trace ( "content = [{}], filters = {}" , content , filters ) ; for ( String filter : filters ) { Pattern pattern = Pattern . compile ( filter , Pattern . MULTILINE | Pattern . UNIX_LINES ) ; logger . trace ( "Testing filter [{}]" , filter ) ; if ( ! pattern . matcher ( content ) . find ( ) ) { logger . trace ( "Filter [{}] is not matching." , filter ) ; return false ; } else { logger . trace ( "Filter [{}] is matching." , filter ) ; } } return true ; |
public class LogMailBean { /** * 得到系统当前的运行时间 , 并格式化
* @ return */
private static String getSystemDate ( ) { } } | Date date = new Date ( ) ; SimpleDateFormat sdf = new SimpleDateFormat ( "yyyy.MM.dd 'at' HH:mm:ss" ) ; return sdf . format ( date ) ; |
public class CPInstancePersistenceImpl { /** * Returns the first cp instance in the ordered set where displayDate & lt ; & # 63 ; and status = & # 63 ; .
* @ param displayDate the display date
* @ param status the status
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp instance
* @ throws NoSuchCPInstanceException if a matching cp instance could not be found */
@ Override public CPInstance findByLtD_S_First ( Date displayDate , int status , OrderByComparator < CPInstance > orderByComparator ) throws NoSuchCPInstanceException { } } | CPInstance cpInstance = fetchByLtD_S_First ( displayDate , status , orderByComparator ) ; if ( cpInstance != null ) { return cpInstance ; } StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "displayDate=" ) ; msg . append ( displayDate ) ; msg . append ( ", status=" ) ; msg . append ( status ) ; msg . append ( "}" ) ; throw new NoSuchCPInstanceException ( msg . toString ( ) ) ; |
public class PkgRes { /** * Gets a resource path using obj ' s class ' s package name as
* the prefix .
* @ param name The name of the resource .
* @ param obj The object to use for the package name
* @ return Path of a resource prefixed by obj ' s class package
* name .
* @ throws NullPointerException if name or obj are null . */
public static String getResourcePathFor ( CharSequence name , Object obj ) { } } | if ( obj == null ) throw new NullPointerException ( "obj is null" ) ; return getResourcePathFor ( name , obj . getClass ( ) ) ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcDimensionCurve ( ) { } } | if ( ifcDimensionCurveEClass == null ) { ifcDimensionCurveEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 149 ) ; } return ifcDimensionCurveEClass ; |
public class OptimizerNode { @ Override public Iterable < OptimizerNode > getPredecessors ( ) { } } | List < OptimizerNode > allPredecessors = new ArrayList < OptimizerNode > ( ) ; for ( DagConnection dagConnection : getIncomingConnections ( ) ) { allPredecessors . add ( dagConnection . getSource ( ) ) ; } for ( DagConnection conn : getBroadcastConnections ( ) ) { allPredecessors . add ( conn . getSource ( ) ) ; } return allPredecessors ; |
public class Bounds { /** * Get intersection with another { @ link Bounds } .
* @ param b The { @ link Bounds } object to intersect this with .
* @ return < code > true < / code > is there is a non - empty intersection , < code > null < / code > otherwise . */
public final Bounds intersect ( Bounds b ) { } } | final long _min = Math . max ( this . min , b . min ) ; final long _max = Math . min ( this . max , b . max ) ; if ( _min <= _max ) return new Bounds ( _min , _max ) ; return null ; |
public class Utils { /** * Convert ( decode ) the given Base64 - encoded String to its binary form .
* @ param base64Value Base64 - encoded string .
* @ return Decoded binary value .
* @ throws IllegalArgumentException If the given string is not a valid Base64 value . */
public static byte [ ] base64ToBinary ( String base64Value ) throws IllegalArgumentException { } } | Utils . require ( base64Value . length ( ) % 4 == 0 , "Invalid base64 value (must be a multiple of 4 chars): " + base64Value ) ; return DatatypeConverter . parseBase64Binary ( base64Value ) ; |
public class Sigmoid { /** * Returns the direction of the sigmoid
* @ return the direction of the sigmoid */
public Direction direction ( ) { } } | if ( ! Op . isFinite ( slope ) || Op . isEq ( slope , 0.0 ) ) { return Direction . Zero ; } if ( Op . isGt ( slope , 0.0 ) ) { return Direction . Positive ; } return Direction . Negative ; |
public class GetV2LoggingOptionsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetV2LoggingOptionsRequest getV2LoggingOptionsRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( getV2LoggingOptionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class Primes { /** * Returns the largest prime less than the given bits . */
public static int getBiggestPrime ( long value ) { } } | for ( int i = PRIMES . length - 1 ; i >= 0 ; i -- ) { if ( PRIMES [ i ] <= value ) { return PRIMES [ i ] ; } } return 2 ; |
public class ParserService { /** * Returns all parser providers sorted based on priority if required .
* @ param sort
* @ return */
synchronized public List < ParserProvider > getAllProviders ( boolean sort ) { } } | List < ParserProvider > providers = new ArrayList < > ( ) ; for ( ParserProvider pp : loader ) { providers . add ( pp ) ; } if ( sort ) { Collections . sort ( providers , PARSER_PROVIDER_COMPARATOR ) ; } return providers ; |
public class DumpProcessingController { /** * Processes the most recent main ( complete ) dump that is available .
* Convenience method : same as retrieving a dump with
* { @ link # getMostRecentDump ( DumpContentType ) } with
* { @ link DumpContentType # CURRENT } or { @ link DumpContentType # FULL } , and
* processing it with { @ link # processDump ( MwDumpFile ) } . The individual
* methods should be used for better control and error handling .
* @ see DumpProcessingController # processAllRecentRevisionDumps ( ) */
public void processMostRecentMainDump ( ) { } } | DumpContentType dumpContentType ; if ( this . preferCurrent ) { dumpContentType = DumpContentType . CURRENT ; } else { dumpContentType = DumpContentType . FULL ; } processDump ( getMostRecentDump ( dumpContentType ) ) ; |
public class VINT { /** * method to construct { @ link VINT } based on its value
* @ param value
* - value of { @ link VINT }
* @ return { @ link VINT } corresponding to this value */
public static VINT fromValue ( long value ) { } } | BitSet bs = BitSet . valueOf ( new long [ ] { value } ) ; byte length = ( byte ) ( 1 + bs . length ( ) / BIT_IN_BYTE ) ; if ( bs . length ( ) == length * BIT_IN_BYTE ) { length ++ ; } bs . set ( length * BIT_IN_BYTE - length ) ; long binary = bs . toLongArray ( ) [ 0 ] ; return new VINT ( binary , length , value ) ; |
public class EvaluatorManager { /** * Packages the ContextControlProto in an EvaluatorControlProto and forward it to the EvaluatorRuntime .
* @ param contextControlProto message contains context control info . */
public void sendContextControlMessage ( final EvaluatorRuntimeProtocol . ContextControlProto contextControlProto ) { } } | synchronized ( this . evaluatorDescriptor ) { LOG . log ( Level . FINEST , "Context control message to {0}" , this . evaluatorId ) ; this . contextControlHandler . send ( contextControlProto ) ; } |
public class MapDBEngine { /** * Opens the storage ( if not already open ) and returns the storage object .
* @ param storageType
* @ return */
private DB openStorage ( StorageType storageType ) { } } | DB storage = storageRegistry . get ( storageType ) ; if ( ! isOpenStorage ( storage ) ) { DBMaker m ; if ( storageType == StorageType . PRIMARY_STORAGE || storageType == StorageType . SECONDARY_STORAGE ) { // main storage
Path rootPath = getRootPath ( storageName ) ; try { createDirectoryIfNotExists ( rootPath ) ; } catch ( IOException ex ) { throw new UncheckedIOException ( ex ) ; } m = DBMaker . newFileDB ( new File ( rootPath . toFile ( ) , storageType . toString ( ) ) ) ; } else if ( storageType == StorageType . TEMP_PRIMARY_STORAGE || storageType == StorageType . TEMP_SECONDARY_STORAGE ) { // temporary storage
m = DBMaker . newTempFileDB ( ) . deleteFilesAfterClose ( ) ; } else { throw new IllegalArgumentException ( "Unsupported StorageType." ) ; } if ( storageConfiguration . isCompressed ( ) ) { m = m . compressionEnable ( ) ; } boolean permitCaching = storageType == StorageType . PRIMARY_STORAGE || storageType == StorageType . TEMP_PRIMARY_STORAGE ; if ( permitCaching && storageConfiguration . getCacheSize ( ) > 0 ) { m = m . cacheLRUEnable ( ) . cacheSize ( storageConfiguration . getCacheSize ( ) ) ; } else { m = m . cacheDisable ( ) ; } if ( storageConfiguration . isAsynchronous ( ) ) { m = m . asyncWriteEnable ( ) ; } m = m . transactionDisable ( ) ; m = m . closeOnJvmShutdown ( ) ; storage = m . make ( ) ; storageRegistry . put ( storageType , storage ) ; } return storage ; |
public class LDataObjectQueue { /** * { @ inheritDoc } Frames are returned in insertion order . If consuming read behavior is
* enabled , the frame will be removed from the queue before return . */
public synchronized CEMILData getFrame ( ) { } } | final CEMILData [ ] c = ( CEMILData [ ] ) value ; final int first = first ( ) ; final CEMILData f = c [ first ] ; if ( size > 0 && consuming ) { -- size ; c [ first ] = null ; timestamps [ first ] = 0 ; if ( size == 0 ) next = 0 ; } return f ; |
public class ControlPanel { /** * Link a emitter configurable value to a value panel
* @ param value The configurable value from the emitter
* @ param panel The component to link against */
private void link ( Value value , ValuePanel panel ) { } } | controlToData . put ( panel , value ) ; if ( value instanceof SimpleValue ) panel . setValue ( ( int ) ( ( SimpleValue ) value ) . getValue ( 0 ) ) ; else if ( value instanceof RandomValue ) panel . setValue ( ( int ) ( ( RandomValue ) value ) . getValue ( ) ) ; |
public class AbstractSequenceClassifier { /** * Loads a classifier from the given input stream . The JVM shuts down
* ( System . exit ( 1 ) ) if there is an exception . This does not close the
* InputStream .
* @ param in
* The InputStream to read from */
public void loadClassifierNoExceptions ( InputStream in , Properties props ) { } } | // load the classifier
try { loadClassifier ( in , props ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } |
public class StatisticsJDBCStorageConnection { /** * { @ inheritDoc } */
public long getNodesCount ( ) throws RepositoryException { } } | Statistics s = ALL_STATISTICS . get ( NODES_COUNT ) ; try { s . begin ( ) ; return wcs . getNodesCount ( ) ; } finally { s . end ( ) ; } |
public class ContainerMetadataUpdateTransaction { /** * Creates a new UpdateableSegmentMetadata for the given Segment and registers it . */
private UpdateableSegmentMetadata createSegmentMetadata ( String segmentName , long segmentId ) { } } | UpdateableSegmentMetadata metadata = new StreamSegmentMetadata ( segmentName , segmentId , this . containerId ) ; this . newSegments . put ( metadata . getId ( ) , metadata ) ; this . newSegmentNames . put ( metadata . getName ( ) , metadata . getId ( ) ) ; return metadata ; |
public class ThemeManager { /** * Resolve theme resource id by flags */
public static int getThemeResource ( int themeTag , boolean applyModifier ) { } } | if ( themeTag >= _START_RESOURCES_ID ) { return themeTag ; } themeTag = prepareFlags ( themeTag , applyModifier ) ; if ( ThemeManager . sThemeGetters != null ) { int getterResource ; final ThemeTag tag = new ThemeTag ( themeTag ) ; for ( int i = ThemeManager . sThemeGetters . size ( ) - 1 ; i >= 0 ; i -- ) { getterResource = ThemeManager . sThemeGetters . get ( i ) . getThemeResource ( tag ) ; if ( getterResource != 0 ) { return getterResource ; } } } final int i = _THEMES_MAP . get ( themeTag , - 1 ) ; if ( i == - 1 ) { return _THEMES_MAP . get ( _DEFAULT_THEME , R . style . Holo_Theme ) ; } else { return i ; } |
public class DependencyBundlingAnalyzer { /** * Determine if the dependency name is equal in the given dependencies .
* @ param dependency a dependency to compare
* @ param nextDependency a dependency to compare
* @ return true if the name is equal in both dependencies ; otherwise false */
private boolean namesAreEqual ( Dependency dependency , Dependency nextDependency ) { } } | return dependency . getName ( ) != null && dependency . getName ( ) . equals ( nextDependency . getName ( ) ) ; |
public class IntArray { /** * Copies given IntArrays values to this . Throws IllegalArgumentException
* if lengths are not same .
* @ param to */
public void copy ( IntArray to ) { } } | if ( length ( ) != to . length ( ) ) { throw new IllegalArgumentException ( "array not same size" ) ; } if ( buffer . hasArray ( ) && to . buffer . hasArray ( ) && buffer . array ( ) . getClass ( ) . getComponentType ( ) == to . buffer . array ( ) . getClass ( ) . getComponentType ( ) ) { System . arraycopy ( to . buffer . array ( ) , 0 , buffer . array ( ) , 0 , length ( ) ) ; } else { int len = length ( ) ; for ( int ii = 0 ; ii < len ; ii ++ ) { put ( ii , to . get ( ii ) ) ; } } |
public class AbstractRouter { /** * Gets the url of the route handled by the specified action method . The action does not takes parameters .
* @ param controller the controller object
* @ param method the controller method
* @ return the url , { @ literal null } if the action method is not found */
@ Override public String getReverseRouteFor ( Controller controller , String method ) { } } | return getReverseRouteFor ( controller . getClass ( ) , method , null ) ; |
public class CommerceCountryPersistenceImpl { /** * Returns the first commerce country in the ordered set where groupId = & # 63 ; and shippingAllowed = & # 63 ; and active = & # 63 ; .
* @ param groupId the group ID
* @ param shippingAllowed the shipping allowed
* @ param active the active
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce country
* @ throws NoSuchCountryException if a matching commerce country could not be found */
@ Override public CommerceCountry findByG_S_A_First ( long groupId , boolean shippingAllowed , boolean active , OrderByComparator < CommerceCountry > orderByComparator ) throws NoSuchCountryException { } } | CommerceCountry commerceCountry = fetchByG_S_A_First ( groupId , shippingAllowed , active , orderByComparator ) ; if ( commerceCountry != null ) { return commerceCountry ; } StringBundler msg = new StringBundler ( 8 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", shippingAllowed=" ) ; msg . append ( shippingAllowed ) ; msg . append ( ", active=" ) ; msg . append ( active ) ; msg . append ( "}" ) ; throw new NoSuchCountryException ( msg . toString ( ) ) ; |
public class PeerGroup { /** * < p > Sets the false positive rate of bloom filters given to peers . The default is { @ link # DEFAULT _ BLOOM _ FILTER _ FP _ RATE } . < / p >
* < p > Be careful regenerating the bloom filter too often , as it decreases anonymity because remote nodes can
* compare transactions against both the new and old filters to significantly decrease the false positive rate . < / p >
* < p > See the docs for { @ link BloomFilter # BloomFilter ( int , double , long , BloomFilter . BloomUpdate ) } for a brief
* explanation of anonymity when using bloom filters . < / p > */
public void setBloomFilterFalsePositiveRate ( double bloomFilterFPRate ) { } } | lock . lock ( ) ; try { bloomFilterMerger . setBloomFilterFPRate ( bloomFilterFPRate ) ; recalculateFastCatchupAndFilter ( FilterRecalculateMode . SEND_IF_CHANGED ) ; } finally { lock . unlock ( ) ; } |
public class XMPPBOSHConnection { /** * Initialize the SmackDebugger which allows to log and debug XML traffic . */
@ Override protected void initDebugger ( ) { } } | // TODO : Maybe we want to extend the SmackDebugger for simplification
// and a performance boost .
// Initialize a empty writer which discards all data .
writer = new Writer ( ) { @ Override public void write ( char [ ] cbuf , int off , int len ) { /* ignore */
} @ Override public void close ( ) { /* ignore */
} @ Override public void flush ( ) { /* ignore */
} } ; // Initialize a pipe for received raw data .
try { readerPipe = new PipedWriter ( ) ; reader = new PipedReader ( readerPipe ) ; } catch ( IOException e ) { // Ignore
} // Call the method from the parent class which initializes the debugger .
super . initDebugger ( ) ; // Add listeners for the received and sent raw data .
client . addBOSHClientResponseListener ( new BOSHClientResponseListener ( ) { @ Override public void responseReceived ( BOSHMessageEvent event ) { if ( event . getBody ( ) != null ) { try { readerPipe . write ( event . getBody ( ) . toXML ( ) ) ; readerPipe . flush ( ) ; } catch ( Exception e ) { // Ignore
} } } } ) ; client . addBOSHClientRequestListener ( new BOSHClientRequestListener ( ) { @ Override public void requestSent ( BOSHMessageEvent event ) { if ( event . getBody ( ) != null ) { try { writer . write ( event . getBody ( ) . toXML ( ) ) ; } catch ( Exception e ) { // Ignore
} } } } ) ; // Create and start a thread which discards all read data .
readerConsumer = new Thread ( ) { private Thread thread = this ; private int bufferLength = 1024 ; @ Override public void run ( ) { try { char [ ] cbuf = new char [ bufferLength ] ; while ( readerConsumer == thread && ! done ) { reader . read ( cbuf , 0 , bufferLength ) ; } } catch ( IOException e ) { // Ignore
} } } ; readerConsumer . setDaemon ( true ) ; readerConsumer . start ( ) ; |
public class HoltWintersAnalysis { /** * ~ Methods * * * * * */
HoltWintersData _performHoltWintersAnalysis ( Map < Long , Double > bootstrappedDps , double alpha , double beta , double gamma , int seasonLength , long startTimestamp ) { } } | List < Double > intercepts = new ArrayList < Double > ( ) ; List < Double > slopes = new ArrayList < Double > ( ) ; List < Double > seasonals = new ArrayList < Double > ( ) ; List < Double > deviations = new ArrayList < Double > ( ) ; Map < Long , Double > deviationDatapoints = new TreeMap < > ( ) ; Map < Long , Double > forecastedDatapoints = new TreeMap < > ( ) ; double next_pred = 0.0 , prediction = 0.0 ; int i = 0 ; for ( Map . Entry < Long , Double > entry : bootstrappedDps . entrySet ( ) ) { Long timestamp = entry . getKey ( ) ; Double value = entry . getValue ( ) ; double lastIntercept = 0.0 ; double lastSlope = 0.0 ; if ( i == 0 ) { lastIntercept = value ; lastSlope = 0 ; // seed the first prediction as the first actual
prediction = value ; } else { lastIntercept = intercepts . get ( i - 1 ) ; lastSlope = slopes . get ( i - 1 ) ; if ( lastIntercept == 0.0 ) { lastIntercept = value ; } prediction = next_pred ; } double last_seasonal = getLast ( seasonals , i , seasonLength ) ; double next_last_seasonal = getLast ( seasonals , i + 1 , seasonLength ) ; double last_seasonal_dev = getLast ( deviations , i , seasonLength ) ; double intercept = _holtWintersIntercept ( alpha , value , last_seasonal , lastIntercept , lastSlope ) ; double slope = _holtWintersSlope ( beta , intercept , lastIntercept , lastSlope ) ; double seasonal = _holtWintersSeasonal ( gamma , value , intercept , last_seasonal ) ; next_pred = intercept + slope + next_last_seasonal ; double deviation = _holtWintersDeviation ( gamma , value , prediction , last_seasonal_dev ) ; intercepts . add ( intercept ) ; slopes . add ( slope ) ; seasonals . add ( seasonal ) ; deviations . add ( deviation ) ; if ( timestamp >= startTimestamp ) { forecastedDatapoints . put ( timestamp , Double . parseDouble ( DECIMAL_FORMAT . format ( prediction ) ) ) ; deviationDatapoints . put ( timestamp , Double . parseDouble ( DECIMAL_FORMAT . format ( deviation ) ) ) ; } i ++ ; } HoltWintersData data = new HoltWintersData ( forecastedDatapoints , deviationDatapoints ) ; return data ; |
public class PrimitiveUtils { /** * Read byte .
* @ param value the value
* @ param defaultValue the default value
* @ return the byte */
public static Byte readByte ( String value , Byte defaultValue ) { } } | if ( ! StringUtils . hasText ( value ) ) return defaultValue ; return Byte . valueOf ( value ) ; |
public class HtmlParser { /** * Parses a given file that validates with the iText DTD and writes the content to a document .
* @ param document the document the parser will write to
* @ param file the file with the content */
public static void parse ( DocListener document , String file ) { } } | HtmlParser p = new HtmlParser ( ) ; p . go ( document , file ) ; |
public class PointSeries { /** * Adds a list of point with only numbers .
* @ param values
* the number values to add .
* @ return a PointSeries object with the new points added to it */
public PointSeries addNumbers ( final List < Number > values ) { } } | for ( Number number : values ) { addNumberPoint ( number ) ; } return this ; |
public class NativeArray { /** * # string _ id _ map # */
@ Override protected int findPrototypeId ( String s ) { } } | int id ; // # generated # Last update : 2019-03-10 12:44:45 MEZ
L0 : { id = 0 ; String X = null ; int c ; L : switch ( s . length ( ) ) { case 3 : c = s . charAt ( 0 ) ; if ( c == 'm' ) { if ( s . charAt ( 2 ) == 'p' && s . charAt ( 1 ) == 'a' ) { id = Id_map ; break L0 ; } } else if ( c == 'p' ) { if ( s . charAt ( 2 ) == 'p' && s . charAt ( 1 ) == 'o' ) { id = Id_pop ; break L0 ; } } break L ; case 4 : switch ( s . charAt ( 2 ) ) { case 'i' : X = "join" ; id = Id_join ; break L ; case 'l' : X = "fill" ; id = Id_fill ; break L ; case 'm' : X = "some" ; id = Id_some ; break L ; case 'n' : X = "find" ; id = Id_find ; break L ; case 'r' : X = "sort" ; id = Id_sort ; break L ; case 's' : X = "push" ; id = Id_push ; break L ; case 'y' : X = "keys" ; id = Id_keys ; break L ; } break L ; case 5 : c = s . charAt ( 1 ) ; if ( c == 'h' ) { X = "shift" ; id = Id_shift ; } else if ( c == 'l' ) { X = "slice" ; id = Id_slice ; } else if ( c == 'v' ) { X = "every" ; id = Id_every ; } break L ; case 6 : switch ( s . charAt ( 0 ) ) { case 'c' : X = "concat" ; id = Id_concat ; break L ; case 'f' : X = "filter" ; id = Id_filter ; break L ; case 'r' : X = "reduce" ; id = Id_reduce ; break L ; case 's' : X = "splice" ; id = Id_splice ; break L ; case 'v' : X = "values" ; id = Id_values ; break L ; } break L ; case 7 : switch ( s . charAt ( 0 ) ) { case 'e' : X = "entries" ; id = Id_entries ; break L ; case 'f' : X = "forEach" ; id = Id_forEach ; break L ; case 'i' : X = "indexOf" ; id = Id_indexOf ; break L ; case 'r' : X = "reverse" ; id = Id_reverse ; break L ; case 'u' : X = "unshift" ; id = Id_unshift ; break L ; } break L ; case 8 : c = s . charAt ( 3 ) ; if ( c == 'l' ) { X = "includes" ; id = Id_includes ; } else if ( c == 'o' ) { X = "toSource" ; id = Id_toSource ; } else if ( c == 't' ) { X = "toString" ; id = Id_toString ; } break L ; case 9 : X = "findIndex" ; id = Id_findIndex ; break L ; case 11 : c = s . charAt ( 0 ) ; if ( c == 'c' ) { X = "constructor" ; id = Id_constructor ; } else if ( c == 'l' ) { X = "lastIndexOf" ; id = Id_lastIndexOf ; } else if ( c == 'r' ) { X = "reduceRight" ; id = Id_reduceRight ; } break L ; case 14 : X = "toLocaleString" ; id = Id_toLocaleString ; break L ; } if ( X != null && X != s && ! X . equals ( s ) ) id = 0 ; break L0 ; } // # / generated #
return id ; |
public class BidLandscapeLandscapePoint { /** * Gets the cost value for this BidLandscapeLandscapePoint .
* @ return cost * Estimated cost at this bid . For mobile bid modifier landscapes ,
* this is the estimated cost
* for mobile only .
* < span class = " constraint Selectable " > This field can
* be selected using the value " LocalCost " . < / span > < span class = " constraint
* Filterable " > This field can be filtered on . < / span > */
public com . google . api . ads . adwords . axis . v201809 . cm . Money getCost ( ) { } } | return cost ; |
public class ChainedMapReduceJob { /** * [ END chain _ job _ example ] */
private MapSettings getSettings ( ) { } } | // [ START mapSettings ]
MapSettings settings = new MapSettings . Builder ( ) . setWorkerQueueName ( "mapreduce-workers" ) . setModule ( "mapreduce" ) . build ( ) ; // [ END mapSettings ]
return settings ; |
public class CommerceDiscountPersistenceImpl { /** * Returns the commerce discount where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found . Uses the finder cache .
* @ param uuid the uuid
* @ param groupId the group ID
* @ return the matching commerce discount , or < code > null < / code > if a matching commerce discount could not be found */
@ Override public CommerceDiscount fetchByUUID_G ( String uuid , long groupId ) { } } | return fetchByUUID_G ( uuid , groupId , true ) ; |
public class AbstractTypedVisitor { /** * Given an arbitrary target type , filter out the target array types . For
* example , consider the following method :
* < pre >
* method f ( int x ) :
* null | int [ ] xs = [ x ]
* < / pre >
* When type checking the expression < code > [ x ] < / code > the flow type checker will
* attempt to determine an < i > expected < / i > array type . In order to then
* determine the appropriate expected type for expression < code > x < / code > it
* filters < code > null | int [ ] < / code > down to just < code > int [ ] < / code > .
* @ param target
* Target type for this value
* @ param expr
* Source expression for this value
* @ author David J . Pearce */
public Type . Array selectArray ( Type target , Expr expr , Environment environment ) { } } | Type . Array type = asType ( expr . getType ( ) , Type . Array . class ) ; Type . Array [ ] records = TYPE_ARRAY_FILTER . apply ( target ) ; return selectCandidate ( records , type , environment ) ; |
public class SCMController { /** * Updating form for a change log file filter */
@ RequestMapping ( value = "changeLog/fileFilter/{projectId}/{name}/update" , method = RequestMethod . GET ) public Form saveChangeLogFileFilterForm ( @ PathVariable ID projectId , @ PathVariable String name ) { } } | Resource < SCMFileChangeFilter > filter = getChangeLogFileFilter ( projectId , name ) ; return Form . create ( ) . with ( Text . of ( "name" ) . label ( "Name" ) . help ( "Name to use to save the filter." ) . readOnly ( ) . value ( filter . getData ( ) . getName ( ) ) ) . with ( Memo . of ( "patterns" ) . label ( "Filter(s)" ) . help ( "List of ANT-like patterns (one per line)." ) . value ( filter . getData ( ) . getPatterns ( ) . stream ( ) . collect ( Collectors . joining ( "\n" ) ) ) ) ; |
public class QueryLifecycle { /** * For callers where simplicity is desired over flexibility . This method does it all in one call . If the request
* is unauthorized , an IllegalStateException will be thrown . Logs and metrics are emitted when the Sequence is
* either fully iterated or throws an exception .
* @ param query the query
* @ param authenticationResult authentication result indicating identity of the requester
* @ param remoteAddress remote address , for logging ; or null if unknown
* @ return results */
@ SuppressWarnings ( "unchecked" ) public < T > Sequence < T > runSimple ( final Query < T > query , final AuthenticationResult authenticationResult , @ Nullable final String remoteAddress ) { } } | initialize ( query ) ; final Sequence < T > results ; try { final Access access = authorize ( authenticationResult ) ; if ( ! access . isAllowed ( ) ) { throw new ISE ( "Unauthorized" ) ; } final QueryLifecycle . QueryResponse queryResponse = execute ( ) ; results = queryResponse . getResults ( ) ; } catch ( Throwable e ) { emitLogsAndMetrics ( e , remoteAddress , - 1 ) ; throw e ; } return Sequences . wrap ( results , new SequenceWrapper ( ) { @ Override public void after ( final boolean isDone , final Throwable thrown ) { emitLogsAndMetrics ( thrown , remoteAddress , - 1 ) ; } } ) ; |
public class AccumuloClient { /** * Gets the TabletServer hostname for where the given key is located in the given table
* @ param table Fully - qualified table name
* @ param key Key to locate
* @ return The tablet location , or DUMMY _ LOCATION if an error occurs */
private Optional < String > getTabletLocation ( String table , Key key ) { } } | try { // Get the Accumulo table ID so we can scan some fun stuff
String tableId = connector . tableOperations ( ) . tableIdMap ( ) . get ( table ) ; // Create our scanner against the metadata table , fetching ' loc ' family
Scanner scanner = connector . createScanner ( "accumulo.metadata" , auths ) ; scanner . fetchColumnFamily ( new Text ( "loc" ) ) ; // Set the scan range to just this table , from the table ID to the default tablet
// row , which is the last listed tablet
Key defaultTabletRow = new Key ( tableId + '<' ) ; Key start = new Key ( tableId ) ; Key end = defaultTabletRow . followingKey ( PartialKey . ROW ) ; scanner . setRange ( new Range ( start , end ) ) ; Optional < String > location = Optional . empty ( ) ; if ( key == null ) { // if the key is null , then it is - inf , so get first tablet location
Iterator < Entry < Key , Value > > iter = scanner . iterator ( ) ; if ( iter . hasNext ( ) ) { location = Optional . of ( iter . next ( ) . getValue ( ) . toString ( ) ) ; } } else { // Else , we will need to scan through the tablet location data and find the location
// Create some text objects to do comparison for what we are looking for
Text splitCompareKey = new Text ( ) ; key . getRow ( splitCompareKey ) ; Text scannedCompareKey = new Text ( ) ; // Scan the table !
for ( Entry < Key , Value > entry : scanner ) { // Get the bytes of the key
byte [ ] keyBytes = entry . getKey ( ) . getRow ( ) . copyBytes ( ) ; // If the last byte is < , then we have hit the default tablet , so use this location
if ( keyBytes [ keyBytes . length - 1 ] == '<' ) { location = Optional . of ( entry . getValue ( ) . toString ( ) ) ; break ; } else { // Chop off some magic nonsense
scannedCompareKey . set ( keyBytes , 3 , keyBytes . length - 3 ) ; // Compare the keys , moving along the tablets until the location is found
if ( scannedCompareKey . getLength ( ) > 0 ) { int compareTo = splitCompareKey . compareTo ( scannedCompareKey ) ; if ( compareTo <= 0 ) { location = Optional . of ( entry . getValue ( ) . toString ( ) ) ; } else { // all future tablets will be greater than this key
break ; } } } } scanner . close ( ) ; } // If we were unable to find the location for some reason , return the default tablet
// location
return location . isPresent ( ) ? location : getDefaultTabletLocation ( table ) ; } catch ( Exception e ) { // Swallow this exception so the query does not fail due to being unable
// to locate the tablet server for the provided Key .
// This is purely an optimization , but we will want to log the error .
LOG . error ( "Failed to get tablet location, returning dummy location" , e ) ; return Optional . empty ( ) ; } |
public class LogManager { /** * create logs with given partition number
* @ param topic the topic name
* @ param partitions partition number
* @ param forceEnlarge enlarge the partition number of log if smaller than runtime
* @ return the partition number of the log after enlarging */
public int createLogs ( String topic , final int partitions , final boolean forceEnlarge ) { } } | TopicNameValidator . validate ( topic ) ; synchronized ( logCreationLock ) { final int configPartitions = getPartition ( topic ) ; if ( configPartitions >= partitions || ! forceEnlarge ) { return configPartitions ; } topicPartitionsMap . put ( topic , partitions ) ; if ( config . getEnableZookeeper ( ) ) { if ( getLogPool ( topic , 0 ) != null ) { // created already
topicRegisterTasks . add ( new TopicTask ( TopicTask . TaskType . ENLARGE , topic ) ) ; } else { topicRegisterTasks . add ( new TopicTask ( TopicTask . TaskType . CREATE , topic ) ) ; } } return partitions ; } |
public class ServerBuilder { /** * public StatProbeManager getStatProbeManager ( )
* return _ statProbeManager ; */
public ServerBase build ( ) { } } | EnvLoader . init ( ) ; SystemManager systemManager = createSystemManager ( ) ; Thread thread = Thread . currentThread ( ) ; ClassLoader oldLoader = thread . getContextClassLoader ( ) ; boolean isValid = false ; try { thread . setContextClassLoader ( systemManager . getClassLoader ( ) ) ; Vfs . setPwd ( getRootDirectory ( ) ) ; if ( ! isEmbedded ( ) ) { logCopyright ( ) ; } preConfigureInit ( ) ; configureRootDirectory ( ) ; _serverSelf = initNetwork ( ) ; Objects . requireNonNull ( _serverSelf ) ; addServices ( _serverSelf ) ; initHttpSystem ( systemManager , _serverSelf ) ; ServerBase server = build ( systemManager , _serverSelf ) ; init ( server ) ; isValid = true ; systemManager . start ( ) ; return server ; } catch ( RuntimeException e ) { throw e ; } catch ( Exception e ) { throw ConfigException . wrap ( e ) ; } finally { thread . setContextClassLoader ( oldLoader ) ; if ( ! isValid ) { systemManager . shutdown ( ShutdownModeAmp . IMMEDIATE ) ; } } |
public class ConfHelper { /** * Convert a string to a boolean .
* Accepted values : " yes " , " true " , " t " , " y " , " 1"
* " no " , " false " , " f " , " n " , " 0"
* All comparisons are case insensitive .
* If the value provided is null , defaultValue is returned .
* @ exception IllegalArgumentException Thrown if value is not
* null and doesn ' t match any of the accepted strings . */
public static boolean parseBoolean ( String value , boolean defaultValue ) { } } | if ( value == null ) return defaultValue ; value = value . trim ( ) ; // any of the following will
final String [ ] acceptedTrue = new String [ ] { "yes" , "true" , "t" , "y" , "1" } ; final String [ ] acceptedFalse = new String [ ] { "no" , "false" , "f" , "n" , "0" } ; for ( String possible : acceptedTrue ) { if ( possible . equalsIgnoreCase ( value ) ) return true ; } for ( String possible : acceptedFalse ) { if ( possible . equalsIgnoreCase ( value ) ) return false ; } throw new IllegalArgumentException ( "Unrecognized boolean value '" + value + "'" ) ; |
public class UserManager { /** * Delete the user with the specified ID .
* @ param sUserID
* The ID of the user to delete
* @ return { @ link EChange # CHANGED } if the user was deleted ,
* { @ link EChange # UNCHANGED } otherwise . */
@ Nonnull public EChange deleteUser ( @ Nullable final String sUserID ) { } } | final User aUser = getOfID ( sUserID ) ; if ( aUser == null ) { AuditHelper . onAuditDeleteFailure ( User . OT , "no-such-user-id" , sUserID ) ; return EChange . UNCHANGED ; } m_aRWLock . writeLock ( ) . lock ( ) ; try { if ( BusinessObjectHelper . setDeletionNow ( aUser ) . isUnchanged ( ) ) { AuditHelper . onAuditDeleteFailure ( User . OT , "already-deleted" , sUserID ) ; return EChange . UNCHANGED ; } internalMarkItemDeleted ( aUser ) ; } finally { m_aRWLock . writeLock ( ) . unlock ( ) ; } AuditHelper . onAuditDeleteSuccess ( User . OT , sUserID ) ; // Execute callback as the very last action
m_aCallbacks . forEach ( aCB -> aCB . onUserDeleted ( aUser ) ) ; return EChange . CHANGED ; |
public class DateTime { /** * Returns a copy of this datetime plus the specified number of days .
* The calculation will do its best to only change the day field
* retaining the same time of day .
* However , in certain circumstances , typically daylight savings cutover ,
* it may be necessary to alter the time fields .
* In spring an hour is typically removed . If adding one day results in
* the time being within the cutover then the time is adjusted to be
* within summer time . For example , if the cutover is from 01:59 to 03:00
* and the result of this method would have been 02:30 , then the result
* will be adjusted to 03:30.
* The following three lines are identical in effect :
* < pre >
* DateTime added = dt . plusDays ( 6 ) ;
* DateTime added = dt . plus ( Period . days ( 6 ) ) ;
* DateTime added = dt . withFieldAdded ( DurationFieldType . days ( ) , 6 ) ;
* < / pre >
* This datetime instance is immutable and unaffected by this method call .
* @ param days the amount of days to add , may be negative
* @ return the new datetime plus the increased days
* @ since 1.1 */
public DateTime plusDays ( int days ) { } } | if ( days == 0 ) { return this ; } long instant = getChronology ( ) . days ( ) . add ( getMillis ( ) , days ) ; return withMillis ( instant ) ; |
public class JDBCResultSet { /** * < ! - - start generic documentation - - >
* Updates the designated column with an < code > Object < / code > value .
* The updater methods are used to update column values in the
* current row or the insert row . The updater methods do not
* update the underlying database ; instead the < code > updateRow < / code > or
* < code > insertRow < / code > methods are called to update the database .
* < ! - - end generic documentation - - >
* < ! - - start release - specific documentation - - >
* < div class = " ReleaseSpecificDocumentation " >
* < h3 > HSQLDB - Specific Information : < / h3 > < p >
* HSQLDB supports this feature . < p >
* < / div >
* < ! - - end release - specific documentation - - >
* @ param columnLabel the label for the column specified with the SQL AS clause . If the SQL AS clause was not specified , then the label is the name of the column
* @ param x the new column value
* @ exception SQLException if a database access error occurs ,
* the result set concurrency is < code > CONCUR _ READ _ ONLY < / code >
* or this method is called on a closed result set
* @ exception SQLFeatureNotSupportedException if the JDBC driver does not support
* this method
* @ since JDK 1.2 ( JDK 1.1 . x developers : read the overview for
* JDBCResultSet ) */
public void updateObject ( String columnLabel , Object x ) throws SQLException { } } | updateObject ( findColumn ( columnLabel ) , x ) ; |
public class CPInstancePersistenceImpl { /** * Returns the first cp instance in the ordered set where displayDate & lt ; & # 63 ; and status = & # 63 ; .
* @ param displayDate the display date
* @ param status the status
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp instance , or < code > null < / code > if a matching cp instance could not be found */
@ Override public CPInstance fetchByLtD_S_First ( Date displayDate , int status , OrderByComparator < CPInstance > orderByComparator ) { } } | List < CPInstance > list = findByLtD_S ( displayDate , status , 0 , 1 , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ; |
public class ArrayCompare { /** * 比较数组A与B的大小关系
* @ param arrayA
* @ param arrayB
* @ return */
public static int compare ( Long [ ] arrayA , Long [ ] arrayB ) { } } | int len1 = arrayA . length ; int len2 = arrayB . length ; int lim = Math . min ( len1 , len2 ) ; int k = 0 ; while ( k < lim ) { Long c1 = arrayA [ k ] ; Long c2 = arrayB [ k ] ; if ( ! c1 . equals ( c2 ) ) { return c1 . compareTo ( c2 ) ; } ++ k ; } return len1 - len2 ; |
public class LNGVector { /** * Pushes an element at the end of the vector .
* @ param element the element */
public void push ( final T element ) { } } | int newSize = this . size + 1 ; this . ensure ( newSize ) ; this . elements [ this . size ++ ] = element ; |
public class ApplicationGatewaysInner { /** * Gets the backend health of the specified application gateway in a resource group .
* @ param resourceGroupName The name of the resource group .
* @ param applicationGatewayName The name of the application gateway .
* @ param expand Expands BackendAddressPool and BackendHttpSettings referenced in backend health .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < ApplicationGatewayBackendHealthInner > backendHealthAsync ( String resourceGroupName , String applicationGatewayName , String expand ) { } } | return backendHealthWithServiceResponseAsync ( resourceGroupName , applicationGatewayName , expand ) . map ( new Func1 < ServiceResponse < ApplicationGatewayBackendHealthInner > , ApplicationGatewayBackendHealthInner > ( ) { @ Override public ApplicationGatewayBackendHealthInner call ( ServiceResponse < ApplicationGatewayBackendHealthInner > response ) { return response . body ( ) ; } } ) ; |
public class SHA1Hasher { /** * Start or continue a hash calculation with the given data ,
* starting at the given position , for the given length .
* @ param data input
* @ param pos start position
* @ param len length */
public void update ( byte [ ] data , int pos , int len ) { } } | update ( ByteBuffer . wrap ( data , pos , len ) ) ; |
public class Adapters { /** * < p > createMutationsAdapter . < / p >
* @ param putAdapter a { @ link com . google . cloud . bigtable . hbase . adapters . PutAdapter } object .
* @ return a { @ link com . google . cloud . bigtable . hbase . adapters . HBaseMutationAdapter } object . */
public static HBaseMutationAdapter createMutationsAdapter ( PutAdapter putAdapter ) { } } | return new HBaseMutationAdapter ( DELETE_ADAPTER , putAdapter , new UnsupportedMutationAdapter < Increment > ( "increment" ) , new UnsupportedMutationAdapter < Append > ( "append" ) ) ; |
public class RomanticActionCustomizer { protected void setupMethod ( ActionMapping actionMapping ) { } } | final Class < ? > actionType = actionMapping . getActionDef ( ) . getComponentClass ( ) ; for ( Method declaredMethod : actionType . getDeclaredMethods ( ) ) { if ( ! isExecuteMethod ( declaredMethod ) ) { continue ; } final ActionExecute existing = actionMapping . getActionExecute ( declaredMethod ) ; if ( existing != null ) { throwOverloadMethodCannotDefinedException ( actionType ) ; } actionMapping . registerExecute ( createActionExecute ( actionMapping , declaredMethod ) ) ; } verifyExecuteMethodSize ( actionMapping , actionType ) ; verifyExecuteMethodNotShadowingOthers ( actionMapping , actionType ) ; verifyExecuteMethodDefinedInConcreteClassOnly ( actionMapping , actionType ) ; verifyExecuteMethodRestfulIndependent ( actionMapping , actionType ) ; |
public class HCRenderer { /** * Convert the passed HC node to an HTML string without namespaces . Indent and
* align status is determined from { @ link GlobalDebug # isDebugMode ( ) }
* @ param aHCNode
* The node to be converted . May not be < code > null < / code > .
* @ return The node as XML with or without indentation . */
@ Nonnull public static String getAsHTMLStringWithoutNamespaces ( @ Nonnull final IHCNode aHCNode ) { } } | return getAsHTMLString ( aHCNode , HCSettings . getConversionSettingsWithoutNamespaces ( ) ) ; |
public class GitlabAPI { /** * Get a list of tags in specific project
* @ param project
* @ return */
public List < GitlabTag > getTags ( GitlabProject project ) { } } | String tailUrl = GitlabProject . URL + "/" + project . getId ( ) + GitlabTag . URL + PARAM_MAX_ITEMS_PER_PAGE ; return retrieve ( ) . getAll ( tailUrl , GitlabTag [ ] . class ) ; |
public class GeoJsonToAssembler { /** * Helpermethod that creates a geolatte pointsequence starting from an array containing coordinate arrays
* @ param coordinates an array containing coordinate arrays
* @ return a geolatte pointsequence or null if the coordinatesequence was null */
private PointSequence createPointSequence ( double [ ] [ ] coordinates , CrsId crsId ) { } } | if ( coordinates == null ) { return null ; } else if ( coordinates . length == 0 ) { return PointCollectionFactory . createEmpty ( ) ; } DimensionalFlag df = coordinates [ 0 ] . length == 4 ? DimensionalFlag . d3DM : coordinates [ 0 ] . length == 3 ? DimensionalFlag . d3D : DimensionalFlag . d2D ; PointSequenceBuilder psb = PointSequenceBuilders . variableSized ( df , crsId ) ; for ( double [ ] point : coordinates ) { psb . add ( point ) ; } return psb . toPointSequence ( ) ; |
public class CmsUpdateDBProjectId { /** * Creates the temp table for project ids if it does not exist yet . < p >
* @ param dbCon the db connection interface
* @ throws SQLException if something goes wrong */
@ Override protected void createTempTable ( CmsSetupDb dbCon ) throws SQLException { } } | System . out . println ( new Exception ( ) . getStackTrace ( ) [ 0 ] . toString ( ) ) ; if ( ! dbCon . hasTableOrColumn ( TEMPORARY_TABLE_NAME , null ) ) { String createStatement = readQuery ( QUERY_CREATE_TEMP_TABLE_UUIDS_MYSQL ) ; Map < String , String > replacer = Collections . singletonMap ( "${tableEngine}" , m_poolData . get ( "engine" ) ) ; dbCon . updateSqlStatement ( createStatement , replacer , null ) ; } else { System . out . println ( "table " + TEMPORARY_TABLE_NAME + " already exists" ) ; } |
public class Hash { /** * Creates a UTF - 8 encoded hash using the hash function
* @ param input string to encode
* @ param function hash function to use
* @ return the hash code */
public static HashCode newHash ( String input , HashFunction function ) { } } | return function . hashString ( input , Charsets . UTF_8 ) ; |
public class Spies { /** * Proxies a ternary function spying for result and parameters .
* @ param < R > the function result type
* @ param < T1 > the function first parameter type
* @ param < T2 > the function second parameter type
* @ param < T3 > the function third parameter type
* @ param function the function to be spied
* @ param result a box that will be containing spied result
* @ param param1 a box that will be containing the first spied parameter
* @ param param2 a box that will be containing the second spied parameter
* @ param param3 a box that will be containing the third spied parameter
* @ return the proxied function */
public static < T1 , T2 , T3 , R > TriFunction < T1 , T2 , T3 , R > spy ( TriFunction < T1 , T2 , T3 , R > function , Box < R > result , Box < T1 > param1 , Box < T2 > param2 , Box < T3 > param3 ) { } } | return new TernaryCapturingFunction < T1 , T2 , T3 , R > ( function , result , param1 , param2 , param3 ) ; |
public class Period { /** * Returns a new period with the specified number of years added .
* This period instance is immutable and unaffected by this method call .
* @ param years the amount of years to add , may be negative
* @ return the new period with the increased years
* @ throws UnsupportedOperationException if the field is not supported */
public Period plusYears ( int years ) { } } | if ( years == 0 ) { return this ; } int [ ] values = getValues ( ) ; // cloned
getPeriodType ( ) . addIndexedField ( this , PeriodType . YEAR_INDEX , values , years ) ; return new Period ( values , getPeriodType ( ) ) ; |
public class HtmlDocletWriter { /** * Adds the annotatation types for the given Element .
* @ param element the element to write annotations for .
* @ param descList the array of { @ link AnnotationDesc } .
* @ param htmltree the documentation tree to which the annotation info will be
* added */
private void addAnnotationInfo ( Element element , List < ? extends AnnotationMirror > descList , Content htmltree ) { } } | addAnnotationInfo ( 0 , element , descList , true , htmltree ) ; |
public class SortWorker { /** * Write the contents of src to dest , without messing with src ' s position .
* @ param dest ( position is advanced )
* @ return the pos in dest where src is written */
private static int spliceIn ( ByteBuffer src , ByteBuffer dest ) { } } | int position = dest . position ( ) ; int srcPos = src . position ( ) ; dest . put ( src ) ; src . position ( srcPos ) ; return position ; |
public class FragmentArrayPagerAdapter { /** * Adds the specified fragments at the end of the array .
* @ param fragments */
public void addAll ( T ... fragments ) { } } | for ( T fragment : fragments ) { mItems . add ( fragment ) ; } notifyDataSetChanged ( ) ; |
public class FormulaFactoryImporter { /** * Gather the operands of an n - ary operator and returns its applied operands .
* @ param operator the n - ary operator
* @ return the applied operands of the given operator */
private LinkedHashSet < Formula > gatherAppliedOperands ( final NAryOperator operator ) { } } | final LinkedHashSet < Formula > applied = new LinkedHashSet < > ( ) ; for ( final Formula operand : operator ) { applied . add ( apply ( operand , false ) ) ; } return applied ; |
public class IllegalPatternArgumentException { /** * Returns the formatted string { @ link IllegalPatternArgumentException # DEFAULT _ MESSAGE } with the given pattern which
* the argument must match .
* @ param argumentName
* the name of the passed argument
* @ param pattern
* Pattern , that a string or character sequence should correspond to
* @ return a formatted string of message with the given argument name */
private static String format ( @ Nullable final Pattern pattern ) { } } | return String . format ( DEFAULT_MESSAGE , patternToString ( pattern ) ) ; |
public class ByteArrayISO8859Writer { public void write ( char [ ] ca ) throws IOException { } } | ensureSpareCapacity ( ca . length ) ; for ( int i = 0 ; i < ca . length ; i ++ ) { char c = ca [ i ] ; if ( c >= 0 && c <= 0x7f ) _buf [ _size ++ ] = ( byte ) c ; else { writeEncoded ( ca , i , ca . length - i ) ; break ; } } |
public class InputsInner { /** * Updates an existing input under an existing streaming job . This can be used to partially update ( ie . update one or two properties ) an input without affecting the rest the job or input definition .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param jobName The name of the streaming job .
* @ param inputName The name of the input .
* @ param input An Input object . The properties specified here will overwrite the corresponding properties in the existing input ( ie . Those properties will be updated ) . Any properties that are set to null here will mean that the corresponding property in the existing input will remain the same and not change as a result of this PATCH operation .
* @ param ifMatch The ETag of the input . Omit this value to always overwrite the current input . Specify the last - seen ETag value to prevent accidentally overwritting concurrent changes .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < InputInner > updateAsync ( String resourceGroupName , String jobName , String inputName , InputInner input , String ifMatch , final ServiceCallback < InputInner > serviceCallback ) { } } | return ServiceFuture . fromHeaderResponse ( updateWithServiceResponseAsync ( resourceGroupName , jobName , inputName , input , ifMatch ) , serviceCallback ) ; |
public class ProtectedPromise { /** * Signify that no more { @ link # newPromise ( ) } allocations will be made . The aggregation can not be
* successful until this method is called .
* @ return { @ code this } promise . */
public ChannelPromise doneAllocatingPromises ( ) { } } | if ( ! doneAllocating ) { doneAllocating = true ; if ( successfulCount == expectedCount ) { trySuccessInternal ( null ) ; return super . setSuccess ( null ) ; } } return this ; |
public class ConcurrentIntrusiveList { /** * Returns all the elements from this list .
* @ return all the elements from this list . */
public synchronized Collection < T > getAll ( ) { } } | List < T > all = new ArrayList < T > ( size ) ; for ( T e = head ; e != null ; e = e . getNext ( ) ) { all . add ( e ) ; } return all ; |
public class VueComponentOptions { /** * Add a custom prop validator to validate a property
* @ param javaMethod Function pointer to the method in the { @ link IsVueComponent }
* @ param propertyName The name of the property to validate */
@ JsOverlay public final void addJavaPropDefaultValue ( Function javaMethod , String propertyName ) { } } | PropOptions propDefinition = getProps ( ) . get ( propertyName ) ; propDefinition . defaultValue = javaMethod ; |
public class CommonOps_DDRM { /** * Transposes matrix ' a ' and stores the results in ' b ' : < br >
* < br >
* b < sub > ij < / sub > = a < sub > ji < / sub > < br >
* where ' b ' is the transpose of ' a ' .
* @ param A The original matrix . Not modified .
* @ param A _ tran Where the transpose is stored . If null a new matrix is created . Modified .
* @ return The transposed matrix . */
public static DMatrixRMaj transpose ( DMatrixRMaj A , DMatrixRMaj A_tran ) { } } | if ( A_tran == null ) { A_tran = new DMatrixRMaj ( A . numCols , A . numRows ) ; } else { if ( A . numRows != A_tran . numCols || A . numCols != A_tran . numRows ) { throw new MatrixDimensionException ( "Incompatible matrix dimensions" ) ; } } if ( A . numRows > EjmlParameters . TRANSPOSE_SWITCH && A . numCols > EjmlParameters . TRANSPOSE_SWITCH ) TransposeAlgs_DDRM . block ( A , A_tran , EjmlParameters . BLOCK_WIDTH ) ; else TransposeAlgs_DDRM . standard ( A , A_tran ) ; return A_tran ; |
public class SignalRsInner { /** * Operation to update an exiting SignalR service .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param resourceName The name of the SignalR resource .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the SignalRResourceInner object if successful . */
public SignalRResourceInner beginUpdate ( String resourceGroupName , String resourceName ) { } } | return beginUpdateWithServiceResponseAsync ( resourceGroupName , resourceName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class Sext { /** * < p > next . < / p >
* @ param t1 a T1 object .
* @ param t2 a T2 object .
* @ param t3 a T3 object .
* @ param t4 a T4 object .
* @ param t5 a T5 object .
* @ param t6 a T6 object .
* @ param < T1 > a T1 object .
* @ param < T2 > a T2 object .
* @ param < T3 > a T3 object .
* @ param < T4 > a T4 object .
* @ param < T5 > a T5 object .
* @ param < T6 > a T6 object .
* @ return a { @ link org . rapidpm . frp . model . serial . Sext } object . */
public static < T1 extends Serializable , T2 extends Serializable , T3 extends Serializable , T4 extends Serializable , T5 extends Serializable , T6 extends Serializable > Sext < T1 , T2 , T3 , T4 , T5 , T6 > next ( final T1 t1 , final T2 t2 , final T3 t3 , final T4 t4 , final T5 t5 , final T6 t6 ) { } } | return new Sext < > ( t1 , t2 , t3 , t4 , t5 , t6 ) ; |
public class FessMessages { /** * Add the created action message for the key ' success . reindex _ started ' with parameters .
* < pre >
* message : Started reindexing .
* < / pre >
* @ param property The property name for the message . ( NotNull )
* @ return this . ( NotNull ) */
public FessMessages addSuccessReindexStarted ( String property ) { } } | assertPropertyNotNull ( property ) ; add ( property , new UserMessage ( SUCCESS_reindex_started ) ) ; return this ; |
public class PersianCalendar { /** * / * [ deutsch ]
* < p > Erh & auml ; lt eine alternative Datumssicht spezifisch f & uuml ; r den angegebenen Algorithmus . < / p >
* @ param algorithm calendar computation
* @ return Persian date ( possibly modified )
* @ throws IllegalArgumentException in case of date overflow
* @ since 3.33/4.28 */
public Date getDate ( PersianAlgorithm algorithm ) { } } | ZonalOffset offset = PersianAlgorithm . STD_OFFSET ; if ( algorithm == DEFAULT_COMPUTATION ) { return new Date ( this , DEFAULT_COMPUTATION , offset ) ; } long utcDays = DEFAULT_COMPUTATION . transform ( this , offset ) ; return new Date ( algorithm . transform ( utcDays , offset ) , algorithm , offset ) ; |
public class FlowIdProducerOut { /** * ( non - Javadoc )
* @ see
* org . apache . cxf . interceptor . Interceptor # handleMessage ( org . apache . cxf . message
* . Message ) */
public void handleMessage ( T message ) throws Fault { } } | if ( LOG . isLoggable ( Level . FINEST ) ) { LOG . finest ( "FlowIdProducerOut Interceptor called. isOutbound: " + MessageUtils . isOutbound ( message ) + ", isRequestor: " + MessageUtils . isRequestor ( message ) ) ; } if ( MessageUtils . isRequestor ( message ) ) { handleRequestOut ( message ) ; } else { handleResponseOut ( message ) ; } // Don ' t write flowId for Oneway responses
if ( isOnewayResponse ( message ) ) { return ; } // write FlowId to HTTP and Soap layer
String flowId = FlowIdHelper . getFlowId ( message ) ; FlowIdProtocolHeaderCodec . writeFlowId ( message , flowId ) ; FlowIdSoapCodec . writeFlowId ( message , flowId ) ; |
public class HttpOutputStreamEE7 { /** * Write the given information to the output buffers .
* If it went async during flush , save the remaining data and stop .
* When callback on complete , write the remaining data .
* @ param value
* @ param start - offset into value
* @ param len - length from that offset to write
* @ throws IOException */
private void writeToBuffers ( byte [ ] value , int start , int len ) throws IOException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Writing " + len + ", buffered=" + this . bufferedCount ) ; } if ( value . length < ( start + len ) ) { throw new IllegalArgumentException ( "Length outside value range" ) ; } this . writing = true ; int remaining = len ; int offset = start ; while ( 0 < remaining ) { // if async write required
if ( ( _callback != null ) && ( ! this . get_internalReady ( ) ) ) { // remaining is yet to write .
// save of the data and amount to write
_remValue = new byte [ remaining ] ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Save of bytesRemainingWhenAsync -->" + _bytesRemainingWhenAsync + ", value size -->" + value . length + ", remValue size -->" + _remValue . length ) ; } synchronized ( _lockObj ) { System . arraycopy ( value , offset , _remValue , 0 , remaining ) ; setDataSaved ( true ) ; _lockObj . notifyAll ( ) ; } break ; } WsByteBuffer buffer = getBuffer ( ) ; int avail = buffer . remaining ( ) ; if ( contentLengthSet && bytesRemaining < bufferedCount + remaining ) { // write what we can and throw an exception - it will be caught in servletWrapper
int numberToWrite = ( int ) bytesRemaining - bufferedCount ; boolean throwExceptionThisTime = true ; if ( numberToWrite > avail ) { numberToWrite = avail ; throwExceptionThisTime = false ; } this . bufferedCount += numberToWrite ; buffer . put ( value , offset , numberToWrite ) ; remaining = remaining - numberToWrite ; if ( throwExceptionThisTime ) { throw new WriteBeyondContentLengthException ( ) ; } } if ( avail >= remaining ) { // write all remaining data
this . bufferedCount += remaining ; buffer . put ( value , offset , remaining ) ; remaining = 0 ; } else { // write what we can
this . bufferedCount += avail ; buffer . put ( value , offset , avail ) ; offset += avail ; remaining -= avail ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Writing " + len + ", buffered=" + this . bufferedCount + ", this.amountToBuffer=" + this . amountToBuffer ) ; Tr . debug ( tc , "buffer now -->" + buffer ) ; } if ( this . bufferedCount >= this . amountToBuffer ) { this . ignoreFlush = false ; if ( _callback == null ) { flushBuffers ( ) ; } else { _bytesRemainingWhenAsync = remaining ; flushAsyncBuffers ( ) ; } } } |
public class ConvolveImage { /** * Performs a 2D convolution across the image .
* @ param input The original image . Not modified .
* @ param output Where the resulting image is written to . Modified .
* @ param kernel The kernel that is being convolved . Not modified .
* @ param border How the image borders are handled . */
public static void convolve ( Kernel2D_S32 kernel , GrayS32 input , GrayS32 output , ImageBorder_S32 < GrayS32 > border ) { } } | InputSanityCheck . checkSameShape ( input , output ) ; boolean processed = BOverrideConvolveImage . invokeNativeConvolve ( kernel , input , output , border ) ; if ( ! processed ) { border . setImage ( input ) ; ConvolveImageNoBorder . convolve ( kernel , input , output ) ; ConvolveJustBorder_General_SB . convolve ( kernel , border , output ) ; } |
public class RenderOptions { /** * Specifies alternative values to use for the root element { @ code viewBox } . Any existing { @ code viewBox }
* attribute value will be ignored .
* Note : will be overridden if a { @ link # view ( String ) } is set .
* @ param minX The left X coordinate of the viewBox
* @ param minY The top Y coordinate of the viewBox
* @ param width The width of the viewBox
* @ param height The height of the viewBox
* @ return this same < code > RenderOptions < / code > instance */
public RenderOptions viewBox ( float minX , float minY , float width , float height ) { } } | this . viewBox = new SVG . Box ( minX , minY , width , height ) ; return this ; |
public class DataFormatter { /** * Returns a signed string representation of the given number .
* @ param number
* @ return String for BigDecimal value */
public static String formatBigDecimal ( BigDecimal number ) { } } | if ( number . signum ( ) != - 1 ) { return "+" + number . toString ( ) ; } else { return number . toString ( ) ; } |
public class BoxesRunTime { /** * arg . toLong */
public static java . lang . Long toLong ( Object arg ) throws NoSuchMethodException { } } | if ( arg instanceof java . lang . Integer ) return boxToLong ( ( long ) unboxToInt ( arg ) ) ; if ( arg instanceof java . lang . Double ) return boxToLong ( ( long ) unboxToDouble ( arg ) ) ; if ( arg instanceof java . lang . Float ) return boxToLong ( ( long ) unboxToFloat ( arg ) ) ; if ( arg instanceof java . lang . Long ) return ( java . lang . Long ) arg ; if ( arg instanceof java . lang . Character ) return boxToLong ( ( long ) unboxToChar ( arg ) ) ; if ( arg instanceof java . lang . Byte ) return boxToLong ( ( long ) unboxToByte ( arg ) ) ; if ( arg instanceof java . lang . Short ) return boxToLong ( ( long ) unboxToShort ( arg ) ) ; throw new NoSuchMethodException ( ) ; |
public class SequencingRunner { /** * Remove any existing nodes that were generated by previous sequencing operations of the node at the selected path .
* @ param parentOfOutput the parent of the output ; may not be null
* @ param outputNodeName the name of the output node ; may not be null or empty
* @ param selectedPath the path of the node that was selected for sequencing
* @ param logMsg the log message , or null if trace / debug logging is not being used ( this is passed in for efficiency reasons )
* @ throws RepositoryException if there is a problem accessing the repository content */
private void removeExistingOutputNodes ( AbstractJcrNode parentOfOutput , String outputNodeName , String selectedPath , String logMsg ) throws RepositoryException { } } | // Determine if there is an existing output node . . .
if ( TRACE ) { LOGGER . trace ( "Looking under '{0}' for existing output to be removed for {1}" , parentOfOutput . getPath ( ) , logMsg ) ; } NodeIterator outputIter = parentOfOutput . getNodesInternal ( outputNodeName ) ; while ( outputIter . hasNext ( ) ) { Node outputNode = outputIter . nextNode ( ) ; // See if this is indeed the output , which should have the ' mode : derived ' mixin . . .
if ( outputNode . isNodeType ( DERIVED_NODE_TYPE_NAME ) && outputNode . hasProperty ( DERIVED_FROM_PROPERTY_NAME ) ) { // See if it was an output for the same input node . . .
String derivedFrom = outputNode . getProperty ( DERIVED_FROM_PROPERTY_NAME ) . getString ( ) ; if ( selectedPath . equals ( derivedFrom ) ) { // Delete it . . .
if ( DEBUG ) { LOGGER . debug ( "Removing existing output node '{0}' for {1}" , outputNode . getPath ( ) , logMsg ) ; } outputNode . remove ( ) ; } } } |
public class VMath { /** * Multiply component - wise v1 = v1 * s1,
* overwriting the vector v1.
* @ param v1 original vector
* @ param s scalar
* @ return v1 = v1 * s1 */
public static double [ ] timesEquals ( final double [ ] v1 , final double s ) { } } | for ( int i = 0 ; i < v1 . length ; i ++ ) { v1 [ i ] *= s ; } return v1 ; |
public class ProvidenceConfigParser { /** * Parse a providence config into a message .
* @ param configFile The config file to be parsed .
* @ param parent The parent config message .
* @ param < M > The config message type .
* @ param < F > The config field type .
* @ return Pair of parsed config and set of included file paths .
* @ throws ProvidenceConfigException If parsing failed . */
@ Nonnull < M extends PMessage < M , F > , F extends PField > Pair < M , Set < String > > parseConfig ( @ Nonnull Path configFile , @ Nullable M parent ) throws ProvidenceConfigException { } } | try { configFile = canonicalFileLocation ( configFile ) ; } catch ( IOException e ) { throw new ProvidenceConfigException ( e , "Unable to resolve config file " + configFile ) . setFile ( configFile . getFileName ( ) . toString ( ) ) ; } Pair < M , Set < String > > result = checkAndParseInternal ( configFile , parent ) ; if ( result == null ) { throw new ProvidenceConfigException ( "No config: " + configFile . toString ( ) ) . setFile ( configFile . getFileName ( ) . toString ( ) ) ; } return result ; |
public class EJBWrapper { /** * F743-1756 */
private static void loadWrapperBase ( GeneratorAdapter mg , String className , EJBWrapperType wrapperType ) { } } | mg . loadThis ( ) ; if ( wrapperType == LOCAL_BEAN || wrapperType == MANAGED_BEAN ) { mg . visitFieldInsn ( GETFIELD , className , LOCAL_BEAN_WRAPPER_FIELD , LOCAL_BEAN_WRAPPER_FIELD_TYPE ) ; } else if ( wrapperType == MDB_NO_METHOD_INTERFACE_PROXY ) { mg . visitFieldInsn ( GETFIELD , className , MESSAGE_ENDPOINT_BASE_FIELD , MESSAGE_ENDPOINT_BASE_FIELD_TYPE ) ; } |
public class ReflectedHeap { /** * Delete an element
* @ param n
* a handle to the element */
private void delete ( ReflectedHandle < K , V > n ) { } } | if ( n . inner == null && free != n ) { throw new IllegalArgumentException ( "Invalid handle!" ) ; } if ( free == n ) { free = null ; } else { // delete from inner queue
AddressableHeap . Handle < K , HandleMap < K , V > > nInner = n . inner ; ReflectedHandle < K , V > nOuter = nInner . getValue ( ) . outer ; nInner . delete ( ) ; nOuter . inner = null ; nOuter . minNotMax = false ; // delete pair from inner queue
AddressableHeap . Handle < K , HandleMap < K , V > > otherInner = nInner . getValue ( ) . otherInner ; ReflectedHandle < K , V > otherOuter = otherInner . getValue ( ) . outer ; otherInner . delete ( ) ; otherOuter . inner = null ; otherOuter . minNotMax = false ; // reinsert either as free or as pair with free
if ( free == null ) { free = otherOuter ; } else { insertPair ( otherOuter , free ) ; free = null ; } } size -- ; |
public class TrifocalTransfer { /** * Specify the trifocaltensor
* @ param tensor tensor */
public void setTrifocal ( TrifocalTensor tensor ) { } } | this . tensor = tensor ; extract . setTensor ( tensor ) ; extract . extractFundmental ( F21 , F31 ) ; |
public class GradientPrewitt { /** * Computes the derivative in the X and Y direction using a floating point Prewitt edge detector .
* @ param orig Input image . Not modified .
* @ param derivX Storage for image derivative along the x - axis . Modified .
* @ param derivY Storage for image derivative along the y - axis . Modified .
* @ param border Specifies how the image border is handled . If null the border is not processed . */
public static void process ( GrayF32 orig , GrayF32 derivX , GrayF32 derivY , @ Nullable ImageBorder_F32 border ) { } } | InputSanityCheck . reshapeOneIn ( orig , derivX , derivY ) ; if ( BoofConcurrency . USE_CONCURRENT ) { GradientPrewitt_Shared_MT . process ( orig , derivX , derivY ) ; } else { GradientPrewitt_Shared . process ( orig , derivX , derivY ) ; } if ( border != null ) { border . setImage ( orig ) ; ConvolveJustBorder_General_SB . convolve ( kernelDerivX_F32 , border , derivX ) ; ConvolveJustBorder_General_SB . convolve ( kernelDerivY_F32 , border , derivY ) ; } |
public class KeyChainGroup { /** * Returns a key that hasn ' t been seen in a transaction yet , and which is suitable for displaying in a wallet
* user interface as " a convenient key to receive funds on " when the purpose parameter is
* { @ link KeyChain . KeyPurpose # RECEIVE _ FUNDS } . The returned key is stable until
* it ' s actually seen in a pending or confirmed transaction , at which point this method will start returning
* a different key ( for each purpose independently ) .
* < p > This method is not supposed to be used for married keychains and will throw UnsupportedOperationException if
* the active chain is married .
* For married keychains use { @ link # currentAddress ( KeyChain . KeyPurpose ) }
* to get a proper P2SH address < / p > */
public DeterministicKey currentKey ( KeyChain . KeyPurpose purpose ) { } } | DeterministicKeyChain chain = getActiveKeyChain ( ) ; if ( chain . isMarried ( ) ) { throw new UnsupportedOperationException ( "Key is not suitable to receive coins for married keychains." + " Use freshAddress to get P2SH address instead" ) ; } DeterministicKey current = currentKeys . get ( purpose ) ; if ( current == null ) { current = freshKey ( purpose ) ; currentKeys . put ( purpose , current ) ; } return current ; |
public class DefaultHttp2RemoteFlowController { /** * { @ inheritDoc }
* Any queued { @ link FlowControlled } objects will be sent . */
@ Override public void channelHandlerContext ( ChannelHandlerContext ctx ) throws Http2Exception { } } | this . ctx = checkNotNull ( ctx , "ctx" ) ; // Writing the pending bytes will not check writability change and instead a writability change notification
// to be provided by an explicit call .
channelWritabilityChanged ( ) ; // Don ' t worry about cleaning up queued frames here if ctx is null . It is expected that all streams will be
// closed and the queue cleanup will occur when the stream state transitions occur .
// If any frames have been queued up , we should send them now that we have a channel context .
if ( isChannelWritable ( ) ) { writePendingBytes ( ) ; } |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < }
* { @ link CmisExtensionType } { @ code > } */
@ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "extension" , scope = GetObjectRelationships . class ) public JAXBElement < CmisExtensionType > createGetObjectRelationshipsExtension ( CmisExtensionType value ) { } } | return new JAXBElement < CmisExtensionType > ( _GetPropertiesExtension_QNAME , CmisExtensionType . class , GetObjectRelationships . class , value ) ; |
public class AmazonEC2Client { /** * Describes the regions that are currently available to you .
* For a list of the regions supported by Amazon EC2 , see < a
* href = " https : / / docs . aws . amazon . com / general / latest / gr / rande . html # ec2 _ region " > Regions and Endpoints < / a > .
* @ param describeRegionsRequest
* @ return Result of the DescribeRegions operation returned by the service .
* @ sample AmazonEC2 . DescribeRegions
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DescribeRegions " target = " _ top " > AWS API
* Documentation < / a > */
@ Override public DescribeRegionsResult describeRegions ( DescribeRegionsRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeDescribeRegions ( request ) ; |
public class RegexUtils { /** * Performs a wild - card matching for the text and pattern provided .
* @ param text
* the text to be tested for matches .
* @ param pattern
* the pattern to be matched for . This can contain the wildcard character ' * ' ( asterisk ) .
* @ return < tt > true < / tt > if a match is found , < tt > false < / tt > otherwise . */
public static boolean wildCardMatch ( String text , String pattern ) { } } | logger . entering ( new Object [ ] { text , pattern } ) ; Preconditions . checkArgument ( text != null , "The text on which the search is to be run cannot be null." ) ; Preconditions . checkArgument ( pattern != null , "The search pattern cannot be null." ) ; // Create the cards by splitting using a RegEx . If more speed
// is desired , a simpler character based splitting can be done .
String [ ] cards = pattern . split ( "\\*" ) ; // Iterate over the cards .
for ( String card : cards ) { int idx = text . indexOf ( card ) ; // Card not detected in the text .
if ( idx == - 1 ) { logger . exiting ( false ) ; return false ; } // Move ahead , towards the right of the text .
text = text . substring ( idx + card . length ( ) ) ; } logger . exiting ( true ) ; return true ; |
public class FilterChain { /** * 获取真正的过滤器列表
* @ param config provider配置或者consumer配置
* @ param autoActiveFilters 系统自动激活的过滤器映射
* @ return 真正的过滤器列表 */
private static List < Filter > selectActualFilters ( AbstractInterfaceConfig config , Map < String , ExtensionClass < Filter > > autoActiveFilters ) { } } | /* * 例如自动装载扩展 A ( a ) , B ( b ) , C ( c ) filter = [ - a , d ] filterRef = [ new E , new Exclude ( b ) ]
* 逻辑如下 :
* 1 . 解析config . getFilterRef ( ) , 记录E和 - b
* 2 . 解析config . getFilter ( ) 字符串 , 记录 d 和 - a , - b
* 3 . 再解析自动装载扩展 , a , b被排除了 , 所以拿到c , d
* 4 . 对c d进行排序
* 5 . 拿到C 、 D实现类
* 6 . 加上自定义 , 返回C 、 D 、 E */
// 用户通过自己new实例的方式注入的filter , 优先级高
List < Filter > customFilters = config . getFilterRef ( ) == null ? new ArrayList < Filter > ( ) : new CopyOnWriteArrayList < Filter > ( config . getFilterRef ( ) ) ; // 先解析是否有特殊处理
HashSet < String > excludes = parseExcludeFilter ( customFilters ) ; // 准备数据 : 用户通过别名的方式注入的filter , 需要解析
List < ExtensionClass < Filter > > extensionFilters = new ArrayList < ExtensionClass < Filter > > ( ) ; List < String > filterAliases = config . getFilter ( ) ; if ( CommonUtils . isNotEmpty ( filterAliases ) ) { for ( String filterAlias : filterAliases ) { if ( startsWithExcludePrefix ( filterAlias ) ) { // 排除用的特殊字符
excludes . add ( filterAlias . substring ( 1 ) ) ; } else { ExtensionClass < Filter > filter = EXTENSION_LOADER . getExtensionClass ( filterAlias ) ; if ( filter != null ) { extensionFilters . add ( filter ) ; } } } } // 解析自动加载的过滤器
if ( ! excludes . contains ( StringUtils . ALL ) && ! excludes . contains ( StringUtils . DEFAULT ) ) { // 配了 - * 和 - default表示不加载内置
for ( Map . Entry < String , ExtensionClass < Filter > > entry : autoActiveFilters . entrySet ( ) ) { if ( ! excludes . contains ( entry . getKey ( ) ) ) { extensionFilters . add ( entry . getValue ( ) ) ; } } } // 按order从小到大排序
if ( extensionFilters . size ( ) > 1 ) { Collections . sort ( extensionFilters , new OrderedComparator < ExtensionClass < Filter > > ( ) ) ; } List < Filter > actualFilters = new ArrayList < Filter > ( ) ; for ( ExtensionClass < Filter > extensionFilter : extensionFilters ) { actualFilters . add ( extensionFilter . getExtInstance ( ) ) ; } // 加入自定义的过滤器
actualFilters . addAll ( customFilters ) ; return actualFilters ; |
public class RetryingBlockFetcher { /** * Fires off a request to fetch all blocks that have not been fetched successfully or permanently
* failed ( i . e . , by a non - IOException ) . */
private void fetchAllOutstanding ( ) { } } | // Start by retrieving our shared state within a synchronized block .
String [ ] blockIdsToFetch ; int numRetries ; RetryingBlockFetchListener myListener ; synchronized ( this ) { blockIdsToFetch = outstandingBlocksIds . toArray ( new String [ outstandingBlocksIds . size ( ) ] ) ; numRetries = retryCount ; myListener = currentListener ; } // Now initiate the fetch on all outstanding blocks , possibly initiating a retry if that fails .
try { fetchStarter . createAndStart ( blockIdsToFetch , myListener ) ; } catch ( Exception e ) { logger . error ( String . format ( "Exception while beginning fetch of %s outstanding blocks %s" , blockIdsToFetch . length , numRetries > 0 ? "(after " + numRetries + " retries)" : "" ) , e ) ; if ( shouldRetry ( e ) ) { initiateRetry ( ) ; } else { for ( String bid : blockIdsToFetch ) { listener . onBlockFetchFailure ( bid , e ) ; } } } |
public class GedWriter { /** * Write the file as directed . */
public void write ( ) { } } | root . accept ( visitor ) ; try { Backup . backup ( root . getFilename ( ) ) ; } catch ( IOException e ) { logger . error ( "Problem backing up old copy of GEDCOM file" , e ) ; } final String filename = root . getFilename ( ) ; final String charset = new CharsetScanner ( ) . charset ( root ) ; try ( FileOutputStream fstream = new FileOutputStream ( filename ) ; BufferedOutputStream bstream = new BufferedOutputStream ( fstream ) ) { writeTheLines ( bstream , charset ) ; } catch ( IOException e ) { logger . error ( "Problem writing GEDCOM file" , e ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.