signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class OnePassRealValueDataIndexer { /** * Updates the set of predicated and counter with the specified event contexts and cutoff . * @ param ec The contexts / features which occur in a event . * @ param predicateSet The set of predicates which will be used for model building . * @ param counter The predicate counters . * @ param cutoff The cutoff which determines whether a predicate is included . */ protected static void update ( String [ ] ec , Set < String > predicateSet , Map < String , Integer > counter , int cutoff ) { } }
for ( String s : ec ) { Integer val = counter . get ( s ) ; val = val == null ? 1 : val + 1 ; counter . put ( s , val ) ; if ( ! predicateSet . contains ( s ) && counter . get ( s ) >= cutoff ) { predicateSet . add ( s ) ; } }
public class PropertyDoc { /** * Overridden to check both the read and write MethodDocs for the tag */ public String getTagValue ( String tagName ) { } }
String value = null ; if ( mReadMethod != null ) { value = mReadMethod . getTagValue ( tagName ) ; } if ( value == null && mWriteMethod != null ) { value = mWriteMethod . getTagValue ( tagName ) ; } return value ;
public class HttpRequest { /** * Append given name / value pairs as query parameters to the base URL * The params argument is interpreted as a sequence of name / value pairs so the * given number of params must be divisible by 2. * @ param url * @ param params * name / value pairs * @ return URL with appended query params */ public static String append ( final CharSequence url , final Object ... params ) { } }
final String baseUrl = url . toString ( ) ; if ( params == null || params . length == 0 ) return baseUrl ; if ( params . length % 2 != 0 ) throw new IllegalArgumentException ( "Must specify an even number of parameter names/values" ) ; final StringBuilder result = new StringBuilder ( baseUrl ) ; addPathSeparator ( baseUrl , result ) ; addParamPrefix ( baseUrl , result ) ; addParam ( params [ 0 ] , params [ 1 ] , result ) ; for ( int i = 2 ; i < params . length ; i += 2 ) { result . append ( '&' ) ; addParam ( params [ i ] , params [ i + 1 ] , result ) ; } return result . toString ( ) ;
public class ApiOvhPartners { /** * Get information on a created company * REST : GET / partners / register / company / { companyId } * @ param companyId [ required ] Company ' s id */ public OvhCompany register_company_companyId_GET ( String companyId ) throws IOException { } }
String qPath = "/partners/register/company/{companyId}" ; StringBuilder sb = path ( qPath , companyId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhCompany . class ) ;
public class PowerShell { /** * Closes all the resources used to maintain the PowerShell context */ @ Override public void close ( ) { } }
if ( ! this . closed ) { try { Future < String > closeTask = threadpool . submit ( ( ) -> { commandWriter . println ( "exit" ) ; p . waitFor ( ) ; return "OK" ; } ) ; if ( ! closeAndWait ( closeTask ) && this . pid > 0 ) { // If it can be closed , force kill the process Logger . getLogger ( PowerShell . class . getName ( ) ) . log ( Level . INFO , "Forcing PowerShell to close. PID: " + this . pid ) ; try { Runtime . getRuntime ( ) . exec ( "taskkill.exe /PID " + pid + " /F /T" ) ; this . closed = true ; } catch ( IOException e ) { Logger . getLogger ( PowerShell . class . getName ( ) ) . log ( Level . SEVERE , "Unexpected error while killing powershell process" , e ) ; } } } catch ( InterruptedException | ExecutionException ex ) { logger . log ( Level . SEVERE , "Unexpected error when when closing PowerShell" , ex ) ; } finally { commandWriter . close ( ) ; try { if ( p . isAlive ( ) ) { p . getInputStream ( ) . close ( ) ; } } catch ( IOException ex ) { logger . log ( Level . SEVERE , "Unexpected error when when closing streams" , ex ) ; } if ( this . threadpool != null ) { try { this . threadpool . shutdownNow ( ) ; this . threadpool . awaitTermination ( 5 , TimeUnit . SECONDS ) ; } catch ( InterruptedException ex ) { logger . log ( Level . SEVERE , "Unexpected error when when shutting down thread pool" , ex ) ; } } this . closed = true ; } }
public class DependencyGraph { /** * Adds data which is useful for the time when the dependency graph is built up . * All this data will be cleaned once the dependency graph creation has finished . */ public < T > void putComputationalData ( Class < T > key , T value ) { } }
this . computationalData . put ( key , value ) ;
public class RuleBasedTimeZone { /** * { @ inheritDoc } */ @ Override public boolean observesDaylightTime ( ) { } }
long time = System . currentTimeMillis ( ) ; // Check if daylight saving time is observed now . int [ ] offsets = new int [ 2 ] ; getOffset ( time , false , offsets ) ; if ( offsets [ 1 ] != 0 ) { return true ; } // If DST is not used now , check if DST is used after each transition . BitSet checkFinals = finalRules == null ? null : new BitSet ( finalRules . length ) ; while ( true ) { TimeZoneTransition tt = getNextTransition ( time , false ) ; if ( tt == null ) { // no more transition break ; } TimeZoneRule toRule = tt . getTo ( ) ; if ( toRule . getDSTSavings ( ) != 0 ) { return true ; } if ( checkFinals != null ) { // final rules exist - check if we saw all of them for ( int i = 0 ; i < finalRules . length ; i ++ ) { if ( finalRules [ i ] . equals ( toRule ) ) { checkFinals . set ( i ) ; } } if ( checkFinals . cardinality ( ) == finalRules . length ) { // already saw all final rules break ; } } time = tt . getTime ( ) ; } return false ;
public class SweepHullDelaunay2D { /** * Run the actual algorithm * @ param hullonly */ void run ( boolean hullonly ) { } }
if ( points . size ( ) < 3 ) { throw new UnsupportedOperationException ( "There is no delaunay triangulation for less than three objects!" ) ; } int len = points . size ( ) - 1 ; hull = new LinkedList < > ( ) ; tris = hullonly ? null : new ArrayList < Triangle > ( len ) ; // 1 . Seed point x _ 0 final double [ ] seed ; final int seedid = 0 ; final double [ ] sortd = new double [ len ] ; final int [ ] sorti = new int [ len ] ; Arrays . fill ( sorti , - 42 ) ; // To cause errors // TODO : remove duplicates . // 2 . sort by squared Euclidean distance { Iterator < double [ ] > iter = points . iterator ( ) ; seed = iter . next ( ) ; for ( int i = 0 , j = 1 ; iter . hasNext ( ) ; j ++ , i ++ ) { double dist = quadraticEuclidean ( seed , iter . next ( ) ) ; if ( dist <= 0. ) { // Duplicate . -- len ; // Decrease candidate set size -- i ; // Increase j , but not i . continue ; } sortd [ i ] = dist ; sorti [ i ] = j ; } DoubleIntegerArrayQuickSort . sort ( sortd , sorti , len ) ; } // Detect some degenerate situations : if ( len < 2 ) { hull . add ( new IntIntPair ( seedid , - 1 ) ) ; if ( len == 1 ) { hull . add ( new IntIntPair ( sorti [ 0 ] , - 1 ) ) ; } return ; } assert ( sortd [ 0 ] > 0 ) ; // final double [ ] seed2 = points . get ( sort [ 0 ] . second ) ; final int seed2id = sorti [ 0 ] ; // 3 . Find minimal triangle for these two points : Triangle besttri = findSmallest ( seedid , seed2id , sortd , sorti , len ) ; if ( besttri == null ) { // Degenerate hull . add ( new IntIntPair ( seedid , - 1 ) ) ; hull . add ( new IntIntPair ( seed2id , - 1 ) ) ; return ; } // Note : sortd no longer accurate , recompute below ! int start = 2 ; // First two points have already been processed . // 5 . Make right - handed : besttri . makeClockwise ( points ) ; // Seed triangulation if ( ! hullonly ) { tris . add ( besttri ) ; } // Seed convex hull ( point , triangle ) hull . add ( new IntIntPair ( besttri . a , 0 ) ) ; hull . add ( new IntIntPair ( besttri . b , 0 ) ) ; hull . add ( new IntIntPair ( besttri . c , 0 ) ) ; if ( LOG . isDebuggingFinest ( ) ) { debugHull ( ) ; } // 6 . Resort from triangle circumcircle center double [ ] center = besttri . m ; for ( int i = start ; i < len ; i ++ ) { sortd [ i ] = quadraticEuclidean ( center , points . get ( sorti [ i ] ) ) ; } DoubleIntegerArrayQuickSort . sort ( sortd , sorti , start , len ) ; // Grow hull and triangles for ( int i = start ; i < len ; i ++ ) { final int pointId = sorti [ i ] ; final double [ ] newpoint = points . get ( pointId ) ; LinkedList < Triangle > newtris = hullonly ? null : new LinkedList < Triangle > ( ) ; // We identify edges by their starting point . - 1 is invalid . int hstart = - 1 , hend = - 1 ; // Find first and last consecutive visible edge , backwards : { Iterator < IntIntPair > iter = hull . descendingIterator ( ) ; IntIntPair next = hull . getFirst ( ) ; double [ ] nextV = points . get ( next . first ) ; for ( int pos = hull . size ( ) - 1 ; iter . hasNext ( ) ; pos -- ) { IntIntPair prev = iter . next ( ) ; double [ ] prevV = points . get ( prev . first ) ; // Not yet visible : if ( hend < 0 ) { if ( leftOf ( prevV , nextV , newpoint ) ) { hstart = hend = pos ; if ( ! hullonly ) { // Clockwise , A is new point ! Triangle tri = new Triangle ( pointId , next . first , prev . first ) ; assert ( tri . isClockwise ( points ) ) ; assert ( prev . second >= 0 ) ; tri . updateCircumcircle ( points ) ; tri . bc = prev . second ; newtris . addFirst ( tri ) ; } } } else { if ( leftOf ( prevV , nextV , newpoint ) ) { hstart = pos ; // Add triad : if ( ! hullonly ) { // Clockwise , A is new point ! Triangle tri = new Triangle ( pointId , next . first , prev . first ) ; assert ( tri . isClockwise ( points ) ) ; assert ( prev . second >= 0 ) ; tri . updateCircumcircle ( points ) ; tri . bc = prev . second ; newtris . addFirst ( tri ) ; } } else { break ; } } next = prev ; nextV = prevV ; } } // If the last edge was visible , we also need to scan forwards : if ( hend == hull . size ( ) - 1 ) { Iterator < IntIntPair > iter = hull . iterator ( ) ; IntIntPair prev = iter . next ( ) ; double [ ] prevV = points . get ( prev . first ) ; while ( iter . hasNext ( ) ) { IntIntPair next = iter . next ( ) ; double [ ] nextV = points . get ( next . first ) ; if ( leftOf ( prevV , nextV , newpoint ) ) { hend ++ ; // Add triad : if ( ! hullonly ) { // Clockwise , A is new point ! Triangle tri = new Triangle ( pointId , next . first , prev . first ) ; assert ( tri . isClockwise ( points ) ) ; assert ( prev . second >= 0 ) ; tri . updateCircumcircle ( points ) ; tri . bc = prev . second ; newtris . addLast ( tri ) ; } } else { break ; } prev = next ; prevV = nextV ; } } assert ( hstart >= 0 && hend >= hstart ) ; // Note that hend can be larger than hull . size ( ) now , interpret as // " hend % hull . size ( ) " // Update hull , remove points final int firsttri , lasttri ; if ( hullonly ) { firsttri = lasttri = - 1 ; } else { final int tristart = tris . size ( ) ; firsttri = tristart ; lasttri = tristart + newtris . size ( ) - 1 ; } final int hullsize = hull . size ( ) ; if ( LOG . isDebuggingFinest ( ) ) { LOG . debugFinest ( "Size: " + hullsize + " start: " + hstart + " end: " + hend ) ; } if ( hend < hullsize ) { ListIterator < IntIntPair > iter = hull . listIterator ( ) ; int p = 0 ; // Skip for ( ; p <= hstart ; p ++ ) { iter . next ( ) ; } // Remove for ( ; p <= hend ; p ++ ) { iter . next ( ) ; iter . remove ( ) ; } // Insert , and update edge - > triangle mapping iter . add ( new IntIntPair ( pointId , lasttri ) ) ; iter . previous ( ) ; if ( ! hullonly ) { ( iter . hasPrevious ( ) ? iter . previous ( ) : hull . getLast ( ) ) . second = firsttri ; } } else { ListIterator < IntIntPair > iter = hull . listIterator ( ) ; // Remove end int p = hullsize ; for ( ; p <= hend ; p ++ ) { iter . next ( ) ; iter . remove ( ) ; } // Insert iter . add ( new IntIntPair ( pointId , lasttri ) ) ; // Wrap around p -= hullsize ; IntIntPair pre = null ; for ( ; p <= hstart ; p ++ ) { pre = iter . next ( ) ; } assert ( pre != null ) ; pre . second = firsttri ; // Remove remainder while ( iter . hasNext ( ) ) { iter . next ( ) ; iter . remove ( ) ; } } if ( LOG . isDebuggingFinest ( ) ) { debugHull ( ) ; } if ( ! hullonly ) { final int tristart = tris . size ( ) ; // Connect triads ( they are ordered ) Iterator < Triangle > iter = newtris . iterator ( ) ; for ( int o = 0 ; iter . hasNext ( ) ; o ++ ) { // This triangle has num tristart + o . Triangle cur = iter . next ( ) ; cur . ca = o > 0 ? tristart + o - 1 : - 1 ; // previously added triangle cur . ab = iter . hasNext ( ) ? tristart + o + 1 : - 1 ; // next triangle // cur . bc was set upon creation assert ( cur . bc >= 0 ) ; Triangle other = tris . get ( cur . bc ) ; Orientation orient = cur . findOrientation ( other ) ; assert ( orient != null ) : "Inconsistent triangles: " + cur + " " + other ; switch ( orient ) { case ORIENT_BC_BA : assert ( other . ab == - 1 ) : "Inconsistent triangles: " + cur + " " + other ; other . ab = tristart + o ; break ; case ORIENT_BC_CB : assert ( other . bc == - 1 ) : "Inconsistent triangles: " + cur + " " + other ; other . bc = tristart + o ; break ; case ORIENT_BC_AC : assert ( other . ca == - 1 ) : "Inconsistent triangles: " + cur + " " + other ; other . ca = tristart + o ; break ; default : assert ( cur . isClockwise ( points ) ) ; assert ( other . isClockwise ( points ) ) ; throw new RuntimeException ( "Inconsistent triangles: " + cur + " " + other + " size:" + tris . size ( ) ) ; } tris . add ( cur ) ; } assert ( tris . size ( ) == lasttri + 1 ) ; } } // Now check for triangles that need flipping . if ( ! hullonly ) { final int size = tris . size ( ) ; long [ ] flippedA = BitsUtil . zero ( size ) , flippedB = BitsUtil . zero ( size ) ; // Initial flip if ( flipTriangles ( flippedA ) > 0 ) { for ( int iterations = 1 ; iterations < 1000 ; iterations += 2 ) { if ( LOG . isDebuggingFinest ( ) ) { debugHull ( ) ; } if ( flipTriangles ( flippedA , flippedB ) == 0 ) { break ; } if ( LOG . isDebuggingFinest ( ) ) { debugHull ( ) ; } if ( flipTriangles ( flippedB , flippedA ) == 0 ) { break ; } } } }
public class UtlInvLine { /** * < p > Reveal shared tax rules for invoice . . < / p > * @ param pReqVars request scoped vars * @ param pInv invoice * @ param pAs Accounting Settings * @ param pIsExtrTx if extract taxes * @ return tax rules , NULL if not taxable * @ throws Exception - an exception . */ public final TaxDestination revealTaxRules ( final Map < String , Object > pReqVars , final IInvoice pInv , final AccSettings pAs , final Boolean pIsExtrTx ) throws Exception { } }
return this . utlInvBase . revealTaxRules ( pReqVars , pInv , pAs , pIsExtrTx ) ;
public class SimpleSelect { /** * Performs a bulk select of consecutive ranks into a given array fragment . * @ param rank the first rank to select . * @ param dest the destination array ; it will be filled with { @ code length } positions of consecutive bits starting at position { @ code offset } . * @ param offset the first bit position written in { @ code dest } . * @ param length the number of bit positions in { @ code dest } starting at { @ code offset } . * @ return { @ code dest } * @ see # select ( long , long [ ] ) */ public long [ ] select ( long rank , long [ ] dest , final int offset , final int length ) { } }
if ( length == 0 ) return dest ; final long s = select ( rank ) ; dest [ offset ] = s ; int curr = ( int ) ( s / Long . SIZE ) ; long window = bits [ curr ] & - 1L << s ; window &= window - 1 ; for ( int i = 1 ; i < length ; i ++ ) { while ( window == 0 ) window = bits [ ++ curr ] ; dest [ offset + i ] = curr * Long . SIZE + Long . numberOfTrailingZeros ( window ) ; window &= window - 1 ; } return dest ;
public class IsoInterval { /** * < p > Yields the best available format pattern . < / p > * @ param printer chronological component printer * @ return localized format pattern for intervals * @ since 3.9/4.6 */ static String getIntervalPattern ( ChronoPrinter < ? > printer ) { } }
AttributeQuery attrs = printer . getAttributes ( ) ; if ( attrs . contains ( Attributes . LANGUAGE ) ) { Locale locale = attrs . get ( Attributes . LANGUAGE ) ; return CalendarText . patternForInterval ( locale ) ; } return "{0}/{1}" ;
import java . util . ArrayList ; import java . util . Arrays ; import java . util . List ; public class MergeLists { /** * Function to merge three provided lists of the same length in interleaving pattern . * Example : * merge _ lists ( Arrays . asList ( 1 , 2 , 3 , 4 , 5 , 6 , 7 ) , * Arrays . asList ( 10 , 20 , 30 , 40 , 50 , 60 , 70 ) , * Arrays . asList ( 100 , 200 , 300 , 400 , 500 , 600 , 700 ) ) * Output : [ 1 , 10 , 100 , 2 , 20 , 200 , 3 , 30 , 300 , 4 , 40 , 400 , 5 , 50 , 500 , 6 , 60 , 600 , 7 , 70 , 700] * merge _ lists ( Arrays . asList ( 10 , 20 ) , Arrays . asList ( 15 , 2 ) , Arrays . asList ( 5 , 10 ) ) * Output : [ 10 , 15 , 5 , 20 , 2 , 10] * merge _ lists ( Arrays . asList ( 11 , 44 ) , Arrays . asList ( 10 , 15 ) , Arrays . asList ( 20 , 5 ) ) * Output : [ 11 , 10 , 20 , 44 , 15 , 5] */ public static List < Integer > mergeLists ( List < Integer > lst1 , List < Integer > lst2 , List < Integer > lst3 ) { } }
List < Integer > mergedList = new ArrayList < > ( ) ; for ( int i = 0 ; i < lst1 . size ( ) ; i ++ ) { mergedList . add ( lst1 . get ( i ) ) ; mergedList . add ( lst2 . get ( i ) ) ; mergedList . add ( lst3 . get ( i ) ) ; } return mergedList ;
public class CodeGenerator { /** * / * ( non - Javadoc ) * @ see com . baidu . bjf . remoting . protobuf . code . ICodeGenerator # getCode ( ) */ @ Override public String getCode ( ) { } }
if ( fields == null ) { fields = fetchFieldInfos ( ) ; } String className = getClassName ( ) ; ClassCode code = new ClassCode ( ClassCode . SCOPE_PUBLIC , className ) ; // to implements Codec interface code . addInteface ( Codec . class . getName ( ) + "<" + ClassHelper . getInternalName ( cls . getCanonicalName ( ) ) + ">" ) ; // package code . setPkg ( getPackage ( ) ) ; // import classes genImportCode ( code ) ; // define Descriptor field String descriptorClsName = ClassHelper . getInternalName ( Descriptor . class . getCanonicalName ( ) ) ; code . addField ( ClassCode . SCOPE_DEFAULT , descriptorClsName , "descriptor" , null ) ; // define class code . addMethod ( getEncodeMethodCode ( ) ) ; code . addMethod ( getDecodeMethodCode ( ) ) ; code . addMethod ( getSizeMethodCode ( ) ) ; code . addMethod ( getWriteToMethodCode ( ) ) ; code . addMethod ( getReadFromMethodCode ( ) ) ; code . addMethod ( getGetDescriptorMethodCode ( ) ) ; return code . toCode ( ) ;
public class Strman { /** * This method returns the index within the calling String object of the last occurrence of the specified value , searching backwards from the offset . * Returns - 1 if the value is not found . * @ param value The input String * @ param needle The search String * @ param offset The index to start search from * @ param caseSensitive whether search should be case sensitive * @ return Return position of the last occurrence of ' needle ' . */ public static int lastIndexOf ( final String value , final String needle , final int offset , final boolean caseSensitive ) { } }
validate ( value , NULL_STRING_PREDICATE , NULL_STRING_MSG_SUPPLIER ) ; validate ( needle , NULL_STRING_PREDICATE , NULL_STRING_MSG_SUPPLIER ) ; if ( caseSensitive ) { return value . lastIndexOf ( needle , offset ) ; } return value . toLowerCase ( ) . lastIndexOf ( needle . toLowerCase ( ) , offset ) ;
public class AffineGapAlignmentScoring { /** * Returns Nucleotide BLAST scoring * @ param gapOpenPenalty penalty for opening gap to be used in system * @ param gapExtensionPenalty penalty for extending gap to be used in system * @ return Nucleotide BLAST scoring */ public static AffineGapAlignmentScoring < NucleotideSequence > getNucleotideBLASTScoring ( int gapOpenPenalty , int gapExtensionPenalty ) { } }
return new AffineGapAlignmentScoring < > ( NucleotideSequence . ALPHABET , 5 , - 4 , gapOpenPenalty , gapExtensionPenalty ) ;
public class TableRef { /** * Applies a filter to the table reference . When fetched , it will return the non null values . * < pre > * StorageRef storage = new StorageRef ( " your _ app _ key " , " your _ token " ) ; * TableRef tableRef = storage . table ( " your _ table " ) ; * / / Retrieve all items where their " itemProperty " value is not null * tableRef . notNull ( " itemProperty " ) . getItems ( new OnItemSnapshot ( ) { * & # 064 ; Override * public void run ( ItemSnapshot itemSnapshot ) { * if ( itemSnapshot ! = null ) { * Log . d ( " TableRef " , " Item retrieved : " + itemSnapshot . val ( ) ) ; * } , new OnError ( ) { * & # 064 ; Override * public void run ( Integer code , String errorMessage ) { * Log . e ( " TableRef " , " Error retrieving items : " + errorMessage ) ; * < / pre > * @ param attributeName * The name of the property to filter . * @ return Current table reference */ public TableRef notNull ( String attributeName ) { } }
filters . add ( new Filter ( StorageFilter . NOTNULL , attributeName , null , null ) ) ; return this ;
public class ElementWithCardinalityImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setCardinality ( String newCardinality ) { } }
String oldCardinality = cardinality ; cardinality = newCardinality ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , SimpleAntlrPackage . ELEMENT_WITH_CARDINALITY__CARDINALITY , oldCardinality , cardinality ) ) ;
public class XmlStringTools { /** * Add a cdata section to a StringBuffer . * If the buffer is null , a new one is created . * @ param buffer * StringBuffer to fill * @ param cdataContent * the cdata content * @ return the buffer */ private static StringBuffer doAppendCdataSection ( StringBuffer buffer , String cdataContent ) { } }
buffer . append ( SEQUENCE__CDATA__OPEN ) . append ( cdataContent ) . append ( SEQUENCE__CDATA__CLOSE ) ; return buffer ;
public class PeriodDuration { /** * Subtracts this amount from the specified temporal object . * This returns a temporal object of the same observable type as the input * with this amount subtracted . This simply subtracts the period and duration from the temporal . * This instance is immutable and unaffected by this method call . * @ param temporal the temporal object to adjust , not null * @ return an object of the same type with the adjustment made , not null * @ throws DateTimeException if unable to subtract * @ throws UnsupportedTemporalTypeException if the DAYS unit is not supported * @ throws ArithmeticException if numeric overflow occurs */ @ Override public Temporal subtractFrom ( Temporal temporal ) { } }
return temporal . minus ( period ) . minus ( duration ) ;
public class ApiOvhDedicatedCloud { /** * Hourly consumption associated with this host . * REST : GET / dedicatedCloud / { serviceName } / datacenter / { datacenterId } / host / { hostId } / hourlyConsumption * @ param serviceName [ required ] Domain of the service * @ param datacenterId [ required ] * @ param hostId [ required ] Id of the host */ public net . minidev . ovh . api . dedicatedcloud . host . OvhHourlyConsumption serviceName_datacenter_datacenterId_host_hostId_hourlyConsumption_GET ( String serviceName , Long datacenterId , Long hostId ) throws IOException { } }
String qPath = "/dedicatedCloud/{serviceName}/datacenter/{datacenterId}/host/{hostId}/hourlyConsumption" ; StringBuilder sb = path ( qPath , serviceName , datacenterId , hostId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , net . minidev . ovh . api . dedicatedcloud . host . OvhHourlyConsumption . class ) ;
public class Faker { /** * Fill a TextView with a specific FakerTextComponent * @ param view * @ param component */ public void fillWithText ( TextView view , FakerTextComponent component ) { } }
validateNotNullableView ( view ) ; validateIfIsATextView ( view ) ; validateNotNullableFakerComponent ( component ) ; view . setText ( component . randomText ( ) ) ;
public class MP3FileID3Controller { /** * Returns the length ( in seconds ) of the playing time of this mp3 . This * will not return an accurate value for VBR files . * @ return the playing time ( in seconds ) of this mp3 */ public long getPlayingTime ( ) { } }
long datasize = ( mp3File . length ( ) * 8 ) - id3v2 . getSize ( ) ; long bps = head . getBitRate ( ) * 1000 ; return datasize / bps ;
public class StringUtility { /** * Gets the approximate size ( where k = 1024 ) of a file in this format : * x byte ( s ) * xx bytes * xxx bytes * x . x k * xx . x k * xxx k * x . x M * xx . x M * xxx M * x . x G * xx . x G * xxx G * x . x T * xx . x T * xxx T * xxx . . . T */ public static String getApproximateSize ( long size ) { } }
if ( size == 1 ) return "1 byte" ; if ( size < 1024 ) return new StringBuilder ( ) . append ( ( int ) size ) . append ( " bytes" ) . toString ( ) ; String unitName ; long unitSize ; if ( size < ( 1024 * 1024 ) ) { unitName = " k" ; unitSize = 1024 ; } else if ( size < ( ( long ) 1024 * 1024 * 1024 ) ) { unitName = " M" ; unitSize = 1024 * 1024 ; } else if ( size < ( ( long ) 1024 * 1024 * 1024 * 1024 ) ) { unitName = " G" ; unitSize = ( long ) 1024 * 1024 * 1024 ; } else { unitName = " T" ; unitSize = ( long ) 1024 * 1024 * 1024 * 1024 ; } long whole = size / unitSize ; if ( whole < 100 ) { int fraction = ( int ) ( ( ( size % unitSize ) * 10 ) / unitSize ) ; return new StringBuilder ( ) . append ( whole ) . append ( '.' ) . append ( fraction ) . append ( unitName ) . toString ( ) ; } else return new StringBuilder ( ) . append ( whole ) . append ( unitName ) . toString ( ) ;
public class SparkApp { /** * Creates an instance of the requested tool , using classpath scanning if necessary */ private static RDDProcessor newProcessor ( String streamProcType ) throws Exception { } }
streamProcType = streamProcType . trim ( ) ; if ( "twitter-to-solr" . equals ( streamProcType ) ) return new TwitterToSolrStreamProcessor ( ) ; else if ( "word-count" . equals ( streamProcType ) ) return new WordCount ( ) ; else if ( "term-vectors" . equals ( streamProcType ) ) return new ReadTermVectors ( ) ; else if ( "docfilter" . equals ( streamProcType ) ) return new DocumentFilteringStreamProcessor ( ) ; else if ( "hdfs-to-solr" . equals ( streamProcType ) ) return new HdfsToSolrRDDProcessor ( ) ; else if ( "logs2solr" . equals ( streamProcType ) ) return new Logs2SolrRDDProcessor ( ) ; else if ( "query-solr-benchmark" . equals ( streamProcType ) ) return new QueryBenchmark ( ) ; else if ( "kmeans-anomaly" . equals ( streamProcType ) ) return new KMeansAnomaly ( ) ; else if ( "eventsim" . equals ( streamProcType ) ) return new EventsimIndexer ( ) ; // If you add a built - in RDDProcessor to this class , add it here to avoid // classpath scanning for ( Class < RDDProcessor > next : findProcessorClassesInPackage ( "com.lucidworks.spark" ) ) { RDDProcessor streamProc = next . newInstance ( ) ; if ( streamProcType . equals ( streamProc . getName ( ) ) ) return streamProc ; } System . err . println ( "\n\n " + streamProcType + " not supported! Please check your command-line arguments and re-try. \n\n" ) ; System . exit ( 1 ) ; return null ; // won ' t get here
public class FSDataset { /** * { @ inheritDoc } */ public void updateBlock ( int namespaceId , Block oldblock , Block newblock ) throws IOException { } }
if ( oldblock . getBlockId ( ) != newblock . getBlockId ( ) ) { throw new IOException ( "Cannot update oldblock (=" + oldblock + ") to newblock (=" + newblock + ")." ) ; } // Protect against a straggler updateblock call moving a block backwards // in time . boolean isValidUpdate = ( newblock . getGenerationStamp ( ) > oldblock . getGenerationStamp ( ) ) || ( newblock . getGenerationStamp ( ) == oldblock . getGenerationStamp ( ) && newblock . getNumBytes ( ) == oldblock . getNumBytes ( ) ) ; if ( ! isValidUpdate ) { throw new IOException ( "Cannot update oldblock=" + oldblock + " to newblock=" + newblock + " since generation stamps must " + "increase, or else length must not change." ) ; } for ( ; ; ) { final List < Thread > threads = tryUpdateBlock ( namespaceId , oldblock , newblock ) ; if ( threads == null ) { DataNode . LOG . info ( "Updated Block: namespaceid: " + namespaceId + " oldBlock: " + oldblock + " newBlock: " + newblock ) ; return ; } DataNode . LOG . info ( "Waiting other threads to update block: namespaceid: " + namespaceId + " oldBlock: " + oldblock + " newBlock: " + newblock ) ; interruptAndJoinThreads ( threads ) ; }
public class BaseDrawerItem { /** * helper method to decide for the correct color * @ param ctx * @ return */ protected int getColor ( Context ctx ) { } }
int color ; if ( this . isEnabled ( ) ) { color = ColorHolder . color ( getTextColor ( ) , ctx , R . attr . material_drawer_primary_text , R . color . material_drawer_primary_text ) ; } else { color = ColorHolder . color ( getDisabledTextColor ( ) , ctx , R . attr . material_drawer_hint_text , R . color . material_drawer_hint_text ) ; } return color ;
public class DemoController { /** * Directions page allowing caching for 15 minutes , but requiring * re - revalidation before serving user a potentially stale resource . */ @ CacheControl ( policy = { } }
CachePolicy . MUST_REVALIDATE } , maxAge = 15 * 60 ) @ RequestMapping ( "/directions.do" ) public String handleProducDirectionsRequest ( Model model ) { model . addAttribute ( "pageName" , "Directions" ) ; return "page" ;
public class BundleUtils { /** * Returns a optional { @ link android . os . Parcelable } { @ link java . util . ArrayList } . In other words , returns the value mapped by key if it exists and is a { @ link android . os . Parcelable } { @ link java . util . ArrayList } . * The bundle argument is allowed to be { @ code null } . If the bundle is null , this method returns null . * @ param bundle a bundle . If the bundle is null , this method will return null . * @ param key a key for the value . * @ return a { @ link android . os . Parcelable } { @ link java . util . ArrayList } value if exists , null otherwise . * @ see android . os . Bundle # getParcelableArrayList ( String ) */ @ Nullable // Since Bundle # getParcelableArrayList returns concrete ArrayList type , so this method follows that implementation . public static < T extends Parcelable > ArrayList < T > optParcelableArrayList ( @ Nullable Bundle bundle , @ Nullable String key ) { } }
return optParcelableArrayList ( bundle , key , new ArrayList < T > ( ) ) ;
public class AbstractJaxbMojo { /** * Retrieves a File to the JAXB Episode ( which is normally written during the XJC process ) . * Moreover , ensures that the parent directory of that File is created , to enable writing the File . * @ param episodeFileName { @ code null } to indicate that the standard episode file name ( " sun - jaxb . episode " ) * should be used , and otherwise a non - empty name which should be used * as the episode file name . * @ return A non - null File where the JAXB episode file should be written . * @ throws MojoExecutionException if the parent directory of the episode file could not be created . */ protected File getEpisodeFile ( final String episodeFileName ) throws MojoExecutionException { } }
// Get the execution ID final String executionID = getExecution ( ) != null && getExecution ( ) . getExecutionId ( ) != null ? getExecution ( ) . getExecutionId ( ) : null ; final String effectiveEpisodeFileName = episodeFileName == null ? ( executionID == null ? STANDARD_EPISODE_FILENAME : "episode_" + executionID ) : episodeFileName ; if ( effectiveEpisodeFileName . isEmpty ( ) ) { throw new MojoExecutionException ( "Cannot handle null or empty JAXB Episode filename. " + "Check 'episodeFileName' configuration property." ) ; } // Find or create the episode directory . final Path episodePath ; final File generatedJaxbEpisodeDirectory ; try { final Path path = Paths . get ( getOutputDirectory ( ) . getAbsolutePath ( ) , "META-INF" , "JAXB" ) ; episodePath = java . nio . file . Files . createDirectories ( path ) ; generatedJaxbEpisodeDirectory = episodePath . toFile ( ) ; if ( getLog ( ) . isInfoEnabled ( ) ) { getLog ( ) . info ( "Created EpisodePath [" + episodePath . toString ( ) + "]: " + ( generatedJaxbEpisodeDirectory . exists ( ) && generatedJaxbEpisodeDirectory . isDirectory ( ) ) ) ; } } catch ( IOException e ) { throw new MojoExecutionException ( "Could not create output directory." , e ) ; } if ( ! generatedJaxbEpisodeDirectory . exists ( ) || ! generatedJaxbEpisodeDirectory . isDirectory ( ) ) { throw new MojoExecutionException ( "Could not create directory [" + episodePath . toString ( ) + "]" ) ; } // Is there already an episode file here ? File episodeFile = new File ( generatedJaxbEpisodeDirectory , effectiveEpisodeFileName + ".xjb" ) ; final AtomicInteger index = new AtomicInteger ( 1 ) ; while ( episodeFile . exists ( ) ) { episodeFile = new File ( generatedJaxbEpisodeDirectory , effectiveEpisodeFileName + "_" + index . getAndIncrement ( ) + ".xjb" ) ; } // Add the ( generated ) outputDirectory to the Resources . final Resource outputDirectoryResource = new Resource ( ) ; outputDirectoryResource . setDirectory ( getOutputDirectory ( ) . getAbsolutePath ( ) ) ; outputDirectoryResource . setIncludes ( Collections . singletonList ( "**/" + episodeFile . getName ( ) ) ) ; this . addResource ( outputDirectoryResource ) ; // All Done . return episodeFile ;
public class MovingAverage { /** * Updates statistics with ' sample ' * @ param sample a sample added to the series */ public void update ( double sample ) { } }
// Adjust min & max min = ( sample < min ? sample : min ) ; max = ( sample > max ? sample : max ) ; // Update average average += ( sample - average ) / ++ count ; _pwrSumAverage += ( sample * sample - _pwrSumAverage ) / count ; // Update variance stdDev = Math . sqrt ( ( _pwrSumAverage * count - count * average * average ) / ( count - 1 ) ) ; cv = 100 * ( stdDev / average ) ;
public class Http2ConnectionManager { /** * Borrow an HTTP / 2 client channel . * @ param http2SrcHandler Relevant http / 2 source handler where the source connection belongs to * @ param httpRoute the http route * @ return Http2ClientChannel */ public Http2ClientChannel borrowChannel ( Http2SourceHandler http2SrcHandler , HttpRoute httpRoute ) { } }
EventLoopPool eventLoopPool ; String key = generateKey ( httpRoute ) ; EventLoopPool . PerRouteConnectionPool perRouteConnectionPool ; if ( http2SrcHandler != null ) { eventLoopPool = getOrCreateEventLoopPool ( http2SrcHandler . getChannelHandlerContext ( ) . channel ( ) . eventLoop ( ) ) ; perRouteConnectionPool = getOrCreatePerRoutePool ( eventLoopPool , key ) ; } else { if ( eventLoops . isEmpty ( ) ) { return null ; } eventLoopPool = getOrCreateEventLoopPool ( eventLoops . peek ( ) ) ; perRouteConnectionPool = getOrCreatePerRoutePool ( eventLoopPool , key ) ; } Http2ClientChannel http2ClientChannel = null ; if ( perRouteConnectionPool != null ) { http2ClientChannel = perRouteConnectionPool . fetchTargetChannel ( ) ; } return http2ClientChannel ;
public class UtilAbstractAction { /** * Get the message object . If we haven ' t already got one and * getMessageObjAttrName returns non - null create one and implant it in * the session . * @ param request Needed to locate session * @ param messages Resources * @ return MessageEmitSvlt */ private MessageEmitSvlt getMessageObj ( final HttpServletRequest request , final MessageResources messages ) { } }
return ( MessageEmitSvlt ) StrutsUtil . getMessageObj ( getId ( ) , this , request , messages , getMessageObjAttrName ( ) , getErrorObjErrProp ( ) , clearMessages ( ) ) ;
public class Delimiters { /** * check that the delimiters are valid * @ throws FHIRException */ public void check ( ) throws FHIRException { } }
rule ( componentDelimiter != fieldDelimiter , "Delimiter Error: \"" + componentDelimiter + "\" is used for both CPComponent and CPField" ) ; rule ( subComponentDelimiter != fieldDelimiter , "Delimiter Error: \"" + subComponentDelimiter + "\" is used for both CPSubComponent and CPField" ) ; rule ( subComponentDelimiter != componentDelimiter , "Delimiter Error: \"" + subComponentDelimiter + "\" is used for both CPSubComponent and CPComponent" ) ; rule ( repetitionDelimiter != fieldDelimiter , "Delimiter Error: \"" + repetitionDelimiter + "\" is used for both Repetition and CPField" ) ; rule ( repetitionDelimiter != componentDelimiter , "Delimiter Error: \"" + repetitionDelimiter + "\" is used for both Repetition and CPComponent" ) ; rule ( repetitionDelimiter != subComponentDelimiter , "Delimiter Error: \"" + repetitionDelimiter + "\" is used for both Repetition and CPSubComponent" ) ; rule ( escapeCharacter != fieldDelimiter , "Delimiter Error: \"" + escapeCharacter + "\" is used for both Escape and CPField" ) ; rule ( escapeCharacter != componentDelimiter , "Delimiter Error: \"" + escapeCharacter + "\" is used for both Escape and CPComponent" ) ; rule ( escapeCharacter != subComponentDelimiter , "Delimiter Error: \"" + escapeCharacter + "\" is used for both Escape and CPSubComponent" ) ; rule ( escapeCharacter != repetitionDelimiter , "Delimiter Error: \"" + escapeCharacter + "\" is used for both Escape and Repetition" ) ;
public class SimpleDateFormat { /** * give the NumberFormat used for the field like ' y ' ( year ) and ' M ' ( year ) * @ param field the field the user wants * @ return override NumberFormat used for the field */ public NumberFormat getNumberFormat ( char field ) { } }
Character ovrField ; ovrField = Character . valueOf ( field ) ; if ( overrideMap != null && overrideMap . containsKey ( ovrField ) ) { String nsName = overrideMap . get ( ovrField ) . toString ( ) ; NumberFormat nf = numberFormatters . get ( nsName ) ; return nf ; } else { return numberFormat ; }
public class BshClassPath { /** * Split class name into package and name */ public static String [ ] splitClassname ( String classname ) { } }
classname = canonicalizeClassName ( classname ) ; int i = classname . lastIndexOf ( "." ) ; String classn , packn ; if ( i == - 1 ) { // top level class classn = classname ; packn = "<unpackaged>" ; } else { packn = classname . substring ( 0 , i ) ; classn = classname . substring ( i + 1 ) ; } return new String [ ] { packn , classn } ;
public class PArray { /** * ARRAY contains the values . * < pre > { @ code * new QContact ( ) * . phoneNumbers . contains ( " 4321 " ) * . findList ( ) ; * } < / pre > * @ param values The values that should be contained in the array */ @ SafeVarargs public final R contains ( E ... values ) { } }
expr ( ) . arrayContains ( _name , ( Object [ ] ) values ) ; return _root ;
public class JsonSet { /** * May be used to change the contains behavior of the set . If set it will use the provided strategy to compare * elements in the set instead of JsonElement . equals ( ) . * Important : this creates a new set in order to remove duplicates . * @ param strategy * an implementation of IdStrategy * @ return a new set with the elements of the old set , minus the duplicates . */ @ Deprecated // use withIdStrategy public JsonSet applyIdStrategy ( IdStrategy strategy ) { } }
JsonSet newSet = new JsonSet ( ) ; newSet . strategy = strategy ; for ( JsonElement e : this ) { newSet . add ( e ) ; } return newSet ;
public class ModelsImpl { /** * Gets information about the regex entity models . * @ param appId The application ID . * @ param versionId The version ID . * @ param getRegexEntityInfosOptionalParameter the object representing the optional parameters to be set before calling this API * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < RegexEntityExtractor > > getRegexEntityInfosAsync ( UUID appId , String versionId , GetRegexEntityInfosOptionalParameter getRegexEntityInfosOptionalParameter , final ServiceCallback < List < RegexEntityExtractor > > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getRegexEntityInfosWithServiceResponseAsync ( appId , versionId , getRegexEntityInfosOptionalParameter ) , serviceCallback ) ;
public class MissionQuitCommandsImplementation { /** * - - - - - ICommandHandler methods - - - - - */ @ Override public void install ( MissionInit missionInit ) { } }
// In order to trigger the end of the mission , we need to hook into the quit handlers . MissionBehaviour mb = parentBehaviour ( ) ; mb . addQuitProducer ( new IWantToQuit ( ) { @ Override public void prepare ( MissionInit missionInit ) { } @ Override public String getOutcome ( ) { return MissionQuitCommandsImplementation . this . quitcomParams . getQuitDescription ( ) ; } @ Override public boolean doIWantToQuit ( MissionInit missionInit ) { return MissionQuitCommandsImplementation . this . iWantToQuit ; } @ Override public void cleanup ( ) { } } ) ;
public class XpathUtils { /** * Evaluates the specified expression on the specified node and returns the * result as a String . * @ param expression * The Xpath expression to evaluate . * @ param node * The node on which to evaluate the expression . * @ return The result of evaluating the specified expression , or null if the * evaluation didn ' t return any result . * @ throws XPathExpressionException * If there are any problems evaluating the Xpath expression . */ private static String evaluateAsString ( String expression , Node node , XPath xpath ) throws XPathExpressionException { } }
if ( isEmpty ( node ) ) return null ; if ( ! expression . equals ( "." ) ) { /* * If the expression being evaluated doesn ' t select a node , we want * to return null to distinguish between cases where a node isn ' t * present ( which should be represented as null ) and when a node is * present , but empty ( which should be represented as the empty * string ) . * We skip this test if the expression is " . " since we ' ve already * checked that the node exists . */ if ( asNode ( expression , node , xpath ) == null ) return null ; } String s = xpath . evaluate ( expression , node ) ; return s . trim ( ) ;
public class CPDefinitionLocalServiceBaseImpl { /** * Performs a dynamic query on the database and returns the matching rows . * @ param dynamicQuery the dynamic query * @ return the matching rows */ @ Override public < T > List < T > dynamicQuery ( DynamicQuery dynamicQuery ) { } }
return cpDefinitionPersistence . findWithDynamicQuery ( dynamicQuery ) ;
public class WidgetFactory { /** * Add a builder to the factory . * @ param key * @ param builder */ public static void put ( String key , WidgetBuilder builder ) { } }
if ( null != key && null != builder ) { WIDGETBUILDERS . put ( key , builder ) ; }
public class CreativeWrapper { /** * Gets the ordering value for this CreativeWrapper . * @ return ordering * If there are multiple wrappers for a creative , then * { @ code ordering } defines the order in which the HTML * snippets are rendered . */ public com . google . api . ads . admanager . axis . v201808 . CreativeWrapperOrdering getOrdering ( ) { } }
return ordering ;
public class SpiderTransaction { /** * Add the given column update ; the value may be null . */ private void addColumn ( String storeName , String rowKey , String colName , byte [ ] colValue ) { } }
Map < String , Map < String , byte [ ] > > rowMap = m_columnAdds . get ( storeName ) ; if ( rowMap == null ) { rowMap = new HashMap < > ( ) ; m_columnAdds . put ( storeName , rowMap ) ; } Map < String , byte [ ] > colMap = rowMap . get ( rowKey ) ; if ( colMap == null ) { colMap = new HashMap < > ( ) ; rowMap . put ( rowKey , colMap ) ; } byte [ ] oldValue = colMap . put ( colName , colValue ) ; if ( oldValue == null ) { m_totalUpdates ++ ; } else if ( ! Arrays . equals ( oldValue , colValue ) ) { m_logger . debug ( "Warning: duplicate column mutation with different value: " + "store={}, row={}, col={}, old={}, new={}" , new Object [ ] { storeName , rowKey , colName , oldValue , colValue } ) ; }
public class DamVideoMediaMarkupBuilder { /** * Get additional parameters to be set as & lt ; param & gt ; elements on html object element for flash player . * @ param media Media metadata * @ param dimension Dimension * @ return Set of key / value pairs */ protected Map < String , String > getAdditionalFlashPlayerParameters ( Media media , Dimension dimension ) { } }
Map < String , String > parameters = new HashMap < String , String > ( ) ; parameters . put ( "allowFullScreen" , "true" ) ; parameters . put ( "wmode" , "opaque" ) ; return parameters ;
public class RaftServiceManager { /** * Takes snapshots for the given index . */ Snapshot snapshot ( ) { } }
Snapshot snapshot = raft . getSnapshotStore ( ) . newTemporarySnapshot ( raft . getLastApplied ( ) , new WallClockTimestamp ( ) ) ; try ( SnapshotWriter writer = snapshot . openWriter ( ) ) { for ( RaftServiceContext service : raft . getServices ( ) ) { writer . buffer ( ) . mark ( ) ; SnapshotWriter serviceWriter = new SnapshotWriter ( writer . buffer ( ) . writeInt ( 0 ) . slice ( ) , writer . snapshot ( ) ) ; snapshotService ( serviceWriter , service ) ; int length = serviceWriter . buffer ( ) . position ( ) ; writer . buffer ( ) . reset ( ) . writeInt ( length ) . skip ( length ) ; } } catch ( Exception e ) { snapshot . close ( ) ; logger . error ( "Failed to snapshot services" , e ) ; throw e ; } return snapshot ;
public class ArrayUtil { /** * / * - - - - - [ Concat ] - - - - - */ @ SuppressWarnings ( { } }
"unchecked" } ) public static < T > T [ ] concat ( Object [ ] array1 , Object array2 [ ] , Class < T > componentType ) { Object newArray [ ] = ( Object [ ] ) Array . newInstance ( componentType , array1 . length + array2 . length ) ; System . arraycopy ( array1 , 0 , newArray , 0 , array1 . length ) ; System . arraycopy ( array2 , 0 , newArray , array1 . length , array2 . length ) ; return ( T [ ] ) newArray ;
public class AttributesImpl { /** * Set the value of a specific attribute . * @ param index The index of the attribute ( zero - based ) . * @ param value The attribute ' s value . * @ exception java . lang . ArrayIndexOutOfBoundsException When the * supplied index does not point to an attribute * in the list . */ public void setValue ( int index , String value ) { } }
if ( index >= 0 && index < length ) { data [ index * 5 + 4 ] = value ; } else { badIndex ( index ) ; }
public class CommandExecutor { /** * Destroy */ protected void destroy ( ) { } }
String str = mProcess . toString ( ) ; try { int i = str . indexOf ( "=" ) + 1 ; int j = str . indexOf ( "]" ) ; str = str . substring ( i , j ) ; int pid = Integer . parseInt ( str ) ; try { android . os . Process . killProcess ( pid ) ; } catch ( Exception e ) { try { mProcess . destroy ( ) ; } catch ( Exception ex ) { ex . printStackTrace ( ) ; } } } catch ( Exception e ) { e . printStackTrace ( ) ; }
public class ScreenshotComparator { /** * スクリーンショットを比較します 。 * @ param baseDir * 基準エビデンスディレクトリ * @ param targetDir * 比較対象エビデンスディレクトリ * @ param evidenceFile * 比較対象エビデンス * @ return 比較対象エビデンスの全スクリーンショットが基準と一致する場合にtrue */ public boolean compare ( EvidenceDir baseDir , EvidenceDir targetDir , File evidenceFile ) { } }
LOG . info ( "screenshot.compare" , new Object [ ] { evidenceFile , baseDir . getDir ( ) , targetDir . getDir ( ) } ) ; if ( ! baseDir . exists ( ) ) { LOG . info ( "base.dir.none" , baseDir . getDir ( ) . getPath ( ) ) ; return false ; } Map < String , File > baseSsMap = baseDir . getScreenshotFilesAsMap ( evidenceFile . getName ( ) ) ; boolean match = true ; for ( Entry < String , File > targetEntry : targetDir . getScreenshotFilesAsMap ( evidenceFile . getName ( ) ) . entrySet ( ) ) { if ( baseSsMap . get ( EvidenceDir . toMaskSsName ( targetEntry . getKey ( ) ) ) != null ) { continue ; } File baseSs = baseSsMap . get ( targetEntry . getKey ( ) ) ; File targetSs = targetEntry . getValue ( ) ; if ( baseSs == null ) { LOG . warn ( "base.screenshot.none" , targetEntry . getKey ( ) ) ; match = false ; continue ; } match &= compareOneScreenshot ( baseSs , targetSs , 10 , 10 ) ; } return match ;
public class RVRendererAdapter { /** * Provides a ready to use diff update for our adapter based on the implementation of the * standard equals method from Object . * @ param newList to refresh our content */ public void diffUpdate ( List < T > newList ) { } }
if ( getCollection ( ) . size ( ) == 0 ) { addAll ( newList ) ; notifyDataSetChanged ( ) ; } else { DiffCallback diffCallback = new DiffCallback ( collection , newList ) ; DiffUtil . DiffResult diffResult = DiffUtil . calculateDiff ( diffCallback ) ; clear ( ) ; addAll ( newList ) ; diffResult . dispatchUpdatesTo ( this ) ; }
public class PuiBody { /** * Builds basically a JSON structure from the JSF model . */ public void addJSFAttrbituteToAngularModel ( Map < String , Object > model , String key , Object value ) { } }
String [ ] keys = key . split ( "\\." ) ; Map < String , Object > currentMap = model ; for ( int i = 0 ; i < keys . length - 1 ; i ++ ) { if ( ! currentMap . containsKey ( keys [ i ] ) ) { currentMap . put ( keys [ i ] , new HashMap < String , Object > ( ) ) ; } currentMap = ( Map < String , Object > ) currentMap . get ( keys [ i ] ) ; } Object v = null ; if ( value != null ) { Class < ? extends Object > type = value . getClass ( ) ; if ( type == int . class || type == long . class || type == float . class || type == double . class || type == byte . class || type == short . class || Number . class . isAssignableFrom ( type ) ) { v = value ; } else v = value . toString ( ) ; } currentMap . put ( keys [ keys . length - 1 ] , v ) ;
public class BoyerMoore { /** * Computes the values of < code > suffix < / code > , which is an auxiliary array , * backwards version of the KMP failure function . * < br > * suffix [ i ] = the smallest j > i s . t . p [ j . . m - 1 ] is a prefix of p [ i . . m - 1 ] , * if there is no such j , suffix [ i ] = m , i . e . < br > * p [ suffix [ i ] . . m - 1 ] is the longest prefix of p [ i . . m - 1 ] , if suffix [ i ] < m . * < br > * The running time for computing the < code > suffix < / code > is O ( m ) . */ private void computeSuffix ( ) { } }
suffix [ suffix . length - 1 ] = suffix . length ; int j = suffix . length - 1 ; // suffix [ i ] = m - the length of the longest prefix of p [ i . . m - 1] for ( int i = suffix . length - 2 ; i >= 0 ; i -- ) { while ( j < suffix . length - 1 && pattern . charAt ( j ) != pattern . charAt ( i ) ) { j = suffix [ j + 1 ] - 1 ; } if ( pattern . charAt ( j ) == pattern . charAt ( i ) ) { j -- ; } suffix [ i ] = j + 1 ; }
public class SwaggerFeatureBinder { /** * Implement to provide binding definitions using the exposed binding * methods . */ @ Override protected void configure ( ) { } }
bind ( SwaggerServletContext . class ) . to ( ServletContext . class ) . in ( Singleton . class ) ; bind ( SwaggerServletConfig . class ) . to ( ServletConfig . class ) . in ( Singleton . class ) ;
public class TriangularDoubleDistribution { /** * Returns a triangularly distributed random number between { @ code low } ( inclusive ) and { @ code high } ( exclusive ) , where values * around { @ code mode } are more likely . * @ param low the lower limit * @ param high the upper limit * @ param mode the point around which the values are more likely */ static double randomTriangular ( double low , double high , double mode ) { } }
double u = MathUtils . random . nextDouble ( ) ; double d = high - low ; if ( u <= ( mode - low ) / d ) return low + Math . sqrt ( u * d * ( mode - low ) ) ; return high - Math . sqrt ( ( 1 - u ) * d * ( high - mode ) ) ;
public class CTCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . CTC__CON_DATA : return CON_DATA_EDEFAULT == null ? conData != null : ! CON_DATA_EDEFAULT . equals ( conData ) ; } return super . eIsSet ( featureID ) ;
public class AbstractGenericRowMapper { /** * Extract attribute values from a BO for corresponding DB table columns * @ param bo * @ return */ public Object [ ] valuesForColumns ( T bo , String ... columns ) { } }
Map < String , ColAttrMapping > columnAttributeMappings = getColumnAttributeMappings ( ) ; Object [ ] result = new Object [ columns . length ] ; for ( int i = 0 ; i < columns . length ; i ++ ) { ColAttrMapping colAttrMapping = columnAttributeMappings . get ( columns [ i ] ) ; try { result [ i ] = colAttrMapping != null ? colAttrMapping . extractAttrValue ( bo ) : null ; } catch ( Exception e ) { throw e instanceof RuntimeException ? ( RuntimeException ) e : new RuntimeException ( e ) ; } } return result ;
public class JerseyTags { /** * Creates a { @ code exception } tag based on the { @ link Class # getSimpleName ( ) simple * name } of the class of the given { @ code exception } . * @ param event the request event * @ return the exception tag derived from the exception */ public static Tag exception ( RequestEvent event ) { } }
Throwable exception = event . getException ( ) ; if ( exception == null ) { return EXCEPTION_NONE ; } ContainerResponse response = event . getContainerResponse ( ) ; if ( response != null ) { int status = response . getStatus ( ) ; if ( status == 404 || isRedirection ( status ) ) { return EXCEPTION_NONE ; } } if ( exception . getCause ( ) != null ) { exception = exception . getCause ( ) ; } String simpleName = exception . getClass ( ) . getSimpleName ( ) ; return Tag . of ( "exception" , StringUtils . isNotEmpty ( simpleName ) ? simpleName : exception . getClass ( ) . getName ( ) ) ;
public class NameUtils { /** * < p > removeNonJavaIdentifierCharacters . < / p > * @ param name a { @ link java . lang . String } object . * @ return a { @ link java . lang . String } object . */ public static String removeNonJavaIdentifierCharacters ( String name ) { } }
StringBuilder javaIdentifier = new StringBuilder ( ) ; for ( int index = 0 ; index < name . length ( ) ; index ++ ) { if ( Character . isJavaIdentifierPart ( name . codePointAt ( index ) ) ) { javaIdentifier . append ( name . charAt ( index ) ) ; } } return javaIdentifier . toString ( ) ;
public class DocBookBuilder { /** * Builds a DocBook Formatted Book using a Content Specification to define the structure and contents of the book . * @ param contentSpec The content specification to build from . * @ param requester The user who requested the build . * @ param buildingOptions The options to be used when building . * @ param zanataDetails The Zanata server details to be used when populating links * @ return Returns a mapping of file names / locations to files . This HashMap can be used to build a ZIP archive . * @ throws BuilderCreationException Thrown if the builder is unable to start due to incorrect passed variables . * @ throws BuildProcessingException Any build issue that should not occur under normal circumstances . Ie a Template can ' t be * converted to a DOM Document . */ public HashMap < String , byte [ ] > buildTranslatedBook ( final ContentSpec contentSpec , final String requester , final DocBookBuildingOptions buildingOptions , final ZanataDetails zanataDetails ) throws BuilderCreationException , BuildProcessingException { } }
return buildTranslatedBook ( contentSpec , requester , buildingOptions , new HashMap < String , byte [ ] > ( ) , zanataDetails ) ;
public class DatabaseMetaData { /** * { @ inheritDoc } */ public ResultSet getSchemas ( final String catalog , final String schemaPattern ) throws SQLException { } }
return RowLists . rowList2 ( String . class , String . class ) . withLabel ( 1 , "TABLE_SCHEM" ) . withLabel ( 2 , "TABLE_CATALOG" ) . resultSet ( ) ;
public class AbstractGlobPatternCompiler { /** * This method converts the given { @ code pattern } to a { @ link Pattern # compile ( String ) regex - pattern } . * @ param pattern is the pattern to convert . * @ return the converted regex - pattern or { @ code null } if { @ link # isRequireWildcard ( ) } is { @ code true } and the given * { @ code pattern } contains no wildcard ( ' * ' or ' ? ' ) . */ protected String convertPattern ( String pattern ) { } }
boolean wildcard = false ; char [ ] chars = pattern . toCharArray ( ) ; StringBuilder buffer = new StringBuilder ( chars . length + 8 ) ; int i = 0 ; while ( i < chars . length ) { char c = chars [ i ] ; if ( ( c == '*' ) || ( c == '?' ) ) { wildcard = true ; } int next = process ( chars , i , buffer ) ; assert ( next > i ) ; i = next ; } if ( isRequireWildcard ( ) && ! wildcard ) { return null ; } else { return buffer . toString ( ) ; }
public class AsciiArtRenderer { /** * < b > Example : < / b > * { @ code / FOO / } */ @ Override public void renderAnyCase ( PositionedText target , double x , double y , String text ) { } }
target . add ( x , y , "/" + text + "/" ) ;
public class LIBORMarketModelStandard { /** * / * ( non - Javadoc ) * @ see net . finmath . montecarlo . interestrate . LIBORMarketModel # getIntegratedLIBORCovariance ( ) */ @ Override public synchronized double [ ] [ ] [ ] getIntegratedLIBORCovariance ( ) { } }
if ( integratedLIBORCovariance != null ) { return integratedLIBORCovariance ; } TimeDiscretization liborPeriodDiscretization = getLiborPeriodDiscretization ( ) ; TimeDiscretization simulationTimeDiscretization = getTimeDiscretization ( ) ; integratedLIBORCovariance = new double [ simulationTimeDiscretization . getNumberOfTimeSteps ( ) ] [ liborPeriodDiscretization . getNumberOfTimeSteps ( ) ] [ liborPeriodDiscretization . getNumberOfTimeSteps ( ) ] ; for ( int componentIndex1 = 0 ; componentIndex1 < liborPeriodDiscretization . getNumberOfTimeSteps ( ) ; componentIndex1 ++ ) { // Sum the libor cross terms ( use symmetry ) for ( int componentIndex2 = componentIndex1 ; componentIndex2 < liborPeriodDiscretization . getNumberOfTimeSteps ( ) ; componentIndex2 ++ ) { double integratedLIBORCovarianceValue = 0.0 ; for ( int timeIndex = 0 ; timeIndex < simulationTimeDiscretization . getNumberOfTimeSteps ( ) ; timeIndex ++ ) { double dt = getTime ( timeIndex + 1 ) - getTime ( timeIndex ) ; RandomVariable [ ] factorLoadingOfComponent1 = getCovarianceModel ( ) . getFactorLoading ( timeIndex , componentIndex1 , null ) ; RandomVariable [ ] factorLoadingOfComponent2 = getCovarianceModel ( ) . getFactorLoading ( timeIndex , componentIndex2 , null ) ; for ( int factorIndex = 0 ; factorIndex < getNumberOfFactors ( ) ; factorIndex ++ ) { integratedLIBORCovarianceValue += factorLoadingOfComponent1 [ factorIndex ] . get ( 0 ) * factorLoadingOfComponent2 [ factorIndex ] . get ( 0 ) * dt ; } integratedLIBORCovariance [ timeIndex ] [ componentIndex1 ] [ componentIndex2 ] = integratedLIBORCovarianceValue ; } } } return integratedLIBORCovariance ;
public class HTTPConduit { /** * This method extracts the value of the " Location " Http * Response header . * @ param headers The Http response headers . * @ return The value of the " Location " header , null if non - existent . * @ throws MalformedURLException */ protected String extractLocation ( Map < String , List < String > > headers ) throws MalformedURLException { } }
for ( Map . Entry < String , List < String > > head : headers . entrySet ( ) ) { if ( "Location" . equalsIgnoreCase ( head . getKey ( ) ) ) { List < String > locs = head . getValue ( ) ; if ( locs != null && locs . size ( ) > 0 ) { String location = locs . get ( 0 ) ; if ( location != null ) { return location ; } else { return null ; } } } } return null ;
public class ServerImpl { /** * Gets or create a new role . * @ param data The json data of the role . * @ return The role . */ public Role getOrCreateRole ( JsonNode data ) { } }
long id = Long . parseLong ( data . get ( "id" ) . asText ( ) ) ; synchronized ( this ) { return getRoleById ( id ) . orElseGet ( ( ) -> { Role role = new RoleImpl ( api , this , data ) ; this . roles . put ( role . getId ( ) , role ) ; return role ; } ) ; }
public class AbstractBaseParams { /** * Read a boolean string value . < br / > * Values allowed are ( case insensitive ) : * < ul > * < li > true < / li > * < li > yes < / li > * < li > 1 < / li > * < / ul > * @ param parameter the boolean string value * @ return the boolean or false */ protected boolean readBoolean ( final String parameter ) { } }
return parameter != null && "true" . equalsIgnoreCase ( parameter ) || "yes" . equalsIgnoreCase ( parameter ) || "1" . equalsIgnoreCase ( parameter ) ;
public class DescribeVpcPeeringConnectionsResult { /** * Information about the VPC peering connections . * @ param vpcPeeringConnections * Information about the VPC peering connections . */ public void setVpcPeeringConnections ( java . util . Collection < VpcPeeringConnection > vpcPeeringConnections ) { } }
if ( vpcPeeringConnections == null ) { this . vpcPeeringConnections = null ; return ; } this . vpcPeeringConnections = new com . amazonaws . internal . SdkInternalList < VpcPeeringConnection > ( vpcPeeringConnections ) ;
public class ColumnText { /** * Finds the intersection between the < CODE > yLine < / CODE > and the column . It will * set the < CODE > lineStatus < / CODE > appropriately . * @ param wall the column to intersect * @ return the x coordinate of the intersection */ protected float findLimitsPoint ( ArrayList wall ) { } }
lineStatus = LINE_STATUS_OK ; if ( yLine < minY || yLine > maxY ) { lineStatus = LINE_STATUS_OFFLIMITS ; return 0 ; } for ( int k = 0 ; k < wall . size ( ) ; ++ k ) { float r [ ] = ( float [ ] ) wall . get ( k ) ; if ( yLine < r [ 0 ] || yLine > r [ 1 ] ) continue ; return r [ 2 ] * yLine + r [ 3 ] ; } lineStatus = LINE_STATUS_NOLINE ; return 0 ;
public class UserPoolType { /** * A container with the schema attributes of a user pool . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setSchemaAttributes ( java . util . Collection ) } or { @ link # withSchemaAttributes ( java . util . Collection ) } if you * want to override the existing values . * @ param schemaAttributes * A container with the schema attributes of a user pool . * @ return Returns a reference to this object so that method calls can be chained together . */ public UserPoolType withSchemaAttributes ( SchemaAttributeType ... schemaAttributes ) { } }
if ( this . schemaAttributes == null ) { setSchemaAttributes ( new java . util . ArrayList < SchemaAttributeType > ( schemaAttributes . length ) ) ; } for ( SchemaAttributeType ele : schemaAttributes ) { this . schemaAttributes . add ( ele ) ; } return this ;
public class CallSimulator { /** * Debug statement to help users verify there are no lost or delayed events . */ void printSummary ( ) { } }
System . out . printf ( "There are %d agents outstanding and %d phones. %d entries waiting to go.\n" , agentsAvailable . size ( ) , phoneNumbersAvailable . size ( ) , delayedEvents . size ( ) ) ;
public class MMultiLineTableCellRenderer { /** * { @ inheritDoc } */ @ Override public Component getTableCellRendererComponent ( final JTable table , final Object value , final boolean isSelected , final boolean hasFocus , final int row , final int column ) { } }
// Surcharge pour appeler adjustRowHeight . final Component component = super . getTableCellRendererComponent ( table , value , isSelected , hasFocus , row , column ) ; adjustRowHeight ( table , component , row ) ; return component ;
public class WSConnectionRequestInfoImpl { /** * Change the value of the result set holdability property in the connection request information . * @ param holdability The new value . * @ throws IllegalArgumentException if the key is incorrect . * @ throws SQLException if the connection request information is not editable . */ public void setHoldability ( int holdability ) throws SQLException { } }
if ( ! changable ) { throw new SQLException ( AdapterUtil . getNLSMessage ( "WS_INTERNAL_ERROR" , new Object [ ] { "ConnectionRequestInfo cannot be modified, doing so may result in corruption: holdability, cri" , holdability , this } ) ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( this , tc , "Setting holdability on the CRI to: " + holdability ) ; ivHoldability = holdability ;
public class SecurityViolationException { /** * Process security violation exception * @ param req - Http servlet request object * @ param res - Http servlet response object * @ throws IOException if error , otherwise redirects to appropriate error or login page */ public void processException ( HttpServletRequest req , HttpServletResponse res ) throws IOException { } }
if ( redirectURL != null ) { res . sendRedirect ( redirectURL ) ; return ; } if ( message == null ) { res . sendError ( statusCode ) ; } else { res . sendError ( statusCode , message ) ; }
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcCurrencyEnum createIfcCurrencyEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcCurrencyEnum result = IfcCurrencyEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class DeleteComputeEnvironmentRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteComputeEnvironmentRequest deleteComputeEnvironmentRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteComputeEnvironmentRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteComputeEnvironmentRequest . getComputeEnvironment ( ) , COMPUTEENVIRONMENT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RuleBasedCollator { /** * { @ inheritDoc } * @ param order the reordering codes to apply to this collator ; if this is null or an empty array * then this clears any existing reordering * @ throws IllegalArgumentException if the reordering codes are malformed in any way ( e . g . duplicates , multiple reset codes , overlapping equivalent scripts ) * @ see # getReorderCodes * @ see Collator # getEquivalentReorderCodes * @ see Collator . ReorderCodes * @ see UScript */ @ Override public void setReorderCodes ( int ... order ) { } }
checkNotFrozen ( ) ; int length = ( order != null ) ? order . length : 0 ; if ( length == 1 && order [ 0 ] == ReorderCodes . NONE ) { length = 0 ; } if ( length == 0 ? settings . readOnly ( ) . reorderCodes . length == 0 : Arrays . equals ( order , settings . readOnly ( ) . reorderCodes ) ) { return ; } CollationSettings defaultSettings = getDefaultSettings ( ) ; if ( length == 1 && order [ 0 ] == Collator . ReorderCodes . DEFAULT ) { if ( settings . readOnly ( ) != defaultSettings ) { CollationSettings ownedSettings = getOwnedSettings ( ) ; ownedSettings . copyReorderingFrom ( defaultSettings ) ; setFastLatinOptions ( ownedSettings ) ; } return ; } CollationSettings ownedSettings = getOwnedSettings ( ) ; if ( length == 0 ) { ownedSettings . resetReordering ( ) ; } else { ownedSettings . setReordering ( data , order . clone ( ) ) ; } setFastLatinOptions ( ownedSettings ) ;
public class KeyManagementServiceClient { /** * Encrypts data , so that it can only be recovered by a call to * [ Decrypt ] [ google . cloud . kms . v1 . KeyManagementService . Decrypt ] . The * [ CryptoKey . purpose ] [ google . cloud . kms . v1 . CryptoKey . purpose ] must be * [ ENCRYPT _ DECRYPT ] [ google . cloud . kms . v1 . CryptoKey . CryptoKeyPurpose . ENCRYPT _ DECRYPT ] . * < p > Sample code : * < pre > < code > * try ( KeyManagementServiceClient keyManagementServiceClient = KeyManagementServiceClient . create ( ) ) { * CryptoKeyPathName name = CryptoKeyPathName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ KEY _ RING ] " , " [ CRYPTO _ KEY _ PATH ] " ) ; * ByteString plaintext = ByteString . copyFromUtf8 ( " " ) ; * EncryptResponse response = keyManagementServiceClient . encrypt ( name , plaintext ) ; * < / code > < / pre > * @ param name Required . The resource name of the [ CryptoKey ] [ google . cloud . kms . v1 . CryptoKey ] or * [ CryptoKeyVersion ] [ google . cloud . kms . v1 . CryptoKeyVersion ] to use for encryption . * < p > If a [ CryptoKey ] [ google . cloud . kms . v1 . CryptoKey ] is specified , the server will use its * [ primary version ] [ google . cloud . kms . v1 . CryptoKey . primary ] . * @ param plaintext Required . The data to encrypt . Must be no larger than 64KiB . * < p > The maximum size depends on the key version ' s * [ protection _ level ] [ google . cloud . kms . v1 . CryptoKeyVersionTemplate . protection _ level ] . For * [ SOFTWARE ] [ google . cloud . kms . v1 . ProtectionLevel . SOFTWARE ] keys , the plaintext must be no * larger than 64KiB . For [ HSM ] [ google . cloud . kms . v1 . ProtectionLevel . HSM ] keys , the combined * length of the plaintext and additional _ authenticated _ data fields must be no larger than * 8KiB . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final EncryptResponse encrypt ( CryptoKeyPathName name , ByteString plaintext ) { } }
EncryptRequest request = EncryptRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . setPlaintext ( plaintext ) . build ( ) ; return encrypt ( request ) ;
public class ClassPath { /** * Returns all top level classes loadable from the current class path . */ public ImmutableSet < ClassInfo > getTopLevelClasses ( ) { } }
return FluentIterable . from ( resources ) . filter ( ClassInfo . class ) . filter ( IS_TOP_LEVEL ) . toImmutableSet ( ) ;
public class SqlRepositoryBuilder { /** * This function sets a schema to be used for creation and updating tables . When passing a schema name make sure * that the schema has been created in the database before running JaVers . If schemaName is null or empty , the default * schema is used instead . * @ since 2.4 */ public SqlRepositoryBuilder withSchema ( String schemaName ) { } }
if ( schemaName != null && ! schemaName . isEmpty ( ) ) { this . schemaName = schemaName ; } return this ;
public class AtmosRequestMappingHandlerMapping { /** * register handler * @ param url * @ param handlerDefinition * @ param handlerClassType * @ throws SecurityException * @ throws NoSuchMethodException */ private void registerNativeFunctionHandler ( String url , HandlerDefinition handlerDefinition , Class < ? extends AbstractNativeFunctionHandler > handlerClassType ) throws SecurityException , NoSuchMethodException { } }
NativeFunction atmosFunction = ( NativeFunction ) handlerDefinition . getHandler ( ) ; Object atmosHandler = getHandler ( atmosFunction , handlerClassType ) ; Class < ? > handlerType = ( atmosHandler instanceof String ) ? getApplicationContext ( ) . getType ( ( String ) atmosHandler ) : atmosHandler . getClass ( ) ; if ( atmosHandler instanceof NativeFunctionModelAndViewHandler ) { ( ( NativeFunctionModelAndViewHandler ) atmosHandler ) . setViewName ( handlerMappingInfoStorage . getViewName ( url ) ) ; } final Class < ? > userType = ClassUtils . getUserClass ( handlerType ) ; Method method = userType . getMethod ( AbstractNativeFunctionHandler . HANDLER_METHOD_NAME , HttpServletRequest . class , HttpServletResponse . class ) ; RequestMethodsRequestCondition requestMethodsRequestCondition = getRequestMethodsRequestCondition ( handlerDefinition . getHttpMethods ( ) ) ; RequestMappingInfo mapping = new RequestMappingInfo ( new PatternsRequestCondition ( url ) , requestMethodsRequestCondition , null , null , null , /* new ProducesRequestCondition ( " application / xml " ) */ null , null ) ; registerHandlerMethod ( atmosHandler , method , mapping ) ;
public class QueryService { /** * Helper function to return a specification for filtering on a { @ link String } field , where equality , containment , * and null / non - null conditions are supported . * @ param filter the individual attribute filter coming from the frontend . * @ param field the JPA static metamodel representing the field . * @ return a Specification */ protected Specification < ENTITY > buildStringSpecification ( StringFilter filter , SingularAttribute < ? super ENTITY , String > field ) { } }
return buildSpecification ( filter , root -> root . get ( field ) ) ;
public class RestfulServerUtils { /** * Returns null if the request doesn ' t express that it wants FHIR . If it expresses that it wants XML and JSON * equally , returns thePrefer . */ public static ResponseEncoding determineResponseEncodingNoDefault ( RequestDetails theReq , EncodingEnum thePrefer ) { } }
return determineResponseEncodingNoDefault ( theReq , thePrefer , null ) ;
public class SearchParamExtractorDstu3 { /** * ( non - Javadoc ) * @ see ca . uhn . fhir . jpa . dao . ISearchParamExtractor # extractSearchParamQuantity ( ca . uhn . fhir . jpa . entity . ResourceTable , ca . uhn . fhir . model . api . IBaseResource ) */ @ Override public Set < ResourceIndexedSearchParamQuantity > extractSearchParamQuantity ( ResourceTable theEntity , IBaseResource theResource ) { } }
HashSet < ResourceIndexedSearchParamQuantity > retVal = new HashSet < ResourceIndexedSearchParamQuantity > ( ) ; Collection < RuntimeSearchParam > searchParams = getSearchParams ( theResource ) ; for ( RuntimeSearchParam nextSpDef : searchParams ) { if ( nextSpDef . getParamType ( ) != RestSearchParameterTypeEnum . QUANTITY ) { continue ; } String nextPath = nextSpDef . getPath ( ) ; if ( isBlank ( nextPath ) ) { continue ; } for ( Object nextObject : extractValues ( nextPath , theResource ) ) { if ( nextObject == null || ( ( IBase ) nextObject ) . isEmpty ( ) ) { continue ; } String resourceName = nextSpDef . getName ( ) ; boolean multiType = false ; if ( nextPath . endsWith ( "[x]" ) ) { multiType = true ; } if ( nextObject instanceof Quantity ) { Quantity nextValue = ( Quantity ) nextObject ; addQuantity ( theEntity , retVal , resourceName , nextValue ) ; } else if ( nextObject instanceof Range ) { Range nextValue = ( Range ) nextObject ; addQuantity ( theEntity , retVal , resourceName , nextValue . getLow ( ) ) ; addQuantity ( theEntity , retVal , resourceName , nextValue . getHigh ( ) ) ; } else if ( nextObject instanceof LocationPositionComponent ) { continue ; } else { if ( ! multiType ) { throw new ConfigurationException ( "Search param " + resourceName + " is of unexpected datatype: " + nextObject . getClass ( ) ) ; } else { continue ; } } } } return retVal ;
public class MongodbBinaryStore { /** * Gets attribute ' s value . * @ param content stored content * @ param fieldName attribute name * @ return attributes value */ private Object getAttribute ( DBCollection content , String fieldName ) { } }
return content . findOne ( HEADER_QUERY ) . get ( fieldName ) ;
public class GetResolverRuleRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetResolverRuleRequest getResolverRuleRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getResolverRuleRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getResolverRuleRequest . getResolverRuleId ( ) , RESOLVERRULEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class TypeUtility { /** * generate begin string to translate in code to used in content value or parameter need to be converted in string through String . valueOf * @ param methodBuilder * the method builder * @ param property * the property */ public static void beginStringConversion ( Builder methodBuilder , ModelProperty property ) { } }
TypeName modelType = typeName ( property . getElement ( ) . asType ( ) ) ; beginStringConversion ( methodBuilder , modelType ) ;
public class GlobalTracer { /** * Register a { @ link Tracer } to back the behaviour of the { @ link # get ( ) global tracer } . * Registration is a one - time operation , attempting to call it more often will result in a runtime exception . * Every application intending to use the global tracer is responsible for registering it once * during its initialization . * @ param tracer Tracer to use as global tracer . * @ throws RuntimeException if there is already a current tracer registered * @ see # registerIfAbsent ( Callable ) * @ deprecated Please use { @ link # registerIfAbsent ( Tracer ) } or { @ link # registerIfAbsent ( Callable ) } instead . */ @ Deprecated public static void register ( final Tracer tracer ) { } }
if ( ! registerIfAbsent ( provide ( tracer ) ) && ! tracer . equals ( GlobalTracer . tracer ) && ! ( tracer instanceof GlobalTracer ) ) { throw new IllegalStateException ( "There is already a current global Tracer registered." ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcMagneticFluxDensityMeasure ( ) { } }
if ( ifcMagneticFluxDensityMeasureEClass == null ) { ifcMagneticFluxDensityMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 703 ) ; } return ifcMagneticFluxDensityMeasureEClass ;
public class RetouchedBloomFilter { /** * Removes a given key from < i > this < / i > filer . * @ param k The key to remove . * @ param vector The counting vector associated to the key . */ private void removeKey ( Key k , List < Key > [ ] vector ) { } }
if ( k == null ) { throw new NullPointerException ( "Key can not be null" ) ; } if ( vector == null ) { throw new NullPointerException ( "ArrayList<Key>[] can not be null" ) ; } int [ ] h = hash . hash ( k ) ; hash . clear ( ) ; for ( int i = 0 ; i < nbHash ; i ++ ) { vector [ h [ i ] ] . remove ( k ) ; }
public class WSKeyStore { /** * Initialize the wrapped keystore . * @ param reinitialize * @ throws Exception */ public void initializeKeyStore ( boolean reinitialize ) throws Exception { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "initializeKeyStore" ) ; try { String initAtStartup = getProperty ( Constants . SSLPROP_KEY_STORE_INITIALIZE_AT_STARTUP ) ; boolean createIfMissing = LibertyConstants . DEFAULT_KEYSTORE_REF_ID . equals ( getProperty ( "id" ) ) ; if ( Boolean . parseBoolean ( initAtStartup ) || reinitialize ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Initializing keystore at startup." ) ; getKeyStore ( reinitialize , createIfMissing ) ; } } catch ( Exception e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Exception initializing KeyStore; " + e ) ; throw e ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "initializeKeyStore" ) ;
public class AtomicLongMap { /** * Adds { @ code delta } to the value currently associated with { @ code key } , and returns the new * value . */ public long addAndGet ( K key , long delta ) { } }
outer : for ( ; ; ) { AtomicLong atomic = map . get ( key ) ; if ( atomic == null ) { atomic = map . putIfAbsent ( key , new AtomicLong ( delta ) ) ; if ( atomic == null ) { return delta ; } // atomic is now non - null ; fall through } for ( ; ; ) { long oldValue = atomic . get ( ) ; if ( oldValue == 0L ) { // don ' t compareAndSet a zero if ( map . replace ( key , atomic , new AtomicLong ( delta ) ) ) { return delta ; } // atomic replaced continue outer ; } long newValue = oldValue + delta ; if ( atomic . compareAndSet ( oldValue , newValue ) ) { return newValue ; } // value changed } }
public class DatabaseAccountsInner { /** * Changes the failover priority for the Azure Cosmos DB database account . A failover priority of 0 indicates a write region . The maximum value for a failover priority = ( total number of regions - 1 ) . Failover priority values must be unique for each of the regions in which the database account exists . * @ param resourceGroupName Name of an Azure resource group . * @ param accountName Cosmos DB database account name . * @ param failoverPolicies List of failover policies . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < ServiceResponse < Void > > beginFailoverPriorityChangeWithServiceResponseAsync ( String resourceGroupName , String accountName , List < FailoverPolicy > failoverPolicies ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( accountName == null ) { throw new IllegalArgumentException ( "Parameter accountName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } if ( failoverPolicies == null ) { throw new IllegalArgumentException ( "Parameter failoverPolicies is required and cannot be null." ) ; } Validator . validate ( failoverPolicies ) ; FailoverPolicies failoverParameters = new FailoverPolicies ( ) ; failoverParameters . withFailoverPolicies ( failoverPolicies ) ; return service . beginFailoverPriorityChange ( this . client . subscriptionId ( ) , resourceGroupName , accountName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , failoverParameters , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Void > > > ( ) { @ Override public Observable < ServiceResponse < Void > > call ( Response < ResponseBody > response ) { try { ServiceResponse < Void > clientResponse = beginFailoverPriorityChangeDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class BpmnParse { /** * Parses a scope : a process , subprocess , etc . * Note that a process definition is a scope on itself . * @ param scopeElement * The XML element defining the scope * @ param parentScope * The scope that contains the nested scope . */ public void parseScope ( Element scopeElement , ScopeImpl parentScope ) { } }
// Not yet supported on process level ( PVM additions needed ) : // parseProperties ( processElement ) ; // filter activities that must be parsed separately List < Element > activityElements = new ArrayList < Element > ( scopeElement . elements ( ) ) ; Map < String , Element > intermediateCatchEvents = filterIntermediateCatchEvents ( activityElements ) ; activityElements . removeAll ( intermediateCatchEvents . values ( ) ) ; Map < String , Element > compensationHandlers = filterCompensationHandlers ( activityElements ) ; activityElements . removeAll ( compensationHandlers . values ( ) ) ; parseStartEvents ( scopeElement , parentScope ) ; parseActivities ( activityElements , scopeElement , parentScope ) ; parseIntermediateCatchEvents ( scopeElement , parentScope , intermediateCatchEvents ) ; parseEndEvents ( scopeElement , parentScope ) ; parseBoundaryEvents ( scopeElement , parentScope ) ; parseSequenceFlow ( scopeElement , parentScope , compensationHandlers ) ; parseExecutionListenersOnScope ( scopeElement , parentScope ) ; parseAssociations ( scopeElement , parentScope , compensationHandlers ) ; parseCompensationHandlers ( parentScope , compensationHandlers ) ; for ( ScopeImpl . BacklogErrorCallback callback : parentScope . getBacklogErrorCallbacks ( ) ) { callback . callback ( ) ; } if ( parentScope instanceof ProcessDefinition ) { parseProcessDefinitionCustomExtensions ( scopeElement , ( ProcessDefinition ) parentScope ) ; }
public class SplitBuilder { /** * Build a constraint . * @ param args the parameters of the constraint . Must be 2 non - empty set of virtual machines . * @ return the constraint */ @ Override public List < ? extends SatConstraint > buildConstraint ( BtrPlaceTree t , List < BtrpOperand > args ) { } }
if ( checkConformance ( t , args ) ) { @ SuppressWarnings ( "unchecked" ) Collection < Collection < VM > > s = ( Collection < Collection < VM > > ) params [ 0 ] . transform ( this , t , args . get ( 0 ) ) ; return s != null ? Collections . singletonList ( new Split ( s , false ) ) : Collections . emptyList ( ) ; } return Collections . emptyList ( ) ;
public class LifecycleHooks { /** * Get the description of the indicated child object from the runner for the specified test class instance . * @ param target test class instance * @ param child child object * @ return { @ link Description } object for the indicated child */ public static Description describeChild ( Object target , Object child ) { } }
Object runner = getRunnerForTarget ( target ) ; return invoke ( runner , "describeChild" , child ) ;
public class sslservicegroup_binding { /** * Use this API to fetch sslservicegroup _ binding resources of given names . */ public static sslservicegroup_binding [ ] get ( nitro_service service , String servicegroupname [ ] ) throws Exception { } }
if ( servicegroupname != null && servicegroupname . length > 0 ) { sslservicegroup_binding response [ ] = new sslservicegroup_binding [ servicegroupname . length ] ; sslservicegroup_binding obj [ ] = new sslservicegroup_binding [ servicegroupname . length ] ; for ( int i = 0 ; i < servicegroupname . length ; i ++ ) { obj [ i ] = new sslservicegroup_binding ( ) ; obj [ i ] . set_servicegroupname ( servicegroupname [ i ] ) ; response [ i ] = ( sslservicegroup_binding ) obj [ i ] . get_resource ( service ) ; } return response ; } return null ;
public class J4pExecRequest { /** * { @ inheritDoc } */ @ Override List < String > getRequestParts ( ) { } }
List < String > ret = super . getRequestParts ( ) ; ret . add ( operation ) ; if ( arguments . size ( ) > 0 ) { for ( int i = 0 ; i < arguments . size ( ) ; i ++ ) { ret . add ( serializeArgumentToRequestPart ( arguments . get ( i ) ) ) ; } } return ret ;
public class OCommandExecutorSQLSelect { /** * Extract the content of collections and / or links and put it as result */ private void applyFlatten ( ) { } }
if ( flattenTarget == null ) return ; final List < OIdentifiable > finalResult = new ArrayList < OIdentifiable > ( ) ; Object fieldValue ; if ( tempResult != null ) for ( OIdentifiable id : tempResult ) { if ( flattenTarget instanceof OSQLFilterItem ) fieldValue = ( ( OSQLFilterItem ) flattenTarget ) . getValue ( id . getRecord ( ) , context ) ; else if ( flattenTarget instanceof OSQLFunctionRuntime ) fieldValue = ( ( OSQLFunctionRuntime ) flattenTarget ) . getResult ( ) ; else fieldValue = flattenTarget . toString ( ) ; if ( fieldValue != null ) if ( fieldValue instanceof Collection < ? > ) { for ( Object o : ( ( Collection < ? > ) fieldValue ) ) { if ( o instanceof OIdentifiable ) finalResult . add ( ( ( OIdentifiable ) o ) . getRecord ( ) ) ; else if ( o instanceof List ) { List < OIdentifiable > list = ( List < OIdentifiable > ) o ; for ( int i = 0 ; i < list . size ( ) ; i ++ ) finalResult . add ( list . get ( i ) . getRecord ( ) ) ; } } } else finalResult . add ( ( OIdentifiable ) fieldValue ) ; } tempResult = finalResult ;
public class AbstractTrainer { /** * { @ inheritDoc } */ @ Override public void save ( String storageName ) { } }
logger . info ( "save()" ) ; String knowledgeBaseName = createKnowledgeBaseName ( storageName , knowledgeBase . getConfiguration ( ) . getStorageConfiguration ( ) . getStorageNameSeparator ( ) ) ; knowledgeBase . save ( knowledgeBaseName ) ; stored = true ;