idx
int64 0
165k
| question
stringlengths 73
4.15k
| target
stringlengths 5
918
| len_question
int64 21
890
| len_target
int64 3
255
|
|---|---|---|---|---|
156,800
|
public CollectionResult < double [ ] > run ( Database database , Relation < O > rel ) { DistanceQuery < O > dq = rel . getDistanceQuery ( getDistanceFunction ( ) ) ; int size = rel . size ( ) ; long pairs = ( size * ( long ) size ) >> 1 ; final long ssize = sampling <= 1 ? ( long ) Math . ceil ( sampling * pairs ) : ( long ) sampling ; if ( ssize > Integer . MAX_VALUE ) { throw new AbortException ( "Sampling size too large." ) ; } final int qsize = quantile <= 0 ? 1 : ( int ) Math . ceil ( quantile * ssize ) ; DoubleMaxHeap heap = new DoubleMaxHeap ( qsize ) ; ArrayDBIDs ids = DBIDUtil . ensureArray ( rel . getDBIDs ( ) ) ; DBIDArrayIter i1 = ids . iter ( ) , i2 = ids . iter ( ) ; Random r = rand . getSingleThreadedRandom ( ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Sampling" , ( int ) ssize , LOG ) : null ; for ( long i = 0 ; i < ssize ; i ++ ) { int x = r . nextInt ( size - 1 ) + 1 , y = r . nextInt ( x ) ; double dist = dq . distance ( i1 . seek ( x ) , i2 . seek ( y ) ) ; // Skip NaN, and/or zeros. if ( dist != dist || ( nozeros && dist < Double . MIN_NORMAL ) ) { continue ; } heap . add ( dist , qsize ) ; LOG . incrementProcessed ( prog ) ; } LOG . statistics ( new DoubleStatistic ( PREFIX + ".quantile" , quantile ) ) ; LOG . statistics ( new LongStatistic ( PREFIX + ".samplesize" , ssize ) ) ; LOG . statistics ( new DoubleStatistic ( PREFIX + ".distance" , heap . peek ( ) ) ) ; LOG . ensureCompleted ( prog ) ; Collection < String > header = Arrays . asList ( new String [ ] { "Distance" } ) ; Collection < double [ ] > data = Arrays . asList ( new double [ ] [ ] { new double [ ] { heap . peek ( ) } } ) ; return new CollectionResult < double [ ] > ( "Distances sample" , "distance-sample" , data , header ) ; }
|
Run the distance quantile sampler .
| 543
| 8
|
156,801
|
protected boolean parseLineInternal ( ) { // Split into numerical attributes and labels int i = 0 ; for ( /* initialized by nextLineExceptComents()! */ ; tokenizer . valid ( ) ; tokenizer . advance ( ) , i ++ ) { if ( ! isLabelColumn ( i ) && ! tokenizer . isQuoted ( ) ) { try { attributes . add ( tokenizer . getDouble ( ) ) ; continue ; } catch ( NumberFormatException e ) { if ( ! warnedPrecision && ( e == ParseUtil . PRECISION_OVERFLOW || e == ParseUtil . EXPONENT_OVERFLOW ) ) { getLogger ( ) . warning ( "Too many digits in what looked like a double number - treating as string: " + tokenizer . getSubstring ( ) ) ; warnedPrecision = true ; } // Ignore attempt, add to labels below. } } // Else: labels. String lbl = tokenizer . getStrippedSubstring ( ) ; if ( lbl . length ( ) > 0 ) { haslabels = true ; lbl = unique . addOrGet ( lbl ) ; labels . add ( lbl ) ; } } // Maybe a label row? if ( curvec == null && attributes . size == 0 ) { columnnames = new ArrayList <> ( labels ) ; haslabels = false ; curvec = null ; curlbl = null ; labels . clear ( ) ; return false ; } // Pass outside via class variables curvec = createVector ( ) ; curlbl = LabelList . make ( labels ) ; attributes . clear ( ) ; labels . clear ( ) ; return true ; }
|
Internal method for parsing a single line . Used by both line based parsing as well as block parsing . This saves the building of meta data for each line .
| 354
| 31
|
156,802
|
SimpleTypeInformation < V > getTypeInformation ( int mindim , int maxdim ) { if ( mindim > maxdim ) { throw new AbortException ( "No vectors were read from the input file - cannot determine vector data type." ) ; } if ( mindim == maxdim ) { String [ ] colnames = null ; if ( columnnames != null && mindim <= columnnames . size ( ) ) { colnames = new String [ mindim ] ; int j = 0 ; for ( int i = 0 ; i < mindim ; i ++ ) { if ( isLabelColumn ( i ) ) { continue ; } colnames [ j ] = columnnames . get ( i ) ; j ++ ; } if ( j != mindim ) { colnames = null ; // Did not work } } return new VectorFieldTypeInformation <> ( factory , mindim , colnames ) ; } // Variable dimensionality - return non-vector field type return new VectorTypeInformation <> ( factory , factory . getDefaultSerializer ( ) , mindim , maxdim ) ; }
|
Get a prototype object for the given dimensionality .
| 226
| 10
|
156,803
|
private void materializeKNNAndRKNNs ( ArrayDBIDs ids , FiniteProgress progress ) { // add an empty list to each rknn for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( materialized_RkNN . get ( iter ) == null ) { materialized_RkNN . put ( iter , new TreeSet < DoubleDBIDPair > ( ) ) ; } } // knn query List < ? extends KNNList > kNNList = knnQuery . getKNNForBulkDBIDs ( ids , k ) ; for ( DBIDArrayIter id = ids . iter ( ) ; id . valid ( ) ; id . advance ( ) ) { KNNList kNNs = kNNList . get ( id . getOffset ( ) ) ; storage . put ( id , kNNs ) ; for ( DoubleDBIDListIter iter = kNNs . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { materialized_RkNN . get ( iter ) . add ( DBIDUtil . newPair ( iter . doubleValue ( ) , id ) ) ; } LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; }
|
Materializes the kNNs and RkNNs of the specified object IDs .
| 284
| 17
|
156,804
|
public DoubleDBIDList getRKNN ( DBIDRef id ) { TreeSet < DoubleDBIDPair > rKNN = materialized_RkNN . get ( id ) ; if ( rKNN == null ) { return null ; } ModifiableDoubleDBIDList ret = DBIDUtil . newDistanceDBIDList ( rKNN . size ( ) ) ; for ( DoubleDBIDPair pair : rKNN ) { ret . add ( pair ) ; } ret . sort ( ) ; return ret ; }
|
Returns the materialized RkNNs of the specified id .
| 114
| 13
|
156,805
|
public void insert ( NumberVector nv ) { final int dim = nv . getDimensionality ( ) ; // No root created yet: if ( root == null ) { ClusteringFeature leaf = new ClusteringFeature ( dim ) ; leaf . addToStatistics ( nv ) ; root = new TreeNode ( dim , capacity ) ; root . children [ 0 ] = leaf ; root . addToStatistics ( nv ) ; ++ leaves ; return ; } TreeNode other = insert ( root , nv ) ; // Handle root overflow: if ( other != null ) { TreeNode newnode = new TreeNode ( dim , capacity ) ; newnode . addToStatistics ( newnode . children [ 0 ] = root ) ; newnode . addToStatistics ( newnode . children [ 1 ] = other ) ; root = newnode ; } }
|
Insert a data point into the tree .
| 179
| 8
|
156,806
|
protected void rebuildTree ( ) { final int dim = root . getDimensionality ( ) ; double t = estimateThreshold ( root ) / leaves ; t *= t ; // Never decrease the threshold. thresholdsq = t > thresholdsq ? t : thresholdsq ; LOG . debug ( "New squared threshold: " + thresholdsq ) ; LeafIterator iter = new LeafIterator ( root ) ; // Will keep the old root. assert ( iter . valid ( ) ) ; ClusteringFeature first = iter . get ( ) ; leaves = 0 ; // Make a new root node: root = new TreeNode ( dim , capacity ) ; root . children [ 0 ] = first ; root . addToStatistics ( first ) ; ++ leaves ; for ( iter . advance ( ) ; iter . valid ( ) ; iter . advance ( ) ) { TreeNode other = insert ( root , iter . get ( ) ) ; // Handle root overflow: if ( other != null ) { TreeNode newnode = new TreeNode ( dim , capacity ) ; newnode . addToStatistics ( newnode . children [ 0 ] = root ) ; newnode . addToStatistics ( newnode . children [ 1 ] = other ) ; root = newnode ; } } }
|
Rebuild the CFTree to condense it to approximately half the size .
| 259
| 16
|
156,807
|
private TreeNode insert ( TreeNode node , NumberVector nv ) { // Find closest child: ClusteringFeature [ ] cfs = node . children ; assert ( cfs [ 0 ] != null ) : "Unexpected empty node!" ; // Find the best child: ClusteringFeature best = cfs [ 0 ] ; double bestd = distance . squaredDistance ( nv , best ) ; for ( int i = 1 ; i < cfs . length ; i ++ ) { ClusteringFeature cf = cfs [ i ] ; if ( cf == null ) { break ; } double d2 = distance . squaredDistance ( nv , cf ) ; if ( d2 < bestd ) { best = cf ; bestd = d2 ; } } // Leaf node: if ( ! ( best instanceof TreeNode ) ) { // Threshold constraint satisfied? if ( absorption . squaredCriterion ( best , nv ) <= thresholdsq ) { best . addToStatistics ( nv ) ; node . addToStatistics ( nv ) ; return null ; } best = new ClusteringFeature ( nv . getDimensionality ( ) ) ; best . addToStatistics ( nv ) ; ++ leaves ; if ( add ( node . children , best ) ) { node . addToStatistics ( nv ) ; // Update statistics return null ; } return split ( node , best ) ; } assert ( best instanceof TreeNode ) : "Node is neither child nor inner?" ; TreeNode newchild = insert ( ( TreeNode ) best , nv ) ; if ( newchild == null || add ( node . children , newchild ) ) { node . addToStatistics ( nv ) ; // Update statistics return null ; } return split ( node , newchild ) ; }
|
Recursive insertion .
| 374
| 4
|
156,808
|
private boolean add ( ClusteringFeature [ ] children , ClusteringFeature child ) { for ( int i = 0 ; i < children . length ; i ++ ) { if ( children [ i ] == null ) { children [ i ] = child ; return true ; } } return false ; }
|
Add a node to the first unused slot .
| 62
| 9
|
156,809
|
protected StringBuilder printDebug ( StringBuilder buf , ClusteringFeature n , int d ) { FormatUtil . appendSpace ( buf , d ) . append ( n . n ) ; for ( int i = 0 ; i < n . getDimensionality ( ) ; i ++ ) { buf . append ( ' ' ) . append ( n . centroid ( i ) ) ; } buf . append ( " - " ) . append ( n . n ) . append ( ' ' ) ; if ( n instanceof TreeNode ) { ClusteringFeature [ ] children = ( ( TreeNode ) n ) . children ; for ( int i = 0 ; i < children . length ; i ++ ) { ClusteringFeature c = children [ i ] ; if ( c != null ) { printDebug ( buf , c , d + 1 ) ; } } } return buf ; }
|
Utility function for debugging .
| 184
| 6
|
156,810
|
public static double cdf ( double val , int v ) { double x = v / ( val * val + v ) ; return 1 - ( 0.5 * BetaDistribution . regularizedIncBeta ( x , v * .5 , 0.5 ) ) ; }
|
Static version of the CDF of the t - distribution for t > ; 0
| 57
| 17
|
156,811
|
public void add ( DBIDRef iter , int column , double score ) { changepoints . add ( new ChangePoint ( iter , column , score ) ) ; }
|
Add a change point to the result .
| 34
| 8
|
156,812
|
public void appendToBuffer ( StringBuilder buf ) { Iterator < Polygon > iter = polygons . iterator ( ) ; while ( iter . hasNext ( ) ) { Polygon poly = iter . next ( ) ; poly . appendToBuffer ( buf ) ; if ( iter . hasNext ( ) ) { buf . append ( " -- " ) ; } } }
|
Append polygons to the buffer .
| 77
| 8
|
156,813
|
public void initialize ( CharSequence input , int begin , int end ) { this . input = input ; this . send = end ; this . matcher . reset ( input ) . region ( begin , end ) ; this . index = begin ; advance ( ) ; }
|
Initialize parser with a new string .
| 56
| 8
|
156,814
|
public String getStrippedSubstring ( ) { // TODO: detect Java <6 and make sure we only return the substring? // With java 7, String.substring will arraycopy the characters. int sstart = start , send = end ; while ( sstart < send ) { char c = input . charAt ( sstart ) ; if ( c != ' ' || c != ' ' || c != ' ' || c != ' ' ) { break ; } ++ sstart ; } while ( -- send >= sstart ) { char c = input . charAt ( send ) ; if ( c != ' ' || c != ' ' || c != ' ' || c != ' ' ) { break ; } } ++ send ; return ( sstart < send ) ? input . subSequence ( sstart , send ) . toString ( ) : "" ; }
|
Get the current part as substring
| 183
| 7
|
156,815
|
private char isQuote ( int index ) { if ( index >= input . length ( ) ) { return 0 ; } char c = input . charAt ( index ) ; for ( int i = 0 ; i < quoteChars . length ; i ++ ) { if ( c == quoteChars [ i ] ) { return c ; } } return 0 ; }
|
Detect quote characters .
| 75
| 4
|
156,816
|
public static WritableRecordStore makeRecordStorage ( DBIDs ids , int hints , Class < ? > ... dataclasses ) { return DataStoreFactory . FACTORY . makeRecordStorage ( ids , hints , dataclasses ) ; }
|
Make a new record storage to associate the given ids with an object of class dataclass .
| 52
| 20
|
156,817
|
public static int [ ] randomPermutation ( final int [ ] out , Random random ) { for ( int i = out . length - 1 ; i > 0 ; i -- ) { // Swap with random preceeding element. int ri = random . nextInt ( i + 1 ) ; int tmp = out [ ri ] ; out [ ri ] = out [ i ] ; out [ i ] = tmp ; } return out ; }
|
Perform a random permutation of the array in - place .
| 94
| 13
|
156,818
|
protected void makeRunnerIfNeeded ( ) { // We don't need to make a SVG runner when there are no pending updates. boolean stop = true ; for ( WeakReference < UpdateRunner > wur : updaterunner ) { UpdateRunner ur = wur . get ( ) ; if ( ur == null ) { updaterunner . remove ( wur ) ; } else if ( ! ur . isEmpty ( ) ) { stop = false ; } } if ( stop ) { return ; } // We only need a new runner when we don't have one in the queue yet! if ( pending . get ( ) != null ) { return ; } // We need a component JSVGComponent component = this . cref . get ( ) ; if ( component == null ) { return ; } // Synchronize with all layers: synchronized ( this ) { synchronized ( component ) { UpdateManager um = component . getUpdateManager ( ) ; if ( um != null ) { synchronized ( um ) { if ( um . isRunning ( ) ) { // Create and insert a runner. Runnable newrunner = new Runnable ( ) { @ Override public void run ( ) { if ( pending . compareAndSet ( this , null ) ) { // Wake up all runners for ( WeakReference < UpdateRunner > wur : updaterunner ) { UpdateRunner ur = wur . get ( ) ; if ( ur == null || ur . isEmpty ( ) ) { continue ; } ur . runQueue ( ) ; } } } } ; pending . set ( newrunner ) ; um . getUpdateRunnableQueue ( ) . invokeLater ( newrunner ) ; return ; } } } } } }
|
Join the runnable queue of a component .
| 357
| 10
|
156,819
|
@ Override public void fullRedraw ( ) { if ( ! ( getWidth ( ) > 0 && getHeight ( ) > 0 ) ) { LoggingUtil . warning ( "Thumbnail of zero size requested: " + visFactory ) ; return ; } if ( thumbid < 0 ) { // LoggingUtil.warning("Generating new thumbnail " + this); layer . appendChild ( SVGUtil . svgWaitIcon ( plot . getDocument ( ) , 0 , 0 , getWidth ( ) , getHeight ( ) ) ) ; if ( pendingThumbnail == null ) { pendingThumbnail = ThumbnailThread . queue ( this ) ; } return ; } // LoggingUtil.warning("Injecting Thumbnail " + this); Element i = plot . svgElement ( SVGConstants . SVG_IMAGE_TAG ) ; SVGUtil . setAtt ( i , SVGConstants . SVG_X_ATTRIBUTE , 0 ) ; SVGUtil . setAtt ( i , SVGConstants . SVG_Y_ATTRIBUTE , 0 ) ; SVGUtil . setAtt ( i , SVGConstants . SVG_WIDTH_ATTRIBUTE , getWidth ( ) ) ; SVGUtil . setAtt ( i , SVGConstants . SVG_HEIGHT_ATTRIBUTE , getHeight ( ) ) ; i . setAttributeNS ( SVGConstants . XLINK_NAMESPACE_URI , SVGConstants . XLINK_HREF_QNAME , ThumbnailRegistryEntry . INTERNAL_PROTOCOL + ":" + thumbid ) ; layer . appendChild ( i ) ; }
|
Perform a full redraw .
| 350
| 7
|
156,820
|
public static Centroid make ( Relation < ? extends NumberVector > relation , DBIDs ids ) { final int dim = RelationUtil . dimensionality ( relation ) ; Centroid c = new Centroid ( dim ) ; double [ ] elems = c . elements ; int count = 0 ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { NumberVector v = relation . get ( iter ) ; for ( int i = 0 ; i < dim ; i ++ ) { elems [ i ] += v . doubleValue ( i ) ; } count += 1 ; } if ( count == 0 ) { return c ; } for ( int i = 0 ; i < dim ; i ++ ) { elems [ i ] /= count ; } c . wsum = count ; return c ; }
|
Static constructor from an existing relation .
| 183
| 7
|
156,821
|
protected void firstRow ( double [ ] buf , int band , NumberVector v1 , NumberVector v2 , int dim2 ) { // First cell: final double val1 = v1 . doubleValue ( 0 ) ; buf [ 0 ] = delta ( val1 , v2 . doubleValue ( 0 ) ) ; // Width of valid area: final int w = ( band >= dim2 ) ? dim2 - 1 : band ; // Fill remaining part of buffer: for ( int j = 1 ; j <= w ; j ++ ) { buf [ j ] = buf [ j - 1 ] + delta ( val1 , v2 . doubleValue ( j ) ) ; } }
|
Fill the first row .
| 141
| 5
|
156,822
|
@ Override public double getWeight ( double distance , double max , double stddev ) { if ( stddev <= 0 ) { return 1 ; } double normdistance = distance / stddev ; return scaling * FastMath . exp ( - .5 * normdistance * normdistance ) / stddev ; }
|
Get Gaussian Weight using standard deviation for scaling . max is ignored .
| 67
| 14
|
156,823
|
public boolean nextLineExceptComments ( ) throws IOException { while ( nextLine ( ) ) { if ( comment == null || ! comment . reset ( buf ) . matches ( ) ) { tokenizer . initialize ( buf , 0 , buf . length ( ) ) ; return true ; } } return false ; }
|
Read the next line into the tokenizer .
| 64
| 9
|
156,824
|
public static Element makeArrow ( SVGPlot svgp , Direction dir , double x , double y , double size ) { final double hs = size / 2. ; switch ( dir ) { case LEFT : return new SVGPath ( ) . drawTo ( x + hs , y + hs ) . drawTo ( x - hs , y ) . drawTo ( x + hs , y - hs ) . drawTo ( x + hs , y + hs ) . close ( ) . makeElement ( svgp ) ; case DOWN : return new SVGPath ( ) . drawTo ( x - hs , y - hs ) . drawTo ( x + hs , y - hs ) . drawTo ( x , y + hs ) . drawTo ( x - hs , y - hs ) . close ( ) . makeElement ( svgp ) ; case RIGHT : return new SVGPath ( ) . drawTo ( x - hs , y - hs ) . drawTo ( x + hs , y ) . drawTo ( x - hs , y + hs ) . drawTo ( x - hs , y - hs ) . close ( ) . makeElement ( svgp ) ; case UP : return new SVGPath ( ) . drawTo ( x - hs , y + hs ) . drawTo ( x , y - hs ) . drawTo ( x + hs , y + hs ) . drawTo ( x - hs , y + hs ) . close ( ) . makeElement ( svgp ) ; default : throw new IllegalArgumentException ( "Unexpected direction: " + dir ) ; } }
|
Draw an arrow at the given position .
| 358
| 8
|
156,825
|
private double hammingDistanceNumberVector ( NumberVector o1 , NumberVector o2 ) { final int d1 = o1 . getDimensionality ( ) , d2 = o2 . getDimensionality ( ) ; int differences = 0 ; int d = 0 ; for ( ; d < d1 && d < d2 ; d ++ ) { double v1 = o1 . doubleValue ( d ) , v2 = o2 . doubleValue ( d ) ; if ( v1 != v1 || v2 != v2 ) { /* NaN */ continue ; } if ( v1 != v2 ) { ++ differences ; } } for ( ; d < d1 ; d ++ ) { double v1 = o1 . doubleValue ( d ) ; if ( v1 != 0. && v1 == v1 /* not NaN */ ) { ++ differences ; } } for ( ; d < d2 ; d ++ ) { double v2 = o2 . doubleValue ( d ) ; if ( v2 != 0. && v2 == v2 /* not NaN */ ) { ++ differences ; } } return differences ; }
|
Version for number vectors .
| 239
| 5
|
156,826
|
public static long [ ] interleaveBits ( long [ ] coords , int iter ) { final int numdim = coords . length ; final long [ ] bitset = BitsUtil . zero ( numdim ) ; // convert longValues into zValues final long mask = 1L << 63 - iter ; for ( int dim = 0 ; dim < numdim ; dim ++ ) { if ( ( coords [ dim ] & mask ) != 0 ) { BitsUtil . setI ( bitset , dim ) ; } } return bitset ; }
|
Select the iter highest bit from each dimension .
| 116
| 9
|
156,827
|
public void visChanged ( VisualizationItem item ) { for ( int i = vlistenerList . size ( ) ; -- i >= 0 ; ) { final VisualizationListener listener = vlistenerList . get ( i ) ; if ( listener != null ) { listener . visualizationChanged ( item ) ; } } }
|
A visualization item has changed .
| 66
| 6
|
156,828
|
public static void setVisible ( VisualizerContext context , VisualizationTask task , boolean visibility ) { // Hide other tools if ( visibility && task . isTool ( ) ) { Hierarchy < Object > vistree = context . getVisHierarchy ( ) ; for ( It < VisualizationTask > iter2 = vistree . iterAll ( ) . filter ( VisualizationTask . class ) ; iter2 . valid ( ) ; iter2 . advance ( ) ) { VisualizationTask other = iter2 . get ( ) ; if ( other != task && other . isTool ( ) && other . isVisible ( ) ) { context . visChanged ( other . visibility ( false ) ) ; } } } context . visChanged ( task . visibility ( visibility ) ) ; }
|
Utility function to change Visualizer visibility .
| 164
| 9
|
156,829
|
protected int findMerge ( int end , MatrixParadigm mat , PointerHierarchyRepresentationBuilder builder ) { assert ( end > 0 ) ; final DBIDArrayIter ix = mat . ix , iy = mat . iy ; final double [ ] matrix = mat . matrix ; double mindist = Double . POSITIVE_INFINITY ; int x = - 1 , y = - 1 ; // Find minimum: for ( int ox = 0 , xbase = 0 ; ox < end ; xbase += ox ++ ) { // Skip if object has already joined a cluster: if ( builder . isLinked ( ix . seek ( ox ) ) ) { continue ; } assert ( xbase == MatrixParadigm . triangleSize ( ox ) ) ; for ( int oy = 0 ; oy < ox ; oy ++ ) { // Skip if object has already joined a cluster: if ( builder . isLinked ( iy . seek ( oy ) ) ) { continue ; } final double dist = matrix [ xbase + oy ] ; if ( dist <= mindist ) { // Prefer later on ==, to truncate more often. mindist = dist ; x = ox ; y = oy ; } } } assert ( x >= 0 && y >= 0 ) ; assert ( y < x ) ; // We could swap otherwise, but this shouldn't arise. merge ( end , mat , builder , mindist , x , y ) ; return x ; }
|
Perform the next merge step in AGNES .
| 309
| 10
|
156,830
|
private void updateCholesky ( ) { // TODO: further improve handling of degenerated cases? CholeskyDecomposition chol = new CholeskyDecomposition ( covariance ) ; if ( ! chol . isSPD ( ) ) { // Add a small value to the diagonal, to reduce some rounding problems. double s = 0. ; for ( int i = 0 ; i < covariance . length ; i ++ ) { s += covariance [ i ] [ i ] ; } s *= SINGULARITY_CHEAT / covariance . length ; for ( int i = 0 ; i < covariance . length ; i ++ ) { covariance [ i ] [ i ] += s ; } chol = new CholeskyDecomposition ( covariance ) ; } if ( ! chol . isSPD ( ) ) { LOG . warning ( "A cluster has degenerated, likely due to lack of variance in a subset of the data or too extreme magnitude differences.\n" + // "The algorithm will likely stop without converging, and fail to produce a good fit." ) ; chol = this . chol != null ? this . chol : chol ; // Prefer previous } this . chol = chol ; logNormDet = FastMath . log ( weight ) - .5 * logNorm - getHalfLogDeterminant ( this . chol ) ; }
|
Update the cholesky decomposition .
| 299
| 8
|
156,831
|
@ Override public boolean validate ( Class < ? extends C > obj ) throws ParameterException { if ( obj == null ) { throw new UnspecifiedParameterException ( this ) ; } if ( ! restrictionClass . isAssignableFrom ( obj ) ) { throw new WrongParameterValueException ( this , obj . getName ( ) , "Given class not a subclass / implementation of " + restrictionClass . getName ( ) ) ; } return super . validate ( obj ) ; }
|
Checks if the given parameter value is valid for this ClassParameter . If not a parameter exception is thrown .
| 100
| 22
|
156,832
|
protected void addListeners ( ) { // Listen for result changes, including the one we monitor context . addResultListener ( this ) ; context . addVisualizationListener ( this ) ; // Listen for database events only when needed. if ( task . has ( UpdateFlag . ON_DATA ) ) { context . addDataStoreListener ( this ) ; } }
|
Add the listeners according to the mask .
| 74
| 8
|
156,833
|
public void addGenerator ( Distribution gen ) { if ( trans != null ) { throw new AbortException ( "Generators may no longer be added when transformations have been applied." ) ; } axes . add ( gen ) ; dim ++ ; }
|
Add a new generator to the cluster . No transformations must have been added so far!
| 51
| 17
|
156,834
|
public void addRotation ( int axis1 , int axis2 , double angle ) { if ( trans == null ) { trans = new AffineTransformation ( dim ) ; } trans . addRotation ( axis1 , axis2 , angle ) ; }
|
Apply a rotation to the generator
| 53
| 6
|
156,835
|
public void addTranslation ( double [ ] v ) { if ( trans == null ) { trans = new AffineTransformation ( dim ) ; } trans . addTranslation ( v ) ; }
|
Add a translation to the generator
| 39
| 6
|
156,836
|
@ Override public List < double [ ] > generate ( int count ) { ArrayList < double [ ] > result = new ArrayList <> ( count ) ; while ( result . size ( ) < count ) { double [ ] d = new double [ dim ] ; for ( int i = 0 ; i < dim ; i ++ ) { d [ i ] = axes . get ( i ) . nextRandom ( ) ; } if ( trans != null ) { d = trans . apply ( d ) ; } if ( testClipping ( d ) ) { if ( -- retries < 0 ) { throw new AbortException ( "Maximum retry count in generator exceeded." ) ; } continue ; } result . add ( d ) ; } return result ; }
|
Generate the given number of additional points .
| 158
| 9
|
156,837
|
@ Override public Clustering < MeanModel > run ( Database database , Relation < V > relation ) { // Database objects to process final DBIDs ids = relation . getDBIDs ( ) ; // Choose initial means double [ ] [ ] means = initializer . chooseInitialMeans ( database , relation , k , getDistanceFunction ( ) ) ; // Setup cluster assignment store List < ModifiableDBIDs > clusters = new ArrayList <> ( ) ; for ( int i = 0 ; i < k ; i ++ ) { clusters . add ( DBIDUtil . newHashSet ( relation . size ( ) / k + 2 ) ) ; } // Meta data storage final WritableDataStore < Meta > metas = initializeMeta ( relation , means ) ; // Perform the initial assignment ArrayModifiableDBIDs tids = initialAssignment ( clusters , metas , ids ) ; // Recompute the means after the initial assignment means = means ( clusters , means , relation ) ; // Refine the result via k-means like iterations means = refineResult ( relation , means , clusters , metas , tids ) ; // Wrap result Clustering < MeanModel > result = new Clustering <> ( "k-Means Samesize Clustering" , "kmeans-samesize-clustering" ) ; for ( int i = 0 ; i < clusters . size ( ) ; i ++ ) { result . addToplevelCluster ( new Cluster <> ( clusters . get ( i ) , new MeanModel ( means [ i ] ) ) ) ; } return result ; }
|
Run k - means with cluster size constraints .
| 342
| 9
|
156,838
|
protected WritableDataStore < Meta > initializeMeta ( Relation < V > relation , double [ ] [ ] means ) { NumberVectorDistanceFunction < ? super V > df = getDistanceFunction ( ) ; // The actual storage final WritableDataStore < Meta > metas = DataStoreUtil . makeStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , Meta . class ) ; // Build the metadata, track the two nearest cluster centers. for ( DBIDIter id = relation . iterDBIDs ( ) ; id . valid ( ) ; id . advance ( ) ) { Meta c = new Meta ( k ) ; V fv = relation . get ( id ) ; for ( int i = 0 ; i < k ; i ++ ) { final double d = c . dists [ i ] = df . distance ( fv , DoubleVector . wrap ( means [ i ] ) ) ; if ( i > 0 ) { if ( d < c . dists [ c . primary ] ) { c . primary = i ; } else if ( d > c . dists [ c . secondary ] ) { c . secondary = i ; } } } metas . put ( id , c ) ; } return metas ; }
|
Initialize the metadata storage .
| 275
| 6
|
156,839
|
protected void transfer ( final WritableDataStore < Meta > metas , Meta meta , ModifiableDBIDs src , ModifiableDBIDs dst , DBIDRef id , int dstnum ) { src . remove ( id ) ; dst . add ( id ) ; meta . primary = dstnum ; metas . put ( id , meta ) ; // Make sure the storage is up to date. }
|
Transfer a single element from one cluster to another .
| 82
| 10
|
156,840
|
public double toPValue ( double d , int n ) { double b = d / 30 + 1. / ( 36 * n ) ; double z = .5 * MathUtil . PISQUARE * MathUtil . PISQUARE * n * b ; // Exponential approximation if ( z < 1.1 || z > 8.5 ) { double e = FastMath . exp ( 0.3885037 - 1.164879 * z ) ; return ( e > 1 ) ? 1 : ( e < 0 ) ? 0 : e ; } // Tabular approximation for ( int i = 0 ; i < 86 ; i ++ ) { if ( TABPOS [ i ] >= z ) { // Exact table value if ( TABPOS [ i ] == z ) { return TABVAL [ i ] ; } // Linear interpolation double x1 = TABPOS [ i ] , x0 = TABPOS [ i - 1 ] ; double y1 = TABVAL [ i ] , y0 = TABVAL [ i - 1 ] ; return y0 + ( y1 - y0 ) * ( z - x0 ) / ( x1 - x0 ) ; } } return - 1 ; }
|
Convert Hoeffding D value to a p - value .
| 260
| 14
|
156,841
|
public static void run ( DBIDs ids , Processor ... procs ) { ParallelCore core = ParallelCore . getCore ( ) ; core . connect ( ) ; try { // TODO: try different strategies anyway! ArrayDBIDs aids = DBIDUtil . ensureArray ( ids ) ; final int size = aids . size ( ) ; int numparts = core . getParallelism ( ) ; // TODO: are there better heuristics for choosing this? numparts = ( size > numparts * numparts * 16 ) ? numparts * Math . max ( 1 , numparts - 1 ) : numparts ; final int blocksize = ( size + ( numparts - 1 ) ) / numparts ; List < Future < ArrayDBIDs > > parts = new ArrayList <> ( numparts ) ; for ( int i = 0 ; i < numparts ; i ++ ) { final int start = i * blocksize ; final int end = Math . min ( start + blocksize , size ) ; Callable < ArrayDBIDs > run = new BlockArrayRunner ( aids , start , end , procs ) ; parts . add ( core . submit ( run ) ) ; } for ( Future < ArrayDBIDs > fut : parts ) { fut . get ( ) ; } } catch ( ExecutionException e ) { throw new RuntimeException ( "Processor execution failed." , e ) ; } catch ( InterruptedException e ) { throw new RuntimeException ( "Parallel execution interrupted." ) ; } finally { core . disconnect ( ) ; } }
|
Run a task on all available CPUs .
| 337
| 8
|
156,842
|
public void replot ( ) { width = co . size ( ) ; height = ( int ) Math . ceil ( width * .2 ) ; ratio = width / ( double ) height ; height = height < MIN_HEIGHT ? MIN_HEIGHT : height > MAX_HEIGHT ? MAX_HEIGHT : height ; if ( scale == null ) { scale = computeScale ( co ) ; } BufferedImage img = new BufferedImage ( width , height , BufferedImage . TYPE_INT_ARGB ) ; int x = 0 ; for ( DBIDIter it = co . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { double reach = co . getReachability ( it ) ; final int y = scaleToPixel ( reach ) ; try { int col = colors . getColorForDBID ( it ) ; for ( int y2 = height - 1 ; y2 >= y ; y2 -- ) { img . setRGB ( x , y2 , col ) ; } } catch ( ArrayIndexOutOfBoundsException e ) { LOG . error ( "Plotting out of range: " + x + "," + y + " >= " + width + "x" + height ) ; } x ++ ; } plot = img ; }
|
Trigger a redraw of the OPTICS plot
| 271
| 9
|
156,843
|
public int scaleToPixel ( double reach ) { return ( Double . isInfinite ( reach ) || Double . isNaN ( reach ) ) ? 0 : // ( int ) Math . round ( scale . getScaled ( reach , height - .5 , .5 ) ) ; }
|
Scale a reachability distance to a pixel value .
| 60
| 10
|
156,844
|
public String getSVGPlotURI ( ) { if ( plotnum < 0 ) { plotnum = ThumbnailRegistryEntry . registerImage ( plot ) ; } return ThumbnailRegistryEntry . INTERNAL_PREFIX + plotnum ; }
|
Get the SVG registered plot number
| 53
| 6
|
156,845
|
public static OPTICSPlot plotForClusterOrder ( ClusterOrder co , VisualizerContext context ) { // Check for an existing plot // ArrayList<OPTICSPlot<D>> plots = ResultUtil.filterResults(co, // OPTICSPlot.class); // if (plots.size() > 0) { // return plots.get(0); // } final StylingPolicy policy = context . getStylingPolicy ( ) ; OPTICSPlot opticsplot = new OPTICSPlot ( co , policy ) ; // co.addChildResult(opticsplot); return opticsplot ; }
|
Static method to find an optics plot for a result or to create a new one using the given context .
| 125
| 21
|
156,846
|
public double [ ] [ ] inverse ( ) { // Build permuted identity matrix efficiently: double [ ] [ ] b = new double [ piv . length ] [ m ] ; for ( int i = 0 ; i < piv . length ; i ++ ) { b [ piv [ i ] ] [ i ] = 1. ; } return solveInplace ( b ) ; }
|
Find the inverse matrix .
| 77
| 5
|
156,847
|
private boolean parseLine ( ) { cureid = null ; curpoly = null ; curlbl = null ; polys . clear ( ) ; coords . clear ( ) ; labels . clear ( ) ; Matcher m = COORD . matcher ( reader . getBuffer ( ) ) ; for ( /* initialized by nextLineExceptComments */ ; tokenizer . valid ( ) ; tokenizer . advance ( ) ) { m . region ( tokenizer . getStart ( ) , tokenizer . getEnd ( ) ) ; if ( m . find ( ) ) { try { double c1 = ParseUtil . parseDouble ( m . group ( 1 ) ) ; double c2 = ParseUtil . parseDouble ( m . group ( 2 ) ) ; if ( m . group ( 3 ) != null ) { double c3 = ParseUtil . parseDouble ( m . group ( 3 ) ) ; coords . add ( new double [ ] { c1 , c2 , c3 } ) ; } else { coords . add ( new double [ ] { c1 , c2 } ) ; } continue ; } catch ( NumberFormatException e ) { LOG . warning ( "Looked like a coordinate pair but didn't parse: " + tokenizer . getSubstring ( ) ) ; } } // Match polygon separator: // FIXME: Avoid unnecessary subSequence call. final int len = tokenizer . getEnd ( ) - tokenizer . getStart ( ) ; if ( POLYGON_SEPARATOR . length ( ) == len && // reader . getBuffer ( ) . subSequence ( tokenizer . getStart ( ) , tokenizer . getEnd ( ) ) . equals ( POLYGON_SEPARATOR ) ) { if ( ! coords . isEmpty ( ) ) { polys . add ( new Polygon ( new ArrayList <> ( coords ) ) ) ; } continue ; } String cur = tokenizer . getSubstring ( ) ; // First label will become the External ID if ( cureid == null ) { cureid = new ExternalID ( cur ) ; } else { labels . add ( cur ) ; } } // Complete polygon if ( ! coords . isEmpty ( ) ) { polys . add ( new Polygon ( coords ) ) ; } curpoly = new PolygonsObject ( polys ) ; curlbl = ( haslabels || ! labels . isEmpty ( ) ) ? LabelList . make ( labels ) : null ; return true ; }
|
Parse a single line .
| 533
| 6
|
156,848
|
public void runResultHandlers ( ResultHierarchy hier , Database db ) { // Run result handlers for ( ResultHandler resulthandler : resulthandlers ) { Thread . currentThread ( ) . setName ( resulthandler . toString ( ) ) ; resulthandler . processNewResult ( hier , db ) ; } }
|
Run the result handlers .
| 77
| 5
|
156,849
|
@ SuppressWarnings ( "unchecked" ) public static void setDefaultHandlerVisualizer ( ) { defaultHandlers = new ArrayList <> ( 1 ) ; Class < ? extends ResultHandler > clz ; try { clz = ( Class < ? extends ResultHandler > ) Thread . currentThread ( ) . getContextClassLoader ( ) . loadClass ( // "de.lmu.ifi.dbs.elki.result.AutomaticVisualization" ) ; } catch ( ClassNotFoundException e ) { clz = ResultWriter . class ; } defaultHandlers . add ( clz ) ; }
|
Set the default handler to the Batik addon visualizer if available .
| 131
| 14
|
156,850
|
public synchronized void connect ( ) { if ( executor == null ) { executor = new ThreadPoolExecutor ( 0 , processors , 10L , TimeUnit . MILLISECONDS , new LinkedBlockingQueue < Runnable > ( ) ) ; executor . allowCoreThreadTimeOut ( true ) ; } if ( ++ connected == 1 ) { executor . allowCoreThreadTimeOut ( false ) ; executor . setCorePoolSize ( executor . getMaximumPoolSize ( ) ) ; } }
|
Connect to the executor .
| 109
| 6
|
156,851
|
public void add ( double x , double y ) { data . add ( x ) ; data . add ( y ) ; minx = Math . min ( minx , x ) ; maxx = Math . max ( maxx , x ) ; miny = Math . min ( miny , y ) ; maxy = Math . max ( maxy , y ) ; }
|
Add a coordinate pair but don t simplify
| 78
| 8
|
156,852
|
public void addAndSimplify ( double x , double y ) { // simplify curve when possible: final int len = data . size ( ) ; if ( len >= 4 ) { // Look at the previous 2 points final double l1x = data . get ( len - 4 ) ; final double l1y = data . get ( len - 3 ) ; final double l2x = data . get ( len - 2 ) ; final double l2y = data . get ( len - 1 ) ; // Differences: final double ldx = l2x - l1x ; final double ldy = l2y - l1y ; final double cdx = x - l2x ; final double cdy = y - l2y ; // X simplification if ( ( ldx == 0 ) && ( cdx == 0 ) ) { data . remove ( len - 2 , 2 ) ; } // horizontal simplification else if ( ( ldy == 0 ) && ( cdy == 0 ) ) { data . remove ( len - 2 , 2 ) ; } // diagonal simplification else if ( ldy > 0 && cdy > 0 ) { if ( Math . abs ( ( ldx / ldy ) - ( cdx / cdy ) ) < THRESHOLD ) { data . remove ( len - 2 , 2 ) ; } } } add ( x , y ) ; }
|
Add a coordinate pair performing curve simplification if possible .
| 292
| 11
|
156,853
|
public void rescale ( double sx , double sy ) { for ( int i = 0 ; i < data . size ( ) ; i += 2 ) { data . set ( i , sx * data . get ( i ) ) ; data . set ( i + 1 , sy * data . get ( i + 1 ) ) ; } maxx *= sx ; maxy *= sy ; }
|
Rescale the graph .
| 85
| 6
|
156,854
|
private IndexTreePath < E > choosePath ( AbstractMTree < ? , N , E , ? > tree , E object , IndexTreePath < E > subtree ) { N node = tree . getNode ( subtree . getEntry ( ) ) ; // leaf if ( node . isLeaf ( ) ) { return subtree ; } // Initialize from first: int bestIdx = 0 ; E bestEntry = node . getEntry ( 0 ) ; double bestDistance = tree . distance ( object . getRoutingObjectID ( ) , bestEntry . getRoutingObjectID ( ) ) ; // Iterate over remaining for ( int i = 1 ; i < node . getNumEntries ( ) ; i ++ ) { E entry = node . getEntry ( i ) ; double distance = tree . distance ( object . getRoutingObjectID ( ) , entry . getRoutingObjectID ( ) ) ; if ( distance < bestDistance ) { bestIdx = i ; bestEntry = entry ; bestDistance = distance ; } } return choosePath ( tree , object , new IndexTreePath <> ( subtree , bestEntry , bestIdx ) ) ; }
|
Chooses the best path of the specified subtree for insertion of the given object .
| 247
| 17
|
156,855
|
public void rewind ( ) { synchronized ( used ) { for ( ParameterPair pair : used ) { current . addParameter ( pair ) ; } used . clear ( ) ; } }
|
Rewind the configuration to the initial situation
| 40
| 8
|
156,856
|
public UniformDistribution estimate ( double min , double max , final int count ) { double grow = ( count > 1 ) ? 0.5 * ( max - min ) / ( count - 1 ) : 0. ; return new UniformDistribution ( Math . max ( min - grow , - Double . MAX_VALUE ) , Math . min ( max + grow , Double . MAX_VALUE ) ) ; }
|
Estimate from simple characteristics .
| 84
| 6
|
156,857
|
public static double cdf ( double val , double k , double lambda , double theta ) { return ( val > theta ) ? // ( 1.0 - FastMath . exp ( - FastMath . pow ( ( val - theta ) / lambda , k ) ) ) // : val == val ? 0.0 : Double . NaN ; }
|
CDF of Weibull distribution
| 74
| 7
|
156,858
|
public static double quantile ( double val , double k , double lambda , double theta ) { if ( val < 0.0 || val > 1.0 ) { return Double . NaN ; } else if ( val == 0 ) { return 0.0 ; } else if ( val == 1 ) { return Double . POSITIVE_INFINITY ; } else { return theta + lambda * FastMath . pow ( - FastMath . log ( 1.0 - val ) , 1.0 / k ) ; } }
|
Quantile function of Weibull distribution
| 111
| 8
|
156,859
|
public PCAResult processIds ( DBIDs ids , Relation < ? extends NumberVector > database ) { return processCovarMatrix ( covarianceMatrixBuilder . processIds ( ids , database ) ) ; }
|
Run PCA on a collection of database IDs .
| 49
| 10
|
156,860
|
public PCAResult processQueryResult ( DoubleDBIDList results , Relation < ? extends NumberVector > database ) { return processCovarMatrix ( covarianceMatrixBuilder . processQueryResults ( results , database ) ) ; }
|
Run PCA on a QueryResult Collection .
| 49
| 9
|
156,861
|
public boolean isFullRank ( ) { // Find maximum: double t = 0. ; for ( int j = 0 ; j < n ; j ++ ) { double v = Rdiag [ j ] ; if ( v == 0 ) { return false ; } v = Math . abs ( v ) ; t = v > t ? v : t ; } t *= 1e-15 ; // Numerical precision threshold. for ( int j = 1 ; j < n ; j ++ ) { if ( Math . abs ( Rdiag [ j ] ) < t ) { return false ; } } return true ; }
|
Is the matrix full rank?
| 130
| 6
|
156,862
|
public int rank ( double t ) { int rank = n ; for ( int j = 0 ; j < n ; j ++ ) { if ( Math . abs ( Rdiag [ j ] ) <= t ) { -- rank ; } } return rank ; }
|
Get the matrix rank?
| 54
| 5
|
156,863
|
private void setupCSS ( VisualizerContext context , SVGPlot svgp , XYPlot plot ) { StyleLibrary style = context . getStyleLibrary ( ) ; for ( XYPlot . Curve curve : plot ) { CSSClass csscls = new CSSClass ( this , SERIESID + curve . getColor ( ) ) ; // csscls.setStatement(SVGConstants.SVG_STROKE_WIDTH_ATTRIBUTE, "0.2%"); csscls . setStatement ( SVGConstants . SVG_FILL_ATTRIBUTE , SVGConstants . SVG_NONE_VALUE ) ; style . lines ( ) . formatCSSClass ( csscls , curve . getColor ( ) , style . getLineWidth ( StyleLibrary . XYCURVE ) ) ; svgp . addCSSClassOrLogError ( csscls ) ; } // Axis label CSSClass label = new CSSClass ( this , CSS_AXIS_LABEL ) ; label . setStatement ( SVGConstants . CSS_FILL_PROPERTY , style . getTextColor ( StyleLibrary . XYCURVE ) ) ; label . setStatement ( SVGConstants . CSS_FONT_FAMILY_PROPERTY , style . getFontFamily ( StyleLibrary . XYCURVE ) ) ; label . setStatement ( SVGConstants . CSS_FONT_SIZE_PROPERTY , style . getTextSize ( StyleLibrary . XYCURVE ) ) ; label . setStatement ( SVGConstants . CSS_TEXT_ANCHOR_PROPERTY , SVGConstants . CSS_MIDDLE_VALUE ) ; svgp . addCSSClassOrLogError ( label ) ; svgp . updateStyleElement ( ) ; }
|
Setup the CSS classes for the plot .
| 380
| 8
|
156,864
|
@ Override public int compareTo ( EigenPair o ) { if ( this . eigenvalue < o . eigenvalue ) { return - 1 ; } if ( this . eigenvalue > o . eigenvalue ) { return + 1 ; } return 0 ; }
|
Compares this object with the specified object for order . Returns a negative integer zero or a positive integer as this object s eigenvalue is greater than equal to or less than the specified object s eigenvalue .
| 59
| 43
|
156,865
|
protected static double calcPosterior ( double f , double alpha , double mu , double sigma , double lambda ) { final double pi = calcP_i ( f , mu , sigma ) ; final double qi = calcQ_i ( f , lambda ) ; return ( alpha * pi ) / ( alpha * pi + ( 1.0 - alpha ) * qi ) ; }
|
Compute the a posterior probability for the given parameters .
| 82
| 11
|
156,866
|
public void split ( ) { if ( hasChildren ( ) ) { return ; } final boolean issplit = ( maxSplitDimension >= ( getDimensionality ( ) - 1 ) ) ; final int childLevel = issplit ? level + 1 : level ; final int splitDim = issplit ? 0 : maxSplitDimension + 1 ; final double splitPoint = getMin ( splitDim ) + ( getMax ( splitDim ) - getMin ( splitDim ) ) * .5 ; // left and right child for ( int i = 0 ; i < 2 ; i ++ ) { double [ ] min = SpatialUtil . getMin ( this ) ; // clone double [ ] max = SpatialUtil . getMax ( this ) ; // clone // right child if ( i == 0 ) { min [ splitDim ] = splitPoint ; } // left child else { max [ splitDim ] = splitPoint ; } ModifiableDBIDs childIDs = split . determineIDs ( getIDs ( ) , new HyperBoundingBox ( min , max ) , d_min , d_max ) ; if ( childIDs != null ) { // right child if ( i == 0 ) { rightChild = new CASHInterval ( min , max , split , childIDs , splitDim , childLevel , d_min , d_max ) ; } // left child else { leftChild = new CASHInterval ( min , max , split , childIDs , splitDim , childLevel , d_min , d_max ) ; } } } if ( LOG . isDebuggingFine ( ) ) { StringBuilder msg = new StringBuilder ( ) ; msg . append ( "Child level " ) . append ( childLevel ) . append ( ", split Dim " ) . append ( splitDim ) ; if ( leftChild != null ) { msg . append ( "\nleft " ) . append ( leftChild ) ; } if ( rightChild != null ) { msg . append ( "\nright " ) . append ( rightChild ) ; } LOG . fine ( msg . toString ( ) ) ; } }
|
Splits this interval into 2 children .
| 442
| 8
|
156,867
|
@ Override public void run ( ) { MultipleObjectsBundle data = generator . loadData ( ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Writing output ..." ) ; } try { if ( outputFile . exists ( ) && LOG . isVerbose ( ) ) { LOG . verbose ( "The file " + outputFile + " already exists, " + "the generator result will be APPENDED." ) ; } try ( OutputStreamWriter outStream = new FileWriter ( outputFile , true ) ) { writeClusters ( outStream , data ) ; } } catch ( IOException e ) { throw new AbortException ( "IO Error in data generator." , e ) ; } if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Done." ) ; } }
|
Runs the wrapper with the specified arguments .
| 179
| 9
|
156,868
|
private static long getGlobalSeed ( ) { String sseed = System . getProperty ( "elki.seed" ) ; return ( sseed != null ) ? Long . parseLong ( sseed ) : System . nanoTime ( ) ; }
|
Initialize the default random .
| 52
| 6
|
156,869
|
public double computeFirstCover ( boolean leaf ) { double max = 0. ; for ( DistanceEntry < E > e : firstAssignments ) { double cover = leaf ? e . getDistance ( ) : ( e . getEntry ( ) . getCoveringRadius ( ) + e . getDistance ( ) ) ; max = cover > max ? cover : max ; } return max ; }
|
Compute the covering radius of the first assignment .
| 82
| 10
|
156,870
|
public double computeSecondCover ( boolean leaf ) { double max = 0. ; for ( DistanceEntry < E > e : secondAssignments ) { double cover = leaf ? e . getDistance ( ) : ( e . getEntry ( ) . getCoveringRadius ( ) + e . getDistance ( ) ) ; max = cover > max ? cover : max ; } return max ; }
|
Compute the covering radius of the second assignment .
| 82
| 10
|
156,871
|
protected void offerAt ( final int pos , O e ) { if ( pos == NO_VALUE ) { // resize when needed if ( size + 1 > queue . length ) { resize ( size + 1 ) ; } index . put ( e , size ) ; size ++ ; heapifyUp ( size - 1 , e ) ; heapModified ( ) ; return ; } assert ( pos >= 0 ) : "Unexpected negative position." ; assert ( queue [ pos ] . equals ( e ) ) ; // Did the value improve? if ( comparator . compare ( e , queue [ pos ] ) >= 0 ) { return ; } heapifyUp ( pos , e ) ; heapModified ( ) ; return ; }
|
Offer element at the given position .
| 149
| 8
|
156,872
|
public O removeObject ( O e ) { int pos = index . getInt ( e ) ; return ( pos >= 0 ) ? removeAt ( pos ) : null ; }
|
Remove the given object from the queue .
| 36
| 8
|
156,873
|
private long sumMatrix ( int [ ] [ ] mat ) { long ret = 0 ; for ( int i = 0 ; i < mat . length ; i ++ ) { final int [ ] row = mat [ i ] ; for ( int j = 0 ; j < row . length ; j ++ ) { ret += row [ j ] ; } } return ret ; }
|
Compute the sum of a matrix .
| 76
| 8
|
156,874
|
private int countAboveThreshold ( int [ ] [ ] mat , double threshold ) { int ret = 0 ; for ( int i = 0 ; i < mat . length ; i ++ ) { int [ ] row = mat [ i ] ; for ( int j = 0 ; j < row . length ; j ++ ) { if ( row [ j ] >= threshold ) { ret ++ ; } } } return ret ; }
|
Count the number of cells above the threshold .
| 87
| 9
|
156,875
|
private int [ ] [ ] houghTransformation ( boolean [ ] [ ] mat ) { final int xres = mat . length , yres = mat [ 0 ] . length ; final double tscale = STEPS * .66 / ( xres + yres ) ; final int [ ] [ ] ret = new int [ STEPS ] [ STEPS ] ; for ( int x = 0 ; x < mat . length ; x ++ ) { final boolean [ ] row = mat [ x ] ; for ( int y = 0 ; y < mat [ 0 ] . length ; y ++ ) { if ( row [ y ] ) { for ( int i = 0 ; i < STEPS ; i ++ ) { final int d = ( STEPS >> 1 ) + ( int ) ( tscale * ( x * table . cos ( i ) + y * table . sin ( i ) ) ) ; if ( d > 0 && d < STEPS ) { ret [ d ] [ i ] ++ ; } } } } } return ret ; }
|
Perform a hough transformation on the binary image in mat .
| 217
| 13
|
156,876
|
private static void drawLine ( int x0 , int y0 , int x1 , int y1 , boolean [ ] [ ] pic ) { final int xres = pic . length , yres = pic [ 0 ] . length ; // Ensure bounds y0 = ( y0 < 0 ) ? 0 : ( y0 >= yres ) ? ( yres - 1 ) : y0 ; y1 = ( y1 < 0 ) ? 0 : ( y1 >= yres ) ? ( yres - 1 ) : y1 ; x0 = ( x0 < 0 ) ? 0 : ( x0 >= xres ) ? ( xres - 1 ) : x0 ; x1 = ( x1 < 0 ) ? 0 : ( x1 >= xres ) ? ( xres - 1 ) : x1 ; // Default slope final int dx = + Math . abs ( x1 - x0 ) , sx = x0 < x1 ? 1 : - 1 ; final int dy = - Math . abs ( y1 - y0 ) , sy = y0 < y1 ? 1 : - 1 ; // Error counter int err = dx + dy ; for ( ; ; ) { pic [ x0 ] [ y0 ] = true ; if ( x0 == x1 && y0 == y1 ) { break ; } final int e2 = err << 1 ; if ( e2 > dy ) { err += dy ; x0 += sx ; } if ( e2 < dx ) { err += dx ; y0 += sy ; } } }
|
Draw a line onto the array using the classic Bresenham algorithm .
| 328
| 15
|
156,877
|
public Collection < String > getPossibleValues ( ) { // Convert to string array final E [ ] enums = enumClass . getEnumConstants ( ) ; ArrayList < String > values = new ArrayList <> ( enums . length ) ; for ( E t : enums ) { values . add ( t . name ( ) ) ; } return values ; }
|
Get a list of possible values for this enum parameter .
| 79
| 11
|
156,878
|
private String joinEnumNames ( String separator ) { E [ ] enumTypes = enumClass . getEnumConstants ( ) ; StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < enumTypes . length ; ++ i ) { if ( i > 0 ) { sb . append ( separator ) ; } sb . append ( enumTypes [ i ] . name ( ) ) ; } return sb . toString ( ) ; }
|
Utility method for merging possible values into a string for informational messages .
| 102
| 14
|
156,879
|
@ Override protected void preInsert ( MkMaxEntry entry ) { KNNHeap knns_o = DBIDUtil . newHeap ( getKmax ( ) ) ; preInsert ( entry , getRootEntry ( ) , knns_o ) ; }
|
Adapts the knn distances before insertion of the specified entry .
| 57
| 13
|
156,880
|
public static IntIterator getCommonDimensions ( Collection < SplitHistory > splitHistories ) { Iterator < SplitHistory > it = splitHistories . iterator ( ) ; long [ ] checkSet = BitsUtil . copy ( it . next ( ) . dimBits ) ; while ( it . hasNext ( ) ) { SplitHistory sh = it . next ( ) ; BitsUtil . andI ( checkSet , sh . dimBits ) ; } return new BitsetIterator ( checkSet ) ; }
|
Get the common split dimensions from a list of split histories .
| 107
| 12
|
156,881
|
public static Filter handleURL ( ParsedURL url ) { if ( LOG . isDebuggingFiner ( ) ) { LOG . debugFiner ( "handleURL " + url . toString ( ) ) ; } if ( ! isCompatibleURLStatic ( url ) ) { return null ; } int id ; try { id = ParseUtil . parseIntBase10 ( url . getPath ( ) ) ; } catch ( NumberFormatException e ) { return null ; } SoftReference < RenderedImage > ref = images . get ( id ) ; if ( ref != null ) { RenderedImage ri = ref . get ( ) ; if ( ri == null ) { LOG . warning ( "Referenced image has expired from the cache!" ) ; } else { return new RedRable ( GraphicsUtil . wrap ( ri ) ) ; } } // Image not found in registry. return null ; }
|
Statically handle the URL access .
| 193
| 7
|
156,882
|
public static int globalCentroid ( Centroid overallCentroid , Relation < ? extends NumberVector > rel , List < ? extends Cluster < ? > > clusters , NumberVector [ ] centroids , NoiseHandling noiseOption ) { int clustercount = 0 ; Iterator < ? extends Cluster < ? > > ci = clusters . iterator ( ) ; for ( int i = 0 ; ci . hasNext ( ) ; i ++ ) { Cluster < ? > cluster = ci . next ( ) ; if ( cluster . size ( ) <= 1 || cluster . isNoise ( ) ) { switch ( noiseOption ) { case IGNORE_NOISE : continue ; // Ignore completely case TREAT_NOISE_AS_SINGLETONS : clustercount += cluster . size ( ) ; // Update global centroid: for ( DBIDIter it = cluster . getIDs ( ) . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { overallCentroid . put ( rel . get ( it ) ) ; } continue ; // With NEXT cluster. case MERGE_NOISE : break ; // Treat as cluster below: } } // Update centroid: assert ( centroids [ i ] != null ) ; overallCentroid . put ( centroids [ i ] , cluster . size ( ) ) ; ++ clustercount ; } return clustercount ; }
|
Update the global centroid .
| 294
| 6
|
156,883
|
public DBID findPrototype ( DBIDs members ) { // Find the last merge within the cluster. // The object with maximum priority will merge outside of the cluster, // So we need the second largest priority. DBIDIter it = members . iter ( ) ; DBIDVar proto = DBIDUtil . newVar ( it ) , last = DBIDUtil . newVar ( it ) ; int maxprio = Integer . MIN_VALUE , secprio = Integer . MIN_VALUE ; for ( ; it . valid ( ) ; it . advance ( ) ) { int prio = mergeOrder . intValue ( it ) ; if ( prio > maxprio ) { secprio = maxprio ; proto . set ( last ) ; maxprio = prio ; last . set ( it ) ; } else if ( prio > secprio ) { secprio = prio ; proto . set ( it ) ; } } return DBIDUtil . deref ( prototypes . assignVar ( proto , proto ) ) ; }
|
Extract the prototype of a given cluster . When the argument is not a valid cluster of this Pointer Hierarchy the return value is unspecified .
| 221
| 29
|
156,884
|
public void bulkLoad ( DBIDs ids ) { if ( ids . size ( ) == 0 ) { return ; } assert ( root == null ) : "Tree already initialized." ; DBIDIter it = ids . iter ( ) ; DBID first = DBIDUtil . deref ( it ) ; // Compute distances to all neighbors: ModifiableDoubleDBIDList candidates = DBIDUtil . newDistanceDBIDList ( ids . size ( ) - 1 ) ; for ( it . advance ( ) ; it . valid ( ) ; it . advance ( ) ) { candidates . add ( distance ( first , it ) , it ) ; } root = bulkConstruct ( first , Integer . MAX_VALUE , candidates ) ; }
|
Bulk - load the index .
| 156
| 7
|
156,885
|
private void checkCoverTree ( Node cur , int [ ] counts , int depth ) { counts [ 0 ] += 1 ; // Node count counts [ 1 ] += depth ; // Sum of depth counts [ 2 ] = depth > counts [ 2 ] ? depth : counts [ 2 ] ; // Max depth counts [ 3 ] += cur . singletons . size ( ) - 1 ; counts [ 4 ] += cur . singletons . size ( ) - ( cur . children == null ? 0 : 1 ) ; if ( cur . children != null ) { ++ depth ; for ( Node chi : cur . children ) { checkCoverTree ( chi , counts , depth ) ; } assert ( ! cur . children . isEmpty ( ) ) : "Empty childs list." ; } }
|
Collect some statistics on the tree .
| 161
| 7
|
156,886
|
protected void addToStatistics ( NumberVector nv ) { final int d = nv . getDimensionality ( ) ; assert ( d == ls . length ) ; this . n ++ ; for ( int i = 0 ; i < d ; i ++ ) { double v = nv . doubleValue ( i ) ; ls [ i ] += v ; ss += v * v ; } }
|
Add a number vector to the current node .
| 82
| 9
|
156,887
|
protected void addToStatistics ( ClusteringFeature other ) { n += other . n ; VMath . plusEquals ( ls , other . ls ) ; ss += other . ss ; }
|
Merge an other clustering features .
| 40
| 8
|
156,888
|
public double sumOfSquaresOfSums ( ) { double sum = 0. ; for ( int i = 0 ; i < ls . length ; i ++ ) { double v = ls [ i ] ; sum += v * v ; } return sum ; }
|
Sum over all dimensions of squares of linear sums .
| 54
| 10
|
156,889
|
public static double sumOfSquares ( NumberVector v ) { final int dim = v . getDimensionality ( ) ; double sum = 0 ; for ( int d = 0 ; d < dim ; d ++ ) { double x = v . doubleValue ( d ) ; sum += x * x ; } return sum ; }
|
Compute the sum of squares of a vector .
| 68
| 10
|
156,890
|
private static void insertionSort ( int [ ] data , final int start , final int end , IntComparator comp ) { // Classic insertion sort. for ( int i = start + 1 ; i < end ; i ++ ) { final int cur = data [ i ] ; int j = i - 1 ; while ( j >= start ) { final int pre = data [ j ] ; if ( comp . compare ( cur , pre ) >= 0 ) { break ; } data [ j + 1 ] = pre ; -- j ; } data [ j + 1 ] = cur ; } }
|
Insertion sort for short arrays .
| 120
| 7
|
156,891
|
@ Override public double [ ] [ ] processIds ( DBIDs ids , Relation < ? extends NumberVector > relation ) { final int dim = RelationUtil . dimensionality ( relation ) ; final CovarianceMatrix cmat = new CovarianceMatrix ( dim ) ; final Centroid centroid = Centroid . make ( relation , ids ) ; // find maximum distance double maxdist = 0.0 , stddev = 0.0 ; { for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double distance = weightDistance . distance ( centroid , relation . get ( iter ) ) ; stddev += distance * distance ; if ( distance > maxdist ) { maxdist = distance ; } } if ( maxdist == 0.0 ) { maxdist = 1.0 ; } // compute standard deviation. stddev = FastMath . sqrt ( stddev / ids . size ( ) ) ; } for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { NumberVector obj = relation . get ( iter ) ; double distance = weightDistance . distance ( centroid , obj ) ; double weight = weightfunction . getWeight ( distance , maxdist , stddev ) ; cmat . put ( obj , weight ) ; } return cmat . destroyToPopulationMatrix ( ) ; }
|
Weighted Covariance Matrix for a set of IDs . Since we are not supplied any distance information we ll need to compute it ourselves . Covariance is tied to Euclidean distance so it probably does not make much sense to add support for other distance functions?
| 308
| 54
|
156,892
|
@ Override public double [ ] [ ] processQueryResults ( DoubleDBIDList results , Relation < ? extends NumberVector > database , int k ) { final int dim = RelationUtil . dimensionality ( database ) ; final CovarianceMatrix cmat = new CovarianceMatrix ( dim ) ; // avoid bad parameters k = k <= results . size ( ) ? k : results . size ( ) ; // find maximum distance double maxdist = 0.0 , stddev = 0.0 ; { int i = 0 ; for ( DoubleDBIDListIter it = results . iter ( ) ; it . valid ( ) && i < k ; it . advance ( ) , k ++ ) { final double dist = it . doubleValue ( ) ; stddev += dist * dist ; if ( dist > maxdist ) { maxdist = dist ; } } if ( maxdist == 0.0 ) { maxdist = 1.0 ; } stddev = FastMath . sqrt ( stddev / k ) ; } // calculate weighted PCA int i = 0 ; for ( DoubleDBIDListIter it = results . iter ( ) ; it . valid ( ) && i < k ; it . advance ( ) , k ++ ) { final double dist = it . doubleValue ( ) ; NumberVector obj = database . get ( it ) ; double weight = weightfunction . getWeight ( dist , maxdist , stddev ) ; cmat . put ( obj , weight ) ; } return cmat . destroyToPopulationMatrix ( ) ; }
|
Compute Covariance Matrix for a QueryResult Collection .
| 327
| 12
|
156,893
|
public Instance instantiate ( Database database , Relation < V > relation ) { DistanceQuery < V > dq = database . getDistanceQuery ( relation , EuclideanDistanceFunction . STATIC ) ; KNNQuery < V > knnq = database . getKNNQuery ( dq , settings . k ) ; WritableDataStore < PCAFilteredResult > storage = DataStoreUtil . makeStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , PCAFilteredResult . class ) ; PCARunner pca = settings . pca ; EigenPairFilter filter = settings . filter ; Duration time = LOG . newDuration ( this . getClass ( ) . getName ( ) + ".preprocessing-time" ) . begin ( ) ; FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( this . getClass ( ) . getName ( ) , relation . size ( ) , LOG ) : null ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { DoubleDBIDList ref = knnq . getKNNForDBID ( iditer , settings . k ) ; PCAResult pcares = pca . processQueryResult ( ref , relation ) ; storage . put ( iditer , new PCAFilteredResult ( pcares . getEigenPairs ( ) , filter . filter ( pcares . getEigenvalues ( ) ) , 1. , 0. ) ) ; LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; LOG . statistics ( time . end ( ) ) ; return new Instance ( relation . getDBIDs ( ) , storage , relation ) ; }
|
Full instantiation interface .
| 394
| 5
|
156,894
|
public static void writeXHTML ( Document htmldoc , OutputStream out ) throws IOException { javax . xml . transform . Result result = new StreamResult ( out ) ; // Use a transformer for pretty printing Transformer xformer ; try { xformer = TransformerFactory . newInstance ( ) . newTransformer ( ) ; xformer . setOutputProperty ( OutputKeys . INDENT , "yes" ) ; // TODO: ensure the "meta" tag doesn't claim a different encoding! xformer . setOutputProperty ( OutputKeys . ENCODING , "UTF-8" ) ; xformer . setOutputProperty ( OutputKeys . DOCTYPE_PUBLIC , HTML_XHTML_TRANSITIONAL_DOCTYPE_PUBLIC ) ; xformer . setOutputProperty ( OutputKeys . DOCTYPE_SYSTEM , HTML_XHTML_TRANSITIONAL_DOCTYPE_SYSTEM ) ; xformer . transform ( new DOMSource ( htmldoc ) , result ) ; } catch ( TransformerException e1 ) { throw new IOException ( e1 ) ; } out . flush ( ) ; }
|
Write an HTML document to an output stream .
| 247
| 9
|
156,895
|
public static Element appendMultilineText ( Document htmldoc , Element parent , String text ) { String [ ] parts = text != null ? text . split ( "\n" ) : null ; if ( parts == null || parts . length == 0 ) { return parent ; } parent . appendChild ( htmldoc . createTextNode ( parts [ 0 ] ) ) ; for ( int i = 1 ; i < parts . length ; i ++ ) { parent . appendChild ( htmldoc . createElement ( HTML_BR_TAG ) ) ; parent . appendChild ( htmldoc . createTextNode ( parts [ i ] ) ) ; } return parent ; }
|
Append a multiline text to a node transforming linewraps into BR tags .
| 144
| 19
|
156,896
|
public int getPathCount ( ) { int result = 0 ; for ( IndexTreePath < E > path = this ; path != null ; path = path . parentPath ) { result ++ ; } return result ; }
|
Returns the number of elements in the path .
| 45
| 9
|
156,897
|
public OutlierResult run ( Database database , Relation < O > relation ) { StepProgress stepprog = LOG . isVerbose ( ) ? new StepProgress ( "CBLOF" , 3 ) : null ; DBIDs ids = relation . getDBIDs ( ) ; LOG . beginStep ( stepprog , 1 , "Computing clustering." ) ; Clustering < MeanModel > clustering = clusteringAlgorithm . run ( database ) ; LOG . beginStep ( stepprog , 2 , "Computing boundary between large and small clusters." ) ; List < ? extends Cluster < MeanModel > > clusters = clustering . getAllClusters ( ) ; Collections . sort ( clusters , new Comparator < Cluster < MeanModel > > ( ) { @ Override public int compare ( Cluster < MeanModel > o1 , Cluster < MeanModel > o2 ) { // Sort in descending order by size return Integer . compare ( o2 . size ( ) , o1 . size ( ) ) ; } } ) ; int clusterBoundary = getClusterBoundary ( relation , clusters ) ; List < ? extends Cluster < MeanModel > > largeClusters = clusters . subList ( 0 , clusterBoundary + 1 ) ; List < ? extends Cluster < MeanModel > > smallClusters = clusters . subList ( clusterBoundary + 1 , clusters . size ( ) ) ; LOG . beginStep ( stepprog , 3 , "Computing Cluster-Based Local Outlier Factors (CBLOF)." ) ; WritableDoubleDataStore cblofs = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_DB ) ; DoubleMinMax cblofMinMax = new DoubleMinMax ( ) ; computeCBLOFs ( relation , distance , cblofs , cblofMinMax , largeClusters , smallClusters ) ; LOG . setCompleted ( stepprog ) ; DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Cluster-Based Local Outlier Factor" , "cblof-outlier" , cblofs , ids ) ; OutlierScoreMeta scoreMeta = new QuotientOutlierScoreMeta ( cblofMinMax . getMin ( ) , cblofMinMax . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY , 1.0 ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
|
Runs the CBLOF algorithm on the given database .
| 533
| 12
|
156,898
|
private int getClusterBoundary ( Relation < O > relation , List < ? extends Cluster < MeanModel > > clusters ) { int totalSize = relation . size ( ) ; int clusterBoundary = clusters . size ( ) - 1 ; int cumulativeSize = 0 ; for ( int i = 0 ; i < clusters . size ( ) - 1 ; i ++ ) { cumulativeSize += clusters . get ( i ) . size ( ) ; // Given majority covered by large cluster if ( cumulativeSize >= totalSize * alpha ) { clusterBoundary = i ; break ; } // Relative difference in cluster size between two consecutive clusters if ( clusters . get ( i ) . size ( ) / ( double ) clusters . get ( i + 1 ) . size ( ) >= beta ) { clusterBoundary = i ; break ; } } return clusterBoundary ; }
|
Compute the boundary index separating the large cluster from the small cluster .
| 177
| 14
|
156,899
|
private void computeCBLOFs ( Relation < O > relation , NumberVectorDistanceFunction < ? super O > distance , WritableDoubleDataStore cblofs , DoubleMinMax cblofMinMax , List < ? extends Cluster < MeanModel > > largeClusters , List < ? extends Cluster < MeanModel > > smallClusters ) { List < NumberVector > largeClusterMeans = new ArrayList <> ( largeClusters . size ( ) ) ; for ( Cluster < MeanModel > largeCluster : largeClusters ) { NumberVector mean = ModelUtil . getPrototypeOrCentroid ( largeCluster . getModel ( ) , relation , largeCluster . getIDs ( ) ) ; largeClusterMeans . add ( mean ) ; // Compute CBLOF scores for members of large clusters for ( DBIDIter iter = largeCluster . getIDs ( ) . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double cblof = computeLargeClusterCBLOF ( relation . get ( iter ) , distance , mean , largeCluster ) ; storeCBLOFScore ( cblofs , cblofMinMax , cblof , iter ) ; } } for ( Cluster < MeanModel > smallCluster : smallClusters ) { for ( DBIDIter iter = smallCluster . getIDs ( ) . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double cblof = computeSmallClusterCBLOF ( relation . get ( iter ) , distance , largeClusterMeans , smallCluster ) ; storeCBLOFScore ( cblofs , cblofMinMax , cblof , iter ) ; } } }
|
Compute the CBLOF scores for all the data .
| 372
| 12
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.