idx
int64
0
165k
question
stringlengths
73
4.15k
target
stringlengths
5
918
len_question
int64
21
890
len_target
int64
3
255
156,700
private boolean nextSearchItemset ( BitVector bv , int [ ] scratchi , int [ ] iters ) { final int last = scratchi . length - 1 ; for ( int j = last ; j >= 0 ; j -- ) { int n = bv . iterAdvance ( iters [ j ] ) ; if ( n >= 0 && ( j == last || n != iters [ j + 1 ] ) ) { iters [ j ] = n ; scratchi [ j ] = bv . iterDim ( n ) ; return true ; // Success } } return false ; }
Advance scratch itemset to the next .
125
9
156,701
private int binarySearch ( List < SparseItemset > candidates , SparseItemset scratch , int begin , int end ) { -- end ; while ( begin < end ) { final int mid = ( begin + end ) >>> 1 ; SparseItemset midVal = candidates . get ( mid ) ; int cmp = midVal . compareTo ( scratch ) ; if ( cmp < 0 ) { begin = mid + 1 ; } else if ( cmp > 0 ) { end = mid - 1 ; } else { return mid ; // key found } } return - ( begin + 1 ) ; // key not found, return next }
Binary - search for the next - larger element .
133
11
156,702
private < A > ArrayList < int [ ] > buildPartitions ( NumberArrayAdapter < ? , A > adapter1 , A data1 , int len , int depth ) { final int [ ] idx = new int [ len ] ; final double [ ] tmp = new double [ len ] ; for ( int i = 0 ; i < len ; ++ i ) { idx [ i ] = i ; tmp [ i ] = adapter1 . getDouble ( data1 , i ) ; } // Sort indexes: IntegerArrayQuickSort . sort ( idx , ( x , y ) -> Double . compare ( tmp [ x ] , tmp [ y ] ) ) ; Arrays . sort ( tmp ) ; // Should yield the same ordering ArrayList < int [ ] > ret = new ArrayList <> ( 1 << depth ) ; divide ( idx , tmp , ret , 0 , tmp . length , depth ) ; return ret ; }
Partitions an attribute .
195
5
156,703
private void divide ( int [ ] idx , double [ ] data , ArrayList < int [ ] > ret , int start , int end , int depth ) { if ( depth == 0 ) { int [ ] a = Arrays . copyOfRange ( idx , start , end ) ; Arrays . sort ( a ) ; ret . add ( a ) ; return ; } final int count = end - start ; if ( count == 0 ) { // Corner case, that should barely happen. But for ties, we currently // Do not yet assure that it doesn't happen! for ( int j = 1 << depth ; j > 0 ; -- j ) { ret . add ( new int [ 0 ] ) ; } return ; } double m = 0. ; for ( int i = start ; i < end ; i ++ ) { m += data [ i ] ; } m /= count ; int pos = Arrays . binarySearch ( data , start , end , m ) ; if ( pos >= 0 ) { // Ties: try to choose the most central element. final int opt = ( start + end ) >> 1 ; while ( data [ pos ] == m ) { if ( pos < opt ) { pos ++ ; } else if ( pos > opt ) { pos -- ; } else { break ; } } } else { pos = ( - pos - 1 ) ; } divide ( idx , data , ret , start , pos , depth - 1 ) ; divide ( idx , data , ret , pos , end , depth - 1 ) ; }
Recursive call to further subdivide the array .
324
10
156,704
private void intersectionMatrix ( int [ ] [ ] res , ArrayList < int [ ] > partsx , ArrayList < int [ ] > partsy , int gridsize ) { for ( int x = 0 ; x < gridsize ; x ++ ) { final int [ ] px = partsx . get ( x ) ; final int [ ] rowx = res [ x ] ; for ( int y = 0 ; y < gridsize ; y ++ ) { int [ ] py = partsy . get ( y ) ; rowx [ y ] = intersectionSize ( px , py ) ; } } }
Intersect the two 1d grid decompositions to obtain a 2d matrix .
127
17
156,705
private int intersectionSize ( int [ ] px , int [ ] py ) { int i = 0 , j = 0 , c = 0 ; while ( i < px . length && j < py . length ) { final int vx = px [ i ] , vy = py [ j ] ; if ( vx < vy ) { ++ i ; } else if ( vx > vy ) { ++ j ; } else { ++ c ; ++ i ; ++ j ; } } return c ; }
Compute the intersection of two sorted integer lists .
109
10
156,706
private double getMCEntropy ( int [ ] [ ] mat , ArrayList < int [ ] > partsx , ArrayList < int [ ] > partsy , int size , int gridsize , double loggrid ) { // Margin entropies: double [ ] mx = new double [ gridsize ] ; double [ ] my = new double [ gridsize ] ; for ( int i = 0 ; i < gridsize ; i ++ ) { // Note: indexes are a bit tricky here, because we compute both margin // entropies at the same time! final double sumx = ( double ) partsx . get ( i ) . length ; final double sumy = ( double ) partsy . get ( i ) . length ; for ( int j = 0 ; j < gridsize ; j ++ ) { double px = mat [ i ] [ j ] / sumx ; double py = mat [ j ] [ i ] / sumy ; if ( px > 0. ) { mx [ i ] -= px * FastMath . log ( px ) ; } if ( py > 0. ) { my [ i ] -= py * FastMath . log ( py ) ; } } } // Weighted sums of margin entropies. double sumx = 0. , sumy = 0. ; for ( int i = 0 ; i < gridsize ; i ++ ) { sumx += mx [ i ] * partsx . get ( i ) . length ; sumy += my [ i ] * partsy . get ( i ) . length ; } double max = ( ( sumx > sumy ) ? sumx : sumy ) ; return max / ( size * loggrid ) ; }
Compute the MCE entropy value .
361
8
156,707
public void add ( E e ) { // resize when needed if ( size + 1 > queue . length ) { resize ( size + 1 ) ; } // final int pos = size; this . size += 1 ; heapifyUp ( size - 1 , e ) ; heapModified ( ) ; }
Add an element to the heap .
62
7
156,708
@ SuppressWarnings ( "unchecked" ) public E replaceTopElement ( E e ) { E oldroot = ( E ) queue [ 0 ] ; heapifyDown ( 0 , e ) ; heapModified ( ) ; return oldroot ; }
Combined operation that removes the top element and inserts a new element instead .
54
15
156,709
@ SuppressWarnings ( "unchecked" ) protected E removeAt ( int pos ) { if ( pos < 0 || pos >= size ) { return null ; } final E ret = ( E ) queue [ pos ] ; // Replacement object: final Object reinsert = queue [ size - 1 ] ; queue [ size - 1 ] = null ; size -- ; heapifyDown ( pos , reinsert ) ; heapModified ( ) ; return ret ; }
Remove the element at the given position .
96
8
156,710
protected final void resize ( int requiredSize ) { // Double until 64, then increase by 50% each time. int newCapacity = ( ( queue . length < 64 ) ? ( ( queue . length + 1 ) << 1 ) : ( ( queue . length >> 1 ) + queue . length ) ) ; // overflow? if ( newCapacity < 0 ) { throw new OutOfMemoryError ( ) ; } if ( requiredSize > newCapacity ) { newCapacity = requiredSize ; } queue = Arrays . copyOf ( queue , newCapacity ) ; }
Test whether we need to resize to have the requested capacity .
120
12
156,711
public void clear ( ) { // clean up references in the array for memory management for ( int i = 0 ; i < size ; i ++ ) { queue [ i ] = null ; } this . size = 0 ; heapModified ( ) ; }
Clear the heap .
52
4
156,712
protected String checkHeap ( ) { for ( int i = 1 ; i < size ; i ++ ) { final int parent = ( i - 1 ) >>> 1 ; if ( comparator . compare ( queue [ parent ] , queue [ i ] ) > 0 ) { return "@" + parent + ": " + queue [ parent ] + " < @" + i + ": " + queue [ i ] ; } } return null ; }
Test whether the heap is still valid .
93
8
156,713
public void run ( ) { // Input step Database db = inputStep . getDatabase ( ) ; hier = db . getHierarchy ( ) ; // Algorithms - Data Mining Step algorithmStep . runAlgorithms ( db ) ; // TODO: this could be nicer hier . add ( db , new SettingsResult ( settings ) ) ; // Evaluation evaluationStep . runEvaluators ( hier , db ) ; // Output / Visualization outputStep . runResultHandlers ( hier , db ) ; }
Method to run the specified algorithm using the specified database connection .
107
12
156,714
public void set ( SpatialComparable obj ) { final int dim = min . length ; assert ( obj . getDimensionality ( ) == dim ) ; if ( obj instanceof ModifiableHyperBoundingBox ) { ModifiableHyperBoundingBox ho = ( ModifiableHyperBoundingBox ) obj ; System . arraycopy ( ho . getMinRef ( ) , 0 , min , 0 , dim ) ; System . arraycopy ( ho . getMaxRef ( ) , 0 , max , 0 , dim ) ; return ; } for ( int i = 0 ; i < dim ; i ++ ) { min [ i ] = obj . getMin ( i ) ; max [ i ] = obj . getMax ( i ) ; } }
Set the bounding box to the same as some other spatial object .
154
14
156,715
public boolean extend ( SpatialComparable obj ) { final int dim = min . length ; assert ( obj . getDimensionality ( ) == dim ) ; boolean extended = false ; for ( int i = 0 ; i < dim ; i ++ ) { final double omin = obj . getMin ( i ) ; final double omax = obj . getMax ( i ) ; if ( omin < min [ i ] ) { min [ i ] = omin ; extended = true ; } if ( omax > max [ i ] ) { max [ i ] = omax ; extended = true ; } } return extended ; }
Extend the bounding box by some other spatial object .
129
12
156,716
public static int findClassLabelColumn ( MultipleObjectsBundle bundle ) { for ( int i = 0 , l = bundle . metaLength ( ) ; i < l ; ++ i ) { if ( TypeUtil . CLASSLABEL . isAssignableFromType ( bundle . meta ( i ) ) ) { return i ; } } return - 1 ; }
Find the class label column in the given data set .
78
11
156,717
@ SuppressWarnings ( { "unchecked" , "rawtypes" } ) @ Override public int compareTo ( ClassLabel o ) { HierarchicalClassLabel h = ( HierarchicalClassLabel ) o ; for ( int i = 0 ; i < this . levelwiseNames . length && i < h . levelwiseNames . length ; i ++ ) { int comp = 0 ; try { Comparable first = this . levelwiseNames [ i ] ; Comparable second = h . levelwiseNames [ i ] ; comp = first . compareTo ( second ) ; } catch ( RuntimeException e ) { String h1 = ( String ) ( this . levelwiseNames [ i ] instanceof Integer ? this . levelwiseNames [ i ] . toString ( ) : this . levelwiseNames [ i ] ) ; String h2 = ( String ) ( h . levelwiseNames [ i ] instanceof Integer ? h . levelwiseNames [ i ] . toString ( ) : h . levelwiseNames [ i ] ) ; comp = h1 . compareTo ( h2 ) ; } if ( comp != 0 ) { return comp ; } } return ( this . levelwiseNames . length < h . levelwiseNames . length ) ? - 1 : ( ( this . levelwiseNames . length == h . levelwiseNames . length ) ? 0 : 1 ) ; }
Compares two HierarchicalClassLabels . Names at higher levels are compared first . Names at a lower level are compared only if their parent - names are equal . Names at a level are tried to be compared as integer values . If this does not succeed both names are compared as Strings .
290
60
156,718
public String getNameAt ( int level ) { return this . levelwiseNames [ level ] instanceof Integer ? this . levelwiseNames [ level ] . toString ( ) : ( String ) this . levelwiseNames [ level ] ; }
Returns the name at the given level as a String .
50
11
156,719
private void recursiveLogResult ( StringBuilder buf , Hierarchy < Result > hier , Result result , int depth ) { if ( result == null ) { buf . append ( "null" ) ; LOG . warning ( "null result!" ) ; return ; } if ( depth > 50 ) { LOG . warning ( "Probably infinitely nested results, aborting!" ) ; return ; } for ( int i = 0 ; i < depth ; i ++ ) { buf . append ( ' ' ) ; } buf . append ( result . getClass ( ) . getSimpleName ( ) ) . append ( ": " ) . append ( result . getLongName ( ) ) // . append ( " (" ) . append ( result . getShortName ( ) ) . append ( ")\n" ) ; if ( hier . numChildren ( result ) > 0 ) { for ( It < Result > iter = hier . iterChildren ( result ) ; iter . valid ( ) ; iter . advance ( ) ) { recursiveLogResult ( buf , hier , iter . get ( ) , depth + 1 ) ; } } }
Recursively walk through the result tree .
228
9
156,720
public ListParameterization addFlag ( OptionID optionid ) { parameters . add ( new ParameterPair ( optionid , Flag . SET ) ) ; return this ; }
Add a flag to the parameter list
36
7
156,721
public ArrayList < String > serialize ( ) { ArrayList < String > params = new ArrayList <> ( ) ; for ( ParameterPair pair : parameters ) { params . add ( SerializedParameterization . OPTION_PREFIX + pair . option . toString ( ) ) ; if ( pair . value instanceof String ) { params . add ( ( String ) pair . value ) ; } else if ( pair . value instanceof Class ) { params . add ( ( ( Class < ? > ) pair . value ) . getCanonicalName ( ) ) ; } else { // Fallback: params . add ( pair . value . toString ( ) ) ; } } return params ; }
Serialize parameters .
149
4
156,722
@ Override public OutlierResult run ( Database db , Relation < V > relation ) { DBIDs ids = relation . getDBIDs ( ) ; WritableDoubleDataStore abodvalues = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_STATIC ) ; DoubleMinMax minmaxabod = new DoubleMinMax ( ) ; if ( kernelFunction . getClass ( ) == LinearKernelFunction . class ) { if ( ! kNNABOD ( db , relation , ids , abodvalues , minmaxabod ) ) { // Fallback, if we do not have an index. fastABOD ( db , relation , ids , abodvalues , minmaxabod ) ; } } else { fastABOD ( db , relation , ids , abodvalues , minmaxabod ) ; } // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Angle-Based Outlier Degree" , "abod-outlier" , abodvalues , relation . getDBIDs ( ) ) ; OutlierScoreMeta scoreMeta = new InvertedOutlierScoreMeta ( minmaxabod . getMin ( ) , minmaxabod . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
Run Fast - ABOD on the data set .
297
10
156,723
private boolean kNNABOD ( Database db , Relation < V > relation , DBIDs ids , WritableDoubleDataStore abodvalues , DoubleMinMax minmaxabod ) { DistanceQuery < V > dq = db . getDistanceQuery ( relation , SquaredEuclideanDistanceFunction . STATIC ) ; KNNQuery < V > knnq = db . getKNNQuery ( dq , DatabaseQuery . HINT_OPTIMIZED_ONLY ) ; boolean squared = true ; if ( knnq == null ) { dq = db . getDistanceQuery ( relation , EuclideanDistanceFunction . STATIC ) ; knnq = db . getKNNQuery ( dq , DatabaseQuery . HINT_OPTIMIZED_ONLY ) ; if ( knnq == null ) { return false ; } squared = false ; } SimilarityQuery < V > lk = db . getSimilarityQuery ( relation , LinearKernelFunction . STATIC ) ; int k1 = k + 1 ; // We will get the query point back by the knnq. MeanVariance s = new MeanVariance ( ) ; for ( DBIDIter pA = ids . iter ( ) ; pA . valid ( ) ; pA . advance ( ) ) { KNNList nl = knnq . getKNNForDBID ( pA , k1 ) ; double simAA = lk . similarity ( pA , pA ) ; s . reset ( ) ; DoubleDBIDListIter iB = nl . iter ( ) , iC = nl . iter ( ) ; for ( ; iB . valid ( ) ; iB . advance ( ) ) { double dAB = iB . doubleValue ( ) ; double simAB = lk . similarity ( pA , iB ) ; if ( ! ( dAB > 0. ) ) { continue ; } for ( iC . seek ( iB . getOffset ( ) + 1 ) ; iC . valid ( ) ; iC . advance ( ) ) { double dAC = iC . doubleValue ( ) ; double simAC = lk . similarity ( pA , iC ) ; if ( ! ( dAC > 0. ) ) { continue ; } // Exploit bilinearity of scalar product: // <B-A, C-A> = <B, C-A> - <A,C-A> // = <B,C> - <B,A> - <A,C> + <A,A> double simBC = lk . similarity ( iB , iC ) ; double numerator = simBC - simAB - simAC + simAA ; if ( squared ) { double div = 1. / ( dAB * dAC ) ; s . put ( numerator * div , FastMath . sqrt ( div ) ) ; } else { double sqrtdiv = 1. / ( dAB * dAC ) ; s . put ( numerator * sqrtdiv * sqrtdiv , sqrtdiv ) ; } } } final double abof = s . getNaiveVariance ( ) ; minmaxabod . put ( abof ) ; abodvalues . putDouble ( pA , abof ) ; } return true ; }
Simpler kNN based can use more indexing .
708
11
156,724
private void fastABOD ( Database db , Relation < V > relation , DBIDs ids , WritableDoubleDataStore abodvalues , DoubleMinMax minmaxabod ) { // Build a kernel matrix, to make O(n^3) slightly less bad. SimilarityQuery < V > sq = db . getSimilarityQuery ( relation , kernelFunction ) ; KernelMatrix kernelMatrix = new KernelMatrix ( sq , relation , ids ) ; MeanVariance s = new MeanVariance ( ) ; KNNHeap nn = DBIDUtil . newHeap ( k ) ; for ( DBIDIter pA = ids . iter ( ) ; pA . valid ( ) ; pA . advance ( ) ) { final double simAA = kernelMatrix . getSimilarity ( pA , pA ) ; // Choose the k-min nearest nn . clear ( ) ; for ( DBIDIter nB = relation . iterDBIDs ( ) ; nB . valid ( ) ; nB . advance ( ) ) { if ( DBIDUtil . equal ( nB , pA ) ) { continue ; } double simBB = kernelMatrix . getSimilarity ( nB , nB ) ; double simAB = kernelMatrix . getSimilarity ( pA , nB ) ; double sqdAB = simAA + simBB - simAB - simAB ; if ( ! ( sqdAB > 0. ) ) { continue ; } nn . insert ( sqdAB , nB ) ; } KNNList nl = nn . toKNNList ( ) ; s . reset ( ) ; DoubleDBIDListIter iB = nl . iter ( ) , iC = nl . iter ( ) ; for ( ; iB . valid ( ) ; iB . advance ( ) ) { double sqdAB = iB . doubleValue ( ) ; double simAB = kernelMatrix . getSimilarity ( pA , iB ) ; if ( ! ( sqdAB > 0. ) ) { continue ; } for ( iC . seek ( iB . getOffset ( ) + 1 ) ; iC . valid ( ) ; iC . advance ( ) ) { double sqdAC = iC . doubleValue ( ) ; double simAC = kernelMatrix . getSimilarity ( pA , iC ) ; if ( ! ( sqdAC > 0. ) ) { continue ; } // Exploit bilinearity of scalar product: // <B-A, C-A> = <B, C-A> - <A,C-A> // = <B,C> - <B,A> - <A,C> + <A,A> double simBC = kernelMatrix . getSimilarity ( iB , iC ) ; double numerator = simBC - simAB - simAC + simAA ; double div = 1. / ( sqdAB * sqdAC ) ; s . put ( numerator * div , FastMath . sqrt ( div ) ) ; } } final double abof = s . getNaiveVariance ( ) ; minmaxabod . put ( abof ) ; abodvalues . putDouble ( pA , abof ) ; } }
Full kernel - based version .
691
6
156,725
@ Override public double getWeight ( double distance , double max , double stddev ) { if ( stddev <= 0 ) { return 1 ; } return NormalDistribution . erfc ( MathUtil . SQRTHALF * distance / stddev ) ; }
Return Erfc weight scaled by standard deviation . max is ignored .
59
13
156,726
public OutlierResult run ( Database database , Relation < O > relation ) { StepProgress stepprog = LOG . isVerbose ( ) ? new StepProgress ( "LOF" , 3 ) : null ; DBIDs ids = relation . getDBIDs ( ) ; LOG . beginStep ( stepprog , 1 , "Materializing nearest-neighbor sets." ) ; KNNQuery < O > knnq = DatabaseUtil . precomputedKNNQuery ( database , relation , getDistanceFunction ( ) , k ) ; // Compute LRDs LOG . beginStep ( stepprog , 2 , "Computing Local Reachability Densities (LRD)." ) ; WritableDoubleDataStore lrds = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP ) ; computeLRDs ( knnq , ids , lrds ) ; // compute LOF_SCORE of each db object LOG . beginStep ( stepprog , 3 , "Computing Local Outlier Factors (LOF)." ) ; WritableDoubleDataStore lofs = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_DB ) ; // track the maximum value for normalization. DoubleMinMax lofminmax = new DoubleMinMax ( ) ; computeLOFScores ( knnq , ids , lrds , lofs , lofminmax ) ; LOG . setCompleted ( stepprog ) ; // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Local Outlier Factor" , "lof-outlier" , lofs , ids ) ; OutlierScoreMeta scoreMeta = new QuotientOutlierScoreMeta ( lofminmax . getMin ( ) , lofminmax . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY , 1.0 ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
Runs the LOF algorithm on the given database .
452
11
156,727
private void computeLRDs ( KNNQuery < O > knnq , DBIDs ids , WritableDoubleDataStore lrds ) { FiniteProgress lrdsProgress = LOG . isVerbose ( ) ? new FiniteProgress ( "Local Reachability Densities (LRD)" , ids . size ( ) , LOG ) : null ; double lrd ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { lrd = computeLRD ( knnq , iter ) ; lrds . putDouble ( iter , lrd ) ; LOG . incrementProcessed ( lrdsProgress ) ; } LOG . ensureCompleted ( lrdsProgress ) ; }
Compute local reachability distances .
160
7
156,728
protected double computeLRD ( KNNQuery < O > knnq , DBIDIter curr ) { final KNNList neighbors = knnq . getKNNForDBID ( curr , k ) ; double sum = 0.0 ; int count = 0 ; for ( DoubleDBIDListIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { if ( DBIDUtil . equal ( curr , neighbor ) ) { continue ; } KNNList neighborsNeighbors = knnq . getKNNForDBID ( neighbor , k ) ; sum += MathUtil . max ( neighbor . doubleValue ( ) , neighborsNeighbors . getKNNDistance ( ) ) ; count ++ ; } // Avoid division by 0 return ( sum > 0 ) ? ( count / sum ) : Double . POSITIVE_INFINITY ; }
Compute a single local reachability distance .
188
9
156,729
private void computeLOFScores ( KNNQuery < O > knnq , DBIDs ids , DoubleDataStore lrds , WritableDoubleDataStore lofs , DoubleMinMax lofminmax ) { FiniteProgress progressLOFs = LOG . isVerbose ( ) ? new FiniteProgress ( "Local Outlier Factor (LOF) scores" , ids . size ( ) , LOG ) : null ; double lof ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { lof = computeLOFScore ( knnq , iter , lrds ) ; lofs . putDouble ( iter , lof ) ; // update minimum and maximum lofminmax . put ( lof ) ; LOG . incrementProcessed ( progressLOFs ) ; } LOG . ensureCompleted ( progressLOFs ) ; }
Compute local outlier factors .
198
7
156,730
protected double computeLOFScore ( KNNQuery < O > knnq , DBIDRef cur , DoubleDataStore lrds ) { final double lrdp = lrds . doubleValue ( cur ) ; if ( Double . isInfinite ( lrdp ) ) { return 1.0 ; } double sum = 0. ; int count = 0 ; final KNNList neighbors = knnq . getKNNForDBID ( cur , k ) ; for ( DBIDIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { // skip the point itself if ( DBIDUtil . equal ( cur , neighbor ) ) { continue ; } sum += lrds . doubleValue ( neighbor ) ; ++ count ; } return sum / ( lrdp * count ) ; }
Compute a single LOF score .
176
8
156,731
protected double kNNDistance ( ) { double result = getEntry ( 0 ) . getKnnDistance ( ) ; for ( int i = 1 ; i < getNumEntries ( ) ; i ++ ) { double knnDistance = getEntry ( i ) . getKnnDistance ( ) ; result = ( result < knnDistance ) ? knnDistance : result ; } return result ; }
Computes and returns the aggregated knn distance of this node
83
13
156,732
public boolean readLine ( Appendable buf ) throws IOException { boolean success = false ; while ( true ) { // Process buffer: while ( pos < end ) { success = true ; final char c = buffer [ pos ++ ] ; if ( c == ' ' ) { return success ; } if ( c == ' ' ) { continue ; } buf . append ( c ) ; } // Refill buffer: assert ( pos >= end ) : "Buffer wasn't empty when refilling!" ; end = in . read ( buffer , 0 , buffer . length ) ; pos = 0 ; if ( end < 0 ) { // End of stream. return success ; } } }
Read a line into the given buffer .
139
8
156,733
private static final int bestPivot ( int rank , int m1 , int m2 , int m3 , int m4 , int m5 ) { if ( rank < m1 ) { return m1 ; } if ( rank > m5 ) { return m5 ; } if ( rank < m2 ) { return m2 ; } if ( rank > m4 ) { return m4 ; } return m3 ; }
Choose the best pivot for the given rank .
89
9
156,734
@ Override public void writeExternal ( ObjectOutput out ) throws IOException { super . writeExternal ( out ) ; int k_max = knnDistances . length ; out . writeInt ( k_max ) ; for ( int i = 0 ; i < k_max ; i ++ ) { out . writeDouble ( knnDistances [ i ] ) ; } }
Calls the super method and writes the parameter k_max and the knn distances of this entry to the specified stream .
79
25
156,735
@ Override public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { super . readExternal ( in ) ; int k_max = in . readInt ( ) ; knnDistances = new double [ k_max ] ; for ( int i = 0 ; i < k_max ; i ++ ) { knnDistances [ i ] = in . readDouble ( ) ; } }
Calls the super method and reads the parameter k_max and knn distance of this entry from the specified input stream .
88
25
156,736
public double getValueAt ( int k ) { if ( k < k_0 ) { return Double . POSITIVE_INFINITY ; } return m * FastMath . log ( k ) + t ; }
Returns the function value of the approximation line at the specified k .
45
13
156,737
public void iterate ( ) { // build covmat out of fitting matrix by multiplying diagonal elements with // 1+lambda for ( int i = 0 ; i < numfit ; i ++ ) { System . arraycopy ( alpha [ i ] , 0 , covmat [ i ] , 0 , numfit ) ; covmat [ i ] [ i ] *= ( 1.0 + lambda ) ; } // Solve the equation system (Gauss-Jordan) LinearEquationSystem ls = new LinearEquationSystem ( covmat , beta ) ; ls . solveByTotalPivotSearch ( ) ; // update covmat with the inverse covmat = ls . getCoefficents ( ) ; // and deltaparams with the solution vector deltaparams = ls . getRHS ( ) ; // deltaparams = beta; for ( int i = 0 , i2 = 0 ; i < numparams ; i ++ ) { if ( dofit [ i ] ) { paramstry [ i ] = params [ i ] + deltaparams [ i2 ++ ] ; } } double newchisq = simulateParameters ( paramstry ) ; // have the results improved? if ( newchisq < chisq ) { // TODO: Do we need a larger limit than MIN_NORMAL? if ( lambda * 0.1 > Double . MIN_NORMAL ) { lambda *= 0.1 ; } chisq = newchisq ; // keep modified covmat as new alpha matrix // and da as new beta for ( int i = 0 ; i < numfit ; i ++ ) { System . arraycopy ( covmat [ i ] , 0 , alpha [ i ] , 0 , numfit ) ; beta [ i ] = deltaparams [ i ] ; } System . arraycopy ( paramstry , 0 , params , 0 , numparams ) ; } else { // TODO: Do we need a larger limit than MAX_VALUE? // Does it ever make sense to go as far up? // Anyway, this should prevent overflows. if ( lambda * 10 < Double . MAX_VALUE ) { lambda *= 10 ; } } }
Perform an iteration of the approximation loop .
462
9
156,738
public void run ( ) { int maxruns = this . maxruns , maxsmall = this . maxsmall ; double oldchi = getChiSq ( ) ; while ( maxruns -- > 0 ) { iterate ( ) ; double newchi = getChiSq ( ) , deltachi = newchi - oldchi ; oldchi = newchi ; // stop condition: only a small improvement in Chi. if ( deltachi < 0 && deltachi > - small && -- maxsmall < 0 ) { break ; } } }
Iterate until convergence at most 100 times .
115
9
156,739
public double [ ] get ( T object ) { double [ ] v = map . get ( object ) ; if ( v == null ) { return null ; } return v . clone ( ) ; }
Get the position data of the object
41
7
156,740
public double relativeFill ( ) { double acc = 0.0 ; final int cols = widths . size ( ) ; final int rows = heights . size ( ) ; { for ( int y = 0 ; y < rows ; y ++ ) { for ( int x = 0 ; x < cols ; x ++ ) { if ( usage . get ( y ) . get ( x ) != null ) { acc += widths . get ( x ) * heights . get ( y ) ; } } } } return acc / ( twidth * theight ) ; }
Compute the relative fill . Useful for triggering a relayout if the relative fill is not satisfactory .
118
20
156,741
public boolean contains ( NumberVector vector ) { for ( int i = 0 ; i < dims . length ; i ++ ) { final double value = vector . doubleValue ( dims [ i ] ) ; if ( bounds [ i << 1 ] > value || value >= bounds [ ( i << 1 ) + 1 ] ) { return false ; } } return true ; }
Returns true if the intervals of this unit contain the specified feature vector .
77
14
156,742
public boolean addFeatureVector ( DBIDRef id , NumberVector vector ) { if ( contains ( vector ) ) { ids . add ( id ) ; return true ; } return false ; }
Adds the id of the specified feature vector to this unit if this unit contains the feature vector .
40
19
156,743
protected boolean containsRightNeighbor ( CLIQUEUnit unit , int d ) { final int e = dims . length - 1 ; return checkDimensions ( unit , e ) && bounds [ e << 1 ] == unit . bounds [ ( e << 1 ) + 1 ] ; }
Returns true if this unit is the right neighbor of the given unit .
59
14
156,744
protected CLIQUEUnit join ( CLIQUEUnit other , double all , double tau ) { if ( other . dimensionality ( ) != this . dimensionality ( ) ) { return null ; } // n-1 dimensions must be the same: int e = dims . length - 1 ; if ( ! checkDimensions ( other , e ) ) { return null ; } if ( dims [ e ] >= other . dims [ e ] ) { return null ; } HashSetModifiableDBIDs resultIDs = DBIDUtil . newHashSet ( this . ids ) ; resultIDs . retainAll ( other . ids ) ; if ( resultIDs . size ( ) / all < tau ) { return null ; } return new CLIQUEUnit ( this , other . dims [ e ] , other . bounds [ e << 1 ] , other . bounds [ ( e << 1 ) + 1 ] , resultIDs ) ; }
Joins this unit with the specified unit .
199
9
156,745
private boolean checkDimensions ( CLIQUEUnit other , int e ) { for ( int i = 0 , j = 0 ; i < e ; i ++ , j += 2 ) { if ( dims [ i ] != other . dims [ i ] || bounds [ j ] != other . bounds [ j ] || bounds [ j + 1 ] != bounds [ j + 1 ] ) { return false ; } } return true ; }
Check that the first e dimensions agree .
91
8
156,746
protected synchronized void merge ( DoubleMinMax minmax ) { this . minmax . put ( minmax . getMin ( ) ) ; this . minmax . put ( minmax . getMax ( ) ) ; }
Merge the result of an instance .
45
8
156,747
public void enableStart ( ) { EventTarget targ = ( EventTarget ) element ; targ . addEventListener ( SVGConstants . SVG_EVENT_MOUSEDOWN , this , false ) ; }
Enable capturing of mousedown events .
43
8
156,748
public void disableStart ( ) { EventTarget targ = ( EventTarget ) element ; targ . removeEventListener ( SVGConstants . SVG_EVENT_MOUSEDOWN , this , false ) ; }
Disable capturing of mousedown events .
43
8
156,749
protected void enableStop ( ) { EventTarget targ = svgp . getDocument ( ) . getRootElement ( ) ; targ . addEventListener ( SVGConstants . SVG_EVENT_MOUSEMOVE , this , false ) ; targ . addEventListener ( SVGConstants . SVG_EVENT_MOUSEUP , this , false ) ; // FIXME: listen on the background object! targ . addEventListener ( SVGConstants . SVG_EVENT_MOUSEOUT , this , false ) ; }
Enable capturing of mousemove and mouseup events .
112
10
156,750
protected void disableStop ( ) { EventTarget targ = svgp . getDocument ( ) . getRootElement ( ) ; targ . removeEventListener ( SVGConstants . SVG_EVENT_MOUSEMOVE , this , false ) ; targ . removeEventListener ( SVGConstants . SVG_EVENT_MOUSEUP , this , false ) ; // FIXME: listen on the background object! targ . removeEventListener ( SVGConstants . SVG_EVENT_MOUSEOUT , this , false ) ; }
Disable capturing of mousemove and mouseup events .
112
10
156,751
protected SVGPoint getCoordinates ( Event evt ) { return SVGUtil . elementCoordinatesFromEvent ( this . svgp . getDocument ( ) , this . coordref , evt ) ; }
Return the event coordinates for this event .
45
8
156,752
protected boolean startDrag ( SVGPoint startPoint , Event evt ) { if ( listener != null ) { return listener . startDrag ( startPoint , evt ) ; } return true ; }
Action to do on drag start .
40
7
156,753
protected boolean duringDrag ( SVGPoint startPoint , SVGPoint dragPoint , Event evt , boolean inside ) { if ( listener != null ) { return listener . duringDrag ( startPoint , dragPoint , evt , inside ) ; } return true ; }
Method called during drags .
53
6
156,754
public void makeInvisible ( ) { CSSClass cls = new CSSClass ( this , "unused" ) ; cls . setStatement ( SVGConstants . CSS_FILL_OPACITY_PROPERTY , "0" ) ; cls . setStatement ( SVGConstants . CSS_CURSOR_PROPERTY , SVGConstants . CSS_POINTER_VALUE ) ; SVGUtil . setAtt ( element , SVGConstants . SVG_STYLE_ATTRIBUTE , cls . inlineCSS ( ) ) ; }
Make the rectangle invisible .
120
5
156,755
public void makeVisible ( ) { CSSClass cls = new CSSClass ( this , "unused" ) ; cls . setStatement ( SVGConstants . CSS_FILL_PROPERTY , SVGConstants . CSS_GREEN_VALUE ) ; cls . setStatement ( SVGConstants . CSS_FILL_OPACITY_PROPERTY , "0.2" ) ; cls . setStatement ( SVGConstants . CSS_CURSOR_PROPERTY , SVGConstants . CSS_POINTER_VALUE ) ; SVGUtil . setAtt ( element , SVGConstants . SVG_STYLE_ATTRIBUTE , cls . inlineCSS ( ) ) ; }
Make the rectangle visible for debug purposes .
152
8
156,756
public void setFill ( double val , double min , double max ) { this . val = val ; this . min = min ; this . max = max ; }
Set the fill of the score bar .
34
8
156,757
public Element build ( SVGPlot svgp , double x , double y , double width , double height ) { Element barchart = svgp . svgElement ( SVGConstants . SVG_G_TAG ) ; // TODO: use style library for colors! Element bar = svgp . svgRect ( x , y , width , height ) ; bar . setAttribute ( SVGConstants . SVG_FILL_ATTRIBUTE , "#a0a0a0" ) ; bar . setAttribute ( SVGConstants . SVG_STROKE_ATTRIBUTE , "#a0a0a0" ) ; bar . setAttribute ( SVGConstants . SVG_STROKE_WIDTH_ATTRIBUTE , String . valueOf ( height * 0.01 ) ) ; barchart . appendChild ( bar ) ; if ( val >= min && val <= max && min < max ) { final double frame = 0.02 * height ; double fpos = ( val - min ) / ( max - min ) * ( width - 2 * frame ) ; Element chart ; if ( reversed ) { chart = svgp . svgRect ( x + frame + fpos , y + frame , width - fpos - 2 * frame , height - 2 * frame ) ; } else { chart = svgp . svgRect ( x + frame , y + frame , fpos , height - 2 * frame ) ; } chart . setAttribute ( SVGConstants . SVG_FILL_ATTRIBUTE , "#d4e4f1" ) ; chart . setAttribute ( SVGConstants . SVG_STROKE_ATTRIBUTE , "#a0a0a0" ) ; chart . setAttribute ( SVGConstants . SVG_STROKE_WIDTH_ATTRIBUTE , String . valueOf ( height * 0.01 ) ) ; barchart . appendChild ( chart ) ; } // Draw the values: if ( format != null ) { String num = Double . isNaN ( val ) ? "NaN" : format . format ( val ) ; Element lbl = svgp . svgText ( x + 0.05 * width , y + 0.75 * height , num ) ; lbl . setAttribute ( SVGConstants . SVG_STYLE_ATTRIBUTE , "font-size: " + 0.75 * height + "; font-weight: bold" ) ; barchart . appendChild ( lbl ) ; } // Draw the label if ( label != null ) { Element lbl = svgp . svgText ( x + 1.05 * width , y + 0.75 * height , label ) ; lbl . setAttribute ( SVGConstants . SVG_STYLE_ATTRIBUTE , "font-size: " + 0.75 * height + "; font-weight: normal" ) ; barchart . appendChild ( lbl ) ; } return barchart ; }
Build the actual element
636
4
156,758
void openBuffer ( ) { if ( buffer == null ) { try { buffer = input . map ( MapMode . READ_ONLY , 0 , input . size ( ) ) ; } catch ( IOException e ) { throw new AbortException ( "Cannot map input bundle." , e ) ; } } }
Map the input file .
66
5
156,759
void readMeta ( ) { final int check = buffer . getInt ( ) ; if ( check != MAGIC ) { throw new AbortException ( "File does not start with expected magic." ) ; } final int nummeta = buffer . getInt ( ) ; assert ( nummeta > 0 ) : "Empty bundle?" ; meta = new BundleMeta ( nummeta ) ; sers = new ByteBufferSerializer < ? > [ nummeta ] ; data = new Object [ nummeta ] ; for ( int i = 0 ; i < nummeta ; i ++ ) { try { @ SuppressWarnings ( "unchecked" ) SimpleTypeInformation < ? extends Object > type = ( SimpleTypeInformation < ? extends Object > ) TypeInformationSerializer . STATIC . fromByteBuffer ( buffer ) ; sers [ i ] = type . getSerializer ( ) ; if ( i == 0 && DBID . class . isAssignableFrom ( type . getRestrictionClass ( ) ) ) { hasids = true ; } else { meta . add ( type ) ; } } catch ( UnsupportedOperationException e ) { throw new AbortException ( "Deserialization failed: " + e . getMessage ( ) , e ) ; } catch ( IOException e ) { throw new AbortException ( "IO error" , e ) ; } } }
Read the metadata .
285
4
156,760
void readObject ( ) { for ( int i = 0 ; i < sers . length ; ++ i ) { try { data [ i ] = sers [ i ] . fromByteBuffer ( buffer ) ; } catch ( UnsupportedOperationException e ) { throw new AbortException ( "Deserialization failed." , e ) ; } catch ( IOException e ) { throw new AbortException ( "IO error" , e ) ; } } }
Read an object .
95
4
156,761
public Clustering < PrototypeModel < O > > run ( Relation < O > relation ) { RangeQuery < O > rq = relation . getRangeQuery ( getDistanceFunction ( ) , threshold ) ; ModifiableDBIDs seen = DBIDUtil . newHashSet ( relation . size ( ) ) ; Clustering < PrototypeModel < O > > clustering = new Clustering <> ( "Prototype clustering" , "prototype-clustering" ) ; int queries = 0 ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Leader clustering" , relation . size ( ) , LOG ) : null ; for ( DBIDIter it = relation . iterDBIDs ( ) ; it . valid ( ) && seen . size ( ) < relation . size ( ) ; it . advance ( ) ) { if ( seen . contains ( it ) ) { continue ; } DoubleDBIDList res = rq . getRangeForDBID ( it , threshold ) ; ++ queries ; ModifiableDBIDs ids = DBIDUtil . newArray ( res . size ( ) ) ; for ( DBIDIter cand = res . iter ( ) ; cand . valid ( ) ; cand . advance ( ) ) { if ( seen . add ( cand ) ) { LOG . incrementProcessed ( prog ) ; ids . add ( cand ) ; } } assert ( ids . size ( ) > 0 && ids . contains ( it ) ) ; PrototypeModel < O > mod = new SimplePrototypeModel <> ( relation . get ( it ) ) ; clustering . addToplevelCluster ( new Cluster <> ( ids , mod ) ) ; } LOG . statistics ( new LongStatistic ( this . getClass ( ) . getName ( ) + ".queries" , queries ) ) ; LOG . ensureCompleted ( prog ) ; return clustering ; }
Run the leader clustering algorithm .
405
7
156,762
@ Override public void writeToText ( TextWriterStream out , String label ) { String name = getNameAutomatic ( ) ; if ( name != null ) { out . commentPrintLn ( "Cluster name: " + name ) ; } out . commentPrintLn ( "Cluster noise flag: " + isNoise ( ) ) ; out . commentPrintLn ( "Cluster size: " + ids . size ( ) ) ; // also print model, if any and printable if ( getModel ( ) != null && ( getModel ( ) instanceof TextWriteable ) ) { ( ( TextWriteable ) getModel ( ) ) . writeToText ( out , label ) ; } }
Write to a textual representation . Writing the actual group data will be handled by the caller this is only meant to write the meta information .
152
27
156,763
private void scheduleSetPlot ( final SVGPlot oldplot , final SVGPlot newplot ) { UpdateManager um = this . getUpdateManager ( ) ; if ( um != null ) { synchronized ( um ) { if ( um . isRunning ( ) ) { // LoggingUtil.warning("Scheduling detach: " + this + " " + oldplot); final Runnable detach = new Runnable ( ) { @ Override public void run ( ) { if ( latest . compareAndSet ( this , null ) ) { detachPlot ( oldplot ) ; attachPlot ( newplot ) ; } } } ; latest . set ( detach ) ; um . getUpdateRunnableQueue ( ) . preemptLater ( detach ) ; return ; } } } else { if ( oldplot != null ) { LoggingUtil . warning ( "No update manager, but a previous plot exists. Incorrectly initialized?" ) ; } } detachPlot ( oldplot ) ; attachPlot ( newplot ) ; }
Schedule a detach .
211
5
156,764
private void attachPlot ( SVGPlot newplot ) { this . plot = newplot ; if ( newplot == null ) { super . setSVGDocument ( null ) ; return ; } newplot . synchronizeWith ( synchronizer ) ; super . setSVGDocument ( newplot . getDocument ( ) ) ; super . setDisableInteractions ( newplot . getDisableInteractions ( ) ) ; }
Attach to a new plot and display .
85
8
156,765
private void detachPlot ( SVGPlot oldplot ) { if ( oldplot == null ) { return ; } this . plot = null ; oldplot . unsynchronizeWith ( synchronizer ) ; }
Execute the detaching event .
42
7
156,766
protected double estimateID ( DBIDRef ignore , DoubleDBIDListIter it , double [ ] p ) { int j = 0 ; for ( it . seek ( 0 ) ; it . valid ( ) ; it . advance ( ) ) { if ( it . doubleValue ( ) == 0. || DBIDUtil . equal ( ignore , it ) ) { continue ; } p [ j ++ ] = it . doubleValue ( ) ; } if ( j < 2 ) { throw new ArithmeticException ( "Too little data to estimate ID." ) ; } return estimator . estimate ( p , j ) ; }
Estimate the local intrinsic dimensionality .
128
8
156,767
public static double logpdf ( double val , double rate ) { return val < 0. ? Double . NEGATIVE_INFINITY : FastMath . log ( rate ) - rate * val ; }
log PDF static version
42
4
156,768
public static double quantile ( double val , double rate ) { return val >= 0 && val <= 1 ? - FastMath . log ( 1 - val ) / rate : Double . NaN ; }
Quantile function static version
41
5
156,769
private MarkdownDocStream pendingBreak ( ) { if ( newline == Newline . NONE ) { return this ; } out . append ( newline == Newline . BREAK ? "\\\n" : newline == Newline . PAR ? "\n\n" : "\n" ) ; for ( int i = indent , j = i ; i > 0 ; i -= j ) { out . append ( WHITESPACES , 0 , ( j = i > WHITESPACES . length ( ) ? WHITESPACES . length ( ) : i ) ) ; } newline = Newline . NONE ; return this ; }
Output any pending line breaks .
139
6
156,770
public MarkdownDocStream append ( char c ) { if ( c == ' ' ) { newline = newline == Newline . NONE ? Newline . NEWLINE : Newline . PAR ; return this ; } pendingBreak ( ) ; out . append ( c ) ; return this ; }
Append a single character .
62
6
156,771
public MarkdownDocStream append ( CharSequence p , int start , int end ) { for ( int pos = start ; pos < end ; ++ pos ) { final char c = p . charAt ( pos ) ; if ( c == ' ' ) { continue ; } append ( c ) ; // Uses \n magic. } return this ; }
Output part of a string .
73
6
156,772
public MarkdownDocStream indent ( int newindent ) { if ( newindent < indent ) { newline = newline == Newline . BREAK ? Newline . NEWLINE : Newline . PAR ; } indent = newindent ; return this ; }
Set the indent depth .
56
5
156,773
public final void render ( GL2 gl ) { gl . glMatrixMode ( GL2 . GL_PROJECTION ) ; gl . glPushMatrix ( ) ; gl . glLoadIdentity ( ) ; gl . glMatrixMode ( GL2 . GL_MODELVIEW ) ; gl . glPushMatrix ( ) ; gl . glLoadIdentity ( ) ; gl . glOrtho ( 0 , width , 0 , height , - 1 , + 1 ) ; gl . glColor4f ( 0f , 0f , 0f , .5f ) ; // Fade background: gl . glBegin ( GL2 . GL_QUADS ) ; gl . glVertex2f ( 0f , 0f ) ; gl . glVertex2f ( width , 0f ) ; gl . glVertex2f ( width , height ) ; gl . glVertex2f ( 0f , height ) ; gl . glEnd ( ) ; renderContents ( gl ) ; gl . glMatrixMode ( GL2 . GL_PROJECTION ) ; gl . glPopMatrix ( ) ; gl . glMatrixMode ( GL2 . GL_MODELVIEW ) ; gl . glPopMatrix ( ) ; }
Main render method
256
3
156,774
@ Override public int setPageID ( P page ) { int pageID = page . getPageID ( ) ; if ( pageID == - 1 ) { pageID = getNextEmptyPageID ( ) ; if ( pageID == - 1 ) { pageID = nextPageID ++ ; } page . setPageID ( pageID ) ; } return pageID ; }
Sets the id of the given page .
79
9
156,775
public StringBuilder appendToBuffer ( StringBuilder buf ) { Iterator < double [ ] > iter = points . iterator ( ) ; while ( iter . hasNext ( ) ) { double [ ] data = iter . next ( ) ; for ( int i = 0 ; i < data . length ; i ++ ) { if ( i > 0 ) { buf . append ( ' ' ) ; } buf . append ( data [ i ] ) ; } if ( iter . hasNext ( ) ) { buf . append ( ' ' ) ; } } return buf ; }
Append the polygon to the buffer .
116
9
156,776
public boolean containsPoint2D ( double [ ] v ) { assert ( v . length == 2 ) ; final double testx = v [ 0 ] ; final double testy = v [ 1 ] ; boolean c = false ; Iterator < double [ ] > it = points . iterator ( ) ; double [ ] pre = points . get ( points . size ( ) - 1 ) ; while ( it . hasNext ( ) ) { final double [ ] cur = it . next ( ) ; final double curx = cur [ 0 ] , cury = cur [ 1 ] ; final double prex = pre [ 0 ] , prey = pre [ 1 ] ; if ( ( ( cury > testy ) != ( prey > testy ) ) ) { if ( ( testx < ( prex - curx ) * ( testy - cury ) / ( prey - cury ) + curx ) ) { c = ! c ; } } pre = cur ; } return c ; }
Point in polygon test based on
207
7
156,777
public static AffineTransformation reorderAxesTransformation ( int dim , int ... axes ) { double [ ] [ ] m = zeroMatrix ( dim + 1 ) ; // insert ones appropriately: for ( int i = 0 ; i < axes . length ; i ++ ) { assert ( 0 < axes [ i ] && axes [ i ] <= dim ) ; m [ i ] [ axes [ i ] - 1 ] = 1.0 ; } int useddim = 1 ; for ( int i = axes . length ; i < dim + 1 ; i ++ ) { // find next "unused" dimension. { boolean search = true ; while ( search ) { search = false ; for ( int a : axes ) { if ( a == useddim ) { search = true ; useddim ++ ; break ; } } } } m [ i ] [ useddim - 1 ] = 1.0 ; useddim ++ ; } assert ( useddim - 2 == dim ) ; return new AffineTransformation ( dim , m , null ) ; }
Generate a transformation that reorders axes in the given way .
217
13
156,778
public void addTranslation ( double [ ] v ) { assert ( v . length == dim ) ; // reset inverse transformation - needs recomputation. inv = null ; double [ ] [ ] homTrans = unitMatrix ( dim + 1 ) ; for ( int i = 0 ; i < dim ; i ++ ) { homTrans [ i ] [ dim ] = v [ i ] ; } trans = times ( homTrans , trans ) ; }
Add a translation operation to the matrix
92
7
156,779
public void addMatrix ( double [ ] [ ] m ) { assert ( m . length == dim ) ; assert ( m [ 0 ] . length == dim ) ; // reset inverse transformation - needs recomputation. inv = null ; // extend the matrix with an extra row and column double [ ] [ ] ht = new double [ dim + 1 ] [ dim + 1 ] ; for ( int i = 0 ; i < dim ; i ++ ) { for ( int j = 0 ; j < dim ; j ++ ) { ht [ i ] [ j ] = m [ i ] [ j ] ; } } // the other cells default to identity matrix ht [ dim ] [ dim ] = 1.0 ; // Multiply from left. trans = times ( ht , trans ) ; }
Add a matrix operation to the matrix .
168
8
156,780
public void addRotation ( int axis1 , int axis2 , double angle ) { // TODO: throw an exception instead of using assert assert ( axis1 >= 0 ) ; assert ( axis1 < dim ) ; assert ( axis1 >= 0 ) ; assert ( axis2 < dim ) ; assert ( axis1 != axis2 ) ; // reset inverse transformation - needs recomputation. inv = null ; double [ ] [ ] ht = new double [ dim + 1 ] [ dim + 1 ] ; // identity matrix for ( int i = 0 ; i < dim + 1 ; i ++ ) { ht [ i ] [ i ] = 1.0 ; } // insert rotation values final DoubleWrapper tmp = new DoubleWrapper ( ) ; // To return cosine double s = FastMath . sinAndCos ( angle , tmp ) , c = tmp . value ; ht [ axis1 ] [ axis1 ] = + c ; ht [ axis1 ] [ axis2 ] = - s ; ht [ axis2 ] [ axis1 ] = + s ; ht [ axis2 ] [ axis2 ] = + c ; // Multiply from left trans = times ( ht , trans ) ; }
Convenience function to apply a rotation in 2 dimensions .
256
12
156,781
public void addAxisReflection ( int axis ) { assert ( 0 < axis && axis <= dim ) ; // reset inverse transformation - needs recomputation. inv = null ; // Formal: // Matrix homTrans = Matrix.unitMatrix(dim + 1); // homTrans[axis - 1][axis - 1] = -1; // trans = homTrans.times(trans); // Faster: for ( int i = 0 ; i <= dim ; i ++ ) { trans [ axis - 1 ] [ i ] = - trans [ axis - 1 ] [ i ] ; } }
Add a reflection along the given axis .
123
8
156,782
public double [ ] homogeneVector ( double [ ] v ) { assert ( v . length == dim ) ; double [ ] dv = Arrays . copyOf ( v , dim + 1 ) ; dv [ dim ] = 1.0 ; return dv ; }
Transform an absolute vector into homogeneous coordinates .
57
9
156,783
public double [ ] homogeneRelativeVector ( double [ ] v ) { assert ( v . length == dim ) ; // TODO: this only works properly when trans[dim][dim] == 1.0, right? double [ ] dv = Arrays . copyOf ( v , dim + 1 ) ; dv [ dim ] = 0.0 ; return dv ; }
Transform a relative vector into homogeneous coordinates .
81
9
156,784
private double computeConfidence ( int support , int samples ) { final double z = NormalDistribution . standardNormalQuantile ( alpha ) ; final double eprob = support / ( double ) samples ; return Math . max ( 0. , eprob - z * FastMath . sqrt ( ( eprob * ( 1 - eprob ) ) / samples ) ) ; }
Estimate the confidence probability of a clustering .
81
10
156,785
protected Clustering < ? > runClusteringAlgorithm ( ResultHierarchy hierarchy , Result parent , DBIDs ids , DataStore < DoubleVector > store , int dim , String title ) { SimpleTypeInformation < DoubleVector > t = new VectorFieldTypeInformation <> ( DoubleVector . FACTORY , dim ) ; Relation < DoubleVector > sample = new MaterializedRelation <> ( t , ids , title , store ) ; ProxyDatabase d = new ProxyDatabase ( ids , sample ) ; Clustering < ? > clusterResult = samplesAlgorithm . run ( d ) ; d . getHierarchy ( ) . remove ( sample ) ; d . getHierarchy ( ) . remove ( clusterResult ) ; hierarchy . add ( parent , sample ) ; hierarchy . add ( sample , clusterResult ) ; return clusterResult ; }
Run a clustering algorithm on a single instance .
181
10
156,786
public static void load ( Class < ? > parent , ClassLoader cl ) { char [ ] buf = new char [ 0x4000 ] ; try { String fullName = RESOURCE_PREFIX + parent . getName ( ) ; Enumeration < URL > configfiles = cl . getResources ( fullName ) ; while ( configfiles . hasMoreElements ( ) ) { URL nextElement = configfiles . nextElement ( ) ; URLConnection conn = nextElement . openConnection ( ) ; conn . setUseCaches ( false ) ; try ( InputStreamReader is = new InputStreamReader ( conn . getInputStream ( ) , "UTF-8" ) ; ) { int start = 0 , cur = 0 , valid = is . read ( buf , 0 , buf . length ) ; char c ; while ( cur < valid ) { // Find newline or end while ( cur < valid && ( c = buf [ cur ] ) != ' ' && c != ' ' ) { cur ++ ; } if ( cur == valid && is . ready ( ) ) { // Move consumed buffer contents: if ( start > 0 ) { System . arraycopy ( buf , start , buf , 0 , valid - start ) ; valid -= start ; cur -= start ; start = 0 ; } else if ( valid == buf . length ) { throw new IOException ( "Buffer size exceeded. Maximum line length in service files is: " + buf . length + " in file: " + fullName ) ; } valid = is . read ( buf , valid , buf . length - valid ) ; continue ; } parseLine ( parent , buf , start , cur ) ; while ( cur < valid && ( ( c = buf [ cur ] ) == ' ' || c == ' ' ) ) { cur ++ ; } start = cur ; } } catch ( IOException x ) { throw new AbortException ( "Error reading configuration file" , x ) ; } } } catch ( IOException x ) { throw new AbortException ( "Could not load service configuration files." , x ) ; } }
Load the service file .
434
5
156,787
private static void parseLine ( Class < ? > parent , char [ ] line , int begin , int end ) { while ( begin < end && line [ begin ] == ' ' ) { begin ++ ; } if ( begin >= end || line [ begin ] == ' ' ) { return ; // Empty/comment lines are okay, continue } // Find end of class name: int cend = begin + 1 ; while ( cend < end && line [ cend ] != ' ' ) { cend ++ ; } // Class name: String cname = new String ( line , begin , cend - begin ) ; ELKIServiceRegistry . register ( parent , cname ) ; for ( int abegin = cend + 1 , aend = - 1 ; abegin < end ; abegin = aend + 1 ) { // Skip whitespace: while ( abegin < end && line [ abegin ] == ' ' ) { abegin ++ ; } // Find next whitespace: aend = abegin + 1 ; while ( aend < end && line [ aend ] != ' ' ) { aend ++ ; } ELKIServiceRegistry . registerAlias ( parent , new String ( line , abegin , aend - abegin ) , cname ) ; } return ; }
Parse a single line from a service registry file .
283
11
156,788
@ Override public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { super . readExternal ( in ) ; this . knnDistance = in . readDouble ( ) ; }
Calls the super method and reads the knn distance of this entry from the specified input stream .
43
20
156,789
public COPACNeighborPredicate . Instance instantiate ( Database database , Relation < V > relation ) { DistanceQuery < V > dq = database . getDistanceQuery ( relation , EuclideanDistanceFunction . STATIC ) ; KNNQuery < V > knnq = database . getKNNQuery ( dq , settings . k ) ; WritableDataStore < COPACModel > storage = DataStoreUtil . makeStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , COPACModel . class ) ; Duration time = LOG . newDuration ( this . getClass ( ) . getName ( ) + ".preprocessing-time" ) . begin ( ) ; FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( this . getClass ( ) . getName ( ) , relation . size ( ) , LOG ) : null ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { DoubleDBIDList ref = knnq . getKNNForDBID ( iditer , settings . k ) ; storage . put ( iditer , computeLocalModel ( iditer , ref , relation ) ) ; LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; LOG . statistics ( time . end ( ) ) ; return new Instance ( relation . getDBIDs ( ) , storage ) ; }
Full instantiation method .
325
5
156,790
protected COPACModel computeLocalModel ( DBIDRef id , DoubleDBIDList knnneighbors , Relation < V > relation ) { PCAResult epairs = settings . pca . processIds ( knnneighbors , relation ) ; int pdim = settings . filter . filter ( epairs . getEigenvalues ( ) ) ; PCAFilteredResult pcares = new PCAFilteredResult ( epairs . getEigenPairs ( ) , pdim , 1. , 0. ) ; double [ ] [ ] mat = pcares . similarityMatrix ( ) ; double [ ] vecP = relation . get ( id ) . toArray ( ) ; if ( pdim == vecP . length ) { // Full dimensional - noise! return new COPACModel ( pdim , DBIDUtil . EMPTYDBIDS ) ; } // Check which neighbors survive HashSetModifiableDBIDs survivors = DBIDUtil . newHashSet ( ) ; for ( DBIDIter neighbor = relation . iterDBIDs ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { double [ ] diff = minusEquals ( relation . get ( neighbor ) . toArray ( ) , vecP ) ; double cdistP = transposeTimesTimes ( diff , mat , diff ) ; if ( cdistP <= epsilonsq ) { survivors . add ( neighbor ) ; } } return new COPACModel ( pdim , survivors ) ; }
COPAC model computation
313
5
156,791
public ModifiableHyperBoundingBox computeMBR ( ) { E firstEntry = getEntry ( 0 ) ; if ( firstEntry == null ) { return null ; } // Note: we deliberately get a cloned copy here, since we will modify it. ModifiableHyperBoundingBox mbr = new ModifiableHyperBoundingBox ( firstEntry ) ; for ( int i = 1 ; i < numEntries ; i ++ ) { mbr . extend ( getEntry ( i ) ) ; } return mbr ; }
Recomputing the MBR is rather expensive .
109
10
156,792
public void applyCamera ( GL2 gl ) { // Setup projection. gl . glMatrixMode ( GL2 . GL_PROJECTION ) ; gl . glLoadIdentity ( ) ; glu . gluPerspective ( 45f , // fov, width / ( float ) height , // ratio 0.f , 10.f ) ; // near, far clipping eye [ 0 ] = ( float ) Math . sin ( theta ) * 2.f ; eye [ 1 ] = .5f ; eye [ 2 ] = ( float ) Math . cos ( theta ) * 2.f ; glu . gluLookAt ( eye [ 0 ] , eye [ 1 ] , eye [ 2 ] , // eye .0f , .0f , 0.f , // center 0.f , 1.f , 0.f ) ; // up gl . glMatrixMode ( GL2 . GL_MODELVIEW ) ; gl . glLoadIdentity ( ) ; gl . glViewport ( 0 , 0 , width , height ) ; }
Apply the camera settings .
222
5
156,793
private void linearScanBatchKNN ( ArrayDBIDs ids , List < KNNHeap > heaps ) { final DistanceQuery < O > dq = distanceQuery ; // The distance is computed on database IDs for ( DBIDIter iter = getRelation ( ) . getDBIDs ( ) . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { int index = 0 ; for ( DBIDIter iter2 = ids . iter ( ) ; iter2 . valid ( ) ; iter2 . advance ( ) , index ++ ) { KNNHeap heap = heaps . get ( index ) ; heap . insert ( dq . distance ( iter2 , iter ) , iter ) ; } } }
Linear batch knn for arbitrary distance functions .
155
10
156,794
public static double [ ] [ ] computeWeightMatrix ( int bpp ) { final int dim = bpp * bpp * bpp ; final double [ ] [ ] m = new double [ dim ] [ dim ] ; // maximum occurring distance in manhattan between bins: final double max = 3. * ( bpp - 1. ) ; for ( int x = 0 ; x < dim ; x ++ ) { final int rx = ( x / bpp ) / bpp ; final int gx = ( x / bpp ) % bpp ; final int bx = x % bpp ; for ( int y = x ; y < dim ; y ++ ) { final int ry = ( y / bpp ) / bpp ; final int gy = ( y / bpp ) % bpp ; final int by = y % bpp ; final double dr = Math . abs ( rx - ry ) ; final double dg = Math . abs ( gx - gy ) ; final double db = Math . abs ( bx - by ) ; final double val = 1 - ( dr + dg + db ) / max ; m [ x ] [ y ] = m [ y ] [ x ] = val ; } } return m ; }
Compute weight matrix for a RGB color histogram
264
10
156,795
protected void initializeDataExtends ( Relation < NumberVector > relation , int dim , double [ ] min , double [ ] extend ) { assert ( min . length == dim && extend . length == dim ) ; // if no parameter for min max compute min max values for each dimension // from dataset if ( minima == null || maxima == null || minima . length == 0 || maxima . length == 0 ) { double [ ] [ ] minmax = RelationUtil . computeMinMax ( relation ) ; final double [ ] dmin = minmax [ 0 ] , dmax = minmax [ 1 ] ; for ( int d = 0 ; d < dim ; d ++ ) { min [ d ] = dmin [ d ] ; extend [ d ] = dmax [ d ] - dmin [ d ] ; } return ; } if ( minima . length == dim ) { System . arraycopy ( minima , 0 , min , 0 , dim ) ; } else if ( minima . length == 1 ) { Arrays . fill ( min , minima [ 0 ] ) ; } else { throw new AbortException ( "Invalid minima specified: expected " + dim + " got minima dimensionality: " + minima . length ) ; } if ( maxima . length == dim ) { for ( int d = 0 ; d < dim ; d ++ ) { extend [ d ] = maxima [ d ] - min [ d ] ; } return ; } else if ( maxima . length == 1 ) { for ( int d = 0 ; d < dim ; d ++ ) { extend [ d ] = maxima [ 0 ] - min [ d ] ; } return ; } else { throw new AbortException ( "Invalid maxima specified: expected " + dim + " got maxima dimensionality: " + maxima . length ) ; } }
Initialize the uniform sampling area .
393
7
156,796
static protected int countSharedNeighbors ( DBIDs neighbors1 , DBIDs neighbors2 ) { int intersection = 0 ; DBIDIter iter1 = neighbors1 . iter ( ) ; DBIDIter iter2 = neighbors2 . iter ( ) ; while ( iter1 . valid ( ) && iter2 . valid ( ) ) { final int comp = DBIDUtil . compare ( iter1 , iter2 ) ; if ( comp == 0 ) { intersection ++ ; iter1 . advance ( ) ; iter2 . advance ( ) ; } else if ( comp < 0 ) { iter1 . advance ( ) ; } else // iter2 < iter1 { iter2 . advance ( ) ; } } return intersection ; }
Compute the intersection size
148
5
156,797
protected static < O > DoubleIntPair [ ] rankReferencePoints ( DistanceQuery < O > distanceQuery , O obj , ArrayDBIDs referencepoints ) { DoubleIntPair [ ] priority = new DoubleIntPair [ referencepoints . size ( ) ] ; // Compute distances to reference points. for ( DBIDArrayIter iter = referencepoints . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { final int i = iter . getOffset ( ) ; final double dist = distanceQuery . distance ( obj , iter ) ; priority [ i ] = new DoubleIntPair ( dist , i ) ; } Arrays . sort ( priority ) ; return priority ; }
Sort the reference points by distance to the query object
145
10
156,798
protected static void binarySearch ( ModifiableDoubleDBIDList index , DoubleDBIDListIter iter , double val ) { // Binary search. TODO: move this into the DoubleDBIDList class. int left = 0 , right = index . size ( ) ; while ( left < right ) { final int mid = ( left + right ) >>> 1 ; final double curd = iter . seek ( mid ) . doubleValue ( ) ; if ( val < curd ) { right = mid ; } else if ( val > curd ) { left = mid + 1 ; } else { left = mid ; break ; } } if ( left >= index . size ( ) ) { -- left ; } iter . seek ( left ) ; }
Seek an iterator to the desired position using binary search .
153
12
156,799
public Result runAlgorithms ( Database database ) { ResultHierarchy hier = database . getHierarchy ( ) ; if ( LOG . isStatistics ( ) ) { boolean first = true ; for ( It < Index > it = hier . iterDescendants ( database ) . filter ( Index . class ) ; it . valid ( ) ; it . advance ( ) ) { if ( first ) { LOG . statistics ( "Index statistics before running algorithms:" ) ; first = false ; } it . get ( ) . logStatistics ( ) ; } } stepresult = new BasicResult ( "Algorithm Step" , "algorithm-step" ) ; for ( Algorithm algorithm : algorithms ) { Thread . currentThread ( ) . setName ( algorithm . toString ( ) ) ; Duration duration = LOG . isStatistics ( ) ? LOG . newDuration ( algorithm . getClass ( ) . getName ( ) + ".runtime" ) . begin ( ) : null ; Result res = algorithm . run ( database ) ; if ( duration != null ) { LOG . statistics ( duration . end ( ) ) ; } if ( LOG . isStatistics ( ) ) { boolean first = true ; for ( It < Index > it = hier . iterDescendants ( database ) . filter ( Index . class ) ; it . valid ( ) ; it . advance ( ) ) { if ( first ) { LOG . statistics ( "Index statistics after running algorithm " + algorithm . toString ( ) + ":" ) ; first = false ; } it . get ( ) . logStatistics ( ) ; } } if ( res != null ) { // Make sure the result is attached, but usually this is a noop: hier . add ( database , res ) ; } } return stepresult ; }
Run algorithms .
366
3