idx
int64 0
165k
| question
stringlengths 73
4.15k
| target
stringlengths 5
918
| len_question
int64 21
890
| len_target
int64 3
255
|
|---|---|---|---|---|
157,700
|
public void remove ( String key ) { Iterator < Pair < String , ArrayList < String > > > it = store . iterator ( ) ; while ( it . hasNext ( ) ) { String thisKey = it . next ( ) . first ; if ( key . equals ( thisKey ) ) { it . remove ( ) ; break ; } } }
|
Remove a given key from the file .
| 74
| 8
|
157,701
|
public ArrayList < String > get ( String key ) { Iterator < Pair < String , ArrayList < String > > > it = store . iterator ( ) ; while ( it . hasNext ( ) ) { Pair < String , ArrayList < String > > pair = it . next ( ) ; if ( key . equals ( pair . first ) ) { return pair . second ; } } return null ; }
|
Find a saved setting by key .
| 85
| 7
|
157,702
|
public Clustering < Model > run ( Database database , Relation < V > relation ) { // current dimensionality associated with each seed int dim_c = RelationUtil . dimensionality ( relation ) ; if ( dim_c < l ) { throw new IllegalStateException ( "Dimensionality of data < parameter l! " + "(" + dim_c + " < " + l + ")" ) ; } // current number of seeds int k_c = Math . min ( relation . size ( ) , k_i * k ) ; // pick k0 > k points from the database List < ORCLUSCluster > clusters = initialSeeds ( relation , k_c ) ; double beta = FastMath . exp ( - FastMath . log ( dim_c / ( double ) l ) * FastMath . log ( 1 / alpha ) / FastMath . log ( k_c / ( double ) k ) ) ; IndefiniteProgress cprogress = LOG . isVerbose ( ) ? new IndefiniteProgress ( "Current number of clusters:" , LOG ) : null ; while ( k_c > k ) { // find partitioning induced by the seeds of the clusters assign ( relation , clusters ) ; // determine current subspace associated with each cluster for ( ORCLUSCluster cluster : clusters ) { if ( cluster . objectIDs . size ( ) > 0 ) { cluster . basis = findBasis ( relation , cluster , dim_c ) ; } } // reduce number of seeds and dimensionality associated with // each seed k_c = ( int ) Math . max ( k , k_c * alpha ) ; dim_c = ( int ) Math . max ( l , dim_c * beta ) ; merge ( relation , clusters , k_c , dim_c , cprogress ) ; if ( cprogress != null ) { cprogress . setProcessed ( clusters . size ( ) , LOG ) ; } } assign ( relation , clusters ) ; LOG . setCompleted ( cprogress ) ; // get the result Clustering < Model > r = new Clustering <> ( "ORCLUS clustering" , "orclus-clustering" ) ; for ( ORCLUSCluster c : clusters ) { r . addToplevelCluster ( new Cluster < Model > ( c . objectIDs , ClusterModel . CLUSTER ) ) ; } return r ; }
|
Performs the ORCLUS algorithm on the given database .
| 504
| 11
|
157,703
|
private List < ORCLUSCluster > initialSeeds ( Relation < V > database , int k ) { DBIDs randomSample = DBIDUtil . randomSample ( database . getDBIDs ( ) , k , rnd ) ; List < ORCLUSCluster > seeds = new ArrayList <> ( k ) ; for ( DBIDIter iter = randomSample . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { seeds . add ( new ORCLUSCluster ( database . get ( iter ) . toArray ( ) , iter ) ) ; } return seeds ; }
|
Initializes the list of seeds wit a random sample of size k .
| 126
| 14
|
157,704
|
private void assign ( Relation < V > database , List < ORCLUSCluster > clusters ) { NumberVectorDistanceFunction < ? super V > distFunc = SquaredEuclideanDistanceFunction . STATIC ; // clear the current clusters for ( ORCLUSCluster cluster : clusters ) { cluster . objectIDs . clear ( ) ; } // projected centroids of the clusters List < NumberVector > projectedCentroids = new ArrayList <> ( clusters . size ( ) ) ; for ( ORCLUSCluster c : clusters ) { projectedCentroids . add ( DoubleVector . wrap ( project ( c , c . centroid ) ) ) ; } // for each data point o do for ( DBIDIter it = database . iterDBIDs ( ) ; it . valid ( ) ; it . advance ( ) ) { double [ ] o = database . get ( it ) . toArray ( ) ; double minDist = Double . POSITIVE_INFINITY ; ORCLUSCluster minCluster = null ; // determine projected distance between o and cluster for ( int i = 0 ; i < clusters . size ( ) ; i ++ ) { ORCLUSCluster c = clusters . get ( i ) ; NumberVector o_proj = DoubleVector . wrap ( project ( c , o ) ) ; double dist = distFunc . distance ( o_proj , projectedCentroids . get ( i ) ) ; if ( dist < minDist ) { minDist = dist ; minCluster = c ; } } // add p to the cluster with the least value of projected distance minCluster . objectIDs . add ( it ) ; } // recompute the seed in each clusters for ( ORCLUSCluster cluster : clusters ) { if ( cluster . objectIDs . size ( ) > 0 ) { cluster . centroid = Centroid . make ( database , cluster . objectIDs ) . toArray ( ) ; } } }
|
Creates a partitioning of the database by assigning each object to its closest seed .
| 406
| 17
|
157,705
|
private void merge ( Relation < V > relation , List < ORCLUSCluster > clusters , int k_new , int d_new , IndefiniteProgress cprogress ) { ArrayList < ProjectedEnergy > projectedEnergies = new ArrayList <> ( ( clusters . size ( ) * ( clusters . size ( ) - 1 ) ) >>> 1 ) ; for ( int i = 0 ; i < clusters . size ( ) ; i ++ ) { for ( int j = i + 1 ; j < clusters . size ( ) ; j ++ ) { // projected energy of c_ij in subspace e_ij ORCLUSCluster c_i = clusters . get ( i ) ; ORCLUSCluster c_j = clusters . get ( j ) ; projectedEnergies . add ( projectedEnergy ( relation , c_i , c_j , i , j , d_new ) ) ; } } while ( clusters . size ( ) > k_new ) { if ( cprogress != null ) { cprogress . setProcessed ( clusters . size ( ) , LOG ) ; } // find the smallest value of r_ij ProjectedEnergy minPE = Collections . min ( projectedEnergies ) ; // renumber the clusters by replacing cluster c_i with cluster c_ij // and discarding cluster c_j for ( int c = 0 ; c < clusters . size ( ) ; c ++ ) { if ( c == minPE . i ) { clusters . remove ( c ) ; clusters . add ( c , minPE . cluster ) ; } if ( c == minPE . j ) { clusters . remove ( c ) ; } } // remove obsolete projected energies and renumber the others ... int i = minPE . i , j = minPE . j ; for ( Iterator < ProjectedEnergy > it = projectedEnergies . iterator ( ) ; it . hasNext ( ) ; ) { ProjectedEnergy pe = it . next ( ) ; if ( pe . i == i || pe . i == j || pe . j == i || pe . j == j ) { it . remove ( ) ; } else { if ( pe . i > j ) { pe . i -= 1 ; } if ( pe . j > j ) { pe . j -= 1 ; } } } // ... and recompute them ORCLUSCluster c_ij = minPE . cluster ; for ( int c = 0 ; c < clusters . size ( ) ; c ++ ) { if ( c < i ) { projectedEnergies . add ( projectedEnergy ( relation , clusters . get ( c ) , c_ij , c , i , d_new ) ) ; } else if ( c > i ) { projectedEnergies . add ( projectedEnergy ( relation , clusters . get ( c ) , c_ij , i , c , d_new ) ) ; } } } }
|
Reduces the number of seeds to k_new
| 607
| 10
|
157,706
|
private ProjectedEnergy projectedEnergy ( Relation < V > relation , ORCLUSCluster c_i , ORCLUSCluster c_j , int i , int j , int dim ) { NumberVectorDistanceFunction < ? super V > distFunc = SquaredEuclideanDistanceFunction . STATIC ; // union of cluster c_i and c_j ORCLUSCluster c_ij = union ( relation , c_i , c_j , dim ) ; double sum = 0. ; NumberVector c_proj = DoubleVector . wrap ( project ( c_ij , c_ij . centroid ) ) ; for ( DBIDIter iter = c_ij . objectIDs . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { NumberVector o_proj = DoubleVector . wrap ( project ( c_ij , relation . get ( iter ) . toArray ( ) ) ) ; sum += distFunc . distance ( o_proj , c_proj ) ; } sum /= c_ij . objectIDs . size ( ) ; return new ProjectedEnergy ( i , j , c_ij , sum ) ; }
|
Computes the projected energy of the specified clusters . The projected energy is given by the mean square distance of the points to the centroid of the union cluster c when all points in c are projected to the subspace of c .
| 247
| 46
|
157,707
|
private ORCLUSCluster union ( Relation < V > relation , ORCLUSCluster c1 , ORCLUSCluster c2 , int dim ) { ORCLUSCluster c = new ORCLUSCluster ( ) ; c . objectIDs = DBIDUtil . newHashSet ( c1 . objectIDs ) ; c . objectIDs . addDBIDs ( c2 . objectIDs ) ; c . objectIDs = DBIDUtil . newArray ( c . objectIDs ) ; if ( c . objectIDs . size ( ) > 0 ) { c . centroid = Centroid . make ( relation , c . objectIDs ) . getArrayRef ( ) ; c . basis = findBasis ( relation , c , dim ) ; } else { c . centroid = timesEquals ( plusEquals ( c1 . centroid , c2 . centroid ) , .5 ) ; c . basis = identity ( dim , c . centroid . length ) ; } return c ; }
|
Returns the union of the two specified clusters .
| 209
| 9
|
157,708
|
private static void initializeNNCache ( double [ ] scratch , double [ ] bestd , int [ ] besti ) { final int size = bestd . length ; Arrays . fill ( bestd , Double . POSITIVE_INFINITY ) ; Arrays . fill ( besti , - 1 ) ; for ( int x = 0 , p = 0 ; x < size ; x ++ ) { assert ( p == MatrixParadigm . triangleSize ( x ) ) ; double bestdx = Double . POSITIVE_INFINITY ; int bestix = - 1 ; for ( int y = 0 ; y < x ; y ++ , p ++ ) { final double v = scratch [ p ] ; if ( v < bestd [ y ] ) { bestd [ y ] = v ; besti [ y ] = x ; } if ( v < bestdx ) { bestdx = v ; bestix = y ; } } bestd [ x ] = bestdx ; besti [ x ] = bestix ; } }
|
Initialize the NN cache .
| 216
| 7
|
157,709
|
protected int findMerge ( int size , MatrixParadigm mat , double [ ] bestd , int [ ] besti , PointerHierarchyRepresentationBuilder builder ) { double mindist = Double . POSITIVE_INFINITY ; int x = - 1 , y = - 1 ; // Find minimum: for ( int cx = 0 ; cx < size ; cx ++ ) { // Skip if object has already joined a cluster: final int cy = besti [ cx ] ; if ( cy < 0 ) { continue ; } final double dist = bestd [ cx ] ; if ( dist <= mindist ) { // Prefer later on ==, to truncate more often. mindist = dist ; x = cx ; y = cy ; } } assert ( x >= 0 && y >= 0 ) ; assert ( y < x ) ; // We could swap otherwise, but this shouldn't arise. merge ( size , mat , bestd , besti , builder , mindist , x , y ) ; return x ; }
|
Perform the next merge step .
| 214
| 7
|
157,710
|
protected void merge ( int size , MatrixParadigm mat , double [ ] bestd , int [ ] besti , PointerHierarchyRepresentationBuilder builder , double mindist , int x , int y ) { // Avoid allocating memory, by reusing existing iterators: final DBIDArrayIter ix = mat . ix . seek ( x ) , iy = mat . iy . seek ( y ) ; if ( LOG . isDebuggingFine ( ) ) { LOG . debugFine ( "Merging: " + DBIDUtil . toString ( ix ) + " -> " + DBIDUtil . toString ( iy ) + " " + mindist ) ; } // Perform merge in data structure: x -> y assert ( y < x ) ; // Since y < x, prefer keeping y, dropping x. builder . add ( ix , linkage . restore ( mindist , getDistanceFunction ( ) . isSquared ( ) ) , iy ) ; // Update cluster size for y: final int sizex = builder . getSize ( ix ) , sizey = builder . getSize ( iy ) ; builder . setSize ( iy , sizex + sizey ) ; // Deactivate x in cache: besti [ x ] = - 1 ; // Note: this changes iy. updateMatrix ( size , mat . matrix , iy , bestd , besti , builder , mindist , x , y , sizex , sizey ) ; if ( besti [ y ] == x ) { findBest ( size , mat . matrix , bestd , besti , y ) ; } }
|
Execute the cluster merge .
| 350
| 6
|
157,711
|
private void updateCache ( int size , double [ ] scratch , double [ ] bestd , int [ ] besti , int x , int y , int j , double d ) { // New best if ( d <= bestd [ j ] ) { bestd [ j ] = d ; besti [ j ] = y ; return ; } // Needs slow update. if ( besti [ j ] == x || besti [ j ] == y ) { findBest ( size , scratch , bestd , besti , j ) ; } }
|
Update the cache .
| 113
| 4
|
157,712
|
public VisualizerContext newContext ( ResultHierarchy hier , Result start ) { Collection < Relation < ? > > rels = ResultUtil . filterResults ( hier , Relation . class ) ; for ( Relation < ? > rel : rels ) { if ( samplesize == 0 ) { continue ; } if ( ! ResultUtil . filterResults ( hier , rel , SamplingResult . class ) . isEmpty ( ) ) { continue ; } if ( rel . size ( ) > samplesize ) { SamplingResult sample = new SamplingResult ( rel ) ; sample . setSample ( DBIDUtil . randomSample ( sample . getSample ( ) , samplesize , rnd ) ) ; ResultUtil . addChildResult ( rel , sample ) ; } } return new VisualizerContext ( hier , start , stylelib , factories ) ; }
|
Make a new visualization context
| 182
| 5
|
157,713
|
public static String getTitle ( Database db , Result result ) { List < TrackedParameter > settings = new ArrayList <> ( ) ; for ( SettingsResult sr : SettingsResult . getSettingsResults ( result ) ) { settings . addAll ( sr . getSettings ( ) ) ; } String algorithm = null ; String distance = null ; String dataset = null ; for ( TrackedParameter setting : settings ) { Parameter < ? > param = setting . getParameter ( ) ; OptionID option = param . getOptionID ( ) ; String value = param . isDefined ( ) ? param . getValueAsString ( ) : null ; if ( option . equals ( AlgorithmStep . Parameterizer . ALGORITHM_ID ) ) { algorithm = value ; } if ( option . equals ( DistanceBasedAlgorithm . DISTANCE_FUNCTION_ID ) ) { distance = value ; } if ( option . equals ( FileBasedDatabaseConnection . Parameterizer . INPUT_ID ) ) { dataset = value ; } } StringBuilder buf = new StringBuilder ( ) ; if ( algorithm != null ) { buf . append ( shortenClassname ( algorithm . split ( "," ) [ 0 ] , ' ' ) ) ; } if ( distance != null ) { if ( buf . length ( ) > 0 ) { buf . append ( " using " ) ; } buf . append ( shortenClassname ( distance , ' ' ) ) ; } if ( dataset != null ) { if ( buf . length ( ) > 0 ) { buf . append ( " on " ) ; } buf . append ( shortenClassname ( dataset , File . separatorChar ) ) ; } if ( buf . length ( ) > 0 ) { return buf . toString ( ) ; } return null ; }
|
Try to automatically generate a title for this .
| 375
| 9
|
157,714
|
protected static String shortenClassname ( String nam , char c ) { final int lastdot = nam . lastIndexOf ( c ) ; if ( lastdot >= 0 ) { nam = nam . substring ( lastdot + 1 ) ; } return nam ; }
|
Shorten the class name .
| 59
| 6
|
157,715
|
private static Class < ? > getRestrictionClass ( OptionID oid , final Parameter < ? > firstopt , Map < OptionID , List < Pair < Parameter < ? > , Class < ? > > > > byopt ) { Class < ? > superclass = getRestrictionClass ( firstopt ) ; // Also look for more general restrictions: for ( Pair < Parameter < ? > , Class < ? > > clinst : byopt . get ( oid ) ) { if ( clinst . getFirst ( ) instanceof ClassParameter ) { ClassParameter < ? > cls = ( ClassParameter < ? > ) clinst . getFirst ( ) ; if ( ! cls . getRestrictionClass ( ) . equals ( superclass ) && cls . getRestrictionClass ( ) . isAssignableFrom ( superclass ) ) { superclass = cls . getRestrictionClass ( ) ; } } if ( clinst . getFirst ( ) instanceof ClassListParameter ) { ClassListParameter < ? > cls = ( ClassListParameter < ? > ) clinst . getFirst ( ) ; if ( ! cls . getRestrictionClass ( ) . equals ( superclass ) && cls . getRestrictionClass ( ) . isAssignableFrom ( superclass ) ) { superclass = cls . getRestrictionClass ( ) ; } } } return superclass ; }
|
Get the restriction class of an option .
| 298
| 8
|
157,716
|
private static < T > ArrayList < T > sorted ( Collection < T > cls , Comparator < ? super T > c ) { ArrayList < T > sorted = new ArrayList <> ( cls ) ; sorted . sort ( c ) ; return sorted ; }
|
Sort a collection of classes .
| 57
| 6
|
157,717
|
protected void handleHoverEvent ( Event evt ) { if ( evt . getTarget ( ) instanceof Element ) { Element e = ( Element ) evt . getTarget ( ) ; Node next = e . getNextSibling ( ) ; if ( next instanceof Element ) { toggleTooltip ( ( Element ) next , evt . getType ( ) ) ; } else { LoggingUtil . warning ( "Tooltip sibling not found." ) ; } } else { LoggingUtil . warning ( "Got event for non-Element?!?" ) ; } }
|
Handle the hover events .
| 121
| 5
|
157,718
|
protected void toggleTooltip ( Element elem , String type ) { String csscls = elem . getAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE ) ; if ( SVGConstants . SVG_MOUSEOVER_EVENT_TYPE . equals ( type ) ) { if ( TOOLTIP_HIDDEN . equals ( csscls ) ) { SVGUtil . setAtt ( elem , SVGConstants . SVG_CLASS_ATTRIBUTE , TOOLTIP_VISIBLE ) ; } } else if ( SVGConstants . SVG_MOUSEOUT_EVENT_TYPE . equals ( type ) ) { if ( TOOLTIP_VISIBLE . equals ( csscls ) ) { SVGUtil . setAtt ( elem , SVGConstants . SVG_CLASS_ATTRIBUTE , TOOLTIP_HIDDEN ) ; } } else if ( SVGConstants . SVG_CLICK_EVENT_TYPE . equals ( type ) ) { if ( TOOLTIP_STICKY . equals ( csscls ) ) { SVGUtil . setAtt ( elem , SVGConstants . SVG_CLASS_ATTRIBUTE , TOOLTIP_HIDDEN ) ; } if ( TOOLTIP_HIDDEN . equals ( csscls ) || TOOLTIP_VISIBLE . equals ( csscls ) ) { SVGUtil . setAtt ( elem , SVGConstants . SVG_CLASS_ATTRIBUTE , TOOLTIP_STICKY ) ; } } }
|
Toggle the Tooltip of an element .
| 346
| 9
|
157,719
|
@ Override public DoubleDBIDList reverseKNNQuery ( DBIDRef id , int k ) { ModifiableDoubleDBIDList result = DBIDUtil . newDistanceDBIDList ( ) ; final Heap < MTreeSearchCandidate > pq = new UpdatableHeap <> ( ) ; // push root pq . add ( new MTreeSearchCandidate ( 0. , getRootID ( ) , null , Double . NaN ) ) ; // search in tree while ( ! pq . isEmpty ( ) ) { MTreeSearchCandidate pqNode = pq . poll ( ) ; // FIXME: cache the distance to the routing object in the queue node! MkAppTreeNode < O > node = getNode ( pqNode . nodeID ) ; // directory node if ( ! node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkAppEntry entry = node . getEntry ( i ) ; double distance = distance ( entry . getRoutingObjectID ( ) , id ) ; double minDist = ( entry . getCoveringRadius ( ) > distance ) ? 0. : distance - entry . getCoveringRadius ( ) ; double approxValue = settings . log ? FastMath . exp ( entry . approximatedValueAt ( k ) ) : entry . approximatedValueAt ( k ) ; if ( approxValue < 0 ) { approxValue = 0 ; } if ( minDist <= approxValue ) { pq . add ( new MTreeSearchCandidate ( minDist , getPageID ( entry ) , entry . getRoutingObjectID ( ) , Double . NaN ) ) ; } } } // data node else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkAppLeafEntry entry = ( MkAppLeafEntry ) node . getEntry ( i ) ; double distance = distance ( entry . getRoutingObjectID ( ) , id ) ; double approxValue = settings . log ? FastMath . exp ( entry . approximatedValueAt ( k ) ) : entry . approximatedValueAt ( k ) ; if ( approxValue < 0 ) { approxValue = 0 ; } if ( distance <= approxValue ) { result . add ( distance , entry . getRoutingObjectID ( ) ) ; } } } } return result ; }
|
Performs a reverse k - nearest neighbor query for the given object ID . The query result is in ascending order to the distance to the query object .
| 516
| 30
|
157,720
|
private void leafEntryIDs ( MkAppTreeNode < O > node , ModifiableDBIDs result ) { if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkAppEntry entry = node . getEntry ( i ) ; result . add ( ( ( LeafEntry ) entry ) . getDBID ( ) ) ; } } else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkAppTreeNode < O > childNode = getNode ( node . getEntry ( i ) ) ; leafEntryIDs ( childNode , result ) ; } } }
|
Determines the ids of the leaf entries stored in the specified subtree .
| 148
| 17
|
157,721
|
private PolynomialApproximation approximateKnnDistances ( double [ ] knnDistances ) { StringBuilder msg = new StringBuilder ( ) ; // count the zero distances (necessary of log-log space is used) int k_0 = 0 ; if ( settings . log ) { for ( int i = 0 ; i < settings . kmax ; i ++ ) { double dist = knnDistances [ i ] ; if ( dist == 0 ) { k_0 ++ ; } else { break ; } } } double [ ] x = new double [ settings . kmax - k_0 ] ; double [ ] y = new double [ settings . kmax - k_0 ] ; for ( int k = 0 ; k < settings . kmax - k_0 ; k ++ ) { if ( settings . log ) { x [ k ] = FastMath . log ( k + k_0 ) ; y [ k ] = FastMath . log ( knnDistances [ k + k_0 ] ) ; } else { x [ k ] = k + k_0 ; y [ k ] = knnDistances [ k + k_0 ] ; } } PolynomialRegression regression = new PolynomialRegression ( y , x , settings . p ) ; PolynomialApproximation approximation = new PolynomialApproximation ( regression . getEstimatedCoefficients ( ) ) ; if ( LOG . isDebugging ( ) ) { msg . append ( "approximation " ) . append ( approximation ) ; LOG . debugFine ( msg . toString ( ) ) ; } return approximation ; }
|
Computes the polynomial approximation of the specified knn - distances .
| 345
| 15
|
157,722
|
protected final int isLeft ( double [ ] a , double [ ] b , double [ ] o ) { final double cross = getRX ( a , o ) * getRY ( b , o ) - getRY ( a , o ) * getRX ( b , o ) ; if ( cross == 0 ) { // Compare manhattan distances - same angle! final double dista = Math . abs ( getRX ( a , o ) ) + Math . abs ( getRY ( a , o ) ) ; final double distb = Math . abs ( getRX ( b , o ) ) + Math . abs ( getRY ( b , o ) ) ; return Double . compare ( dista , distb ) ; } return Double . compare ( cross , 0 ) ; }
|
Test whether a point is left of the other wrt . the origin .
| 164
| 15
|
157,723
|
private double mdist ( double [ ] a , double [ ] b ) { return Math . abs ( a [ 0 ] - b [ 0 ] ) + Math . abs ( a [ 1 ] - b [ 1 ] ) ; }
|
Manhattan distance .
| 48
| 4
|
157,724
|
private boolean isConvex ( double [ ] a , double [ ] b , double [ ] c ) { // We're using factor to improve numerical contrast for small polygons. double area = ( b [ 0 ] - a [ 0 ] ) * factor * ( c [ 1 ] - a [ 1 ] ) - ( c [ 0 ] - a [ 0 ] ) * factor * ( b [ 1 ] - a [ 1 ] ) ; return ( - 1e-13 < area && area < 1e-13 ) ? ( mdist ( b , c ) > mdist ( a , b ) + mdist ( a , c ) ) : ( area < 0 ) ; }
|
Simple convexity test .
| 143
| 6
|
157,725
|
private void grahamScan ( ) { if ( points . size ( ) < 3 ) { return ; } Iterator < double [ ] > iter = points . iterator ( ) ; Stack < double [ ] > stack = new Stack <> ( ) ; // Start with the first two points on the stack final double [ ] first = iter . next ( ) ; stack . add ( first ) ; while ( iter . hasNext ( ) ) { double [ ] n = iter . next ( ) ; if ( mdist ( first , n ) > 0 ) { stack . add ( n ) ; break ; } } while ( iter . hasNext ( ) ) { double [ ] next = iter . next ( ) ; double [ ] curr = stack . pop ( ) ; double [ ] prev = stack . peek ( ) ; while ( ( stack . size ( ) > 1 ) && ( mdist ( curr , next ) == 0 || ! isConvex ( prev , curr , next ) ) ) { curr = stack . pop ( ) ; prev = stack . peek ( ) ; } stack . add ( curr ) ; stack . add ( next ) ; } points = stack ; }
|
The actual graham scan main loop .
| 249
| 8
|
157,726
|
public Polygon getHull ( ) { if ( ! ok ) { computeConvexHull ( ) ; } return new Polygon ( points , minmaxX . getMin ( ) , minmaxX . getMax ( ) , minmaxY . getMin ( ) , minmaxY . getMax ( ) ) ; }
|
Compute the convex hull and return the resulting polygon .
| 70
| 13
|
157,727
|
private static double coverRadius ( double [ ] [ ] matrix , int [ ] idx , int i ) { final int idx_i = idx [ i ] ; final double [ ] row_i = matrix [ i ] ; double m = 0 ; for ( int j = 0 ; j < row_i . length ; j ++ ) { if ( i != j && idx_i == idx [ j ] ) { final double d = row_i [ j ] ; m = d > m ? d : m ; } } return m ; }
|
Find the cover radius of a partition .
| 119
| 8
|
157,728
|
private static int [ ] mstPartition ( double [ ] [ ] matrix ) { final int n = matrix . length ; int [ ] edges = PrimsMinimumSpanningTree . processDense ( matrix ) ; // Note: Prims does *not* yield edges sorted by edge length! double meanlength = thresholdLength ( matrix , edges ) ; int [ ] idx = new int [ n ] , best = new int [ n ] , sizes = new int [ n ] ; int bestsize = - 1 ; double bestlen = 0 ; for ( int omit = n - 2 ; omit > 0 ; -- omit ) { final double len = edgelength ( matrix , edges , omit ) ; if ( len < meanlength ) { continue ; } omitEdge ( edges , idx , sizes , omit ) ; // Finalize array: int minsize = n ; for ( int i = 0 ; i < n ; i ++ ) { int j = idx [ i ] = follow ( i , idx ) ; if ( j == i && sizes [ i ] < minsize ) { minsize = sizes [ i ] ; } } if ( minsize > bestsize || ( minsize == bestsize && len > bestlen ) ) { bestsize = minsize ; bestlen = len ; System . arraycopy ( idx , 0 , best , 0 , n ) ; } } return best ; }
|
Partition the data using the minimu spanning tree .
| 295
| 11
|
157,729
|
private static double thresholdLength ( double [ ] [ ] matrix , int [ ] edges ) { double [ ] lengths = new double [ edges . length >> 1 ] ; for ( int i = 0 , e = edges . length - 1 ; i < e ; i += 2 ) { lengths [ i >> 1 ] = matrix [ edges [ i ] ] [ edges [ i + 1 ] ] ; } Arrays . sort ( lengths ) ; final int pos = ( lengths . length >> 1 ) ; // 50% return lengths [ pos ] ; }
|
Choose the threshold length of edges to consider omittig .
| 112
| 12
|
157,730
|
private static double edgelength ( double [ ] [ ] matrix , int [ ] edges , int i ) { i <<= 1 ; return matrix [ edges [ i ] ] [ edges [ i + 1 ] ] ; }
|
Length of edge i .
| 46
| 5
|
157,731
|
private static void omitEdge ( int [ ] edges , int [ ] idx , int [ ] sizes , int omit ) { for ( int i = 0 ; i < idx . length ; i ++ ) { idx [ i ] = i ; } Arrays . fill ( sizes , 1 ) ; for ( int i = 0 , j = 0 , e = edges . length - 1 ; j < e ; i ++ , j += 2 ) { if ( i == omit ) { continue ; } int ea = edges [ j + 1 ] , eb = edges [ j ] ; if ( eb < ea ) { // Swap int tmp = eb ; eb = ea ; ea = tmp ; } final int pa = follow ( ea , idx ) , pb = follow ( eb , idx ) ; assert ( pa != pb ) : "Must be disjoint - MST inconsistent." ; sizes [ idx [ pa ] ] += sizes [ idx [ pb ] ] ; idx [ pb ] = idx [ pa ] ; } }
|
Partition the data by omitting one edge .
| 226
| 10
|
157,732
|
private static int follow ( int i , int [ ] partitions ) { int next = partitions [ i ] , tmp ; while ( i != next ) { tmp = next ; next = partitions [ i ] = partitions [ next ] ; i = tmp ; } return i ; }
|
Union - find with simple path compression .
| 56
| 8
|
157,733
|
private static void computeCentroid ( double [ ] centroid , Relation < ? extends NumberVector > relation , DBIDs ids ) { Arrays . fill ( centroid , 0 ) ; int dim = centroid . length ; for ( DBIDIter it = ids . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { NumberVector v = relation . get ( it ) ; for ( int i = 0 ; i < dim ; i ++ ) { centroid [ i ] += v . doubleValue ( i ) ; } } timesEquals ( centroid , 1. / ids . size ( ) ) ; }
|
Recompute the centroid of a set .
| 136
| 10
|
157,734
|
public static < O > DistanceQuery < O > getDistanceQuery ( Database database , DistanceFunction < ? super O > distanceFunction , Object ... hints ) { final Relation < O > objectQuery = database . getRelation ( distanceFunction . getInputTypeRestriction ( ) , hints ) ; return database . getDistanceQuery ( objectQuery , distanceFunction , hints ) ; }
|
Get a distance query for a given distance function automatically choosing a relation .
| 78
| 14
|
157,735
|
public static < O > SimilarityQuery < O > getSimilarityQuery ( Database database , SimilarityFunction < ? super O > similarityFunction , Object ... hints ) { final Relation < O > objectQuery = database . getRelation ( similarityFunction . getInputTypeRestriction ( ) , hints ) ; return database . getSimilarityQuery ( objectQuery , similarityFunction , hints ) ; }
|
Get a similarity query automatically choosing a relation .
| 82
| 9
|
157,736
|
public static < O > RKNNQuery < O > getRKNNQuery ( Relation < O > relation , DistanceFunction < ? super O > distanceFunction , Object ... hints ) { final DistanceQuery < O > distanceQuery = relation . getDistanceQuery ( distanceFunction , hints ) ; return relation . getRKNNQuery ( distanceQuery , hints ) ; }
|
Get a rKNN query object for the given distance function .
| 77
| 13
|
157,737
|
public static < O > RangeQuery < O > getLinearScanSimilarityRangeQuery ( SimilarityQuery < O > simQuery ) { // Slight optimizations of linear scans if ( simQuery instanceof PrimitiveSimilarityQuery ) { final PrimitiveSimilarityQuery < O > pdq = ( PrimitiveSimilarityQuery < O > ) simQuery ; return new LinearScanPrimitiveSimilarityRangeQuery <> ( pdq ) ; } return new LinearScanSimilarityRangeQuery <> ( simQuery ) ; }
|
Get a linear scan query for the given similarity query .
| 108
| 11
|
157,738
|
protected static void register ( Class < ? > parent , String cname ) { Entry e = data . get ( parent ) ; if ( e == null ) { data . put ( parent , e = new Entry ( ) ) ; } e . addName ( cname ) ; }
|
Register a class with the registry .
| 58
| 7
|
157,739
|
protected static void register ( Class < ? > parent , Class < ? > clazz ) { Entry e = data . get ( parent ) ; if ( e == null ) { data . put ( parent , e = new Entry ( ) ) ; } final String cname = clazz . getCanonicalName ( ) ; e . addHit ( cname , clazz ) ; if ( clazz . isAnnotationPresent ( Alias . class ) ) { Alias aliases = clazz . getAnnotation ( Alias . class ) ; for ( String alias : aliases . value ( ) ) { e . addAlias ( alias , cname ) ; } } }
|
Register a class in the registry .
| 139
| 7
|
157,740
|
protected static void registerAlias ( Class < ? > parent , String alias , String cname ) { Entry e = data . get ( parent ) ; assert ( e != null ) ; e . addAlias ( alias , cname ) ; }
|
Register a class alias with the registry .
| 49
| 8
|
157,741
|
private static Class < ? > tryLoadClass ( String value ) { try { return CLASSLOADER . loadClass ( value ) ; } catch ( ClassNotFoundException e ) { return null ; } }
|
Attempt to load a class
| 44
| 5
|
157,742
|
public static List < Class < ? > > findAllImplementations ( Class < ? > restrictionClass ) { if ( restrictionClass == null ) { return Collections . emptyList ( ) ; } if ( ! contains ( restrictionClass ) ) { ELKIServiceLoader . load ( restrictionClass ) ; ELKIServiceScanner . load ( restrictionClass ) ; } Entry e = data . get ( restrictionClass ) ; if ( e == null ) { return Collections . emptyList ( ) ; } // Start loading classes: ArrayList < Class < ? > > ret = new ArrayList <> ( e . len ) ; for ( int pos = 0 ; pos < e . len ; pos ++ ) { Class < ? > c = e . clazzes [ pos ] ; if ( c == null ) { c = tryLoadClass ( e . names [ pos ] ) ; if ( c == null ) { LOG . warning ( "Failed to load class " + e . names [ pos ] + " for interface " + restrictionClass . getName ( ) ) ; c = FAILED_LOAD ; } e . clazzes [ pos ] = c ; } if ( c == FAILED_LOAD ) { continue ; } // Linear scan, but cheap enough. if ( ! ret . contains ( c ) ) { ret . add ( c ) ; } } return ret ; }
|
Find all implementations of a particular interface .
| 291
| 8
|
157,743
|
public static List < Class < ? > > findAllImplementations ( Class < ? > c , boolean everything , boolean parameterizable ) { if ( c == null ) { return Collections . emptyList ( ) ; } // Default is served from the registry if ( ! everything && parameterizable ) { return findAllImplementations ( c ) ; } // This codepath is used by utility classes to also find buggy // implementations (e.g. non-instantiable, abstract) of the interfaces. List < Class < ? > > known = findAllImplementations ( c ) ; // For quickly skipping seen entries: HashSet < Class < ? > > dupes = new HashSet <> ( known ) ; for ( Iterator < Class < ? > > iter = ELKIServiceScanner . nonindexedClasses ( ) ; iter . hasNext ( ) ; ) { Class < ? > cls = iter . next ( ) ; if ( dupes . contains ( cls ) ) { continue ; } // skip abstract / private classes. if ( ! everything && ( Modifier . isInterface ( cls . getModifiers ( ) ) || Modifier . isAbstract ( cls . getModifiers ( ) ) || Modifier . isPrivate ( cls . getModifiers ( ) ) ) ) { continue ; } if ( ! c . isAssignableFrom ( cls ) ) { continue ; } if ( parameterizable ) { boolean instantiable = false ; try { instantiable = cls . getConstructor ( ) != null ; } catch ( Exception | Error e ) { // ignore } try { instantiable = instantiable || ClassGenericsUtil . getParameterizer ( cls ) != null ; } catch ( Exception | Error e ) { // ignore } if ( ! instantiable ) { continue ; } } known . add ( cls ) ; dupes . add ( cls ) ; } return known ; }
|
Find all implementations of a given class in the classpath .
| 409
| 12
|
157,744
|
private static < C > Class < ? > tryAlternateNames ( Class < ? super C > restrictionClass , String value , Entry e ) { StringBuilder buf = new StringBuilder ( value . length ( ) + 100 ) ; // Try with FACTORY_POSTFIX first: Class < ? > clazz = tryLoadClass ( buf . append ( value ) . append ( FACTORY_POSTFIX ) . toString ( ) ) ; if ( clazz != null ) { return clazz ; } clazz = tryLoadClass ( value ) ; // Without FACTORY_POSTFIX. if ( clazz != null ) { return clazz ; } buf . setLength ( 0 ) ; // Try prepending the package name: clazz = tryLoadClass ( buf . append ( restrictionClass . getPackage ( ) . getName ( ) ) . append ( ' ' ) // . append ( value ) . append ( FACTORY_POSTFIX ) . toString ( ) ) ; if ( clazz != null ) { return clazz ; } // Remove FACTORY_POSTFIX again. buf . setLength ( buf . length ( ) - FACTORY_POSTFIX . length ( ) ) ; String value2 = buf . toString ( ) ; // Will also be used below. clazz = tryLoadClass ( value2 ) ; if ( clazz != null ) { return clazz ; } // Last, try aliases: if ( e != null && e . aliaslen > 0 ) { for ( int i = 0 ; i < e . aliaslen ; i += 2 ) { if ( e . aliases [ i ] . equalsIgnoreCase ( value ) || e . aliases [ i ] . equalsIgnoreCase ( value2 ) ) { return findImplementation ( restrictionClass , e . aliases [ ++ i ] ) ; } } } return null ; }
|
Try loading alternative names .
| 391
| 5
|
157,745
|
protected Element setupCanvas ( ) { final double margin = context . getStyleLibrary ( ) . getSize ( StyleLibrary . MARGIN ) ; this . layer = setupCanvas ( svgp , this . proj , margin , getWidth ( ) , getHeight ( ) ) ; return layer ; }
|
Setup our canvas .
| 64
| 4
|
157,746
|
protected SimpleTypeInformation < ? > convertedType ( SimpleTypeInformation < ? > in , NumberVector . Factory < V > factory ) { return new VectorFieldTypeInformation <> ( factory , tdim ) ; }
|
Get the output type from the input type after conversion .
| 44
| 11
|
157,747
|
protected < O > Map < O , IntList > partition ( List < ? extends O > classcolumn ) { Map < O , IntList > classes = new HashMap <> ( ) ; Iterator < ? extends O > iter = classcolumn . iterator ( ) ; for ( int i = 0 ; iter . hasNext ( ) ; i ++ ) { O lbl = iter . next ( ) ; IntList ids = classes . get ( lbl ) ; if ( ids == null ) { ids = new IntArrayList ( ) ; classes . put ( lbl , ids ) ; } ids . add ( i ) ; } return classes ; }
|
Partition the bundle based on the class label .
| 140
| 10
|
157,748
|
public Curve makeCurve ( ) { Curve c = new Curve ( curves . size ( ) ) ; curves . add ( c ) ; return c ; }
|
Make a new curve .
| 32
| 5
|
157,749
|
public void publish ( String message , Level level ) { try { publish ( new LogRecord ( level , message ) ) ; } catch ( BadLocationException e ) { throw new RuntimeException ( "Error writing a log-like message." , e ) ; } }
|
Print a message as if it were logged without going through the full logger .
| 54
| 15
|
157,750
|
protected synchronized void publish ( LogRecord record ) throws BadLocationException { // choose an appropriate formatter final Formatter fmt ; final Style style ; // always format progress messages using the progress formatter. if ( record . getLevel ( ) . intValue ( ) >= Level . WARNING . intValue ( ) ) { // format errors using the error formatter fmt = errformat ; style = errStyle ; } else if ( record . getLevel ( ) . intValue ( ) <= Level . FINE . intValue ( ) ) { // format debug statements using the debug formatter. fmt = debugformat ; style = dbgStyle ; } else { // default to the message formatter. fmt = msgformat ; style = msgStyle ; } // format final String m ; m = fmt . format ( record ) ; StyledDocument doc = getStyledDocument ( ) ; if ( record instanceof ProgressLogRecord ) { if ( lastNewlinePos < doc . getLength ( ) ) { doc . remove ( lastNewlinePos , doc . getLength ( ) - lastNewlinePos ) ; } } else { // insert a newline, if we didn't see one yet. if ( lastNewlinePos < doc . getLength ( ) ) { doc . insertString ( doc . getLength ( ) , "\n" , style ) ; lastNewlinePos = doc . getLength ( ) ; } } int tail = tailingNonNewline ( m , 0 , m . length ( ) ) ; int headlen = m . length ( ) - tail ; if ( headlen > 0 ) { String pre = m . substring ( 0 , headlen ) ; doc . insertString ( doc . getLength ( ) , pre , style ) ; } lastNewlinePos = doc . getLength ( ) ; if ( tail > 0 ) { String post = m . substring ( m . length ( ) - tail ) ; doc . insertString ( lastNewlinePos , post , style ) ; } }
|
Publish a log record to the logging pane .
| 417
| 10
|
157,751
|
protected void optimizeSNE ( AffinityMatrix pij , double [ ] [ ] sol ) { final int size = pij . size ( ) ; if ( size * 3L * dim > 0x7FFF_FFFA L ) { throw new AbortException ( "Memory exceeds Java array size limit." ) ; } // Meta information on each point; joined for memory locality. // Gradient, Momentum, and learning rate // For performance, we use a flat memory layout! double [ ] meta = new double [ size * 3 * dim ] ; final int dim3 = dim * 3 ; for ( int off = 2 * dim ; off < meta . length ; off += dim3 ) { Arrays . fill ( meta , off , off + dim , 1. ) ; // Initial learning rate } // Affinity matrix in projected space double [ ] [ ] qij = new double [ size ] [ size ] ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Iterative Optimization" , iterations , LOG ) : null ; Duration timer = LOG . isStatistics ( ) ? LOG . newDuration ( this . getClass ( ) . getName ( ) + ".runtime.optimization" ) . begin ( ) : null ; // Optimize for ( int it = 0 ; it < iterations ; it ++ ) { double qij_sum = computeQij ( qij , sol ) ; computeGradient ( pij , qij , 1. / qij_sum , sol , meta ) ; updateSolution ( sol , meta , it ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; if ( timer != null ) { LOG . statistics ( timer . end ( ) ) ; } }
|
Perform the actual tSNE optimization .
| 371
| 9
|
157,752
|
protected double computeQij ( double [ ] [ ] qij , double [ ] [ ] solution ) { double qij_sum = 0 ; for ( int i = 1 ; i < qij . length ; i ++ ) { final double [ ] qij_i = qij [ i ] , vi = solution [ i ] ; for ( int j = 0 ; j < i ; j ++ ) { qij_sum += qij_i [ j ] = qij [ j ] [ i ] = MathUtil . exp ( - sqDist ( vi , solution [ j ] ) ) ; } } return qij_sum * 2 ; // Symmetry }
|
Compute the qij of the solution and the sum .
| 141
| 12
|
157,753
|
protected void computeGradient ( AffinityMatrix pij , double [ ] [ ] qij , double qij_isum , double [ ] [ ] sol , double [ ] meta ) { final int dim3 = dim * 3 ; int size = pij . size ( ) ; for ( int i = 0 , off = 0 ; i < size ; i ++ , off += dim3 ) { final double [ ] sol_i = sol [ i ] , qij_i = qij [ i ] ; Arrays . fill ( meta , off , off + dim , 0. ) ; // Clear gradient only for ( int j = 0 ; j < size ; j ++ ) { if ( i == j ) { continue ; } final double [ ] sol_j = sol [ j ] ; final double qij_ij = qij_i [ j ] ; // Qij after scaling! final double q = MathUtil . max ( qij_ij * qij_isum , MIN_QIJ ) ; double a = 4 * ( pij . get ( i , j ) - q ) ; // SNE gradient for ( int k = 0 ; k < dim ; k ++ ) { meta [ off + k ] += a * ( sol_i [ k ] - sol_j [ k ] ) ; } } } }
|
Compute the gradients .
| 281
| 6
|
157,754
|
public OutlierResult run ( Database database , Relation < O > relation ) { DistanceFunction < ? super O > df = clusterer . getDistanceFunction ( ) ; DistanceQuery < O > dq = database . getDistanceQuery ( relation , df ) ; // TODO: improve ELKI api to ensure we're using the same DBIDs! Clustering < ? > c = clusterer . run ( database , relation ) ; WritableDoubleDataStore scores = DataStoreUtil . makeDoubleStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_DB ) ; DoubleMinMax mm = new DoubleMinMax ( ) ; @ SuppressWarnings ( "unchecked" ) NumberVector . Factory < O > factory = ( NumberVector . Factory < O > ) RelationUtil . assumeVectorField ( relation ) . getFactory ( ) ; List < ? extends Cluster < ? > > clusters = c . getAllClusters ( ) ; for ( Cluster < ? > cluster : clusters ) { // FIXME: use a primitive distance function on number vectors instead. O mean = factory . newNumberVector ( ModelUtil . getPrototype ( cluster . getModel ( ) , relation ) ) ; for ( DBIDIter iter = cluster . getIDs ( ) . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double dist = dq . distance ( mean , iter ) ; scores . put ( iter , dist ) ; mm . put ( dist ) ; } } // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation ( "KMeans outlier scores" , "kmeans-outlier" , scores , relation . getDBIDs ( ) ) ; OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta ( mm . getMin ( ) , mm . getMax ( ) , 0. , Double . POSITIVE_INFINITY , 0. ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
|
Run the outlier detection algorithm .
| 424
| 7
|
157,755
|
@ Override public FittingFunctionResult eval ( double x , double [ ] params ) { final int len = params . length ; // We always need triples: (mean, stddev, scaling) assert ( len % 3 ) == 0 ; double y = 0.0 ; double [ ] gradients = new double [ len ] ; // Loosely based on the book: // Numerical Recipes in C: The Art of Scientific Computing // Due to their license, we cannot use their code, but we have to implement // the mathematics ourselves. We hope the loss in precision is not too big. for ( int i = 2 ; i < params . length ; i += 3 ) { // Standardized Gaussian parameter (centered, scaled by stddev) double stdpar = ( x - params [ i - 2 ] ) / params [ i - 1 ] ; double e = FastMath . exp ( - .5 * stdpar * stdpar ) ; double localy = params [ i ] / ( params [ i - 1 ] * MathUtil . SQRTTWOPI ) * e ; y += localy ; // mean gradient gradients [ i - 2 ] = localy * stdpar ; // stddev gradient gradients [ i - 1 ] = ( stdpar * stdpar - 1.0 ) * localy ; // amplitude gradient gradients [ i ] = e / ( params [ i - 1 ] * MathUtil . SQRTTWOPI ) ; } return new FittingFunctionResult ( y , gradients ) ; }
|
Compute the mixture of Gaussians at the given position
| 325
| 12
|
157,756
|
private void showVisualization ( VisualizerContext context , SimilarityMatrixVisualizer factory , VisualizationTask task ) { VisualizationPlot plot = new VisualizationPlot ( ) ; Visualization vis = factory . makeVisualization ( context , task , plot , 1.0 , 1.0 , null ) ; plot . getRoot ( ) . appendChild ( vis . getLayer ( ) ) ; plot . getRoot ( ) . setAttribute ( SVGConstants . SVG_WIDTH_ATTRIBUTE , "20cm" ) ; plot . getRoot ( ) . setAttribute ( SVGConstants . SVG_HEIGHT_ATTRIBUTE , "20cm" ) ; plot . getRoot ( ) . setAttribute ( SVGConstants . SVG_VIEW_BOX_ATTRIBUTE , "0 0 1 1" ) ; plot . updateStyleElement ( ) ; ( new SimpleSVGViewer ( ) ) . setPlot ( plot ) ; }
|
Show a single visualization .
| 201
| 5
|
157,757
|
public void put ( int [ ] data ) { final int l = data . length ; for ( int i = 0 ; i < l ; i ++ ) { put ( data [ i ] ) ; } }
|
Process a whole array of int values .
| 43
| 8
|
157,758
|
public OutlierResult run ( Database database , Relation < O > rel ) { final DBIDs ids = rel . getDBIDs ( ) ; LOG . verbose ( "Running kNN preprocessor." ) ; KNNQuery < O > knnq = DatabaseUtil . precomputedKNNQuery ( database , rel , getDistanceFunction ( ) , kmax + 1 ) ; // Initialize store for densities WritableDataStore < double [ ] > densities = DataStoreUtil . makeStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , double [ ] . class ) ; estimateDensities ( rel , knnq , ids , densities ) ; // Compute scores: WritableDoubleDataStore kofs = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_DB ) ; DoubleMinMax minmax = new DoubleMinMax ( ) ; computeOutlierScores ( knnq , ids , densities , kofs , minmax ) ; DoubleRelation scoreres = new MaterializedDoubleRelation ( "Kernel Density Estimation Outlier Scores" , "kdeos-outlier" , kofs , ids ) ; OutlierScoreMeta meta = new ProbabilisticOutlierScore ( minmax . getMin ( ) , minmax . getMax ( ) ) ; return new OutlierResult ( meta , scoreres ) ; }
|
Run the KDEOS outlier detection algorithm .
| 316
| 9
|
157,759
|
protected void estimateDensities ( Relation < O > rel , KNNQuery < O > knnq , final DBIDs ids , WritableDataStore < double [ ] > densities ) { final int dim = dimensionality ( rel ) ; final int knum = kmax + 1 - kmin ; // Initialize storage: for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { densities . put ( iter , new double [ knum ] ) ; } // Distribute densities: FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing densities" , ids . size ( ) , LOG ) : null ; double iminbw = ( minBandwidth > 0. ) ? 1. / ( minBandwidth * scale ) : Double . POSITIVE_INFINITY ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { KNNList neighbors = knnq . getKNNForDBID ( iter , kmax + 1 ) ; int k = 1 , idx = 0 ; double sum = 0. ; for ( DoubleDBIDListIter kneighbor = neighbors . iter ( ) ; k <= kmax && kneighbor . valid ( ) ; kneighbor . advance ( ) , k ++ ) { sum += kneighbor . doubleValue ( ) ; if ( k < kmin ) { continue ; } final double ibw = Math . min ( k / ( sum * scale ) , iminbw ) ; final double sca = MathUtil . powi ( ibw , dim ) ; for ( DoubleDBIDListIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { final double dens ; if ( sca < Double . POSITIVE_INFINITY ) { // NaNs with duplicate points! dens = sca * kernel . density ( neighbor . doubleValue ( ) * ibw ) ; } else { dens = neighbor . doubleValue ( ) == 0. ? 1. : 0. ; } densities . get ( neighbor ) [ idx ] += dens ; if ( dens < CUTOFF ) { break ; } } ++ idx ; // Only if k >= kmin } LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; }
|
Perform the kernel density estimation step .
| 521
| 8
|
157,760
|
private int dimensionality ( Relation < O > rel ) { // Explicit: if ( idim >= 0 ) { return idim ; } // Cast to vector field relation. @ SuppressWarnings ( "unchecked" ) final Relation < NumberVector > frel = ( Relation < NumberVector > ) rel ; int dim = RelationUtil . dimensionality ( frel ) ; if ( dim < 1 ) { throw new AbortException ( "When using KDEOS with non-vectorspace data, the intrinsic dimensionality parameter must be set!" ) ; } return dim ; }
|
Ugly hack to allow using this implementation without having a well - defined dimensionality .
| 125
| 17
|
157,761
|
protected void computeOutlierScores ( KNNQuery < O > knnq , final DBIDs ids , WritableDataStore < double [ ] > densities , WritableDoubleDataStore kdeos , DoubleMinMax minmax ) { final int knum = kmax + 1 - kmin ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing KDEOS scores" , ids . size ( ) , LOG ) : null ; double [ ] [ ] scratch = new double [ knum ] [ kmax + 5 ] ; MeanVariance mv = new MeanVariance ( ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double [ ] dens = densities . get ( iter ) ; KNNList neighbors = knnq . getKNNForDBID ( iter , kmax + 1 ) ; if ( scratch [ 0 ] . length < neighbors . size ( ) ) { // Resize scratch. Add some extra margin again. scratch = new double [ knum ] [ neighbors . size ( ) + 5 ] ; } { // Store density matrix of neighbors int i = 0 ; for ( DoubleDBIDListIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) , i ++ ) { double [ ] ndens = densities . get ( neighbor ) ; for ( int k = 0 ; k < knum ; k ++ ) { scratch [ k ] [ i ] = ndens [ k ] ; } } assert ( i == neighbors . size ( ) ) ; } // Compute means and stddevs for each k double score = 0. ; for ( int i = 0 ; i < knum ; i ++ ) { mv . reset ( ) ; for ( int j = 0 ; j < neighbors . size ( ) ; j ++ ) { mv . put ( scratch [ i ] [ j ] ) ; } final double mean = mv . getMean ( ) , stddev = mv . getSampleStddev ( ) ; if ( stddev > 0. ) { score += ( mean - dens [ i ] ) / stddev ; } } score /= knum ; // average score = NormalDistribution . standardNormalCDF ( score ) ; minmax . put ( score ) ; kdeos . put ( iter , score ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; }
|
Compute the final KDEOS scores .
| 540
| 8
|
157,762
|
public Clustering < Model > run ( Relation < V > rel ) { fulldatabase = preprocess ( rel ) ; processedIDs = DBIDUtil . newHashSet ( fulldatabase . size ( ) ) ; noiseDim = dimensionality ( fulldatabase ) ; FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "CASH Clustering" , fulldatabase . size ( ) , LOG ) : null ; Clustering < Model > result = doRun ( fulldatabase , progress ) ; LOG . ensureCompleted ( progress ) ; if ( LOG . isVerbose ( ) ) { StringBuilder msg = new StringBuilder ( 1000 ) ; for ( Cluster < Model > c : result . getAllClusters ( ) ) { if ( c . getModel ( ) instanceof LinearEquationModel ) { LinearEquationModel s = ( LinearEquationModel ) c . getModel ( ) ; msg . append ( "\n Cluster: Dim: " + s . getLes ( ) . subspacedim ( ) + " size: " + c . size ( ) ) ; } else { msg . append ( "\n Cluster: " + c . getModel ( ) . getClass ( ) . getName ( ) + " size: " + c . size ( ) ) ; } } LOG . verbose ( msg . toString ( ) ) ; } return result ; }
|
Run CASH on the relation .
| 296
| 7
|
157,763
|
private Relation < ParameterizationFunction > preprocess ( Relation < V > vrel ) { DBIDs ids = vrel . getDBIDs ( ) ; SimpleTypeInformation < ParameterizationFunction > type = new SimpleTypeInformation <> ( ParameterizationFunction . class ) ; WritableDataStore < ParameterizationFunction > prep = DataStoreUtil . makeStorage ( ids , DataStoreFactory . HINT_HOT , ParameterizationFunction . class ) ; // Project for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { prep . put ( iter , new ParameterizationFunction ( vrel . get ( iter ) ) ) ; } return new MaterializedRelation <> ( type , ids , null , prep ) ; }
|
Preprocess the dataset precomputing the parameterization functions .
| 174
| 12
|
157,764
|
private void initHeap ( ObjectHeap < CASHInterval > heap , Relation < ParameterizationFunction > relation , int dim , DBIDs ids ) { CASHIntervalSplit split = new CASHIntervalSplit ( relation , minPts ) ; // determine minimum and maximum function value of all functions double [ ] minMax = determineMinMaxDistance ( relation , dim ) ; double d_min = minMax [ 0 ] , d_max = minMax [ 1 ] ; double dIntervalLength = d_max - d_min ; int numDIntervals = ( int ) FastMath . ceil ( dIntervalLength / jitter ) ; double dIntervalSize = dIntervalLength / numDIntervals ; double [ ] d_mins = new double [ numDIntervals ] , d_maxs = new double [ numDIntervals ] ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( new StringBuilder ( ) . append ( "d_min " ) . append ( d_min ) // . append ( "\nd_max " ) . append ( d_max ) // . append ( "\nnumDIntervals " ) . append ( numDIntervals ) // . append ( "\ndIntervalSize " ) . append ( dIntervalSize ) . toString ( ) ) ; } // alpha intervals double [ ] alphaMin = new double [ dim - 1 ] , alphaMax = new double [ dim - 1 ] ; Arrays . fill ( alphaMax , Math . PI ) ; for ( int i = 0 ; i < numDIntervals ; i ++ ) { d_mins [ i ] = ( i == 0 ) ? d_min : d_maxs [ i - 1 ] ; d_maxs [ i ] = ( i < numDIntervals - 1 ) ? d_mins [ i ] + dIntervalSize : d_max - d_mins [ i ] ; HyperBoundingBox alphaInterval = new HyperBoundingBox ( alphaMin , alphaMax ) ; ModifiableDBIDs intervalIDs = split . determineIDs ( ids , alphaInterval , d_mins [ i ] , d_maxs [ i ] ) ; if ( intervalIDs != null && intervalIDs . size ( ) >= minPts ) { heap . add ( new CASHInterval ( alphaMin , alphaMax , split , intervalIDs , - 1 , 0 , d_mins [ i ] , d_maxs [ i ] ) ) ; } } if ( LOG . isDebuggingFiner ( ) ) { LOG . debugFiner ( new StringBuilder ( ) . append ( "heap.size: " ) . append ( heap . size ( ) ) . toString ( ) ) ; } }
|
Initializes the heap with the root intervals .
| 591
| 9
|
157,765
|
private MaterializedRelation < ParameterizationFunction > buildDB ( int dim , double [ ] [ ] basis , DBIDs ids , Relation < ParameterizationFunction > relation ) { ProxyDatabase proxy = new ProxyDatabase ( ids ) ; SimpleTypeInformation < ParameterizationFunction > type = new SimpleTypeInformation <> ( ParameterizationFunction . class ) ; WritableDataStore < ParameterizationFunction > prep = DataStoreUtil . makeStorage ( ids , DataStoreFactory . HINT_HOT , ParameterizationFunction . class ) ; // Project for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { prep . put ( iter , project ( basis , relation . get ( iter ) ) ) ; } if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "db fuer dim " + ( dim - 1 ) + ": " + ids . size ( ) ) ; } MaterializedRelation < ParameterizationFunction > prel = new MaterializedRelation <> ( type , ids , null , prep ) ; proxy . addRelation ( prel ) ; return prel ; }
|
Builds a dim - 1 dimensional database where the objects are projected into the specified subspace .
| 256
| 19
|
157,766
|
private ParameterizationFunction project ( double [ ] [ ] basis , ParameterizationFunction f ) { // Matrix m = new Matrix(new // double[][]{f.getPointCoordinates()}).times(basis); double [ ] m = transposeTimes ( basis , f . getColumnVector ( ) ) ; return new ParameterizationFunction ( DoubleVector . wrap ( m ) ) ; }
|
Projects the specified parameterization function into the subspace described by the given basis .
| 87
| 17
|
157,767
|
private double [ ] [ ] determineBasis ( double [ ] alpha ) { final int dim = alpha . length ; // Primary vector: double [ ] nn = new double [ dim + 1 ] ; for ( int i = 0 ; i < nn . length ; i ++ ) { double alpha_i = i == alpha . length ? 0 : alpha [ i ] ; nn [ i ] = ParameterizationFunction . sinusProduct ( 0 , i , alpha ) * FastMath . cos ( alpha_i ) ; } timesEquals ( nn , 1. / euclideanLength ( nn ) ) ; // Normalize // Find orthogonal system, in transposed form: double [ ] [ ] basis = new double [ dim ] [ ] ; int found = 0 ; for ( int i = 0 ; i < nn . length && found < dim ; i ++ ) { // ith unit vector. final double [ ] e_i = new double [ nn . length ] ; e_i [ i ] = 1.0 ; minusTimesEquals ( e_i , nn , scalarProduct ( e_i , nn ) ) ; double len = euclideanLength ( e_i ) ; // Make orthogonal to earlier (normal) basis vectors: for ( int j = 0 ; j < found ; j ++ ) { if ( len < 1e-9 ) { // Disappeared, probably linear dependent break ; } minusTimesEquals ( e_i , basis [ j ] , scalarProduct ( e_i , basis [ j ] ) ) ; len = euclideanLength ( e_i ) ; } if ( len < 1e-9 ) { continue ; } timesEquals ( e_i , 1. / len ) ; // Normalize basis [ found ++ ] = e_i ; } if ( found < dim ) { // Likely some numerical instability, should not happen. for ( int i = found ; i < dim ; i ++ ) { basis [ i ] = new double [ nn . length ] ; // Append zero vectors } } return transpose ( basis ) ; }
|
Determines a basis defining a subspace described by the specified alpha values .
| 455
| 16
|
157,768
|
private CASHInterval determineNextIntervalAtMaxLevel ( ObjectHeap < CASHInterval > heap ) { CASHInterval next = doDetermineNextIntervalAtMaxLevel ( heap ) ; // noise path was chosen while ( next == null ) { if ( heap . isEmpty ( ) ) { return null ; } next = doDetermineNextIntervalAtMaxLevel ( heap ) ; } return next ; }
|
Determines the next best interval at maximum level i . e . the next interval containing the most unprocessed objects .
| 92
| 25
|
157,769
|
private CASHInterval doDetermineNextIntervalAtMaxLevel ( ObjectHeap < CASHInterval > heap ) { CASHInterval interval = heap . poll ( ) ; int dim = interval . getDimensionality ( ) ; while ( true ) { // max level is reached if ( interval . getLevel ( ) >= maxLevel && interval . getMaxSplitDimension ( ) == ( dim - 1 ) ) { return interval ; } if ( heap . size ( ) % 10000 == 0 && LOG . isVerbose ( ) ) { LOG . verbose ( "heap size " + heap . size ( ) ) ; } if ( heap . size ( ) >= 40000 ) { LOG . warning ( "Heap size > 40.000! Stopping." ) ; heap . clear ( ) ; return null ; } if ( LOG . isDebuggingFiner ( ) ) { LOG . debugFiner ( "split " + interval . toString ( ) + " " + interval . getLevel ( ) + "-" + interval . getMaxSplitDimension ( ) ) ; } interval . split ( ) ; // noise if ( ! interval . hasChildren ( ) ) { return null ; } CASHInterval bestInterval ; if ( interval . getLeftChild ( ) != null && interval . getRightChild ( ) != null ) { int comp = interval . getLeftChild ( ) . compareTo ( interval . getRightChild ( ) ) ; if ( comp < 0 ) { bestInterval = interval . getRightChild ( ) ; heap . add ( interval . getLeftChild ( ) ) ; } else { bestInterval = interval . getLeftChild ( ) ; heap . add ( interval . getRightChild ( ) ) ; } } else if ( interval . getLeftChild ( ) == null ) { bestInterval = interval . getRightChild ( ) ; } else { bestInterval = interval . getLeftChild ( ) ; } interval = bestInterval ; } }
|
Recursive helper method to determine the next best interval at maximum level i . e . the next interval containing the most unprocessed objects
| 420
| 27
|
157,770
|
private double [ ] determineMinMaxDistance ( Relation < ParameterizationFunction > relation , int dimensionality ) { double [ ] min = new double [ dimensionality - 1 ] ; double [ ] max = new double [ dimensionality - 1 ] ; Arrays . fill ( max , Math . PI ) ; HyperBoundingBox box = new HyperBoundingBox ( min , max ) ; double d_min = Double . POSITIVE_INFINITY , d_max = Double . NEGATIVE_INFINITY ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { ParameterizationFunction f = relation . get ( iditer ) ; HyperBoundingBox minMax = f . determineAlphaMinMax ( box ) ; double f_min = f . function ( SpatialUtil . getMin ( minMax ) ) ; double f_max = f . function ( SpatialUtil . getMax ( minMax ) ) ; d_min = Math . min ( d_min , f_min ) ; d_max = Math . max ( d_max , f_max ) ; } return new double [ ] { d_min , d_max } ; }
|
Determines the minimum and maximum function value of all parameterization functions stored in the specified database .
| 267
| 20
|
157,771
|
public HistogramResult run ( Database database , Relation < O > relation ) { final DistanceQuery < O > distanceQuery = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; final KNNQuery < O > knnQuery = database . getKNNQuery ( distanceQuery , relation . size ( ) ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Preprocessing clusters..." ) ; } // Cluster by labels Collection < Cluster < Model > > split = ( new ByLabelOrAllInOneClustering ( ) ) . run ( database ) . getAllClusters ( ) ; DoubleHistogram hist = new DoubleHistogram ( numbins , 0.0 , 1.0 ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Processing points..." ) ; } FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing ROC AUC values" , relation . size ( ) , LOG ) : null ; ROCEvaluation roc = new ROCEvaluation ( ) ; MeanVariance mv = new MeanVariance ( ) ; // sort neighbors for ( Cluster < ? > clus : split ) { for ( DBIDIter iter = clus . getIDs ( ) . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { KNNList knn = knnQuery . getKNNForDBID ( iter , relation . size ( ) ) ; double result = EvaluateClustering . evaluateRanking ( roc , clus , knn ) ; mv . put ( result ) ; hist . increment ( result , 1. / relation . size ( ) ) ; LOG . incrementProcessed ( progress ) ; } } LOG . ensureCompleted ( progress ) ; // Transform Histogram into a Double Vector array. Collection < double [ ] > res = new ArrayList <> ( relation . size ( ) ) ; for ( DoubleHistogram . Iter iter = hist . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { res . add ( new double [ ] { iter . getCenter ( ) , iter . getValue ( ) } ) ; } HistogramResult result = new HistogramResult ( "Ranking Quality Histogram" , "ranking-histogram" , res ) ; result . addHeader ( "Mean: " + mv . getMean ( ) + " Variance: " + mv . getSampleVariance ( ) ) ; return result ; }
|
Process a database
| 544
| 3
|
157,772
|
public Clustering < M > run ( Database database , Relation < V > relation ) { if ( relation . size ( ) == 0 ) { throw new IllegalArgumentException ( "database empty: must contain elements" ) ; } // initial models List < ? extends EMClusterModel < M > > models = mfactory . buildInitialModels ( database , relation , k , SquaredEuclideanDistanceFunction . STATIC ) ; WritableDataStore < double [ ] > probClusterIGivenX = DataStoreUtil . makeStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_SORTED , double [ ] . class ) ; double loglikelihood = assignProbabilitiesToInstances ( relation , models , probClusterIGivenX ) ; DoubleStatistic likestat = LOG . isStatistics ( ) ? new DoubleStatistic ( this . getClass ( ) . getName ( ) + ".loglikelihood" ) : null ; if ( LOG . isStatistics ( ) ) { LOG . statistics ( likestat . setDouble ( loglikelihood ) ) ; } // iteration unless no change int it = 0 , lastimprovement = 0 ; double bestloglikelihood = loglikelihood ; // For detecting instabilities. for ( ++ it ; it < maxiter || maxiter < 0 ; it ++ ) { final double oldloglikelihood = loglikelihood ; recomputeCovarianceMatrices ( relation , probClusterIGivenX , models , prior ) ; // reassign probabilities loglikelihood = assignProbabilitiesToInstances ( relation , models , probClusterIGivenX ) ; if ( LOG . isStatistics ( ) ) { LOG . statistics ( likestat . setDouble ( loglikelihood ) ) ; } if ( loglikelihood - bestloglikelihood > delta ) { lastimprovement = it ; bestloglikelihood = loglikelihood ; } if ( Math . abs ( loglikelihood - oldloglikelihood ) <= delta || lastimprovement < it >> 1 ) { break ; } } if ( LOG . isStatistics ( ) ) { LOG . statistics ( new LongStatistic ( KEY + ".iterations" , it ) ) ; } // fill result with clusters and models List < ModifiableDBIDs > hardClusters = new ArrayList <> ( k ) ; for ( int i = 0 ; i < k ; i ++ ) { hardClusters . add ( DBIDUtil . newArray ( ) ) ; } // provide a hard clustering for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { hardClusters . get ( argmax ( probClusterIGivenX . get ( iditer ) ) ) . add ( iditer ) ; } Clustering < M > result = new Clustering <> ( "EM Clustering" , "em-clustering" ) ; // provide models within the result for ( int i = 0 ; i < k ; i ++ ) { result . addToplevelCluster ( new Cluster <> ( hardClusters . get ( i ) , models . get ( i ) . finalizeCluster ( ) ) ) ; } if ( isSoft ( ) ) { result . addChildResult ( new MaterializedRelation <> ( "cluster assignments" , "em-soft-score" , SOFT_TYPE , probClusterIGivenX , relation . getDBIDs ( ) ) ) ; } else { probClusterIGivenX . destroy ( ) ; } return result ; }
|
Performs the EM clustering algorithm on the given database .
| 777
| 12
|
157,773
|
public static void recomputeCovarianceMatrices ( Relation < ? extends NumberVector > relation , WritableDataStore < double [ ] > probClusterIGivenX , List < ? extends EMClusterModel < ? > > models , double prior ) { final int k = models . size ( ) ; boolean needsTwoPass = false ; for ( EMClusterModel < ? > m : models ) { m . beginEStep ( ) ; needsTwoPass |= m . needsTwoPass ( ) ; } // First pass, only for two-pass models. if ( needsTwoPass ) { for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { double [ ] clusterProbabilities = probClusterIGivenX . get ( iditer ) ; NumberVector instance = relation . get ( iditer ) ; for ( int i = 0 ; i < clusterProbabilities . length ; i ++ ) { final double prob = clusterProbabilities [ i ] ; if ( prob > 1e-10 ) { models . get ( i ) . firstPassE ( instance , prob ) ; } } } for ( EMClusterModel < ? > m : models ) { m . finalizeFirstPassE ( ) ; } } double [ ] wsum = new double [ k ] ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { double [ ] clusterProbabilities = probClusterIGivenX . get ( iditer ) ; NumberVector instance = relation . get ( iditer ) ; for ( int i = 0 ; i < clusterProbabilities . length ; i ++ ) { final double prob = clusterProbabilities [ i ] ; if ( prob > 1e-10 ) { models . get ( i ) . updateE ( instance , prob ) ; } wsum [ i ] += prob ; } } for ( int i = 0 ; i < models . size ( ) ; i ++ ) { // MLE / MAP final double weight = prior <= 0. ? wsum [ i ] / relation . size ( ) : ( wsum [ i ] + prior - 1 ) / ( relation . size ( ) + prior * k - k ) ; models . get ( i ) . finalizeEStep ( weight , prior ) ; } }
|
Recompute the covariance matrixes .
| 512
| 9
|
157,774
|
public static double assignProbabilitiesToInstances ( Relation < ? extends NumberVector > relation , List < ? extends EMClusterModel < ? > > models , WritableDataStore < double [ ] > probClusterIGivenX ) { final int k = models . size ( ) ; double emSum = 0. ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { NumberVector vec = relation . get ( iditer ) ; double [ ] probs = new double [ k ] ; for ( int i = 0 ; i < k ; i ++ ) { double v = models . get ( i ) . estimateLogDensity ( vec ) ; probs [ i ] = v > MIN_LOGLIKELIHOOD ? v : MIN_LOGLIKELIHOOD ; } final double logP = logSumExp ( probs ) ; for ( int i = 0 ; i < k ; i ++ ) { probs [ i ] = FastMath . exp ( probs [ i ] - logP ) ; } probClusterIGivenX . put ( iditer , probs ) ; emSum += logP ; } return emSum / relation . size ( ) ; }
|
Assigns the current probability values to the instances in the database and compute the expectation value of the current mixture of distributions .
| 272
| 25
|
157,775
|
protected synchronized void updateVisualizerMenus ( ) { Projection proj = null ; if ( svgCanvas . getPlot ( ) instanceof DetailView ) { PlotItem item = ( ( DetailView ) svgCanvas . getPlot ( ) ) . getPlotItem ( ) ; proj = item . proj ; } menubar . removeAll ( ) ; menubar . add ( filemenu ) ; ResultHierarchy hier = context . getHierarchy ( ) ; Hierarchy < Object > vistree = context . getVisHierarchy ( ) ; Result start = context . getBaseResult ( ) ; ArrayList < JMenuItem > items = new ArrayList <> ( ) ; if ( start == null ) { for ( It < Result > iter = hier . iterAll ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( hier . numParents ( iter . get ( ) ) == 0 ) { recursiveBuildMenu ( items , iter . get ( ) , hier , vistree , proj ) ; } } } else { for ( It < Result > iter = hier . iterChildren ( start ) ; iter . valid ( ) ; iter . advance ( ) ) { recursiveBuildMenu ( items , iter . get ( ) , hier , vistree , proj ) ; } } // Add all items. for ( JMenuItem item : items ) { menubar . add ( item ) ; } menubar . revalidate ( ) ; menubar . repaint ( ) ; }
|
Update the visualizer menus .
| 328
| 6
|
157,776
|
public OutlierResult run ( Relation < V > relation ) { final DBIDs ids = relation . getDBIDs ( ) ; ArrayList < ArrayDBIDs > subspaceIndex = buildOneDimIndexes ( relation ) ; Set < HiCSSubspace > subspaces = calculateSubspaces ( relation , subspaceIndex , rnd . getSingleThreadedRandom ( ) ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Number of high-contrast subspaces: " + subspaces . size ( ) ) ; } List < DoubleRelation > results = new ArrayList <> ( ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Calculating Outlier scores for high Contrast subspaces" , subspaces . size ( ) , LOG ) : null ; // run outlier detection and collect the result // TODO extend so that any outlierAlgorithm can be used (use materialized // relation instead of SubspaceEuclideanDistanceFunction?) for ( HiCSSubspace dimset : subspaces ) { if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Performing outlier detection in subspace " + dimset ) ; } ProxyDatabase pdb = new ProxyDatabase ( ids ) ; pdb . addRelation ( new ProjectedView <> ( relation , new NumericalFeatureSelection < V > ( dimset ) ) ) ; // run LOF and collect the result OutlierResult result = outlierAlgorithm . run ( pdb ) ; results . add ( result . getScores ( ) ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; WritableDoubleDataStore scores = DataStoreUtil . makeDoubleStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_STATIC ) ; DoubleMinMax minmax = new DoubleMinMax ( ) ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { double sum = 0.0 ; for ( DoubleRelation r : results ) { final double s = r . doubleValue ( iditer ) ; if ( ! Double . isNaN ( s ) ) { sum += s ; } } scores . putDouble ( iditer , sum ) ; minmax . put ( sum ) ; } OutlierScoreMeta meta = new BasicOutlierScoreMeta ( minmax . getMin ( ) , minmax . getMax ( ) ) ; DoubleRelation scoreres = new MaterializedDoubleRelation ( "HiCS" , "HiCS-outlier" , scores , relation . getDBIDs ( ) ) ; return new OutlierResult ( meta , scoreres ) ; }
|
Perform HiCS on a given database .
| 601
| 9
|
157,777
|
private ArrayList < ArrayDBIDs > buildOneDimIndexes ( Relation < ? extends NumberVector > relation ) { final int dim = RelationUtil . dimensionality ( relation ) ; ArrayList < ArrayDBIDs > subspaceIndex = new ArrayList <> ( dim + 1 ) ; SortDBIDsBySingleDimension comp = new VectorUtil . SortDBIDsBySingleDimension ( relation ) ; for ( int i = 0 ; i < dim ; i ++ ) { ArrayModifiableDBIDs amDBIDs = DBIDUtil . newArray ( relation . getDBIDs ( ) ) ; comp . setDimension ( i ) ; amDBIDs . sort ( comp ) ; subspaceIndex . add ( amDBIDs ) ; } return subspaceIndex ; }
|
Calculates index structures for every attribute i . e . sorts a ModifiableArray of every DBID in the database for every dimension and stores them in a list
| 164
| 33
|
157,778
|
private double [ ] max ( double [ ] distances1 , double [ ] distances2 ) { if ( distances1 . length != distances2 . length ) { throw new RuntimeException ( "different lengths!" ) ; } double [ ] result = new double [ distances1 . length ] ; for ( int i = 0 ; i < distances1 . length ; i ++ ) { result [ i ] = Math . max ( distances1 [ i ] , distances2 [ i ] ) ; } return result ; }
|
Returns an array that holds the maximum values of the both specified arrays in each index .
| 103
| 17
|
157,779
|
public static int compileShader ( Class < ? > context , GL2 gl , int type , String name ) throws ShaderCompilationException { int prog = - 1 ; try ( InputStream in = context . getResourceAsStream ( name ) ) { int [ ] error = new int [ 1 ] ; String shaderdata = FileUtil . slurp ( in ) ; prog = gl . glCreateShader ( type ) ; gl . glShaderSource ( prog , 1 , new String [ ] { shaderdata } , null , 0 ) ; gl . glCompileShader ( prog ) ; // This worked best for me to capture error messages: gl . glGetObjectParameterivARB ( prog , GL2 . GL_OBJECT_INFO_LOG_LENGTH_ARB , error , 0 ) ; if ( error [ 0 ] > 1 ) { byte [ ] info = new byte [ error [ 0 ] ] ; gl . glGetInfoLogARB ( prog , info . length , error , 0 , info , 0 ) ; String out = new String ( info ) ; gl . glDeleteShader ( prog ) ; throw new ShaderCompilationException ( "Shader compilation error in '" + name + "': " + out ) ; } // Different way of catching errors. gl . glGetShaderiv ( prog , GL2 . GL_COMPILE_STATUS , error , 0 ) ; if ( error [ 0 ] > 1 ) { throw new ShaderCompilationException ( "Shader compilation of '" + name + "' failed." ) ; } } catch ( IOException e ) { throw new ShaderCompilationException ( "IO error loading shader: " + name , e ) ; } return prog ; }
|
Compile a shader from a file .
| 365
| 8
|
157,780
|
protected int effectiveBandSize ( final int dim1 , final int dim2 ) { if ( bandSize == Double . POSITIVE_INFINITY ) { return ( dim1 > dim2 ) ? dim1 : dim2 ; } if ( bandSize >= 1. ) { return ( int ) bandSize ; } // Max * bandSize: return ( int ) Math . ceil ( ( dim1 >= dim2 ? dim1 : dim2 ) * bandSize ) ; }
|
Compute the effective band size .
| 100
| 7
|
157,781
|
@ Override public final int addLeafEntry ( E entry ) { // entry is not a leaf entry if ( ! ( entry instanceof LeafEntry ) ) { throw new UnsupportedOperationException ( "Entry is not a leaf entry!" ) ; } // this is a not a leaf node if ( ! isLeaf ( ) ) { throw new UnsupportedOperationException ( "Node is not a leaf node!" ) ; } // leaf node return addEntry ( entry ) ; }
|
Adds a new leaf entry to this node s children and returns the index of the entry in this node s children array . An UnsupportedOperationException will be thrown if the entry is not a leaf entry or this node is not a leaf node .
| 99
| 49
|
157,782
|
@ Override public final int addDirectoryEntry ( E entry ) { // entry is not a directory entry if ( entry instanceof LeafEntry ) { throw new UnsupportedOperationException ( "Entry is not a directory entry!" ) ; } // this is a not a directory node if ( isLeaf ( ) ) { throw new UnsupportedOperationException ( "Node is not a directory node!" ) ; } return addEntry ( entry ) ; }
|
Adds a new directory entry to this node s children and returns the index of the entry in this node s children array . An UnsupportedOperationException will be thrown if the entry is not a directory entry or this node is not a directory node .
| 91
| 49
|
157,783
|
public boolean deleteEntry ( int index ) { System . arraycopy ( entries , index + 1 , entries , index , numEntries - index - 1 ) ; entries [ -- numEntries ] = null ; return true ; }
|
Deletes the entry at the specified index and shifts all entries after the index to left .
| 47
| 18
|
157,784
|
@ SuppressWarnings ( "unchecked" ) @ Deprecated public final List < E > getEntries ( ) { List < E > result = new ArrayList <> ( numEntries ) ; for ( Entry entry : entries ) { if ( entry != null ) { result . add ( ( E ) entry ) ; } } return result ; }
|
Returns a list of the entries .
| 75
| 7
|
157,785
|
public void removeMask ( long [ ] mask ) { int dest = BitsUtil . nextSetBit ( mask , 0 ) ; if ( dest < 0 ) { return ; } int src = BitsUtil . nextSetBit ( mask , dest ) ; while ( src < numEntries ) { if ( ! BitsUtil . get ( mask , src ) ) { entries [ dest ] = entries [ src ] ; dest ++ ; } src ++ ; } int rm = src - dest ; while ( dest < numEntries ) { entries [ dest ] = null ; dest ++ ; } numEntries -= rm ; }
|
Remove entries according to the given mask .
| 129
| 8
|
157,786
|
public final void splitTo ( AbstractNode < E > newNode , List < E > sorting , int splitPoint ) { assert ( isLeaf ( ) == newNode . isLeaf ( ) ) ; deleteAllEntries ( ) ; StringBuilder msg = LoggingConfiguration . DEBUG ? new StringBuilder ( 1000 ) : null ; for ( int i = 0 ; i < splitPoint ; i ++ ) { addEntry ( sorting . get ( i ) ) ; if ( msg != null ) { msg . append ( "n_" ) . append ( getPageID ( ) ) . append ( ' ' ) . append ( sorting . get ( i ) ) . append ( ' ' ) ; } } for ( int i = splitPoint ; i < sorting . size ( ) ; i ++ ) { newNode . addEntry ( sorting . get ( i ) ) ; if ( msg != null ) { msg . append ( "n_" ) . append ( newNode . getPageID ( ) ) . append ( ' ' ) . append ( sorting . get ( i ) ) . append ( ' ' ) ; } } if ( msg != null ) { Logging . getLogger ( this . getClass ( ) . getName ( ) ) . fine ( msg . toString ( ) ) ; } }
|
Redistribute entries according to the given sorting .
| 272
| 10
|
157,787
|
public static void ensureClusteringResult ( final Database db , final Result result ) { Collection < Clustering < ? > > clusterings = ResultUtil . filterResults ( db . getHierarchy ( ) , result , Clustering . class ) ; if ( clusterings . isEmpty ( ) ) { ResultUtil . addChildResult ( db , new ByLabelOrAllInOneClustering ( ) . run ( db ) ) ; } }
|
Ensure that the result contains at least one Clustering .
| 97
| 13
|
157,788
|
public static < A > double [ ] toPrimitiveDoubleArray ( A data , NumberArrayAdapter < ? , A > adapter ) { if ( adapter == DoubleArrayAdapter . STATIC ) { return ( ( double [ ] ) data ) . clone ( ) ; } final int len = adapter . size ( data ) ; double [ ] x = new double [ len ] ; for ( int i = 0 ; i < len ; i ++ ) { x [ i ] = adapter . getDouble ( data , i ) ; } return x ; }
|
Local copy see ArrayLikeUtil . toPrimitiveDoubleArray .
| 112
| 14
|
157,789
|
@ Override public void flush ( ) { try { out . flush ( ) ; } catch ( Exception ex ) { reportError ( null , ex , ErrorManager . FLUSH_FAILURE ) ; } try { err . flush ( ) ; } catch ( Exception ex ) { reportError ( null , ex , ErrorManager . FLUSH_FAILURE ) ; } }
|
Flush output streams
| 78
| 4
|
157,790
|
@ Override public void publish ( final LogRecord record ) { // determine destination final Writer destination ; if ( record . getLevel ( ) . intValue ( ) >= Level . WARNING . intValue ( ) ) { destination = this . err ; } else { destination = this . out ; } // format final String m ; // Progress records are handled specially. if ( record instanceof ProgressLogRecord ) { ProgressLogRecord prec = ( ProgressLogRecord ) record ; ptrack . addProgress ( prec . getProgress ( ) ) ; Collection < Progress > completed = ptrack . removeCompleted ( ) ; Collection < Progress > progresses = ptrack . getProgresses ( ) ; StringBuilder buf = new StringBuilder ( ) ; if ( ! completed . isEmpty ( ) ) { buf . append ( OutputStreamLogger . CARRIAGE_RETURN ) ; for ( Progress prog : completed ) { // TODO: use formatter, somehow? prog . appendToBuffer ( buf ) ; buf . append ( OutputStreamLogger . NEWLINE ) ; } } if ( ! progresses . isEmpty ( ) ) { boolean first = true ; buf . append ( OutputStreamLogger . CARRIAGE_RETURN ) ; for ( Progress prog : progresses ) { if ( first ) { first = false ; } else { buf . append ( ' ' ) ; } // TODO: use formatter, somehow? prog . appendToBuffer ( buf ) ; } } m = buf . toString ( ) ; } else { // choose an appropriate formatter final Formatter fmt ; // always format progress messages using the progress formatter. if ( record . getLevel ( ) . intValue ( ) >= Level . WARNING . intValue ( ) ) { // format errors using the error formatter fmt = errformat ; } else if ( record . getLevel ( ) . intValue ( ) <= Level . FINE . intValue ( ) ) { // format debug statements using the debug formatter. fmt = debugformat ; } else { // default to the message formatter. fmt = msgformat ; } try { m = fmt . format ( record ) ; } catch ( Exception ex ) { reportError ( null , ex , ErrorManager . FORMAT_FAILURE ) ; return ; } } // write try { destination . write ( m ) ; // always flush (although the streams should auto-flush already) destination . flush ( ) ; } catch ( Exception ex ) { reportError ( null , ex , ErrorManager . WRITE_FAILURE ) ; return ; } }
|
Publish a log record .
| 531
| 6
|
157,791
|
private boolean checkForNaNs ( NumberVector vec ) { for ( int i = 0 , d = vec . getDimensionality ( ) ; i < d ; i ++ ) { double v = vec . doubleValue ( i ) ; if ( v != v ) { // NaN! return true ; } } return false ; }
|
Check for NaN values .
| 69
| 6
|
157,792
|
public static Relation < String > guessLabelRepresentation ( Database database ) throws NoSupportedDataTypeException { try { Relation < ? extends ClassLabel > classrep = database . getRelation ( TypeUtil . CLASSLABEL ) ; if ( classrep != null ) { return new ConvertToStringView ( classrep ) ; } } catch ( NoSupportedDataTypeException e ) { // retry. } try { Relation < ? extends LabelList > labelsrep = database . getRelation ( TypeUtil . LABELLIST ) ; if ( labelsrep != null ) { return new ConvertToStringView ( labelsrep ) ; } } catch ( NoSupportedDataTypeException e ) { // retry. } try { Relation < String > stringrep = database . getRelation ( TypeUtil . STRING ) ; if ( stringrep != null ) { return stringrep ; } } catch ( NoSupportedDataTypeException e ) { // retry. } throw new NoSupportedDataTypeException ( "No label-like representation was found." ) ; }
|
Guess a potentially label - like representation preferring class labels .
| 226
| 12
|
157,793
|
public static ArrayModifiableDBIDs getObjectsByLabelMatch ( Database database , Pattern name_pattern ) { Relation < String > relation = guessLabelRepresentation ( database ) ; if ( name_pattern == null ) { return DBIDUtil . newArray ( ) ; } ArrayModifiableDBIDs ret = DBIDUtil . newArray ( ) ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { if ( name_pattern . matcher ( relation . get ( iditer ) ) . find ( ) ) { ret . add ( iditer ) ; } } return ret ; }
|
Find object by matching their labels .
| 144
| 7
|
157,794
|
@ Override public void writeExternal ( ObjectOutput out ) throws IOException { super . writeExternal ( out ) ; out . writeObject ( conservativeApproximation ) ; }
|
Calls the super method and writes the conservative approximation of the knn distances of this entry to the specified stream .
| 36
| 23
|
157,795
|
@ Override public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { super . readExternal ( in ) ; conservativeApproximation = ( ApproximationLine ) in . readObject ( ) ; }
|
Calls the super method and reads the the conservative approximation of the knn distances of this entry from the specified input stream .
| 48
| 25
|
157,796
|
protected void updateDensities ( WritableDoubleDataStore rbod_score , DoubleDBIDList referenceDists ) { DoubleDBIDListIter it = referenceDists . iter ( ) ; for ( int l = 0 ; l < referenceDists . size ( ) ; l ++ ) { double density = computeDensity ( referenceDists , it , l ) ; // computeDensity modified the iterator, reset: it . seek ( l ) ; // NaN indicates the first run. if ( ! ( density > rbod_score . doubleValue ( it ) ) ) { rbod_score . putDouble ( it , density ) ; } } }
|
Update the density estimates for each object .
| 141
| 8
|
157,797
|
static void chooseRemaining ( Relation < ? extends NumberVector > relation , DBIDs ids , DistanceQuery < NumberVector > distQ , int k , List < NumberVector > means , WritableDoubleDataStore weights , double weightsum , Random random ) { while ( true ) { if ( weightsum > Double . MAX_VALUE ) { throw new IllegalStateException ( "Could not choose a reasonable mean - too many data points, too large distance sum?" ) ; } if ( weightsum < Double . MIN_NORMAL ) { LoggingUtil . warning ( "Could not choose a reasonable mean - to few data points?" ) ; } double r = random . nextDouble ( ) * weightsum ; while ( r <= 0 && weightsum > Double . MIN_NORMAL ) { r = random . nextDouble ( ) * weightsum ; // Try harder to not choose 0. } DBIDIter it = ids . iter ( ) ; while ( it . valid ( ) ) { if ( ( r -= weights . doubleValue ( it ) ) < 0 ) { break ; } it . advance ( ) ; } if ( ! it . valid ( ) ) { // Rare case, but happens due to floating math weightsum -= r ; // Decrease continue ; // Retry } // Add new mean: final NumberVector newmean = relation . get ( it ) ; means . add ( newmean ) ; if ( means . size ( ) >= k ) { break ; } // Update weights: weights . putDouble ( it , 0. ) ; weightsum = updateWeights ( weights , ids , newmean , distQ ) ; } }
|
Choose remaining means weighted by distance .
| 344
| 7
|
157,798
|
private double factor ( int dimension ) { return maxima [ dimension ] > minima [ dimension ] ? maxima [ dimension ] - minima [ dimension ] : maxima [ dimension ] > 0 ? maxima [ dimension ] : 1 ; }
|
Returns a factor for normalization in a certain dimension .
| 50
| 11
|
157,799
|
protected double derivative ( int i , NumberVector v ) { final int dim = v . getDimensionality ( ) ; if ( dim == 1 ) { return 0. ; } // Adjust for boundary conditions, as per the article: i = ( i == 0 ) ? 1 : ( i == dim - 1 ) ? dim - 2 : i ; return ( v . doubleValue ( i ) - v . doubleValue ( i - 1 ) + ( v . doubleValue ( i + 1 ) - v . doubleValue ( i - 1 ) ) * .5 ) * .5 ; }
|
Given a NumberVector and the position of an element approximates the gradient of given element .
| 122
| 18
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.