idx
int64
0
165k
question
stringlengths
73
4.15k
target
stringlengths
5
918
len_question
int64
21
890
len_target
int64
3
255
157,600
public static ArrayDBIDs ensureArray ( DBIDs ids ) { return ids instanceof ArrayDBIDs ? ( ArrayDBIDs ) ids : newArray ( ids ) ; }
Ensure that the given DBIDs are array - indexable .
39
13
157,601
public static SetDBIDs ensureSet ( DBIDs ids ) { return ids instanceof SetDBIDs ? ( SetDBIDs ) ids : newHashSet ( ids ) ; }
Ensure that the given DBIDs support fast contains operations .
40
12
157,602
public static ModifiableDBIDs ensureModifiable ( DBIDs ids ) { return ids instanceof ModifiableDBIDs ? ( ModifiableDBIDs ) ids : // ids instanceof HashSetDBIDs ? newHashSet ( ids ) : newArray ( ids ) ; }
Ensure modifiable .
61
5
157,603
public static DBIDPair newPair ( DBIDRef id1 , DBIDRef id2 ) { return DBIDFactory . FACTORY . newPair ( id1 , id2 ) ; }
Make a DBID pair .
44
6
157,604
public static DoubleDBIDPair newPair ( double val , DBIDRef id ) { return DBIDFactory . FACTORY . newPair ( val , id ) ; }
Make a DoubleDBIDPair .
39
8
157,605
public static void sort ( int [ ] data , Comparator < ? super DBIDRef > comp ) { sort ( data , 0 , data . length , comp ) ; }
Sort the full array using the given comparator .
36
10
157,606
private static int compare ( IntegerDBIDVar i1 , int p1 , IntegerDBIDVar i2 , int p2 , Comparator < ? super DBIDRef > comp ) { i1 . internalSetIndex ( p1 ) ; i2 . internalSetIndex ( p2 ) ; return comp . compare ( i1 , i2 ) ; }
Compare two elements .
74
4
157,607
@ Override protected int computeHeight ( ) { N node = getRoot ( ) ; int tHeight = 1 ; // compute height while ( ! node . isLeaf ( ) && node . getNumEntries ( ) != 0 ) { SpatialEntry entry = node . getEntry ( 0 ) ; node = getNode ( entry ) ; tHeight ++ ; } return tHeight ; }
Computes the height of this XTree . Is called by the constructor . and should be overwritten by subclasses if necessary .
81
26
157,608
public long commit ( ) throws IOException { final PageFile < N > file = super . getFile ( ) ; if ( ! ( file instanceof PersistentPageFile ) ) { throw new IllegalStateException ( "Trying to commit a non-persistent XTree" ) ; } long npid = file . getNextPageID ( ) ; XTreeHeader ph = ( XTreeHeader ) ( ( PersistentPageFile < ? > ) file ) . getHeader ( ) ; long offset = ( ph . getReservedPages ( ) + npid ) * ph . getPageSize ( ) ; ph . setSupernode_offset ( npid * ph . getPageSize ( ) ) ; ph . setNumberOfElements ( num_elements ) ; RandomAccessFile ra_file = ( ( PersistentPageFile < ? > ) file ) . getFile ( ) ; ph . writeHeader ( ra_file ) ; ra_file . seek ( offset ) ; long nBytes = 0 ; for ( Iterator < N > iterator = supernodes . values ( ) . iterator ( ) ; iterator . hasNext ( ) ; ) { N supernode = iterator . next ( ) ; ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; ObjectOutputStream oos = new ObjectOutputStream ( baos ) ; supernode . writeSuperNode ( oos ) ; oos . close ( ) ; baos . close ( ) ; byte [ ] array = baos . toByteArray ( ) ; byte [ ] sn_array = new byte [ getPageSize ( ) * ( int ) Math . ceil ( ( double ) supernode . getCapacity ( ) / dirCapacity ) ] ; if ( array . length > sn_array . length ) { throw new IllegalStateException ( "Supernode is too large for fitting in " + ( ( int ) Math . ceil ( ( double ) supernode . getCapacity ( ) / dirCapacity ) ) + " pages of total size " + sn_array . length ) ; } System . arraycopy ( array , 0 , sn_array , 0 , array . length ) ; // file.countWrite(); ra_file . write ( sn_array ) ; nBytes += sn_array . length ; } return nBytes ; }
Writes all supernodes to the end of the file . This is only supposed to be used for a final saving of an XTree . If another page is added to this tree the supernodes written to file by this operation are over - written .
485
52
157,609
public void setExpanded ( SpatialEntry entry1 , SpatialEntry entry2 ) { IntSet exp1 = expanded . get ( getPageID ( entry1 ) ) ; if ( exp1 == null ) { exp1 = new IntOpenHashSet ( ) ; expanded . put ( getPageID ( entry1 ) , exp1 ) ; } exp1 . add ( getPageID ( entry2 ) ) ; }
Marks the nodes with the specified ids as expanded .
88
12
157,610
public IntSet getExpanded ( SpatialEntry entry ) { IntSet exp = expanded . get ( getPageID ( entry ) ) ; return ( exp != null ) ? exp : IntSets . EMPTY_SET ; }
Returns the nodes which are already expanded with the specified node .
48
12
157,611
public void increment ( double coord , double val ) { int bin = getBinNr ( coord ) ; if ( bin < 0 ) { if ( size - bin > data . length ) { // Reallocate. TODO: use an arraylist-like grow strategy! double [ ] tmpdata = new double [ growSize ( data . length , size - bin ) ] ; System . arraycopy ( data , 0 , tmpdata , - bin , size ) ; data = tmpdata ; } else { // Shift in place and clear head System . arraycopy ( data , 0 , data , - bin , size ) ; Arrays . fill ( data , 0 , - bin , ( double ) 0 ) ; } data [ 0 ] = val ; // Note that bin is negative, -bin is the shift offset! assert ( data . length >= size - bin ) ; offset -= bin ; size -= bin ; // TODO: modCounter++; and have iterators fast-fail } else if ( bin >= data . length ) { double [ ] tmpdata = new double [ growSize ( data . length , bin + 1 ) ] ; System . arraycopy ( data , 0 , tmpdata , 0 , size ) ; tmpdata [ bin ] = val ; data = tmpdata ; size = bin + 1 ; // TODO: modCounter++; and have iterators fast-fail // Unset max value when resizing max = Double . MAX_VALUE ; } else { if ( bin >= size ) { // TODO: reset bins to 0 first? size = bin + 1 ; } data [ bin ] += val ; } }
Increment the value of a bin .
338
8
157,612
public double get ( double coord ) { int bin = getBinNr ( coord ) ; return ( bin < 0 || bin >= size ) ? 0 : data [ bin ] ; }
Get the value at a particular position .
39
8
157,613
public Assignment update ( Border border ) { Arrays . sort ( cs ) ; int j = 1 ; boolean found = ( cs [ 0 ] . core == border . core ) ; for ( int i = 1 ; i < cs . length ; i ++ ) { if ( cs [ i ] . core != cs [ i - 1 ] . core ) { cs [ j ++ ] = cs [ i ] ; } found |= ( cs [ i ] . core == border . core ) ; } if ( found ) { if ( j == 1 ) { Border r = cs [ 0 ] ; cs = null ; // Prevent further use return r ; } if ( j < cs . length ) { cs = Arrays . copyOf ( cs , j ) ; } return this ; } if ( j + 1 != cs . length ) { cs = Arrays . copyOf ( cs , j + 1 ) ; } cs [ j ] = border ; return this ; }
Add a new border to the existing borders .
199
9
157,614
public Core getCore ( ) { Core a = cs [ 0 ] . core ; for ( int i = 1 ; i < cs . length ; i ++ ) { Core v = cs [ i ] . core ; a = a . num > v . num ? a : v ; // max, of negative values } return a ; }
Get the core this is assigned to .
69
8
157,615
protected int currentCluster ( List < ? extends ModifiableDBIDs > clusters , DBIDRef id ) { for ( int i = 0 ; i < k ; i ++ ) { if ( clusters . get ( i ) . contains ( id ) ) { return i ; } } return - 1 ; }
Find the current cluster assignment .
63
6
157,616
protected void computeINFLO ( Relation < O > relation , ModifiableDBIDs pruned , KNNQuery < O > knnq , WritableDataStore < ModifiableDBIDs > rNNminuskNNs , WritableDoubleDataStore inflos , DoubleMinMax inflominmax ) { FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing INFLOs" , relation . size ( ) , LOG ) : null ; HashSetModifiableDBIDs set = DBIDUtil . newHashSet ( ) ; for ( DBIDIter iter = relation . iterDBIDs ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( pruned . contains ( iter ) ) { inflos . putDouble ( iter , 1. ) ; inflominmax . put ( 1. ) ; LOG . incrementProcessed ( prog ) ; continue ; } final KNNList knn = knnq . getKNNForDBID ( iter , kplus1 ) ; if ( knn . getKNNDistance ( ) == 0. ) { inflos . putDouble ( iter , 1. ) ; inflominmax . put ( 1. ) ; LOG . incrementProcessed ( prog ) ; continue ; } set . clear ( ) ; set . addDBIDs ( knn ) ; set . addDBIDs ( rNNminuskNNs . get ( iter ) ) ; // Compute mean density of NN \cup RNN double sum = 0. ; int c = 0 ; for ( DBIDIter niter = set . iter ( ) ; niter . valid ( ) ; niter . advance ( ) ) { if ( DBIDUtil . equal ( iter , niter ) ) { continue ; } final double kdist = knnq . getKNNForDBID ( niter , kplus1 ) . getKNNDistance ( ) ; if ( kdist <= 0 ) { sum = Double . POSITIVE_INFINITY ; c ++ ; break ; } sum += 1. / kdist ; c ++ ; } sum *= knn . getKNNDistance ( ) ; final double inflo = sum == 0 ? 1. : sum / c ; inflos . putDouble ( iter , inflo ) ; inflominmax . put ( inflo ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; }
Compute the final INFLO scores .
514
9
157,617
public OutlierResult run ( Database database , Relation < O > relation ) { // Get the query functions: DistanceQuery < O > dq = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; KNNQuery < O > knnq = database . getKNNQuery ( dq , k ) ; // Get the objects to process, and a data storage for counting and output: DBIDs ids = relation . getDBIDs ( ) ; WritableDoubleDataStore scores = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_DB , 0. ) ; // Process all objects for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { // Find the nearest neighbors (using an index, if available!) KNNList neighbors = knnq . getKNNForDBID ( iter , k ) ; // For each neighbor, except ourselves, increase the in-degree: for ( DBIDIter nei = neighbors . iter ( ) ; nei . valid ( ) ; nei . advance ( ) ) { if ( DBIDUtil . equal ( iter , nei ) ) { continue ; } scores . put ( nei , scores . doubleValue ( nei ) + 1 ) ; } } // Compute maximum double min = Double . POSITIVE_INFINITY , max = 0.0 ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { min = Math . min ( min , scores . doubleValue ( iter ) ) ; max = Math . max ( max , scores . doubleValue ( iter ) ) ; } // Wrap the result and add metadata. // By actually specifying theoretical min, max and baseline, we get a better // visualization (try it out - or see the screenshots in the tutorial)! OutlierScoreMeta meta = new InvertedOutlierScoreMeta ( min , max , 0. , ids . size ( ) - 1 , k ) ; DoubleRelation rel = new MaterializedDoubleRelation ( "ODIN In-Degree" , "odin" , scores , ids ) ; return new OutlierResult ( meta , rel ) ; }
Run the ODIN algorithm
479
5
157,618
public static Icon getStockIcon ( String name ) { SoftReference < Icon > ref = iconcache . get ( name ) ; if ( ref != null ) { Icon icon = ref . get ( ) ; if ( icon != null ) { return icon ; } } java . net . URL imgURL = StockIcon . class . getResource ( name + ".png" ) ; if ( imgURL != null ) { Icon icon = new ImageIcon ( imgURL ) ; iconcache . put ( name , new SoftReference <> ( icon ) ) ; return icon ; } LoggingUtil . warning ( "Could not find stock icon: " + name ) ; return null ; }
Get a particular stock icon .
140
6
157,619
@ Override public void initializeFromFile ( TreeIndexHeader header , PageFile < FlatRStarTreeNode > file ) { super . initializeFromFile ( header , file ) ; // reconstruct root int nextPageID = file . getNextPageID ( ) ; dirCapacity = nextPageID ; root = createNewDirectoryNode ( ) ; for ( int i = 1 ; i < nextPageID ; i ++ ) { FlatRStarTreeNode node = getNode ( i ) ; root . addDirectoryEntry ( createNewDirectoryEntry ( node ) ) ; } if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "root: " + root + " with " + nextPageID + " leafNodes." ) ; } }
Initializes the flat RTree from an existing persistent file .
157
12
157,620
protected Node bulkConstruct ( DBIDRef cur , int maxScale , double parentDist , ModifiableDoubleDBIDList elems ) { assert ( ! elems . contains ( cur ) ) ; final double max = maxDistance ( elems ) ; final int scale = Math . min ( distToScale ( max ) - 1 , maxScale ) ; final int nextScale = scale - 1 ; // Leaf node, because points coincide, we are too deep, or have too few // elements remaining: if ( max <= 0 || scale <= scaleBottom || elems . size ( ) < truncate ) { return new Node ( cur , max , parentDist , elems ) ; } // Find neighbors in the cover of the current object: ModifiableDoubleDBIDList candidates = DBIDUtil . newDistanceDBIDList ( ) ; excludeNotCovered ( elems , scaleToDist ( scale ) , candidates ) ; // If no elements were not in the cover, build a compact tree: if ( candidates . size ( ) == 0 ) { LOG . warning ( "Scale not chosen appropriately? " + max + " " + scaleToDist ( scale ) ) ; return bulkConstruct ( cur , nextScale , parentDist , elems ) ; } // We will have at least one other child, so build the parent: Node node = new Node ( cur , max , parentDist ) ; // Routing element now is a singleton: final boolean curSingleton = elems . size ( ) == 0 ; if ( ! curSingleton ) { // Add node for the routing object: node . children . add ( bulkConstruct ( cur , nextScale , 0 , elems ) ) ; } final double fmax = scaleToDist ( nextScale ) ; // Build additional cover nodes: for ( DoubleDBIDListIter it = candidates . iter ( ) ; it . valid ( ) ; ) { assert ( it . getOffset ( ) == 0 ) ; DBID t = DBIDUtil . deref ( it ) ; elems . clear ( ) ; // Recycle. collectByCover ( it , candidates , fmax , elems ) ; assert ( DBIDUtil . equal ( t , it ) ) : "First element in candidates must not change!" ; if ( elems . size ( ) == 0 ) { // Singleton node . singletons . add ( it . doubleValue ( ) , it ) ; } else { // Build a full child node: node . children . add ( bulkConstruct ( it , nextScale , it . doubleValue ( ) , elems ) ) ; } candidates . removeSwap ( 0 ) ; } assert ( candidates . size ( ) == 0 ) ; // Routing object is not yet handled: if ( curSingleton ) { if ( node . isLeaf ( ) ) { node . children = null ; // First in leaf is enough. } else { node . singletons . add ( parentDist , cur ) ; // Add as regular singleton. } } // TODO: improve recycling of lists? return node ; }
Bulk - load the cover tree .
640
8
157,621
@ Override public double getWeight ( double distance , double max , double stddev ) { if ( max <= 0 ) { return 1.0 ; } double relativedistance = distance / max ; return 1.0 - 0.9 * relativedistance * relativedistance ; }
Evaluate quadratic weight . stddev is ignored .
60
14
157,622
public double maxDist ( SpatialComparable mbr1 , SpatialComparable mbr2 ) { final int dim1 = mbr1 . getDimensionality ( ) , dim2 = mbr2 . getDimensionality ( ) ; final int mindim = dim1 < dim2 ? dim1 : dim2 ; double agg = 0. ; for ( int d = 0 ; d < mindim ; d ++ ) { double d1 = mbr1 . getMax ( d ) - mbr2 . getMin ( d ) ; double d2 = mbr2 . getMax ( d ) - mbr1 . getMin ( d ) ; double delta = d1 > d2 ? d1 : d2 ; agg += delta * delta ; } for ( int d = mindim ; d < dim1 ; d ++ ) { double d1 = Math . abs ( mbr1 . getMin ( d ) ) , d2 = Math . abs ( mbr1 . getMax ( d ) ) ; double delta = d1 > d2 ? d1 : d2 ; agg += delta * delta ; } for ( int d = mindim ; d < dim2 ; d ++ ) { double d1 = Math . abs ( mbr2 . getMin ( d ) ) , d2 = Math . abs ( mbr2 . getMax ( d ) ) ; double delta = d1 > d2 ? d1 : d2 ; agg += delta * delta ; } return FastMath . sqrt ( agg ) ; }
Maximum distance of two objects .
324
6
157,623
public synchronized void updateFromTrackParameters ( TrackParameters track ) { parameters . clear ( ) ; for ( TrackedParameter p : track . getAllParameters ( ) ) { Parameter < ? > option = p . getParameter ( ) ; String value = null ; if ( option . isDefined ( ) ) { if ( option . tookDefaultValue ( ) ) { value = DynamicParameters . STRING_USE_DEFAULT + option . getDefaultValueAsString ( ) ; } else { value = option . getValueAsString ( ) ; } } if ( value == null ) { value = ( option instanceof Flag ) ? Flag . NOT_SET : "" ; } int bits = 0 ; if ( option . isOptional ( ) ) { bits |= BIT_OPTIONAL ; } if ( option . hasDefaultValue ( ) && option . tookDefaultValue ( ) ) { bits |= BIT_DEFAULT_VALUE ; } if ( value . length ( ) <= 0 ) { if ( ( bits & BIT_DEFAULT_VALUE ) == 0 && ( bits & BIT_OPTIONAL ) == 0 ) { bits |= BIT_INCOMPLETE ; } } else { try { if ( ! option . tookDefaultValue ( ) && ! option . isValid ( value ) ) { bits |= BIT_INVALID ; } } catch ( ParameterException e ) { bits |= BIT_INVALID ; } } int depth = 0 ; { Object pos = track . getParent ( option ) ; while ( pos != null ) { pos = track . getParent ( pos ) ; depth += 1 ; if ( depth > 10 ) { break ; } } } parameters . add ( new Node ( option , value , bits , depth ) ) ; } }
Update the Parameter list from the collected options of an ELKI context
369
14
157,624
public synchronized void addParameter ( Parameter < ? > option , String value , int bits , int depth ) { parameters . add ( new Node ( option , value , bits , depth ) ) ; }
Add a single parameter to the list
41
7
157,625
public static List < Clustering < ? extends Model > > getClusteringResults ( Result r ) { if ( r instanceof Clustering < ? > ) { List < Clustering < ? > > crs = new ArrayList <> ( 1 ) ; crs . add ( ( Clustering < ? > ) r ) ; return crs ; } if ( r instanceof HierarchicalResult ) { return ResultUtil . filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , Clustering . class ) ; } return Collections . emptyList ( ) ; }
Collect all clustering results from a Result
133
8
157,626
private static double [ ] randomLatitudeLongitude ( Random r ) { // Make marginally more realistic looking data by non-uniformly sampling // latitude, since Earth is a sphere, and there is not much at the poles double lat = Math . pow ( 1. - r . nextDouble ( ) * 2. , 2 ) / 2. * 180 ; double lng = ( .5 - r . nextDouble ( ) ) * 360. ; return new double [ ] { lat , lng } ; }
Generate random coordinates .
106
5
157,627
@ Override public double distance ( NumberVector v1 , NumberVector v2 ) { return 1 - Math . abs ( PearsonCorrelation . coefficient ( v1 , v2 ) ) ; }
Computes the absolute Pearson correlation distance for two given feature vectors .
40
13
157,628
private long inverse ( double current ) { // Represent to base b. short [ ] digits = new short [ maxi ] ; for ( int j = 0 ; j < maxi ; j ++ ) { current *= base ; digits [ j ] = ( short ) current ; current -= digits [ j ] ; if ( current <= 1e-10 ) { break ; } } long inv = 0 ; for ( int j = maxi - 1 ; j >= 0 ; j -- ) { inv = inv * base + digits [ j ] ; } return inv ; }
Compute the inverse with respect to the given base .
117
11
157,629
private double radicalInverse ( long i ) { double digit = 1.0 / ( double ) base ; double radical = digit ; double inverse = 0.0 ; while ( i > 0 ) { inverse += digit * ( double ) ( i % base ) ; digit *= radical ; i /= base ; } return inverse ; }
Compute the radical inverse of i .
69
8
157,630
private double nextRadicalInverse ( ) { counter ++ ; // Do at most MAXFAST appromate steps if ( counter >= MAXFAST ) { counter = 0 ; inverse += MAXFAST ; current = radicalInverse ( inverse ) ; return current ; } // Fast approximation: double nextInverse = current + invbase ; if ( nextInverse < ALMOST_ONE ) { current = nextInverse ; return current ; } else { double digit1 = invbase , digit2 = invbase * invbase ; while ( current + digit2 >= ALMOST_ONE ) { digit1 = digit2 ; digit2 *= invbase ; } current += ( digit1 - 1.0 ) + digit2 ; return current ; } }
Compute the next radical inverse .
158
7
157,631
public String dimensonsToString ( String sep ) { StringBuilder result = new StringBuilder ( 100 ) . append ( ' ' ) ; for ( int dim = BitsUtil . nextSetBit ( dimensions , 0 ) ; dim >= 0 ; dim = BitsUtil . nextSetBit ( dimensions , dim + 1 ) ) { result . append ( dim + 1 ) . append ( sep ) ; } if ( result . length ( ) > sep . length ( ) ) { // Un-append last separator result . setLength ( result . length ( ) - sep . length ( ) ) ; } return result . append ( ' ' ) . toString ( ) ; }
Returns a string representation of the dimensions of this subspace .
140
12
157,632
public boolean isSubspace ( Subspace subspace ) { return this . dimensionality <= subspace . dimensionality && // BitsUtil . intersectionSize ( dimensions , subspace . dimensions ) == dimensionality ; }
Returns true if this subspace is a subspace of the specified subspace i . e . if the set of dimensions building this subspace are contained in the set of dimensions building the specified subspace .
44
41
157,633
protected < E extends SpatialComparable , A > double computeOverlap ( A entries , ArrayAdapter < E , A > getter , long [ ] assign ) { ModifiableHyperBoundingBox mbr1 = null , mbr2 = null ; for ( int i = 0 ; i < getter . size ( entries ) ; i ++ ) { E e = getter . get ( entries , i ) ; if ( BitsUtil . get ( assign , i ) ) { if ( mbr1 == null ) { mbr1 = new ModifiableHyperBoundingBox ( e ) ; } else { mbr1 . extend ( e ) ; } } else { if ( mbr2 == null ) { mbr2 = new ModifiableHyperBoundingBox ( e ) ; } else { mbr2 . extend ( e ) ; } } } if ( mbr1 == null || mbr2 == null ) { throw new AbortException ( "Invalid state in split: one of the sets is empty." ) ; } return SpatialUtil . overlap ( mbr1 , mbr2 ) ; }
Compute overlap of assignment
235
5
157,634
private void binarySplitSort ( List < ? extends SpatialComparable > objs , final int start , final int end , int depth , final int numdim , int [ ] dims , Sorter comp ) { final int mid = start + ( ( end - start ) >>> 1 ) ; // Make invariant comp . setDimension ( dims != null ? dims [ depth ] : depth ) ; QuickSelect . quickSelect ( objs , comp , start , end , mid ) ; // Recurse final int nextdim = ( depth + 1 ) % numdim ; if ( start < mid - 1 ) { binarySplitSort ( objs , start , mid , nextdim , numdim , dims , comp ) ; } if ( mid + 2 < end ) { binarySplitSort ( objs , mid + 1 , end , nextdim , numdim , dims , comp ) ; } }
Sort the array using a binary split in dimension curdim then recurse with the next dimension .
189
19
157,635
public static Element svgElement ( Document document , String name ) { return document . createElementNS ( SVGConstants . SVG_NAMESPACE_URI , name ) ; }
Create a SVG element in appropriate namespace
38
7
157,636
public static void setStyle ( Element el , String d ) { el . setAttribute ( SVGConstants . SVG_STYLE_ATTRIBUTE , d ) ; }
Set a SVG style attribute
37
5
157,637
public static void addCSSClass ( Element e , String cssclass ) { String oldval = e . getAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE ) ; if ( oldval == null || oldval . length ( ) == 0 ) { setAtt ( e , SVGConstants . SVG_CLASS_ATTRIBUTE , cssclass ) ; return ; } String [ ] classes = oldval . split ( " " ) ; for ( String c : classes ) { if ( c . equals ( cssclass ) ) { return ; } } setAtt ( e , SVGConstants . SVG_CLASS_ATTRIBUTE , oldval + " " + cssclass ) ; }
Add a CSS class to an Element .
150
8
157,638
public static void removeCSSClass ( Element e , String cssclass ) { String oldval = e . getAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE ) ; if ( oldval == null ) { return ; } String [ ] classes = oldval . split ( " " ) ; if ( classes . length == 1 ) { if ( cssclass . equals ( classes [ 0 ] ) ) { e . removeAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE ) ; } } else if ( classes . length == 2 ) { if ( cssclass . equals ( classes [ 0 ] ) ) { if ( cssclass . equals ( classes [ 1 ] ) ) { e . removeAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE ) ; } else { e . setAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE , classes [ 1 ] ) ; } } else if ( cssclass . equals ( classes [ 1 ] ) ) { e . setAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE , classes [ 0 ] ) ; } } else { StringBuilder joined = new StringBuilder ( ) ; for ( String c : classes ) { if ( ! c . equals ( cssclass ) ) { if ( joined . length ( ) > 0 ) { joined . append ( ' ' ) ; } joined . append ( c ) ; } } e . setAttribute ( SVGConstants . SVG_CLASS_ATTRIBUTE , joined . toString ( ) ) ; } }
Remove a CSS class from an Element .
329
8
157,639
public static Element makeStyleElement ( Document document ) { Element style = SVGUtil . svgElement ( document , SVGConstants . SVG_STYLE_TAG ) ; style . setAttribute ( SVGConstants . SVG_TYPE_ATTRIBUTE , SVGConstants . CSS_MIME_TYPE ) ; return style ; }
Make a new CSS style element for the given Document .
71
11
157,640
public static Element svgRect ( Document document , double x , double y , double w , double h ) { Element rect = SVGUtil . svgElement ( document , SVGConstants . SVG_RECT_TAG ) ; SVGUtil . setAtt ( rect , SVGConstants . SVG_X_ATTRIBUTE , x ) ; SVGUtil . setAtt ( rect , SVGConstants . SVG_Y_ATTRIBUTE , y ) ; SVGUtil . setAtt ( rect , SVGConstants . SVG_WIDTH_ATTRIBUTE , w ) ; SVGUtil . setAtt ( rect , SVGConstants . SVG_HEIGHT_ATTRIBUTE , h ) ; return rect ; }
Create a SVG rectangle element .
154
6
157,641
public static Element svgCircle ( Document document , double cx , double cy , double r ) { Element circ = SVGUtil . svgElement ( document , SVGConstants . SVG_CIRCLE_TAG ) ; SVGUtil . setAtt ( circ , SVGConstants . SVG_CX_ATTRIBUTE , cx ) ; SVGUtil . setAtt ( circ , SVGConstants . SVG_CY_ATTRIBUTE , cy ) ; SVGUtil . setAtt ( circ , SVGConstants . SVG_R_ATTRIBUTE , r ) ; return circ ; }
Create a SVG circle element .
128
6
157,642
public static Element svgLine ( Document document , double x1 , double y1 , double x2 , double y2 ) { Element line = SVGUtil . svgElement ( document , SVGConstants . SVG_LINE_TAG ) ; SVGUtil . setAtt ( line , SVGConstants . SVG_X1_ATTRIBUTE , x1 ) ; SVGUtil . setAtt ( line , SVGConstants . SVG_Y1_ATTRIBUTE , y1 ) ; SVGUtil . setAtt ( line , SVGConstants . SVG_X2_ATTRIBUTE , x2 ) ; SVGUtil . setAtt ( line , SVGConstants . SVG_Y2_ATTRIBUTE , y2 ) ; return line ; }
Create a SVG line element . Do not confuse this with path elements .
163
14
157,643
public static Color stringToColor ( String str ) { int icol = SVG_COLOR_NAMES . getInt ( str . toLowerCase ( ) ) ; if ( icol != NO_VALUE ) { return new Color ( icol , false ) ; } return colorLookupStylesheet . stringToColor ( str ) ; }
Convert a color name from SVG syntax to an AWT color object .
71
15
157,644
public static String colorToString ( int col ) { final char [ ] buf = new char [ ] { ' ' , ' ' , ' ' , ' ' , ' ' , ' ' , ' ' } ; for ( int i = 6 ; i > 0 ; i -- ) { final int v = ( col & 0xF ) ; buf [ i ] = ( char ) ( ( v < 10 ) ? ( ' ' + v ) : ( ' ' + v - 10 ) ) ; col >>>= 4 ; } return new String ( buf ) ; }
Convert a color name from an integer RGB color to CSS syntax
117
13
157,645
public static SVGPoint elementCoordinatesFromEvent ( Document doc , Element tag , Event evt ) { try { DOMMouseEvent gnme = ( DOMMouseEvent ) evt ; SVGMatrix mat = ( ( SVGLocatable ) tag ) . getScreenCTM ( ) ; SVGMatrix imat = mat . inverse ( ) ; SVGPoint cPt = ( ( SVGDocument ) doc ) . getRootElement ( ) . createSVGPoint ( ) ; cPt . setX ( gnme . getClientX ( ) ) ; cPt . setY ( gnme . getClientY ( ) ) ; return cPt . matrixTransform ( imat ) ; } catch ( Exception e ) { LoggingUtil . warning ( "Error getting coordinates from SVG event." , e ) ; return null ; } }
Convert the coordinates of an DOM Event from screen into element coordinates .
178
14
157,646
public static void removeLastChild ( Element tag ) { final Node last = tag . getLastChild ( ) ; if ( last != null ) { tag . removeChild ( last ) ; } }
Remove last child of an element when present
40
8
157,647
public static void removeFromParent ( Element elem ) { if ( elem != null && elem . getParentNode ( ) != null ) { elem . getParentNode ( ) . removeChild ( elem ) ; } }
Remove an element from its parent if defined .
49
9
157,648
public static Element svgCircleSegment ( SVGPlot svgp , double centerx , double centery , double angleStart , double angleDelta , double innerRadius , double outerRadius ) { final DoubleWrapper tmp = new DoubleWrapper ( ) ; // To return cosine double sin1st = FastMath . sinAndCos ( angleStart , tmp ) ; double cos1st = tmp . value ; double sin2nd = FastMath . sinAndCos ( angleStart + angleDelta , tmp ) ; double cos2nd = tmp . value ; // Note: tmp is modified! double inner1stx = centerx + ( innerRadius * sin1st ) ; double inner1sty = centery - ( innerRadius * cos1st ) ; double outer1stx = centerx + ( outerRadius * sin1st ) ; double outer1sty = centery - ( outerRadius * cos1st ) ; double inner2ndx = centerx + ( innerRadius * sin2nd ) ; double inner2ndy = centery - ( innerRadius * cos2nd ) ; double outer2ndx = centerx + ( outerRadius * sin2nd ) ; double outer2ndy = centery - ( outerRadius * cos2nd ) ; double largeArc = angleDelta >= Math . PI ? 1 : 0 ; SVGPath path = new SVGPath ( inner1stx , inner1sty ) . lineTo ( outer1stx , outer1sty ) // . ellipticalArc ( outerRadius , outerRadius , 0 , largeArc , 1 , outer2ndx , outer2ndy ) // . lineTo ( inner2ndx , inner2ndy ) ; if ( innerRadius > 0 ) { path . ellipticalArc ( innerRadius , innerRadius , 0 , largeArc , 0 , inner1stx , inner1sty ) ; } return path . makeElement ( svgp ) ; }
Create a circle segment .
418
5
157,649
protected WritableDoubleDataStore computeCoreDists ( DBIDs ids , KNNQuery < O > knnQ , int minPts ) { final Logging LOG = getLogger ( ) ; final WritableDoubleDataStore coredists = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_DB ) ; FiniteProgress cprog = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing core sizes" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { coredists . put ( iter , knnQ . getKNNForDBID ( iter , minPts ) . getKNNDistance ( ) ) ; LOG . incrementProcessed ( cprog ) ; } LOG . ensureCompleted ( cprog ) ; return coredists ; }
Compute the core distances for all objects .
214
9
157,650
protected void convertToPointerRepresentation ( ArrayDBIDs ids , DoubleLongHeap heap , WritableDBIDDataStore pi , WritableDoubleDataStore lambda ) { final Logging LOG = getLogger ( ) ; // Initialize parent array: for ( DBIDArrayIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { pi . put ( iter , iter ) ; // Initialize } DBIDVar p = DBIDUtil . newVar ( ) , q = DBIDUtil . newVar ( ) , n = DBIDUtil . newVar ( ) ; FiniteProgress pprog = LOG . isVerbose ( ) ? new FiniteProgress ( "Converting MST to pointer representation" , heap . size ( ) , LOG ) : null ; while ( ! heap . isEmpty ( ) ) { final double dist = heap . peekKey ( ) ; final long pair = heap . peekValue ( ) ; final int i = ( int ) ( pair >>> 31 ) , j = ( int ) ( pair & 0x7FFFFFFF L ) ; ids . assignVar ( i , p ) ; // Follow p to its parent. while ( ! DBIDUtil . equal ( p , pi . assignVar ( p , n ) ) ) { p . set ( n ) ; } // Follow q to its parent. ids . assignVar ( j , q ) ; while ( ! DBIDUtil . equal ( q , pi . assignVar ( q , n ) ) ) { q . set ( n ) ; } // By definition of the pointer representation, the largest element in // each cluster is the cluster lead. // The extraction methods currently rely on this! int c = DBIDUtil . compare ( p , q ) ; if ( c < 0 ) { // p joins q: pi . put ( p , q ) ; lambda . put ( p , dist ) ; } else { assert ( c != 0 ) : "This should never happen!" ; // q joins p: pi . put ( q , p ) ; lambda . put ( q , dist ) ; } heap . poll ( ) ; LOG . incrementProcessed ( pprog ) ; } LOG . ensureCompleted ( pprog ) ; // Hack to ensure a valid pointer representation: // If distances are tied, the heap may return edges such that the n-way join // does not fulfill the property that the last element has the largest id. for ( DBIDArrayIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double d = lambda . doubleValue ( iter ) ; // Parent: pi . assignVar ( iter , p ) ; q . set ( p ) ; // Follow parent while tied. while ( d >= lambda . doubleValue ( q ) && ! DBIDUtil . equal ( q , pi . assignVar ( q , n ) ) ) { q . set ( n ) ; } if ( ! DBIDUtil . equal ( p , q ) ) { if ( LOG . isDebuggingFinest ( ) ) { LOG . finest ( "Correcting parent: " + p + " -> " + q ) ; } pi . put ( iter , q ) ; } } }
Convert spanning tree to a pointer representation .
693
9
157,651
private void updateHeap ( final double distance , final int iid ) { final double prevdist = kdist ; final int previd = heap . peekValue ( ) ; heap . replaceTopElement ( distance , iid ) ; kdist = heap . peekKey ( ) ; // If the kdist improved, zap ties. if ( kdist < prevdist ) { numties = 0 ; } else { addToTies ( previd ) ; } }
Do a full update for the heap .
97
8
157,652
private void addToTies ( int id ) { if ( ties . length == numties ) { ties = Arrays . copyOf ( ties , ( ties . length << 1 ) + 1 ) ; // grow. } ties [ numties ] = id ; ++ numties ; }
Ensure the ties array has capacity for at least one more element .
59
14
157,653
public static int numberOfFreeParameters ( Relation < ? extends NumberVector > relation , Clustering < ? extends MeanModel > clustering ) { // number of clusters int m = clustering . getAllClusters ( ) . size ( ) ; // num_ctrs // dimensionality of data points int dim = RelationUtil . dimensionality ( relation ) ; // num_dims // number of free parameters return ( m - 1 ) + m * dim + m ; }
Compute the number of free parameters .
101
8
157,654
protected void dumpClusteringOutput ( PrintStream writer , ResultHierarchy hierarchy , Clustering < ? > c ) { DBIDRange ids = null ; for ( It < Relation < ? > > iter = hierarchy . iterParents ( c ) . filter ( Relation . class ) ; iter . valid ( ) ; iter . advance ( ) ) { DBIDs pids = iter . get ( ) . getDBIDs ( ) ; if ( pids instanceof DBIDRange ) { ids = ( DBIDRange ) pids ; break ; } LOG . warning ( "Parent result " + iter . get ( ) . getLongName ( ) + " has DBID type " + pids . getClass ( ) ) ; } // Fallback: try to locate a database. if ( ids == null ) { for ( It < Database > iter = hierarchy . iterAll ( ) . filter ( Database . class ) ; iter . valid ( ) ; iter . advance ( ) ) { DBIDs pids = iter . get ( ) . getRelation ( TypeUtil . ANY ) . getDBIDs ( ) ; if ( pids instanceof DBIDRange ) { ids = ( DBIDRange ) pids ; break ; } LOG . warning ( "Parent result " + iter . get ( ) . getLongName ( ) + " has DBID type " + pids . getClass ( ) ) ; } } if ( ids == null ) { LOG . warning ( "Cannot dump cluster assignment, as I do not have a well-defined DBIDRange to use for a unique column assignment. DBIDs must be a continuous range." ) ; return ; } WritableIntegerDataStore map = DataStoreUtil . makeIntegerStorage ( ids , DataStoreFactory . HINT_TEMP ) ; int cnum = 0 ; for ( Cluster < ? > clu : c . getAllClusters ( ) ) { for ( DBIDIter iter = clu . getIDs ( ) . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { map . putInt ( iter , cnum ) ; } ++ cnum ; } for ( DBIDArrayIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( iter . getOffset ( ) > 0 ) { writer . append ( ' ' ) ; } writer . append ( Integer . toString ( map . intValue ( iter ) ) ) ; } if ( forceLabel != null ) { if ( forceLabel . length ( ) > 0 ) { writer . append ( ' ' ) . append ( forceLabel ) ; } } else { writer . append ( ' ' ) . append ( c . getLongName ( ) ) ; } writer . append ( ' ' ) ; }
Dump a single clustering result .
593
8
157,655
public < F extends NumberVector > F getMeanVector ( Relation < ? extends F > relation ) { return RelationUtil . getNumberVectorFactory ( relation ) . newNumberVector ( mean ) ; }
Get the mean as vector .
45
6
157,656
public void reset ( ) { Arrays . fill ( mean , 0. ) ; Arrays . fill ( nmea , 0. ) ; if ( elements != null ) { for ( int i = 0 ; i < elements . length ; i ++ ) { Arrays . fill ( elements [ i ] , 0. ) ; } } else { elements = new double [ mean . length ] [ mean . length ] ; } wsum = 0. ; }
Reset the covariance matrix .
95
7
157,657
public static CovarianceMatrix make ( Relation < ? extends NumberVector > relation ) { int dim = RelationUtil . dimensionality ( relation ) ; CovarianceMatrix c = new CovarianceMatrix ( dim ) ; double [ ] mean = c . mean ; int count = 0 ; // Compute mean first: for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { NumberVector vec = relation . get ( iditer ) ; for ( int i = 0 ; i < dim ; i ++ ) { mean [ i ] += vec . doubleValue ( i ) ; } count ++ ; } if ( count == 0 ) { return c ; } // Normalize mean for ( int i = 0 ; i < dim ; i ++ ) { mean [ i ] /= count ; } // Compute covariances second // Two-pass approach is numerically okay and fast, when possible. double [ ] tmp = c . nmea ; // Scratch space double [ ] [ ] elems = c . elements ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { NumberVector vec = relation . get ( iditer ) ; for ( int i = 0 ; i < dim ; i ++ ) { tmp [ i ] = vec . doubleValue ( i ) - mean [ i ] ; } for ( int i = 0 ; i < dim ; i ++ ) { for ( int j = i ; j < dim ; j ++ ) { elems [ i ] [ j ] += tmp [ i ] * tmp [ j ] ; } } } // Restore symmetry. for ( int i = 0 ; i < dim ; i ++ ) { for ( int j = i + 1 ; j < dim ; j ++ ) { elems [ j ] [ i ] = elems [ i ] [ j ] ; } } c . wsum = count ; return c ; }
Static Constructor from a full relation .
428
8
157,658
@ Override public StringBuilder appendToBuffer ( StringBuilder buf ) { String processedString = Integer . toString ( getProcessed ( ) ) ; int percentage = ( int ) ( getProcessed ( ) * 100.0 / total ) ; buf . append ( getTask ( ) ) ; buf . append ( ": " ) ; for ( int i = 0 ; i < totalLength - processedString . length ( ) ; i ++ ) { buf . append ( ' ' ) ; } buf . append ( getProcessed ( ) ) ; buf . append ( " [" ) ; if ( percentage < 100 ) { buf . append ( ' ' ) ; } if ( percentage < 10 ) { buf . append ( ' ' ) ; } buf . append ( percentage ) ; buf . append ( "%]" ) ; if ( ratems > 0. && getProcessed ( ) < total ) { buf . append ( ' ' ) ; int secs = ( int ) Math . round ( ( total - getProcessed ( ) ) / ratems / 1000. + .2 ) ; if ( secs > 300 ) { buf . append ( secs / 60 ) ; buf . append ( " min remaining" ) ; } else { buf . append ( secs ) ; buf . append ( " sec remaining" ) ; } } return buf ; }
Append a string representation of the progress to the given string buffer .
279
14
157,659
public void ensureCompleted ( Logging logger ) { if ( ! isComplete ( ) ) { logger . warning ( "Progress had not completed automatically as expected: " + getProcessed ( ) + "/" + total , new Throwable ( ) ) ; setProcessed ( getTotal ( ) ) ; logger . progress ( this ) ; } }
Ensure that the progress was completed to make progress bars disappear
71
12
157,660
private void clusterData ( DBIDs ids , RangeQuery < O > rnnQuery , WritableDoubleDataStore radii , WritableDataStore < ModifiableDBIDs > labels ) { FiniteProgress clustProg = LOG . isVerbose ( ) ? new FiniteProgress ( "Density-Based Clustering" , ids . size ( ) , LOG ) : null ; // Iterate over all objects for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( labels . get ( iter ) != null ) { continue ; } ModifiableDBIDs newCluster = DBIDUtil . newArray ( ) ; newCluster . add ( iter ) ; labels . put ( iter , newCluster ) ; LOG . incrementProcessed ( clustProg ) ; // container of the points to be added and their radii neighbors to the // cluster ModifiableDBIDs nChain = DBIDUtil . newArray ( ) ; nChain . add ( iter ) ; // iterate over nChain for ( DBIDIter toGetNeighbors = nChain . iter ( ) ; toGetNeighbors . valid ( ) ; toGetNeighbors . advance ( ) ) { double range = radii . doubleValue ( toGetNeighbors ) ; DoubleDBIDList nNeighbors = rnnQuery . getRangeForDBID ( toGetNeighbors , range ) ; for ( DoubleDBIDListIter iter2 = nNeighbors . iter ( ) ; iter2 . valid ( ) ; iter2 . advance ( ) ) { if ( DBIDUtil . equal ( toGetNeighbors , iter2 ) ) { continue ; } if ( labels . get ( iter2 ) == null ) { newCluster . add ( iter2 ) ; labels . put ( iter2 , newCluster ) ; nChain . add ( iter2 ) ; LOG . incrementProcessed ( clustProg ) ; } else if ( labels . get ( iter2 ) != newCluster ) { ModifiableDBIDs toBeDeleted = labels . get ( iter2 ) ; newCluster . addDBIDs ( toBeDeleted ) ; for ( DBIDIter iter3 = toBeDeleted . iter ( ) ; iter3 . valid ( ) ; iter3 . advance ( ) ) { labels . put ( iter3 , newCluster ) ; } toBeDeleted . clear ( ) ; } } } } LOG . ensureCompleted ( clustProg ) ; }
This method applies a density based clustering algorithm .
532
10
157,661
private int updateSizes ( DBIDs ids , WritableDataStore < ModifiableDBIDs > labels , WritableIntegerDataStore newSizes ) { // to count the unclustered all over int countUnmerged = 0 ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { // checking the point's new cluster size after the clustering step int newClusterSize = labels . get ( iter ) . size ( ) ; newSizes . putInt ( iter , newClusterSize ) ; // the point is alone in the cluster --> not merged with other points if ( newClusterSize == 1 ) { countUnmerged ++ ; } } return countUnmerged ; }
This method updates each object s cluster size after the clustering step .
162
14
157,662
public PointerHierarchyRepresentationResult run ( Database database , Relation < O > relation ) { DBIDs ids = relation . getDBIDs ( ) ; WritableDBIDDataStore pi = DataStoreUtil . makeDBIDStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_STATIC ) ; WritableDoubleDataStore lambda = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_STATIC , Double . POSITIVE_INFINITY ) ; // Temporary storage for m. WritableDoubleDataStore m = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP ) ; final Logging log = getLogger ( ) ; // To allow CLINK logger override FiniteProgress progress = log . isVerbose ( ) ? new FiniteProgress ( "Running SLINK" , ids . size ( ) , log ) : null ; ArrayDBIDs aids = DBIDUtil . ensureArray ( ids ) ; // First element is trivial/special: DBIDArrayIter id = aids . iter ( ) , it = aids . iter ( ) ; // Step 1: initialize for ( ; id . valid ( ) ; id . advance ( ) ) { // P(n+1) = n+1: pi . put ( id , id ) ; // L(n+1) = infinity already. } // First element is finished already (start at seek(1) below!) log . incrementProcessed ( progress ) ; // Optimized branch if ( getDistanceFunction ( ) instanceof PrimitiveDistanceFunction ) { PrimitiveDistanceFunction < ? super O > distf = ( PrimitiveDistanceFunction < ? super O > ) getDistanceFunction ( ) ; for ( id . seek ( 1 ) ; id . valid ( ) ; id . advance ( ) ) { step2primitive ( id , it , id . getOffset ( ) , relation , distf , m ) ; process ( id , aids , it , id . getOffset ( ) , pi , lambda , m ) ; // SLINK or CLINK log . incrementProcessed ( progress ) ; } } else { // Fallback branch DistanceQuery < O > distQ = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; for ( id . seek ( 1 ) ; id . valid ( ) ; id . advance ( ) ) { step2 ( id , it , id . getOffset ( ) , distQ , m ) ; process ( id , aids , it , id . getOffset ( ) , pi , lambda , m ) ; // SLINK or CLINK log . incrementProcessed ( progress ) ; } } log . ensureCompleted ( progress ) ; // We don't need m anymore. m . destroy ( ) ; m = null ; return new PointerHierarchyRepresentationResult ( ids , pi , lambda , getDistanceFunction ( ) . isSquared ( ) ) ; }
Performs the SLINK algorithm on the given database .
652
11
157,663
protected void process ( DBIDRef id , ArrayDBIDs ids , DBIDArrayIter it , int n , WritableDBIDDataStore pi , WritableDoubleDataStore lambda , WritableDoubleDataStore m ) { slinkstep3 ( id , it , n , pi , lambda , m ) ; slinkstep4 ( id , it , n , pi , lambda ) ; }
SLINK main loop .
82
5
157,664
public void add ( DBIDRef id , double reach , DBIDRef pre ) { ids . add ( id ) ; reachability . putDouble ( id , reach ) ; if ( pre == null || pre instanceof DBIDVar && ! ( ( DBIDVar ) pre ) . isSet ( ) ) { return ; } predecessor . putDBID ( id , pre ) ; }
Add an object to the cluster order .
81
8
157,665
@ Override public ArrayModifiableDBIDs order ( DBIDs ids ) { ArrayModifiableDBIDs res = DBIDUtil . newArray ( ids . size ( ) ) ; for ( DBIDIter it = this . ids . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { if ( ids . contains ( it ) ) { res . add ( it ) ; } } return res ; }
Use the cluster order to sort the given collection ids .
93
12
157,666
public void getPredecessor ( DBIDRef id , DBIDVar out ) { if ( predecessor == null ) { out . unset ( ) ; return ; } predecessor . assignVar ( id , out ) ; }
Get the predecessor .
46
4
157,667
public OutlierResult run ( Database database , Relation < O > relation ) { StepProgress stepprog = LOG . isVerbose ( ) ? new StepProgress ( "COF" , 3 ) : null ; DistanceQuery < O > dq = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; LOG . beginStep ( stepprog , 1 , "Materializing COF neighborhoods." ) ; KNNQuery < O > knnq = DatabaseUtil . precomputedKNNQuery ( database , relation , dq , k ) ; DBIDs ids = relation . getDBIDs ( ) ; LOG . beginStep ( stepprog , 2 , "Computing Average Chaining Distances." ) ; WritableDoubleDataStore acds = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP ) ; computeAverageChainingDistances ( knnq , dq , ids , acds ) ; // compute COF_SCORE of each db object LOG . beginStep ( stepprog , 3 , "Computing Connectivity-based Outlier Factors." ) ; WritableDoubleDataStore cofs = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_DB ) ; // track the maximum value for normalization. DoubleMinMax cofminmax = new DoubleMinMax ( ) ; computeCOFScores ( knnq , ids , acds , cofs , cofminmax ) ; LOG . setCompleted ( stepprog ) ; // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Connectivity-Based Outlier Factor" , "cof-outlier" , cofs , ids ) ; OutlierScoreMeta scoreMeta = new QuotientOutlierScoreMeta ( cofminmax . getMin ( ) , cofminmax . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY , 1.0 ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
Runs the COF algorithm on the given database .
463
11
157,668
private void computeCOFScores ( KNNQuery < O > knnq , DBIDs ids , DoubleDataStore acds , WritableDoubleDataStore cofs , DoubleMinMax cofminmax ) { FiniteProgress progressCOFs = LOG . isVerbose ( ) ? new FiniteProgress ( "COF for objects" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { final KNNList neighbors = knnq . getKNNForDBID ( iter , k ) ; // Aggregate the average chaining distances of all neighbors: double sum = 0. ; for ( DBIDIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { // skip the point itself if ( DBIDUtil . equal ( neighbor , iter ) ) { continue ; } sum += acds . doubleValue ( neighbor ) ; } final double cof = ( sum > 0. ) ? ( acds . doubleValue ( iter ) * k / sum ) : ( acds . doubleValue ( iter ) > 0. ? Double . POSITIVE_INFINITY : 1. ) ; cofs . putDouble ( iter , cof ) ; // update minimum and maximum cofminmax . put ( cof ) ; LOG . incrementProcessed ( progressCOFs ) ; } LOG . ensureCompleted ( progressCOFs ) ; }
Compute Connectivity outlier factors .
319
8
157,669
public void invokeLater ( Runnable r ) { queue . add ( r ) ; synchronized ( this ) { if ( synchronizer == null ) { runQueue ( ) ; } else { synchronizer . activate ( ) ; } } }
Add a new update to run at any appropriate time .
49
11
157,670
public void runQueue ( ) { synchronized ( sync ) { while ( ! queue . isEmpty ( ) ) { Runnable r = queue . poll ( ) ; if ( r != null ) { try { r . run ( ) ; } catch ( Exception e ) { // Alternatively, we could allow the specification of exception // handlers for each runnable in the API. For now we'll just log. // TODO: handle exceptions here better! LoggingUtil . exception ( e ) ; } } else { LoggingUtil . warning ( "Tried to run a 'null' Object." ) ; } } } }
Run the processing queue now . This should usually be only invoked by the UpdateSynchronizer
131
18
157,671
public synchronized void synchronizeWith ( UpdateSynchronizer newsync ) { // LoggingUtil.warning("Synchronizing: " + sync + " " + newsync, new // Throwable()); if ( synchronizer == newsync ) { LoggingUtil . warning ( "Double-synced to the same plot!" , new Throwable ( ) ) ; return ; } if ( synchronizer != null ) { LoggingUtil . warning ( "Attempting to synchronize to more than one synchronizer." ) ; return ; } synchronizer = newsync ; newsync . addUpdateRunner ( this ) ; }
Set a new update synchronizer .
129
7
157,672
public synchronized void unsynchronizeWith ( UpdateSynchronizer oldsync ) { if ( synchronizer == null ) { LoggingUtil . warning ( "Warning: was not synchronized." ) ; return ; } if ( synchronizer != oldsync ) { LoggingUtil . warning ( "Warning: was synchronized differently!" ) ; return ; } // LoggingUtil.warning("Unsynchronizing: " + sync + " " + oldsync); synchronizer = null ; runQueue ( ) ; }
Remove an update synchronizer
106
5
157,673
protected static double estimateInitialBeta ( double [ ] dist_i , double perplexity ) { double sum = 0. ; for ( double d : dist_i ) { double d2 = d * d ; sum += d2 < Double . POSITIVE_INFINITY ? d2 : 0. ; } return sum > 0 && sum < Double . POSITIVE_INFINITY ? .5 / sum * perplexity * ( dist_i . length - 1. ) : 1. ; }
Estimate beta from the distances in a row .
105
10
157,674
public static List < Relation < ? > > getRelations ( Result r ) { if ( r instanceof Relation < ? > ) { List < Relation < ? > > anns = new ArrayList <> ( 1 ) ; anns . add ( ( Relation < ? > ) r ) ; return anns ; } if ( r instanceof HierarchicalResult ) { return filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , Relation . class ) ; } return Collections . emptyList ( ) ; }
Collect all Annotation results from a Result
119
8
157,675
public static List < OrderingResult > getOrderingResults ( Result r ) { if ( r instanceof OrderingResult ) { List < OrderingResult > ors = new ArrayList <> ( 1 ) ; ors . add ( ( OrderingResult ) r ) ; return ors ; } if ( r instanceof HierarchicalResult ) { return filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , OrderingResult . class ) ; } return Collections . emptyList ( ) ; }
Collect all ordering results from a Result
114
7
157,676
public static List < CollectionResult < ? > > getCollectionResults ( Result r ) { if ( r instanceof CollectionResult < ? > ) { List < CollectionResult < ? > > crs = new ArrayList <> ( 1 ) ; crs . add ( ( CollectionResult < ? > ) r ) ; return crs ; } if ( r instanceof HierarchicalResult ) { return filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , CollectionResult . class ) ; } return Collections . emptyList ( ) ; }
Collect all collection results from a Result
120
7
157,677
public static List < IterableResult < ? > > getIterableResults ( Result r ) { if ( r instanceof IterableResult < ? > ) { List < IterableResult < ? > > irs = new ArrayList <> ( 1 ) ; irs . add ( ( IterableResult < ? > ) r ) ; return irs ; } if ( r instanceof HierarchicalResult ) { return filterResults ( ( ( HierarchicalResult ) r ) . getHierarchy ( ) , r , IterableResult . class ) ; } return Collections . emptyList ( ) ; }
Return all Iterable results
126
5
157,678
public static < C extends Result > ArrayList < C > filterResults ( ResultHierarchy hier , Result r , Class < ? super C > restrictionClass ) { ArrayList < C > res = new ArrayList <> ( ) ; final It < C > it = hier . iterDescendantsSelf ( r ) . filter ( restrictionClass ) ; it . forEach ( res :: add ) ; return res ; }
Return only results of the given restriction class
86
8
157,679
public static void addChildResult ( HierarchicalResult parent , Result child ) { parent . getHierarchy ( ) . add ( parent , child ) ; }
Add a child result .
34
5
157,680
public static Database findDatabase ( ResultHierarchy hier , Result baseResult ) { final List < Database > dbs = filterResults ( hier , baseResult , Database . class ) ; return ( ! dbs . isEmpty ( ) ) ? dbs . get ( 0 ) : null ; }
Find the first database result in the tree .
61
9
157,681
public static void removeRecursive ( ResultHierarchy hierarchy , Result child ) { for ( It < Result > iter = hierarchy . iterParents ( child ) ; iter . valid ( ) ; iter . advance ( ) ) { hierarchy . remove ( iter . get ( ) , child ) ; } for ( It < Result > iter = hierarchy . iterChildren ( child ) ; iter . valid ( ) ; iter . advance ( ) ) { removeRecursive ( hierarchy , iter . get ( ) ) ; } }
Recursively remove a result and its children .
104
10
157,682
protected void findEigenVectors ( double [ ] [ ] imat , double [ ] [ ] evs , double [ ] lambda ) { final int size = imat . length ; Random rnd = random . getSingleThreadedRandom ( ) ; double [ ] tmp = new double [ size ] ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Learning projections" , tdim , LOG ) : null ; for ( int d = 0 ; d < tdim ; ) { final double [ ] cur = evs [ d ] ; randomInitialization ( cur , rnd ) ; double l = multiply ( imat , cur , tmp ) ; for ( int iter = 0 ; iter < 100 ; iter ++ ) { // This will scale "tmp" to unit length, and copy it to cur: double delta = updateEigenvector ( tmp , cur , l ) ; if ( delta < 1e-10 ) { break ; } l = multiply ( imat , cur , tmp ) ; } lambda [ d ++ ] = l = estimateEigenvalue ( imat , cur ) ; LOG . incrementProcessed ( prog ) ; if ( d == tdim ) { break ; } // Update matrix updateMatrix ( imat , cur , l ) ; } LOG . ensureCompleted ( prog ) ; }
Find the first eigenvectors and eigenvalues using power iterations .
282
15
157,683
protected void randomInitialization ( double [ ] out , Random rnd ) { double l2 = 0. ; while ( ! ( l2 > 0 ) ) { for ( int d = 0 ; d < out . length ; d ++ ) { final double val = rnd . nextDouble ( ) ; out [ d ] = val ; l2 += val * val ; } } // Standardize: final double s = 1. / FastMath . sqrt ( l2 ) ; for ( int d = 0 ; d < out . length ; d ++ ) { out [ d ] *= s ; } }
Choose a random vector of unit norm for power iterations .
127
11
157,684
protected double updateEigenvector ( double [ ] in , double [ ] out , double l ) { double s = 1. / ( l > 0. ? l : l < 0. ? - l : 1. ) ; s = ( in [ 0 ] > 0. ) ? s : - s ; // Reduce flipping vectors double diff = 0. ; for ( int d = 0 ; d < in . length ; d ++ ) { in [ d ] *= s ; // Scale to unit length // Compute change from previous iteration: double delta = in [ d ] - out [ d ] ; diff += delta * delta ; out [ d ] = in [ d ] ; // Update output storage } return diff ; }
Compute the change in the eigenvector and normalize the output vector while doing so .
150
19
157,685
protected void updateMatrix ( double [ ] [ ] mat , final double [ ] evec , double eval ) { final int size = mat . length ; for ( int i = 0 ; i < size ; i ++ ) { final double [ ] mati = mat [ i ] ; final double eveci = evec [ i ] ; for ( int j = 0 ; j < size ; j ++ ) { mati [ j ] -= eval * eveci * evec [ j ] ; } } }
Update matrix by removing the effects of a known Eigenvector .
104
13
157,686
public static double pdf ( double x , double mu , double sigma , double k ) { if ( x == Double . POSITIVE_INFINITY || x == Double . NEGATIVE_INFINITY ) { return 0. ; } x = ( x - mu ) / sigma ; if ( k > 0 || k < 0 ) { if ( k * x > 1 ) { return 0. ; } double t = FastMath . log ( 1 - k * x ) ; return t == Double . NEGATIVE_INFINITY ? 1. / sigma // : t == Double . POSITIVE_INFINITY ? 0. // : FastMath . exp ( ( 1 - k ) * t / k - FastMath . exp ( t / k ) ) / sigma ; } else { // Gumbel case: return FastMath . exp ( - x - FastMath . exp ( - x ) ) / sigma ; } }
PDF of GEV distribution
200
5
157,687
public static double cdf ( double val , double mu , double sigma , double k ) { final double x = ( val - mu ) / sigma ; if ( k > 0 || k < 0 ) { if ( k * x > 1 ) { return k > 0 ? 1 : 0 ; } return FastMath . exp ( - FastMath . exp ( FastMath . log ( 1 - k * x ) / k ) ) ; } else { // Gumbel case: return FastMath . exp ( - FastMath . exp ( - x ) ) ; } }
CDF of GEV distribution
119
6
157,688
public static double quantile ( double val , double mu , double sigma , double k ) { if ( val < 0.0 || val > 1.0 ) { return Double . NaN ; } if ( k < 0 ) { return mu + sigma * Math . max ( ( 1. - FastMath . pow ( - FastMath . log ( val ) , k ) ) / k , 1. / k ) ; } else if ( k > 0 ) { return mu + sigma * Math . min ( ( 1. - FastMath . pow ( - FastMath . log ( val ) , k ) ) / k , 1. / k ) ; } else { // Gumbel return mu + sigma * FastMath . log ( 1. / FastMath . log ( 1. / val ) ) ; } }
Quantile function of GEV distribution
173
7
157,689
public static double cdf ( double x , double sigma ) { if ( x <= 0. ) { return 0. ; } final double xs = x / sigma ; return 1. - FastMath . exp ( - .5 * xs * xs ) ; }
CDF of Rayleigh distribution
58
6
157,690
public static double quantile ( double val , double sigma ) { if ( ! ( val >= 0. ) || ! ( val <= 1. ) ) { return Double . NaN ; } if ( val == 0. ) { return 0. ; } if ( val == 1. ) { return Double . POSITIVE_INFINITY ; } return sigma * FastMath . sqrt ( - 2. * FastMath . log ( 1. - val ) ) ; }
Quantile function of Rayleigh distribution
100
7
157,691
public OutlierResult run ( Database db , Relation < V > relation ) { ArrayDBIDs ids = DBIDUtil . ensureArray ( relation . getDBIDs ( ) ) ; // Build a kernel matrix, to make O(n^3) slightly less bad. SimilarityQuery < V > sq = db . getSimilarityQuery ( relation , kernelFunction ) ; KernelMatrix kernelMatrix = new KernelMatrix ( sq , relation , ids ) ; WritableDoubleDataStore abodvalues = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_STATIC ) ; DoubleMinMax minmaxabod = new DoubleMinMax ( ) ; MeanVariance s = new MeanVariance ( ) ; DBIDArrayIter pA = ids . iter ( ) , pB = ids . iter ( ) , pC = ids . iter ( ) ; for ( ; pA . valid ( ) ; pA . advance ( ) ) { final double abof = computeABOF ( kernelMatrix , pA , pB , pC , s ) ; minmaxabod . put ( abof ) ; abodvalues . putDouble ( pA , abof ) ; } // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Angle-Based Outlier Degree" , "abod-outlier" , abodvalues , relation . getDBIDs ( ) ) ; OutlierScoreMeta scoreMeta = new InvertedOutlierScoreMeta ( minmaxabod . getMin ( ) , minmaxabod . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
Run ABOD on the data set .
370
8
157,692
protected double computeABOF ( KernelMatrix kernelMatrix , DBIDRef pA , DBIDArrayIter pB , DBIDArrayIter pC , MeanVariance s ) { s . reset ( ) ; // Reused double simAA = kernelMatrix . getSimilarity ( pA , pA ) ; for ( pB . seek ( 0 ) ; pB . valid ( ) ; pB . advance ( ) ) { if ( DBIDUtil . equal ( pB , pA ) ) { continue ; } double simBB = kernelMatrix . getSimilarity ( pB , pB ) ; double simAB = kernelMatrix . getSimilarity ( pA , pB ) ; double sqdAB = simAA + simBB - simAB - simAB ; if ( ! ( sqdAB > 0. ) ) { continue ; } for ( pC . seek ( pB . getOffset ( ) + 1 ) ; pC . valid ( ) ; pC . advance ( ) ) { if ( DBIDUtil . equal ( pC , pA ) ) { continue ; } double simCC = kernelMatrix . getSimilarity ( pC , pC ) ; double simAC = kernelMatrix . getSimilarity ( pA , pC ) ; double sqdAC = simAA + simCC - simAC - simAC ; if ( ! ( sqdAC > 0. ) ) { continue ; } // Exploit bilinearity of scalar product: // <B-A, C-A> = <B,C-A> - <A,C-A> // = <B,C> - <B,A> - <A,C> + <A,A> double simBC = kernelMatrix . getSimilarity ( pB , pC ) ; double numerator = simBC - simAB - simAC + simAA ; double div = 1. / ( sqdAB * sqdAC ) ; s . put ( numerator * div , FastMath . sqrt ( div ) ) ; } } // Sample variance probably would be better here, but the ABOD publication // uses the naive variance. return s . getNaiveVariance ( ) ; }
Compute the exact ABOF value .
466
9
157,693
public OutlierResult run ( Database database , Relation < O > relation ) { DBIDs ids = relation . getDBIDs ( ) ; WritableDoubleDataStore store = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_DB ) ; DistanceQuery < O > distq = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; KNNQuery < O > knnq = database . getKNNQuery ( distq , k + 1 ) ; // Find kNN KNNProcessor < O > knnm = new KNNProcessor <> ( k + 1 , knnq ) ; SharedObject < KNNList > knnv = new SharedObject <> ( ) ; knnm . connectKNNOutput ( knnv ) ; // Extract outlier score KNNWeightProcessor kdistm = new KNNWeightProcessor ( k + 1 ) ; SharedDouble kdistv = new SharedDouble ( ) ; kdistm . connectKNNInput ( knnv ) ; kdistm . connectOutput ( kdistv ) ; // Store in output result WriteDoubleDataStoreProcessor storem = new WriteDoubleDataStoreProcessor ( store ) ; storem . connectInput ( kdistv ) ; // And gather statistics for metadata DoubleMinMaxProcessor mmm = new DoubleMinMaxProcessor ( ) ; mmm . connectInput ( kdistv ) ; ParallelExecutor . run ( ids , knnm , kdistm , storem , mmm ) ; DoubleMinMax minmax = mmm . getMinMax ( ) ; DoubleRelation scoreres = new MaterializedDoubleRelation ( "kNN weight Outlier Score" , "knnw-outlier" , store , ids ) ; OutlierScoreMeta meta = new BasicOutlierScoreMeta ( minmax . getMin ( ) , minmax . getMax ( ) , 0. , Double . POSITIVE_INFINITY , 0. ) ; return new OutlierResult ( meta , scoreres ) ; }
Run the parallel kNN weight outlier detector .
434
10
157,694
public Clustering < ? > run ( final Database database , final Relation < DiscreteUncertainObject > relation ) { if ( relation . size ( ) <= 0 ) { return new Clustering <> ( "Uk-Means Clustering" , "ukmeans-clustering" ) ; } // Choose initial means randomly DBIDs sampleids = DBIDUtil . randomSample ( relation . getDBIDs ( ) , k , rnd ) ; List < double [ ] > means = new ArrayList <> ( k ) ; for ( DBIDIter iter = sampleids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { means . add ( ArrayLikeUtil . toPrimitiveDoubleArray ( relation . get ( iter ) . getCenterOfMass ( ) ) ) ; } // Setup cluster assignment store List < ModifiableDBIDs > clusters = new ArrayList <> ( ) ; for ( int i = 0 ; i < k ; i ++ ) { clusters . add ( DBIDUtil . newHashSet ( ( int ) ( relation . size ( ) * 2. / k ) ) ) ; } WritableIntegerDataStore assignment = DataStoreUtil . makeIntegerStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_TEMP | DataStoreFactory . HINT_HOT , - 1 ) ; double [ ] varsum = new double [ k ] ; IndefiniteProgress prog = LOG . isVerbose ( ) ? new IndefiniteProgress ( "UK-Means iteration" , LOG ) : null ; DoubleStatistic varstat = LOG . isStatistics ( ) ? new DoubleStatistic ( this . getClass ( ) . getName ( ) + ".variance-sum" ) : null ; int iteration = 0 ; for ( ; maxiter <= 0 || iteration < maxiter ; iteration ++ ) { LOG . incrementProcessed ( prog ) ; boolean changed = assignToNearestCluster ( relation , means , clusters , assignment , varsum ) ; logVarstat ( varstat , varsum ) ; // Stop if no cluster assignment changed. if ( ! changed ) { break ; } // Recompute means. means = means ( clusters , means , relation ) ; } LOG . setCompleted ( prog ) ; if ( LOG . isStatistics ( ) ) { LOG . statistics ( new LongStatistic ( KEY + ".iterations" , iteration ) ) ; } // Wrap result Clustering < KMeansModel > result = new Clustering <> ( "Uk-Means Clustering" , "ukmeans-clustering" ) ; for ( int i = 0 ; i < clusters . size ( ) ; i ++ ) { DBIDs ids = clusters . get ( i ) ; if ( ids . isEmpty ( ) ) { continue ; } result . addToplevelCluster ( new Cluster <> ( ids , new KMeansModel ( means . get ( i ) , varsum [ i ] ) ) ) ; } return result ; }
Run the clustering .
651
5
157,695
protected boolean updateAssignment ( DBIDIter iditer , List < ? extends ModifiableDBIDs > clusters , WritableIntegerDataStore assignment , int newA ) { final int oldA = assignment . intValue ( iditer ) ; if ( oldA == newA ) { return false ; } clusters . get ( newA ) . add ( iditer ) ; assignment . putInt ( iditer , newA ) ; if ( oldA >= 0 ) { clusters . get ( oldA ) . remove ( iditer ) ; } return true ; }
Update the cluster assignment .
115
5
157,696
protected double getExpectedRepDistance ( NumberVector rep , DiscreteUncertainObject uo ) { SquaredEuclideanDistanceFunction euclidean = SquaredEuclideanDistanceFunction . STATIC ; int counter = 0 ; double sum = 0.0 ; for ( int i = 0 ; i < uo . getNumberSamples ( ) ; i ++ ) { sum += euclidean . distance ( rep , uo . getSample ( i ) ) ; counter ++ ; } return sum / counter ; }
Get expected distance between a Vector and an uncertain object
112
10
157,697
protected void logVarstat ( DoubleStatistic varstat , double [ ] varsum ) { if ( varstat != null ) { double s = sum ( varsum ) ; getLogger ( ) . statistics ( varstat . setDouble ( s ) ) ; } }
Log statistics on the variance sum .
58
7
157,698
public void save ( ) throws FileNotFoundException { PrintStream p = new PrintStream ( file ) ; p . println ( COMMENT_PREFIX + "Saved ELKI settings. First line is title, remaining lines are parameters." ) ; for ( Pair < String , ArrayList < String > > settings : store ) { p . println ( settings . first ) ; for ( String str : settings . second ) { p . println ( str ) ; } p . println ( ) ; } p . close ( ) ; }
Save the current data to the given file .
110
9
157,699
public void load ( ) throws FileNotFoundException , IOException { BufferedReader is = new BufferedReader ( new InputStreamReader ( new FileInputStream ( file ) ) ) ; ArrayList < String > buf = new ArrayList <> ( ) ; while ( is . ready ( ) ) { String line = is . readLine ( ) ; // skip comments if ( line . startsWith ( COMMENT_PREFIX ) ) { continue ; } if ( line . length ( ) == 0 && ! buf . isEmpty ( ) ) { String title = buf . remove ( 0 ) ; store . add ( new Pair <> ( title , buf ) ) ; buf = new ArrayList <> ( ) ; } else { buf . add ( line ) ; } } if ( ! buf . isEmpty ( ) ) { String title = buf . remove ( 0 ) ; store . add ( new Pair <> ( title , buf ) ) ; buf = new ArrayList <> ( ) ; } is . close ( ) ; }
Read the current file
215
4