idx
int64
0
165k
question
stringlengths
73
4.15k
target
stringlengths
5
918
len_question
int64
21
890
len_target
int64
3
255
157,300
public Document cloneDocument ( DOMImplementation domImpl , Document document ) { Element root = document . getDocumentElement ( ) ; // New document Document result = domImpl . createDocument ( root . getNamespaceURI ( ) , root . getNodeName ( ) , null ) ; Element rroot = result . getDocumentElement ( ) ; // Cloning the document element is a bit tricky. // This is adopted from DomUtilities#deepCloneDocument boolean before = true ; for ( Node n = document . getFirstChild ( ) ; n != null ; n = n . getNextSibling ( ) ) { if ( n == root ) { before = false ; copyAttributes ( result , root , rroot ) ; for ( Node c = root . getFirstChild ( ) ; c != null ; c = c . getNextSibling ( ) ) { final Node cl = cloneNode ( result , c ) ; if ( cl != null ) { rroot . appendChild ( cl ) ; } } } else { if ( n . getNodeType ( ) != Node . DOCUMENT_TYPE_NODE ) { final Node cl = cloneNode ( result , n ) ; if ( cl != null ) { if ( before ) { result . insertBefore ( cl , rroot ) ; } else { result . appendChild ( cl ) ; } } } } } return result ; }
Deep - clone a document .
289
6
157,301
public Node cloneNode ( Document doc , Node eold ) { return doc . importNode ( eold , true ) ; }
Clone an existing node .
26
6
157,302
public void copyAttributes ( Document doc , Element eold , Element enew ) { if ( eold . hasAttributes ( ) ) { NamedNodeMap attr = eold . getAttributes ( ) ; int len = attr . getLength ( ) ; for ( int i = 0 ; i < len ; i ++ ) { enew . setAttributeNode ( ( Attr ) doc . importNode ( attr . item ( i ) , true ) ) ; } } }
Copy the attributes from an existing node to a new node .
99
12
157,303
protected String getFilename ( Object result , String filenamepre ) { if ( filenamepre == null || filenamepre . length ( ) == 0 ) { filenamepre = "result" ; } for ( int i = 0 ; ; i ++ ) { String filename = i > 0 ? filenamepre + "-" + i : filenamepre ; Object existing = filenames . get ( filename ) ; if ( existing == null || existing == result ) { filenames . put ( filename , result ) ; return filename ; } } }
Try to find a unique file name .
108
8
157,304
@ SuppressWarnings ( "unchecked" ) public void output ( Database db , Result r , StreamFactory streamOpener , Pattern filter ) throws IOException { List < Relation < ? > > ra = new LinkedList <> ( ) ; List < OrderingResult > ro = new LinkedList <> ( ) ; List < Clustering < ? > > rc = new LinkedList <> ( ) ; List < IterableResult < ? > > ri = new LinkedList <> ( ) ; List < SettingsResult > rs = new LinkedList <> ( ) ; List < Result > otherres = new LinkedList <> ( ) ; // Split result objects in different known types: { List < Result > results = ResultUtil . filterResults ( db . getHierarchy ( ) , r , Result . class ) ; for ( Result res : results ) { if ( filter != null ) { final String nam = res . getShortName ( ) ; if ( nam == null || ! filter . matcher ( nam ) . find ( ) ) { continue ; } } if ( res instanceof Database ) { continue ; } if ( res instanceof Relation ) { ra . add ( ( Relation < ? > ) res ) ; continue ; } if ( res instanceof OrderingResult ) { ro . add ( ( OrderingResult ) res ) ; continue ; } if ( res instanceof Clustering ) { rc . add ( ( Clustering < ? > ) res ) ; continue ; } if ( res instanceof IterableResult ) { ri . add ( ( IterableResult < ? > ) res ) ; continue ; } if ( res instanceof SettingsResult ) { rs . add ( ( SettingsResult ) res ) ; continue ; } otherres . add ( res ) ; } } writeSettingsResult ( streamOpener , rs ) ; for ( IterableResult < ? > rii : ri ) { writeIterableResult ( streamOpener , rii ) ; } for ( Clustering < ? > c : rc ) { NamingScheme naming = new SimpleEnumeratingScheme ( c ) ; for ( Cluster < ? > clus : c . getAllClusters ( ) ) { writeClusterResult ( db , streamOpener , ( Clustering < Model > ) c , ( Cluster < Model > ) clus , ra , naming ) ; } } for ( OrderingResult ror : ro ) { writeOrderingResult ( db , streamOpener , ror , ra ) ; } for ( Result otherr : otherres ) { writeOtherResult ( streamOpener , otherr ) ; } }
Stream output .
568
3
157,305
@ Override public void writeExternal ( ObjectOutput out ) throws IOException { super . writeExternal ( out ) ; out . writeObject ( approximation ) ; }
Calls the super method and writes the polynomiale approximation of the knn distances of this entry to the specified stream .
33
26
157,306
@ Override public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { super . readExternal ( in ) ; approximation = ( PolynomialApproximation ) in . readObject ( ) ; }
Calls the super method and reads the the polynomial approximation of the knn distances of this entry from the specified input stream .
47
27
157,307
private WritableIntegerDataStore computeSubtreeSizes ( DBIDs order ) { WritableIntegerDataStore siz = DataStoreUtil . makeIntegerStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , 1 ) ; DBIDVar v1 = DBIDUtil . newVar ( ) ; for ( DBIDIter it = order . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { if ( DBIDUtil . equal ( it , parent . assignVar ( it , v1 ) ) ) { continue ; } siz . increment ( v1 , siz . intValue ( it ) ) ; } return siz ; }
Compute the size of all subtrees .
153
9
157,308
private WritableDoubleDataStore computeMaxHeight ( ) { WritableDoubleDataStore maxheight = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , 0. ) ; DBIDVar v1 = DBIDUtil . newVar ( ) ; for ( DBIDIter it = ids . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { double d = parentDistance . doubleValue ( it ) ; if ( d > maxheight . doubleValue ( it ) ) { maxheight . putDouble ( it , d ) ; } if ( d > maxheight . doubleValue ( parent . assignVar ( it , v1 ) ) ) { maxheight . putDouble ( v1 , d ) ; } } return maxheight ; }
Compute the maximum height of nodes .
179
8
157,309
public ArrayDBIDs topologicalSort ( ) { ArrayModifiableDBIDs ids = DBIDUtil . newArray ( this . ids ) ; if ( mergeOrder != null ) { ids . sort ( new DataStoreUtil . AscendingByIntegerDataStore ( mergeOrder ) ) ; WritableDoubleDataStore maxheight = computeMaxHeight ( ) ; ids . sort ( new Sorter ( maxheight ) ) ; maxheight . destroy ( ) ; } else { ids . sort ( new DataStoreUtil . DescendingByDoubleDataStoreAndId ( parentDistance ) ) ; } // We used to simply sort by merging distance // But for e.g. Median Linkage, this would lead to problems, as links are // not necessarily performed in ascending order anymore! final int size = ids . size ( ) ; ModifiableDBIDs seen = DBIDUtil . newHashSet ( size ) ; ArrayModifiableDBIDs order = DBIDUtil . newArray ( size ) ; DBIDVar v1 = DBIDUtil . newVar ( ) , prev = DBIDUtil . newVar ( ) ; // Process merges in descending order for ( DBIDIter it = ids . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { if ( ! seen . add ( it ) ) { continue ; } final int begin = order . size ( ) ; order . add ( it ) ; prev . set ( it ) ; // Copy // Follow parents of prev -> v1 - these need to come before prev. while ( ! DBIDUtil . equal ( prev , parent . assignVar ( prev , v1 ) ) ) { if ( ! seen . add ( v1 ) ) { break ; } order . add ( v1 ) ; prev . set ( v1 ) ; // Copy } // Reverse the inserted path: for ( int i = begin , j = order . size ( ) - 1 ; i < j ; i ++ , j -- ) { order . swap ( i , j ) ; } } // Reverse everything for ( int i = 0 , j = size - 1 ; i < j ; i ++ , j -- ) { order . swap ( i , j ) ; } return order ; }
Topological sort the object IDs .
475
7
157,310
public static int [ ] range ( int start , int end ) { int [ ] out = new int [ end - start ] ; for ( int i = 0 , j = start ; j < end ; i ++ , j ++ ) { out [ i ] = j ; } return out ; }
Initialize an integer value range .
61
7
157,311
private void doReverseKNNQuery ( int k , DBIDRef q , ModifiableDoubleDBIDList result , ModifiableDBIDs candidates ) { final ComparableMinHeap < MTreeSearchCandidate > pq = new ComparableMinHeap <> ( ) ; // push root pq . add ( new MTreeSearchCandidate ( 0. , getRootID ( ) , null , Double . NaN ) ) ; // search in tree while ( ! pq . isEmpty ( ) ) { MTreeSearchCandidate pqNode = pq . poll ( ) ; // FIXME: cache the distance to the routing object in the queue node! MkCoPTreeNode < O > node = getNode ( pqNode . nodeID ) ; // directory node if ( ! node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkCoPEntry entry = node . getEntry ( i ) ; double distance = distance ( entry . getRoutingObjectID ( ) , q ) ; double minDist = entry . getCoveringRadius ( ) > distance ? 0. : distance - entry . getCoveringRadius ( ) ; double approximatedKnnDist_cons = entry . approximateConservativeKnnDistance ( k ) ; if ( minDist <= approximatedKnnDist_cons ) { pq . add ( new MTreeSearchCandidate ( minDist , getPageID ( entry ) , entry . getRoutingObjectID ( ) , Double . NaN ) ) ; } } } // data node else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { MkCoPLeafEntry entry = ( MkCoPLeafEntry ) node . getEntry ( i ) ; double distance = distance ( entry . getRoutingObjectID ( ) , q ) ; double approximatedKnnDist_prog = entry . approximateProgressiveKnnDistance ( k ) ; if ( distance <= approximatedKnnDist_prog ) { result . add ( distance , entry . getRoutingObjectID ( ) ) ; } else { double approximatedKnnDist_cons = entry . approximateConservativeKnnDistance ( k ) ; double diff = distance - approximatedKnnDist_cons ; if ( diff <= 1E-10 ) { candidates . add ( entry . getRoutingObjectID ( ) ) ; } } } } } }
Performs a reverse knn query .
528
8
157,312
protected void expirePage ( P page ) { if ( LOG . isDebuggingFine ( ) ) { LOG . debugFine ( "Write to backing:" + page . getPageID ( ) ) ; } if ( page . isDirty ( ) ) { file . writePage ( page ) ; } }
Write page through to disk .
63
6
157,313
public void setCacheSize ( int cacheSize ) { this . cacheSize = cacheSize ; long toDelete = map . size ( ) - this . cacheSize ; if ( toDelete <= 0 ) { return ; } List < Integer > keys = new ArrayList <> ( map . keySet ( ) ) ; Collections . reverse ( keys ) ; for ( Integer id : keys ) { P page = map . remove ( id ) ; file . writePage ( page ) ; } }
Sets the maximum size of this cache .
100
9
157,314
public static String getFilenameExtension ( String name ) { if ( name == null ) { return null ; } int index = name . lastIndexOf ( ' ' ) ; return index < 0 ? null : name . substring ( index + 1 ) . toLowerCase ( ) ; }
Returns the lower case extension of the selected file .
60
10
157,315
public static InputStream tryGzipInput ( InputStream in ) throws IOException { // try autodetecting gzip compression. if ( ! in . markSupported ( ) ) { PushbackInputStream pb = new PushbackInputStream ( in , 16 ) ; // read a magic from the file header, and push it back byte [ ] magic = { 0 , 0 } ; int r = pb . read ( magic ) ; pb . unread ( magic , 0 , r ) ; return ( magic [ 0 ] == 31 && magic [ 1 ] == - 117 ) ? new GZIPInputStream ( pb ) : pb ; } // Mark is supported. in . mark ( 16 ) ; boolean isgzip = ( ( in . read ( ) << 8 ) | in . read ( ) ) == GZIPInputStream . GZIP_MAGIC ; in . reset ( ) ; // Rewind return isgzip ? new GZIPInputStream ( in ) : in ; }
Try to open a stream as gzip if it starts with the gzip magic .
213
17
157,316
public static File locateFile ( String name , String basedir ) { // Try exact match first. File f = new File ( name ) ; if ( f . exists ( ) ) { return f ; } // Try with base directory if ( basedir != null ) { if ( ( f = new File ( basedir , name ) ) . exists ( ) ) { return f ; } } // try stripping whitespace String name2 ; if ( ! name . equals ( name2 = name . trim ( ) ) ) { if ( ( f = locateFile ( name2 , basedir ) ) != null ) { return f ; } } // try substituting path separators if ( ! name . equals ( name2 = name . replace ( ' ' , File . separatorChar ) ) ) { if ( ( f = locateFile ( name2 , basedir ) ) != null ) { return f ; } } if ( ! name . equals ( name2 = name . replace ( ' ' , File . separatorChar ) ) ) { if ( ( f = locateFile ( name2 , basedir ) ) != null ) { return f ; } } // try stripping extra characters, such as quotes. if ( name . length ( ) > 2 && name . charAt ( 0 ) == ' ' && name . charAt ( name . length ( ) - 1 ) == ' ' ) { if ( ( f = locateFile ( name . substring ( 1 , name . length ( ) - 1 ) , basedir ) ) != null ) { return f ; } } return null ; }
Try to locate an file in the filesystem given a partial name and a prefix .
328
16
157,317
protected void addInternal ( double dist , int id ) { if ( size == dists . length ) { grow ( ) ; } dists [ size ] = dist ; ids [ size ] = id ; ++ size ; }
Add an entry consisting of distance and internal index .
47
10
157,318
protected void grow ( ) { if ( dists == EMPTY_DISTS ) { dists = new double [ INITIAL_SIZE ] ; ids = new int [ INITIAL_SIZE ] ; return ; } final int len = dists . length ; final int newlength = len + ( len >> 1 ) + 1 ; double [ ] odists = dists ; dists = new double [ newlength ] ; System . arraycopy ( odists , 0 , dists , 0 , odists . length ) ; int [ ] oids = ids ; ids = new int [ newlength ] ; System . arraycopy ( oids , 0 , ids , 0 , oids . length ) ; }
Grow the data storage .
152
6
157,319
protected void reverse ( ) { for ( int i = 0 , j = size - 1 ; i < j ; i ++ , j -- ) { double tmpd = dists [ j ] ; dists [ j ] = dists [ i ] ; dists [ i ] = tmpd ; int tmpi = ids [ j ] ; ids [ j ] = ids [ i ] ; ids [ i ] = tmpi ; } }
Reverse the list .
94
6
157,320
public void truncate ( int newsize ) { if ( newsize < size ) { double [ ] odists = dists ; dists = new double [ newsize ] ; System . arraycopy ( odists , 0 , dists , 0 , newsize ) ; int [ ] oids = ids ; ids = new int [ newsize ] ; System . arraycopy ( oids , 0 , ids , 0 , newsize ) ; size = newsize ; } }
Truncate the list to the given size freeing the memory .
101
13
157,321
private RectangleArranger < PlotItem > arrangeVisualizations ( double width , double height ) { if ( ! ( width > 0. && height > 0. ) ) { LOG . warning ( "No size information during arrange()" , new Throwable ( ) ) ; return new RectangleArranger <> ( 1. , 1. ) ; } RectangleArranger < PlotItem > plotmap = new RectangleArranger <> ( width , height ) ; Hierarchy < Object > vistree = context . getVisHierarchy ( ) ; for ( It < Projector > iter2 = vistree . iterAll ( ) . filter ( Projector . class ) ; iter2 . valid ( ) ; iter2 . advance ( ) ) { Collection < PlotItem > projs = iter2 . get ( ) . arrange ( context ) ; for ( PlotItem it : projs ) { if ( it . w <= 0.0 || it . h <= 0.0 ) { LOG . warning ( "Plot item with improper size information: " + it ) ; continue ; } plotmap . put ( it . w , it . h , it ) ; } } nextTask : for ( It < VisualizationTask > iter2 = vistree . iterAll ( ) . filter ( VisualizationTask . class ) ; iter2 . valid ( ) ; iter2 . advance ( ) ) { VisualizationTask task = iter2 . get ( ) ; if ( ! task . isVisible ( ) ) { continue ; } if ( vistree . iterParents ( task ) . filter ( Projector . class ) . valid ( ) ) { continue nextTask ; } if ( task . getRequestedWidth ( ) <= 0.0 || task . getRequestedHeight ( ) <= 0.0 ) { LOG . warning ( "Task with improper size information: " + task ) ; continue ; } PlotItem it = new PlotItem ( task . getRequestedWidth ( ) , task . getRequestedHeight ( ) , null ) ; it . tasks . add ( task ) ; plotmap . put ( it . w , it . h , it ) ; } return plotmap ; }
Recompute the layout of visualizations .
461
9
157,322
public void initialize ( double ratio ) { if ( ! ( ratio > 0 && ratio < Double . POSITIVE_INFINITY ) ) { LOG . warning ( "Invalid ratio: " + ratio , new Throwable ( ) ) ; ratio = 1.4 ; } this . ratio = ratio ; if ( plot != null ) { LOG . warning ( "Already initialized." ) ; lazyRefresh ( ) ; return ; } reinitialize ( ) ; // register context listener context . addResultListener ( this ) ; context . addVisualizationListener ( this ) ; }
Initialize the plot .
118
5
157,323
private void initializePlot ( ) { plot = new VisualizationPlot ( ) ; { // Add a background element: CSSClass cls = new CSSClass ( this , "background" ) ; final String bgcol = context . getStyleLibrary ( ) . getBackgroundColor ( StyleLibrary . PAGE ) ; cls . setStatement ( SVGConstants . CSS_FILL_PROPERTY , bgcol ) ; plot . addCSSClassOrLogError ( cls ) ; Element background = plot . svgElement ( SVGConstants . SVG_RECT_TAG ) ; background . setAttribute ( SVGConstants . SVG_X_ATTRIBUTE , "0" ) ; background . setAttribute ( SVGConstants . SVG_Y_ATTRIBUTE , "0" ) ; background . setAttribute ( SVGConstants . SVG_WIDTH_ATTRIBUTE , "100%" ) ; background . setAttribute ( SVGConstants . SVG_HEIGHT_ATTRIBUTE , "100%" ) ; SVGUtil . setCSSClass ( background , cls . getName ( ) ) ; // Don't export a white background: if ( "white" . equals ( bgcol ) ) { background . setAttribute ( SVGPlot . NO_EXPORT_ATTRIBUTE , SVGPlot . NO_EXPORT_ATTRIBUTE ) ; } plot . getRoot ( ) . appendChild ( background ) ; } { // setup the hover CSS classes. selcss = new CSSClass ( this , "s" ) ; if ( DEBUG_LAYOUT ) { selcss . setStatement ( SVGConstants . CSS_STROKE_PROPERTY , SVGConstants . CSS_RED_VALUE ) ; selcss . setStatement ( SVGConstants . CSS_STROKE_WIDTH_PROPERTY , .00001 * StyleLibrary . SCALE ) ; selcss . setStatement ( SVGConstants . CSS_STROKE_OPACITY_PROPERTY , "0.5" ) ; } selcss . setStatement ( SVGConstants . CSS_FILL_PROPERTY , SVGConstants . CSS_RED_VALUE ) ; selcss . setStatement ( SVGConstants . CSS_FILL_OPACITY_PROPERTY , "0" ) ; selcss . setStatement ( SVGConstants . CSS_CURSOR_PROPERTY , SVGConstants . CSS_POINTER_VALUE ) ; plot . addCSSClassOrLogError ( selcss ) ; CSSClass hovcss = new CSSClass ( this , "h" ) ; hovcss . setStatement ( SVGConstants . CSS_FILL_OPACITY_PROPERTY , "0.25" ) ; plot . addCSSClassOrLogError ( hovcss ) ; // Hover listener. hoverer = new CSSHoverClass ( hovcss . getName ( ) , null , true ) ; } // Disable Batik default interactions (zoom, rotate, etc.) if ( single ) { plot . setDisableInteractions ( true ) ; } SVGEffects . addShadowFilter ( plot ) ; SVGEffects . addLightGradient ( plot ) ; }
Initialize the SVG plot .
691
6
157,324
private Visualization embedOrThumbnail ( final int thumbsize , PlotItem it , VisualizationTask task , Element parent ) { final Visualization vis ; if ( ! single ) { vis = task . getFactory ( ) . makeVisualizationOrThumbnail ( context , task , plot , it . w , it . h , it . proj , thumbsize ) ; } else { vis = task . getFactory ( ) . makeVisualization ( context , task , plot , it . w , it . h , it . proj ) ; } if ( vis == null || vis . getLayer ( ) == null ) { LOG . warning ( "Visualization returned empty layer: " + vis ) ; return vis ; } if ( task . has ( RenderFlag . NO_EXPORT ) ) { vis . getLayer ( ) . setAttribute ( SVGPlot . NO_EXPORT_ATTRIBUTE , SVGPlot . NO_EXPORT_ATTRIBUTE ) ; } parent . appendChild ( vis . getLayer ( ) ) ; return vis ; }
Produce thumbnail for a visualizer .
218
8
157,325
protected boolean visibleInOverview ( VisualizationTask task ) { return task . isVisible ( ) && ! task . has ( single ? RenderFlag . NO_EMBED : RenderFlag . NO_THUMBNAIL ) ; }
Test whether a task should be displayed in the overview plot .
50
12
157,326
private void recalcViewbox ( ) { final Element root = plot . getRoot ( ) ; // Reset plot attributes SVGUtil . setAtt ( root , SVGConstants . SVG_WIDTH_ATTRIBUTE , "20cm" ) ; SVGUtil . setAtt ( root , SVGConstants . SVG_HEIGHT_ATTRIBUTE , SVGUtil . fmt ( 20 * plotmap . getHeight ( ) / plotmap . getWidth ( ) ) + "cm" ) ; String vb = "0 0 " + SVGUtil . fmt ( plotmap . getWidth ( ) ) + " " + SVGUtil . fmt ( plotmap . getHeight ( ) ) ; SVGUtil . setAtt ( root , SVGConstants . SVG_VIEW_BOX_ATTRIBUTE , vb ) ; }
Recompute the view box of the plot .
178
10
157,327
protected void triggerSubplotSelectEvent ( PlotItem it ) { // forward event to all listeners. for ( ActionListener actionListener : actionListeners ) { actionListener . actionPerformed ( new DetailViewSelectedEvent ( this , ActionEvent . ACTION_PERFORMED , null , 0 , it ) ) ; } }
When a subplot was selected forward the event to listeners .
67
12
157,328
public static double cdf ( double val , double mu , double sigma , double xi ) { val = ( val - mu ) / sigma ; // Check support: if ( val < 0 ) { return 0. ; } if ( xi < 0 && val > - 1. / xi ) { return 1. ; } return 1 - FastMath . pow ( 1 + xi * val , - 1. / xi ) ; }
CDF of GPD distribution
94
6
157,329
public static double quantile ( double val , double mu , double sigma , double xi ) { if ( val < 0.0 || val > 1.0 ) { return Double . NaN ; } if ( xi == 0. ) { return mu - sigma * FastMath . log ( 1 - val ) ; } return mu - sigma / xi * ( 1 - FastMath . pow ( 1 - val , - xi ) ) ; }
Quantile function of GPD distribution
97
7
157,330
public Result run ( Database database , Relation < O > relation , Relation < NumberVector > radrel ) { if ( queries != null ) { throw new AbortException ( "This 'run' method will not use the given query set!" ) ; } // Get a distance and kNN query instance. DistanceQuery < O > distQuery = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; RangeQuery < O > rangeQuery = database . getRangeQuery ( distQuery ) ; final DBIDs sample = DBIDUtil . randomSample ( relation . getDBIDs ( ) , sampling , random ) ; FiniteProgress prog = LOG . isVeryVerbose ( ) ? new FiniteProgress ( "kNN queries" , sample . size ( ) , LOG ) : null ; int hash = 0 ; MeanVariance mv = new MeanVariance ( ) ; for ( DBIDIter iditer = sample . iter ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { double r = radrel . get ( iditer ) . doubleValue ( 0 ) ; DoubleDBIDList rres = rangeQuery . getRangeForDBID ( iditer , r ) ; int ichecksum = 0 ; for ( DBIDIter it = rres . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { ichecksum += DBIDUtil . asInteger ( it ) ; } hash = Util . mixHashCodes ( hash , ichecksum ) ; mv . put ( rres . size ( ) ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; if ( LOG . isStatistics ( ) ) { LOG . statistics ( "Result hashcode: " + hash ) ; LOG . statistics ( "Mean number of results: " + mv . getMean ( ) + " +- " + mv . getNaiveStddev ( ) ) ; } return null ; }
Run the algorithm with separate radius relation
425
7
157,331
public static long [ ] random ( int card , int capacity , Random random ) { if ( card < 0 || card > capacity ) { throw new IllegalArgumentException ( "Cannot set " + card + " out of " + capacity + " bits." ) ; } // FIXME: Avoid recomputing the cardinality. if ( card < capacity >>> 1 ) { long [ ] bitset = BitsUtil . zero ( capacity ) ; for ( int todo = card ; todo > 0 ; // todo = ( todo == 1 ) ? ( card - cardinality ( bitset ) ) : ( todo - 1 ) ) { setI ( bitset , random . nextInt ( capacity ) ) ; } return bitset ; } else { long [ ] bitset = BitsUtil . ones ( capacity ) ; for ( int todo = capacity - card ; todo > 0 ; // todo = ( todo == 1 ) ? ( cardinality ( bitset ) - card ) : ( todo - 1 ) ) { clearI ( bitset , random . nextInt ( capacity ) ) ; } return bitset ; } }
Creates a new BitSet of fixed cardinality with randomly set bits .
240
15
157,332
public static long [ ] copy ( long [ ] v , int mincap ) { int words = ( ( mincap - 1 ) >>> LONG_LOG2_SIZE ) + 1 ; if ( v . length == words ) { return Arrays . copyOf ( v , v . length ) ; } long [ ] ret = new long [ words ] ; System . arraycopy ( v , 0 , ret , 0 , Math . min ( v . length , words ) ) ; return ret ; }
Copy a bitset .
102
5
157,333
public static boolean isZero ( long [ ] v ) { for ( int i = 0 ; i < v . length ; i ++ ) { if ( v [ i ] != 0 ) { return false ; } } return true ; }
Test for the bitstring to be all - zero .
48
11
157,334
public static long [ ] setI ( long [ ] v , long [ ] o ) { assert ( o . length <= v . length ) : "Bit set sizes do not agree." ; final int max = Math . min ( v . length , o . length ) ; for ( int i = 0 ; i < max ; i ++ ) { v [ i ] = o [ i ] ; } return v ; }
Put o onto v in - place i . e . v = o
86
14
157,335
public static void onesI ( long [ ] v , int bits ) { final int fillWords = bits >>> LONG_LOG2_SIZE ; final int fillBits = bits & LONG_LOG2_MASK ; Arrays . fill ( v , 0 , fillWords , LONG_ALL_BITS ) ; if ( fillBits > 0 ) { v [ fillWords ] = ( 1L << fillBits ) - 1 ; } if ( fillWords + 1 < v . length ) { Arrays . fill ( v , fillWords + 1 , v . length , 0L ) ; } }
Fill a vector initialized with bits ones .
126
8
157,336
public static long [ ] xorI ( long [ ] v , long [ ] o ) { assert ( o . length <= v . length ) : "Bit set sizes do not agree." ; for ( int i = 0 ; i < o . length ; i ++ ) { v [ i ] ^= o [ i ] ; } return v ; }
XOR o onto v in - place i . e . v ^ = o
73
16
157,337
public static long [ ] orI ( long [ ] v , long [ ] o ) { assert ( o . length <= v . length ) : "Bit set sizes do not agree." ; final int max = Math . min ( v . length , o . length ) ; for ( int i = 0 ; i < max ; i ++ ) { v [ i ] |= o [ i ] ; } return v ; }
OR o onto v in - place i . e . v | = o
87
15
157,338
public static long [ ] andI ( long [ ] v , long [ ] o ) { int i = 0 ; for ( ; i < o . length ; i ++ ) { v [ i ] &= o [ i ] ; } // Zero higher words Arrays . fill ( v , i , v . length , 0 ) ; return v ; }
AND o onto v in - place i . e . v &amp ; = o
73
17
157,339
public static long [ ] nandI ( long [ ] v , long [ ] o ) { int i = 0 ; for ( ; i < o . length ; i ++ ) { v [ i ] &= ~ o [ i ] ; } return v ; }
NOTAND o onto v in - place i . e . v &amp ; = ~o
55
19
157,340
public static long [ ] invertI ( long [ ] v ) { for ( int i = 0 ; i < v . length ; i ++ ) { v [ i ] = ~ v [ i ] ; } return v ; }
Invert v in - place .
48
7
157,341
public static long cycleLeftC ( long v , int shift , int len ) { return shift == 0 ? v : shift < 0 ? cycleRightC ( v , - shift , len ) : // ( ( ( v ) << ( shift ) ) | ( ( v ) >>> ( ( len ) - ( shift ) ) ) ) & ( ( 1 << len ) - 1 ) ; }
Rotate a long to the left cyclic with length len
80
12
157,342
public static long [ ] cycleLeftI ( long [ ] v , int shift , int len ) { long [ ] t = copy ( v , len , shift ) ; return orI ( shiftRightI ( v , len - shift ) , truncateI ( t , len ) ) ; }
Cycle a bitstring to the right .
61
9
157,343
public static int numberOfTrailingZerosSigned ( long [ ] v ) { for ( int p = 0 ; ; p ++ ) { if ( p == v . length ) { return - 1 ; } if ( v [ p ] != 0 ) { return Long . numberOfTrailingZeros ( v [ p ] ) + p * Long . SIZE ; } } }
Find the number of trailing zeros .
80
8
157,344
public static int numberOfLeadingZerosSigned ( long [ ] v ) { for ( int p = 0 , ip = v . length - 1 ; p < v . length ; p ++ , ip -- ) { if ( v [ ip ] != 0 ) { return Long . numberOfLeadingZeros ( v [ ip ] ) + p * Long . SIZE ; } } return - 1 ; }
Find the number of leading zeros .
86
8
157,345
public static int previousClearBit ( long v , int start ) { if ( start < 0 ) { return - 1 ; } start = start < Long . SIZE ? start : Long . SIZE - 1 ; long cur = ~ v & ( LONG_ALL_BITS >>> - ( start + 1 ) ) ; return cur == 0 ? - 1 : 63 - Long . numberOfLeadingZeros ( cur ) ; }
Find the previous clear bit .
89
6
157,346
public static int nextSetBit ( long v , int start ) { if ( start >= Long . SIZE ) { return - 1 ; } start = start < 0 ? 0 : start ; long cur = v & ( LONG_ALL_BITS << start ) ; return cur == 0 ? - 1 : cur == LONG_ALL_BITS ? 0 : Long . numberOfTrailingZeros ( cur ) ; }
Find the next set bit .
87
6
157,347
public static boolean intersect ( long [ ] x , long [ ] y ) { final int min = ( x . length < y . length ) ? x . length : y . length ; for ( int i = 0 ; i < min ; i ++ ) { if ( ( x [ i ] & y [ i ] ) != 0L ) { return true ; } } return false ; }
Test whether two Bitsets intersect .
80
7
157,348
public static int intersectionSize ( long [ ] x , long [ ] y ) { final int lx = x . length , ly = y . length ; final int min = ( lx < ly ) ? lx : ly ; int res = 0 ; for ( int i = 0 ; i < min ; i ++ ) { res += Long . bitCount ( x [ i ] & y [ i ] ) ; } return res ; }
Compute the intersection size of two Bitsets .
91
10
157,349
public static int unionSize ( long [ ] x , long [ ] y ) { final int lx = x . length , ly = y . length ; final int min = ( lx < ly ) ? lx : ly ; int i = 0 , res = 0 ; for ( ; i < min ; i ++ ) { res += Long . bitCount ( x [ i ] | y [ i ] ) ; } for ( ; i < lx ; i ++ ) { res += Long . bitCount ( x [ i ] ) ; } for ( ; i < ly ; i ++ ) { res += Long . bitCount ( y [ i ] ) ; } return res ; }
Compute the union size of two Bitsets .
142
10
157,350
public static boolean equal ( long [ ] x , long [ ] y ) { if ( x == null || y == null ) { return ( x == null ) && ( y == null ) ; } int p = Math . min ( x . length , y . length ) - 1 ; for ( int i = x . length - 1 ; i > p ; i -- ) { if ( x [ i ] != 0L ) { return false ; } } for ( int i = y . length - 1 ; i > p ; i -- ) { if ( y [ i ] != 0L ) { return false ; } } for ( ; p >= 0 ; p -- ) { if ( x [ p ] != y [ p ] ) { return false ; } } return true ; }
Test two bitsets for equality
162
6
157,351
public static int compare ( long [ ] x , long [ ] y ) { if ( x == null ) { return ( y == null ) ? 0 : - 1 ; } if ( y == null ) { return + 1 ; } int p = Math . min ( x . length , y . length ) - 1 ; for ( int i = x . length - 1 ; i > p ; i -- ) { if ( x [ i ] != 0 ) { return + 1 ; } } for ( int i = y . length - 1 ; i > p ; i -- ) { if ( y [ i ] != 0 ) { return - 1 ; } } for ( ; p >= 0 ; p -- ) { final long xp = x [ p ] , yp = y [ p ] ; if ( xp != yp ) { return xp < 0 ? ( yp < 0 && yp < xp ) ? - 1 : + 1 : ( yp < 0 || xp < yp ) ? - 1 : + 1 ; } } return 0 ; }
Compare two bitsets .
219
5
157,352
@ Override public int compareTo ( DistanceEntry < E > o ) { int comp = Double . compare ( distance , o . distance ) ; return comp ; // return comp != 0 ? comp : // entry.getEntryID().compareTo(o.entry.getEntryID()); }
Compares this object with the specified object for order .
61
11
157,353
private double [ ] knnDistances ( O object ) { KNNList knns = knnq . getKNNForObject ( object , getKmax ( ) - 1 ) ; double [ ] distances = new double [ getKmax ( ) ] ; int i = 0 ; for ( DoubleDBIDListIter iter = knns . iter ( ) ; iter . valid ( ) && i < getKmax ( ) ; iter . advance ( ) , i ++ ) { distances [ i ] = iter . doubleValue ( ) ; } return distances ; }
Returns the knn distance of the object with the specified id .
117
13
157,354
protected double downsample ( double [ ] data , int start , int end , int size ) { double sum = 0 ; for ( int i = start ; i < end ; i ++ ) { sum += data [ i ] ; } return sum ; }
Perform downsampling on a number of bins .
52
11
157,355
public void setRotationZ ( double rotationZ ) { this . rotationZ = rotationZ ; this . cosZ = FastMath . cos ( rotationZ ) ; this . sinZ = FastMath . sin ( rotationZ ) ; fireCameraChangedEvent ( ) ; }
Set the z rotation angle in radians .
56
9
157,356
public void apply ( GL2 gl ) { // 3D projection gl . glMatrixMode ( GL2 . GL_PROJECTION ) ; gl . glLoadIdentity ( ) ; // Perspective. glu . gluPerspective ( 35 , ratio , 1 , 1000 ) ; glu . gluLookAt ( distance * sinZ , distance * - cosZ , height , // pos 0 , 0 , .5 , // center 0 , 0 , 1 // up ) ; // Change back to model view matrix. gl . glMatrixMode ( GL2 . GL_MODELVIEW ) ; gl . glLoadIdentity ( ) ; // Store the matrixes for reference. gl . glGetIntegerv ( GL . GL_VIEWPORT , viewp , 0 ) ; gl . glGetDoublev ( GLMatrixFunc . GL_MODELVIEW_MATRIX , modelview , 0 ) ; gl . glGetDoublev ( GLMatrixFunc . GL_PROJECTION_MATRIX , projection , 0 ) ; }
Apply the camera to a GL context .
217
8
157,357
public void project ( double x , double y , double z , double [ ] out ) { glu . gluProject ( x , y , z , modelview , 0 , projection , 0 , viewp , 0 , out , 0 ) ; }
Project a coordinate
52
3
157,358
public void addCameraListener ( CameraListener lis ) { if ( listeners == null ) { listeners = new ArrayList <> ( 5 ) ; } listeners . add ( lis ) ; }
Add a camera listener .
40
5
157,359
public static AffineTransformation axisProjection ( int dim , int ax1 , int ax2 ) { // setup a projection to get the data into the interval -1:+1 in each // dimension with the intended-to-see dimensions first. AffineTransformation proj = AffineTransformation . reorderAxesTransformation ( dim , ax1 , ax2 ) ; // Assuming that the data was normalized on [0:1], center it: double [ ] trans = new double [ dim ] ; for ( int i = 0 ; i < dim ; i ++ ) { trans [ i ] = - .5 ; } proj . addTranslation ( trans ) ; // mirror on the y axis, since the SVG coordinate system is screen // coordinates (y = down) and not mathematical coordinates (y = up) proj . addAxisReflection ( 2 ) ; // scale it up proj . addScaling ( SCALE ) ; return proj ; }
Compute an transformation matrix to show only axis ax1 and ax2 .
203
15
157,360
protected ApproximationLine conservativeKnnDistanceApproximation ( int k_max ) { // determine k_0, y_1, y_kmax int k_0 = k_max ; double y_1 = Double . NEGATIVE_INFINITY ; double y_kmax = Double . NEGATIVE_INFINITY ; for ( int i = 0 ; i < getNumEntries ( ) ; i ++ ) { MkCoPEntry entry = getEntry ( i ) ; ApproximationLine approx = entry . getConservativeKnnDistanceApproximation ( ) ; k_0 = Math . min ( approx . getK_0 ( ) , k_0 ) ; } for ( int i = 0 ; i < getNumEntries ( ) ; i ++ ) { MkCoPEntry entry = getEntry ( i ) ; ApproximationLine approx = entry . getConservativeKnnDistanceApproximation ( ) ; double entry_y_1 = approx . getValueAt ( k_0 ) ; double entry_y_kmax = approx . getValueAt ( k_max ) ; if ( ! Double . isInfinite ( entry_y_1 ) ) { y_1 = Math . max ( entry_y_1 , y_1 ) ; } if ( ! Double . isInfinite ( entry_y_kmax ) ) { y_kmax = Math . max ( entry_y_kmax , y_kmax ) ; } } // determine m and t double m = ( y_kmax - y_1 ) / ( FastMath . log ( k_max ) - FastMath . log ( k_0 ) ) ; double t = y_1 - m * FastMath . log ( k_0 ) ; return new ApproximationLine ( k_0 , m , t ) ; }
Determines and returns the conservative approximation for the knn distances of this node as the maximum of the conservative approximations of all entries .
393
29
157,361
protected ApproximationLine progressiveKnnDistanceApproximation ( int k_max ) { if ( ! isLeaf ( ) ) { throw new UnsupportedOperationException ( "Progressive KNN-distance approximation " + "is only vailable in leaf nodes!" ) ; } // determine k_0, y_1, y_kmax int k_0 = 0 ; double y_1 = Double . POSITIVE_INFINITY ; double y_kmax = Double . POSITIVE_INFINITY ; for ( int i = 0 ; i < getNumEntries ( ) ; i ++ ) { MkCoPLeafEntry entry = ( MkCoPLeafEntry ) getEntry ( i ) ; ApproximationLine approx = entry . getProgressiveKnnDistanceApproximation ( ) ; k_0 = Math . max ( approx . getK_0 ( ) , k_0 ) ; } for ( int i = 0 ; i < getNumEntries ( ) ; i ++ ) { MkCoPLeafEntry entry = ( MkCoPLeafEntry ) getEntry ( i ) ; ApproximationLine approx = entry . getProgressiveKnnDistanceApproximation ( ) ; y_1 = Math . min ( approx . getValueAt ( k_0 ) , y_1 ) ; y_kmax = Math . min ( approx . getValueAt ( k_max ) , y_kmax ) ; } // determine m and t double m = ( y_kmax - y_1 ) / ( FastMath . log ( k_max ) - FastMath . log ( k_0 ) ) ; double t = y_1 - m * FastMath . log ( k_0 ) ; return new ApproximationLine ( k_0 , m , t ) ; }
Determines and returns the progressive approximation for the knn distances of this node as the maximum of the progressive approximations of all entries .
385
29
157,362
public CSSClass addClass ( CSSClass clss ) throws CSSNamingConflict { CSSClass existing = store . get ( clss . getName ( ) ) ; if ( existing != null && existing . getOwner ( ) != null && existing . getOwner ( ) != clss . getOwner ( ) ) { throw new CSSNamingConflict ( "CSS class naming conflict between " + clss . getOwner ( ) . toString ( ) + " and " + existing . getOwner ( ) . toString ( ) ) ; } return store . put ( clss . getName ( ) , clss ) ; }
Add a single class to the map .
131
8
157,363
public synchronized void removeClass ( CSSClass clss ) { CSSClass existing = store . get ( clss . getName ( ) ) ; if ( existing == clss ) { store . remove ( existing . getName ( ) ) ; } }
Remove a single CSS class from the map . Note that classes are removed by reference not by name!
51
20
157,364
public CSSClass getClass ( String name , Object owner ) throws CSSNamingConflict { CSSClass existing = store . get ( name ) ; // Not found. if ( existing == null ) { return null ; } // Different owner if ( owner != null && existing . getOwner ( ) != owner ) { throw new CSSNamingConflict ( "CSS class naming conflict between " + owner . toString ( ) + " and " + existing . getOwner ( ) . toString ( ) ) ; } return existing ; }
Retrieve a single class by name and owner
109
9
157,365
public void serialize ( StringBuilder buf ) { for ( CSSClass clss : store . values ( ) ) { clss . appendCSSDefinition ( buf ) ; } }
Serialize managed CSS classes to rule file .
36
9
157,366
public boolean mergeCSSFrom ( CSSClassManager other ) throws CSSNamingConflict { for ( CSSClass clss : other . getClasses ( ) ) { this . addClass ( clss ) ; } return true ; }
Merge CSS classes for example to merge two plots .
48
11
157,367
public void updateStyleElement ( Document document , Element style ) { StringBuilder buf = new StringBuilder ( ) ; serialize ( buf ) ; Text cont = document . createTextNode ( buf . toString ( ) ) ; while ( style . hasChildNodes ( ) ) { style . removeChild ( style . getFirstChild ( ) ) ; } style . appendChild ( cont ) ; }
Update the text contents of an existing style element .
82
10
157,368
private static double calculate_MDEF_norm ( Node sn , Node cg ) { // get the square sum of the counting neighborhoods box counts long sq = sn . getSquareSum ( cg . getLevel ( ) - sn . getLevel ( ) ) ; /* * if the square sum is equal to box count of the sampling Neighborhood then * n_hat is equal one, and as cg needs to have at least one Element mdef * would get zero or lower than zero. This is the case when all of the * counting Neighborhoods contain one or zero Objects. Additionally, the * cubic sum, square sum and sampling Neighborhood box count are all equal, * which leads to sig_n_hat being zero and thus mdef_norm is either negative * infinite or undefined. As the distribution of the Objects seem quite * uniform, a mdef_norm value of zero ( = no outlier) is appropriate and * circumvents the problem of undefined values. */ if ( sq == sn . getCount ( ) ) { return 0.0 ; } // calculation of mdef according to the paper and standardization as done in // LOCI long cb = sn . getCubicSum ( cg . getLevel ( ) - sn . getLevel ( ) ) ; double n_hat = ( double ) sq / sn . getCount ( ) ; double sig_n_hat = FastMath . sqrt ( cb * sn . getCount ( ) - ( sq * sq ) ) / sn . getCount ( ) ; // Avoid NaN - correct result 0.0? if ( sig_n_hat < Double . MIN_NORMAL ) { return 0.0 ; } double mdef = n_hat - cg . getCount ( ) ; return mdef / sig_n_hat ; }
Method for the MDEF calculation
377
6
157,369
public void writeBundleStream ( BundleStreamSource source , WritableByteChannel output ) throws IOException { ByteBuffer buffer = ByteBuffer . allocateDirect ( INITIAL_BUFFER ) ; DBIDVar var = DBIDUtil . newVar ( ) ; ByteBufferSerializer < ? > [ ] serializers = null ; loop : while ( true ) { BundleStreamSource . Event ev = source . nextEvent ( ) ; switch ( ev ) { case NEXT_OBJECT : if ( serializers == null ) { serializers = writeHeader ( source , buffer , output ) ; } if ( serializers [ 0 ] != null ) { if ( ! source . assignDBID ( var ) ) { throw new AbortException ( "An object did not have an DBID assigned." ) ; } DBID id = DBIDUtil . deref ( var ) ; @ SuppressWarnings ( "unchecked" ) ByteBufferSerializer < DBID > ser = ( ByteBufferSerializer < DBID > ) serializers [ 0 ] ; int size = ser . getByteSize ( id ) ; buffer = ensureBuffer ( size , buffer , output ) ; ser . toByteBuffer ( buffer , id ) ; } for ( int i = 1 , j = 0 ; i < serializers . length ; ++ i , ++ j ) { @ SuppressWarnings ( "unchecked" ) ByteBufferSerializer < Object > ser = ( ByteBufferSerializer < Object > ) serializers [ i ] ; int size = ser . getByteSize ( source . data ( j ) ) ; buffer = ensureBuffer ( size , buffer , output ) ; ser . toByteBuffer ( buffer , source . data ( j ) ) ; } break ; // switch case META_CHANGED : if ( serializers != null ) { throw new AbortException ( "Meta changes are not supported, once the block header has been written." ) ; } break ; // switch case END_OF_STREAM : break loop ; default : LOG . warning ( "Unknown bundle stream event. API inconsistent? " + ev ) ; break ; // switch } } if ( buffer . position ( ) > 0 ) { flushBuffer ( buffer , output ) ; } }
Write a bundle stream to a file output channel .
469
10
157,370
private void flushBuffer ( ByteBuffer buffer , WritableByteChannel output ) throws IOException { buffer . flip ( ) ; output . write ( buffer ) ; buffer . flip ( ) ; buffer . limit ( buffer . capacity ( ) ) ; }
Flush the current write buffer to disk .
50
9
157,371
private ByteBuffer ensureBuffer ( int size , ByteBuffer buffer , WritableByteChannel output ) throws IOException { if ( buffer . remaining ( ) >= size ) { return buffer ; } flushBuffer ( buffer , output ) ; if ( buffer . remaining ( ) >= size ) { return buffer ; } // Aggressively grow the buffer return ByteBuffer . allocateDirect ( Math . max ( buffer . capacity ( ) << 1 , buffer . capacity ( ) + size ) ) ; }
Ensure the buffer is large enough .
98
8
157,372
private ByteBufferSerializer < ? > [ ] writeHeader ( BundleStreamSource source , ByteBuffer buffer , WritableByteChannel output ) throws IOException { final BundleMeta meta = source . getMeta ( ) ; final int nummeta = meta . size ( ) ; @ SuppressWarnings ( "rawtypes" ) final ByteBufferSerializer [ ] serializers = new ByteBufferSerializer [ 1 + nummeta ] ; // Write our magic ID first. assert ( buffer . position ( ) == 0 ) : "Buffer is supposed to be at 0." ; buffer . putInt ( MAGIC ) ; // Write the number of metas next. // For compatibility with earlier versions, treat DBIDs as extra type if ( source . hasDBIDs ( ) ) { buffer . putInt ( 1 + nummeta ) ; ByteBufferSerializer < DBID > ser = DBIDFactory . FACTORY . getDBIDSerializer ( ) ; TypeInformationSerializer . STATIC . toByteBuffer ( buffer , new SimpleTypeInformation <> ( DBID . class , ser ) ) ; serializers [ 0 ] = ser ; } else { buffer . putInt ( nummeta ) ; } for ( int i = 0 ; i < nummeta ; i ++ ) { SimpleTypeInformation < ? > type = meta . get ( i ) ; ByteBufferSerializer < ? > ser = type . getSerializer ( ) ; if ( ser == null ) { throw new AbortException ( "Cannot serialize - no serializer found for type: " + type . toString ( ) ) ; } TypeInformationSerializer . STATIC . toByteBuffer ( buffer , type ) ; serializers [ i + 1 ] = ser ; } return serializers ; }
Write the header for the given stream to the stream .
365
11
157,373
protected void runDBSCAN ( Relation < O > relation , RangeQuery < O > rangeQuery ) { final int size = relation . size ( ) ; FiniteProgress objprog = LOG . isVerbose ( ) ? new FiniteProgress ( "Processing objects" , size , LOG ) : null ; IndefiniteProgress clusprog = LOG . isVerbose ( ) ? new IndefiniteProgress ( "Number of clusters" , LOG ) : null ; processedIDs = DBIDUtil . newHashSet ( size ) ; ArrayModifiableDBIDs seeds = DBIDUtil . newArray ( ) ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { if ( ! processedIDs . contains ( iditer ) ) { expandCluster ( relation , rangeQuery , iditer , seeds , objprog , clusprog ) ; } if ( objprog != null && clusprog != null ) { objprog . setProcessed ( processedIDs . size ( ) , LOG ) ; clusprog . setProcessed ( resultList . size ( ) , LOG ) ; } if ( processedIDs . size ( ) == size ) { break ; } } // Finish progress logging LOG . ensureCompleted ( objprog ) ; LOG . setCompleted ( clusprog ) ; }
Run the DBSCAN algorithm
299
6
157,374
protected void expandCluster ( Relation < O > relation , RangeQuery < O > rangeQuery , DBIDRef startObjectID , ArrayModifiableDBIDs seeds , FiniteProgress objprog , IndefiniteProgress clusprog ) { DoubleDBIDList neighbors = rangeQuery . getRangeForDBID ( startObjectID , epsilon ) ; ncounter += neighbors . size ( ) ; // startObject is no core-object if ( neighbors . size ( ) < minpts ) { noise . add ( startObjectID ) ; processedIDs . add ( startObjectID ) ; if ( objprog != null ) { objprog . incrementProcessed ( LOG ) ; } return ; } ModifiableDBIDs currentCluster = DBIDUtil . newArray ( ) ; currentCluster . add ( startObjectID ) ; processedIDs . add ( startObjectID ) ; // try to expand the cluster assert ( seeds . size ( ) == 0 ) ; seeds . clear ( ) ; processNeighbors ( neighbors . iter ( ) , currentCluster , seeds ) ; DBIDVar o = DBIDUtil . newVar ( ) ; while ( ! seeds . isEmpty ( ) ) { neighbors = rangeQuery . getRangeForDBID ( seeds . pop ( o ) , epsilon ) ; ncounter += neighbors . size ( ) ; if ( neighbors . size ( ) >= minpts ) { processNeighbors ( neighbors . iter ( ) , currentCluster , seeds ) ; } if ( objprog != null ) { objprog . incrementProcessed ( LOG ) ; } } resultList . add ( currentCluster ) ; if ( clusprog != null ) { clusprog . setProcessed ( resultList . size ( ) , LOG ) ; } }
DBSCAN - function expandCluster .
380
9
157,375
private void processNeighbors ( DoubleDBIDListIter neighbor , ModifiableDBIDs currentCluster , ArrayModifiableDBIDs seeds ) { final boolean ismetric = getDistanceFunction ( ) . isMetric ( ) ; for ( ; neighbor . valid ( ) ; neighbor . advance ( ) ) { if ( processedIDs . add ( neighbor ) ) { if ( ! ismetric || neighbor . doubleValue ( ) > 0. ) { seeds . add ( neighbor ) ; } } else if ( ! noise . remove ( neighbor ) ) { continue ; } currentCluster . add ( neighbor ) ; } }
Process a single core point .
127
6
157,376
private double loglikelihoodAnomalous ( DBIDs anomalousObjs ) { return anomalousObjs . isEmpty ( ) ? 0 : anomalousObjs . size ( ) * - FastMath . log ( anomalousObjs . size ( ) ) ; }
Loglikelihood anomalous objects . Uniform distribution .
56
10
157,377
private double loglikelihoodNormal ( DBIDs objids , SetDBIDs anomalous , CovarianceMatrix builder , Relation < V > relation ) { double [ ] mean = builder . getMeanVector ( ) ; final LUDecomposition lu = new LUDecomposition ( builder . makeSampleMatrix ( ) ) ; double [ ] [ ] covInv = lu . inverse ( ) ; // for each object compute probability and sum double prob = ( objids . size ( ) - anomalous . size ( ) ) * - FastMath . log ( FastMath . sqrt ( MathUtil . powi ( MathUtil . TWOPI , RelationUtil . dimensionality ( relation ) ) * lu . det ( ) ) ) ; for ( DBIDIter iter = objids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { if ( ! anomalous . contains ( iter ) ) { double [ ] xcent = minusEquals ( relation . get ( iter ) . toArray ( ) , mean ) ; prob -= .5 * transposeTimesTimes ( xcent , covInv , xcent ) ; } } return prob ; }
Computes the loglikelihood of all normal objects . Gaussian model
251
14
157,378
private DoubleMinMax exactMinMax ( Relation < O > relation , DistanceQuery < O > distFunc ) { final FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "Exact fitting distance computations" , relation . size ( ) , LOG ) : null ; DoubleMinMax minmax = new DoubleMinMax ( ) ; // find exact minimum and maximum first. for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { for ( DBIDIter iditer2 = relation . iterDBIDs ( ) ; iditer2 . valid ( ) ; iditer2 . advance ( ) ) { // skip the point itself. if ( DBIDUtil . equal ( iditer , iditer2 ) ) { continue ; } double d = distFunc . distance ( iditer , iditer2 ) ; minmax . put ( d ) ; } LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; return minmax ; }
Compute the exact maximum and minimum .
227
8
157,379
@ Override protected void preInsert ( RdKNNEntry entry ) { KNNHeap knns_o = DBIDUtil . newHeap ( settings . k_max ) ; preInsert ( entry , getRootEntry ( ) , knns_o ) ; }
Performs necessary operations before inserting the specified entry .
58
10
157,380
@ Override protected void postDelete ( RdKNNEntry entry ) { // reverse knn of o ModifiableDoubleDBIDList rnns = DBIDUtil . newDistanceDBIDList ( ) ; doReverseKNN ( getRoot ( ) , ( ( RdKNNLeafEntry ) entry ) . getDBID ( ) , rnns ) ; // knn of rnn ArrayModifiableDBIDs ids = DBIDUtil . newArray ( rnns ) ; ids . sort ( ) ; List < ? extends KNNList > knnLists = knnQuery . getKNNForBulkDBIDs ( ids , settings . k_max ) ; // adjust knn distances adjustKNNDistance ( getRootEntry ( ) , ids , knnLists ) ; }
Performs necessary operations after deleting the specified object .
175
10
157,381
private void doReverseKNN ( RdKNNNode node , DBID oid , ModifiableDoubleDBIDList result ) { if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { RdKNNLeafEntry entry = ( RdKNNLeafEntry ) node . getEntry ( i ) ; double distance = distanceQuery . distance ( entry . getDBID ( ) , oid ) ; if ( distance <= entry . getKnnDistance ( ) ) { result . add ( distance , entry . getDBID ( ) ) ; } } } // node is a inner node else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { RdKNNDirectoryEntry entry = ( RdKNNDirectoryEntry ) node . getEntry ( i ) ; double minDist = distanceQuery . minDist ( entry , oid ) ; if ( minDist <= entry . getKnnDistance ( ) ) { doReverseKNN ( getNode ( entry ) , oid , result ) ; } } } }
Performs a reverse knn query in the specified subtree .
244
13
157,382
private void doBulkReverseKNN ( RdKNNNode node , DBIDs ids , Map < DBID , ModifiableDoubleDBIDList > result ) { if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { RdKNNLeafEntry entry = ( RdKNNLeafEntry ) node . getEntry ( i ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { DBID id = DBIDUtil . deref ( iter ) ; double distance = distanceQuery . distance ( entry . getDBID ( ) , id ) ; if ( distance <= entry . getKnnDistance ( ) ) { result . get ( id ) . add ( distance , entry . getDBID ( ) ) ; } } } } // node is a inner node else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { RdKNNDirectoryEntry entry = ( RdKNNDirectoryEntry ) node . getEntry ( i ) ; ModifiableDBIDs candidates = DBIDUtil . newArray ( ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { DBID id = DBIDUtil . deref ( iter ) ; double minDist = distanceQuery . minDist ( entry , id ) ; if ( minDist <= entry . getKnnDistance ( ) ) { candidates . add ( id ) ; } if ( ! candidates . isEmpty ( ) ) { doBulkReverseKNN ( getNode ( entry ) , candidates , result ) ; } } } } }
Performs a bulk reverse knn query in the specified subtree .
377
14
157,383
private void checkDistanceFunction ( SpatialPrimitiveDistanceFunction < ? super O > distanceFunction ) { if ( ! settings . distanceFunction . equals ( distanceFunction ) ) { throw new IllegalArgumentException ( "Parameter distanceFunction must be an instance of " + this . distanceQuery . getClass ( ) + ", but is " + distanceFunction . getClass ( ) ) ; } }
Throws an IllegalArgumentException if the specified distance function is not an instance of the distance function used by this index .
80
25
157,384
@ Override public final void insertAll ( DBIDs ids ) { if ( ids . isEmpty ( ) || ( ids . size ( ) == 1 ) ) { return ; } // Make an example leaf if ( canBulkLoad ( ) ) { List < RdKNNEntry > leafs = new ArrayList <> ( ids . size ( ) ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { leafs . add ( createNewLeafEntry ( DBIDUtil . deref ( iter ) ) ) ; } bulkLoad ( leafs ) ; } else { for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { insert ( iter ) ; } } doExtraIntegrityChecks ( ) ; }
Inserts the specified objects into this index . If a bulk load mode is implemented the objects are inserted in one bulk .
185
24
157,385
public int correlationDistance ( PCAFilteredResult pca1 , PCAFilteredResult pca2 , int dimensionality ) { // TODO: Can we delay copying the matrixes? // pca of rv1 double [ ] [ ] v1t = copy ( pca1 . getEigenvectors ( ) ) ; double [ ] [ ] v1t_strong = pca1 . getStrongEigenvectors ( ) ; int lambda1 = pca1 . getCorrelationDimension ( ) ; // pca of rv2 double [ ] [ ] v2t = copy ( pca2 . getEigenvectors ( ) ) ; double [ ] [ ] v2t_strong = pca2 . getStrongEigenvectors ( ) ; int lambda2 = pca2 . getCorrelationDimension ( ) ; // for all strong eigenvectors of rv2 double [ ] [ ] m1_czech = pca1 . dissimilarityMatrix ( ) ; for ( int i = 0 ; i < v2t_strong . length ; i ++ ) { double [ ] v2_i = v2t_strong [ i ] ; // check, if distance of v2_i to the space of rv1 > delta // (i.e., if v2_i spans up a new dimension) double distsq = squareSum ( v2_i ) - transposeTimesTimes ( v2_i , m1_czech , v2_i ) ; // if so, insert v2_i into v1 and adjust v1 // and compute m1_czech new, increase lambda1 if ( lambda1 < dimensionality && distsq > deltasq ) { adjust ( v1t , v2_i , lambda1 ++ ) ; // TODO: make this incremental? double [ ] e1_czech_d = new double [ v1t . length ] ; Arrays . fill ( e1_czech_d , 0 , lambda1 , 1 ) ; m1_czech = transposeDiagonalTimes ( v1t , e1_czech_d , v1t ) ; } } // for all strong eigenvectors of rv1 double [ ] [ ] m2_czech = pca2 . dissimilarityMatrix ( ) ; for ( int i = 0 ; i < v1t_strong . length ; i ++ ) { double [ ] v1_i = v1t_strong [ i ] ; // check, if distance of v1_i to the space of rv2 > delta // (i.e., if v1_i spans up a new dimension) double distsq = squareSum ( v1_i ) - transposeTimesTimes ( v1_i , m2_czech , v1_i ) ; // if so, insert v1_i into v2 and adjust v2 // and compute m2_czech new , increase lambda2 if ( lambda2 < dimensionality && distsq > deltasq ) { adjust ( v2t , v1_i , lambda2 ++ ) ; // TODO: make this incremental? double [ ] e2_czech_d = new double [ v1t . length ] ; Arrays . fill ( e2_czech_d , 0 , lambda2 , 1 ) ; m2_czech = transposeDiagonalTimes ( v2t , e2_czech_d , v2t ) ; } } return Math . max ( lambda1 , lambda2 ) ; }
Computes the correlation distance between the two subspaces defined by the specified PCAs .
771
18
157,386
public static double logcdf ( double val , double shape1 , double shape2 ) { if ( val == Double . NEGATIVE_INFINITY ) { return Double . NEGATIVE_INFINITY ; } if ( val == Double . POSITIVE_INFINITY ) { return 0. ; } if ( val != val ) { return Double . NaN ; } if ( shape1 == 0. ) { val = FastMath . exp ( - val ) ; } else { double tmp = shape1 * val ; if ( tmp == Double . NEGATIVE_INFINITY ) { return shape2 == 0 ? 0. : Double . NEGATIVE_INFINITY ; } if ( tmp >= 1. ) { return shape2 == 0 ? Double . NEGATIVE_INFINITY : 0. ; } val = FastMath . exp ( FastMath . log1p ( - tmp ) / shape1 ) ; } if ( shape2 == 0. ) { return - val ; } final double tmp = shape2 * val ; return tmp < 1. ? FastMath . log1p ( - tmp ) / shape2 : Double . NEGATIVE_INFINITY ; }
Cumulative density function for location = 0 scale = 1
251
12
157,387
private double naiveQuerySparse ( SparseNumberVector obj , WritableDoubleDataStore scores , HashSetModifiableDBIDs cands ) { double len = 0. ; // Length of query object, for final normalization for ( int iter = obj . iter ( ) ; obj . iterValid ( iter ) ; iter = obj . iterAdvance ( iter ) ) { final int dim = obj . iterDim ( iter ) ; final double val = obj . iterDoubleValue ( iter ) ; if ( val == 0. || val != val ) { continue ; } len += val * val ; // No matching documents in index: if ( dim >= index . size ( ) ) { continue ; } ModifiableDoubleDBIDList column = index . get ( dim ) ; for ( DoubleDBIDListIter n = column . iter ( ) ; n . valid ( ) ; n . advance ( ) ) { scores . increment ( n , n . doubleValue ( ) * val ) ; cands . add ( n ) ; } } return FastMath . sqrt ( len ) ; }
Query the most similar objects sparse version .
224
8
157,388
private double naiveQueryDense ( NumberVector obj , WritableDoubleDataStore scores , HashSetModifiableDBIDs cands ) { double len = 0. ; // Length of query object, for final normalization for ( int dim = 0 , max = obj . getDimensionality ( ) ; dim < max ; dim ++ ) { final double val = obj . doubleValue ( dim ) ; if ( val == 0. || val != val ) { continue ; } len += val * val ; // No matching documents in index: if ( dim >= index . size ( ) ) { continue ; } ModifiableDoubleDBIDList column = index . get ( dim ) ; for ( DoubleDBIDListIter n = column . iter ( ) ; n . valid ( ) ; n . advance ( ) ) { scores . increment ( n , n . doubleValue ( ) * val ) ; cands . add ( n ) ; } } return FastMath . sqrt ( len ) ; }
Query the most similar objects dense version .
204
8
157,389
private double naiveQuery ( V obj , WritableDoubleDataStore scores , HashSetModifiableDBIDs cands ) { if ( obj instanceof SparseNumberVector ) { return naiveQuerySparse ( ( SparseNumberVector ) obj , scores , cands ) ; } else { return naiveQueryDense ( obj , scores , cands ) ; } }
Query the most similar objects abstract version .
75
8
157,390
protected BundleStreamSource invokeStreamFilters ( BundleStreamSource stream ) { assert ( stream != null ) ; if ( filters == null ) { return stream ; } // We dynamically switch between streaming and bundle operations. MultipleObjectsBundle bundle = null ; for ( ObjectFilter filter : filters ) { if ( filter instanceof StreamFilter ) { stream = ( ( StreamFilter ) filter ) . init ( bundle != null ? bundle . asStream ( ) : stream ) ; bundle = null ; } else { bundle = filter . filter ( stream != null ? stream . asMultipleObjectsBundle ( ) : bundle ) ; stream = null ; } } return stream != null ? stream : bundle . asStream ( ) ; }
Transforms the specified list of objects and their labels into a list of objects and their associations .
148
19
157,391
private void inferCallerELKI ( ) { needToInferCaller = false ; StackTraceElement [ ] stack = ( new Throwable ( ) ) . getStackTrace ( ) ; int ix = 0 ; // skip back to the logger. while ( ix < stack . length ) { StackTraceElement frame = stack [ ix ] ; final String cls = frame . getClassName ( ) ; if ( cls . equals ( START_TRACE_AT ) ) { break ; } ix ++ ; } // skip further back through helper functions while ( ix < stack . length ) { StackTraceElement frame = stack [ ix ] ; final String cls = frame . getClassName ( ) ; boolean ignore = false ; for ( int i = 0 ; i < IGNORE_CLASSES . length ; i ++ ) { if ( cls . equals ( IGNORE_CLASSES [ i ] ) ) { ignore = true ; break ; } } if ( ! ignore ) { super . setSourceClassName ( frame . getClassName ( ) ) ; super . setSourceMethodName ( frame . getMethodName ( ) ) ; break ; } ix ++ ; } }
Infer a caller ignoring logging - related classes .
255
10
157,392
public static SamplingResult getSamplingResult ( final Relation < ? > rel ) { Collection < SamplingResult > selections = ResultUtil . filterResults ( rel . getHierarchy ( ) , rel , SamplingResult . class ) ; if ( selections . isEmpty ( ) ) { final SamplingResult newsam = new SamplingResult ( rel ) ; ResultUtil . addChildResult ( rel , newsam ) ; return newsam ; } return selections . iterator ( ) . next ( ) ; }
Get the sampling result attached to a relation
108
8
157,393
public Element render ( SVGPlot svgp ) { Element tag = svgp . svgElement ( SVGConstants . SVG_G_TAG ) ; Element button = svgp . svgRect ( x , y , w , h ) ; if ( ! Double . isNaN ( r ) ) { SVGUtil . setAtt ( button , SVGConstants . SVG_RX_ATTRIBUTE , r ) ; SVGUtil . setAtt ( button , SVGConstants . SVG_RY_ATTRIBUTE , r ) ; } SVGUtil . setAtt ( button , SVGConstants . SVG_STYLE_ATTRIBUTE , butcss . inlineCSS ( ) ) ; tag . appendChild ( button ) ; // Add light effect: if ( svgp . getIdElement ( SVGEffects . LIGHT_GRADIENT_ID ) != null ) { Element light = svgp . svgRect ( x , y , w , h ) ; if ( ! Double . isNaN ( r ) ) { SVGUtil . setAtt ( light , SVGConstants . SVG_RX_ATTRIBUTE , r ) ; SVGUtil . setAtt ( light , SVGConstants . SVG_RY_ATTRIBUTE , r ) ; } SVGUtil . setAtt ( light , SVGConstants . SVG_STYLE_ATTRIBUTE , "fill:url(#" + SVGEffects . LIGHT_GRADIENT_ID + ");fill-opacity:.5" ) ; tag . appendChild ( light ) ; } // Add shadow effect: if ( svgp . getIdElement ( SVGEffects . SHADOW_ID ) != null ) { //Element shadow = svgp.svgRect(x + (w * .05), y + (h * .05), w, h); //SVGUtil.setAtt(button, SVGConstants.SVG_STYLE_ATTRIBUTE, SVGConstants.CSS_FILL_PROPERTY + ":" + SVGConstants.CSS_BLACK_VALUE); button . setAttribute ( SVGConstants . SVG_FILTER_ATTRIBUTE , "url(#" + SVGEffects . SHADOW_ID + ")" ) ; //tag.appendChild(shadow); } if ( title != null ) { Element label = svgp . svgText ( x + w * .5 , y + h * .7 , title ) ; label . setAttribute ( SVGConstants . SVG_STYLE_ATTRIBUTE , titlecss . inlineCSS ( ) ) ; tag . appendChild ( label ) ; } return tag ; }
Produce the actual SVG elements for the button .
578
10
157,394
public void setTitle ( String title , String textcolor ) { this . title = title ; if ( titlecss == null ) { titlecss = new CSSClass ( this , "text" ) ; titlecss . setStatement ( SVGConstants . CSS_TEXT_ANCHOR_PROPERTY , SVGConstants . CSS_MIDDLE_VALUE ) ; titlecss . setStatement ( SVGConstants . CSS_FILL_PROPERTY , textcolor ) ; titlecss . setStatement ( SVGConstants . CSS_FONT_SIZE_PROPERTY , .6 * h ) ; } }
Set the button title
129
4
157,395
private Pair < PlotItem , VisualizationTask > key ( PlotItem item , VisualizationTask task ) { return new Pair <> ( item , task ) ; }
Helper function for building a key object
34
7
157,396
private Pair < Element , Visualization > value ( Element elem , Visualization vis ) { return new Pair <> ( elem , vis ) ; }
Helper function to build a value pair
32
7
157,397
public void put ( PlotItem it , VisualizationTask task , Element elem , Visualization vis ) { map . put ( key ( it , task ) , value ( elem , vis ) ) ; }
Put a new combination into the map .
43
8
157,398
public Pair < Element , Visualization > remove ( PlotItem it , VisualizationTask task ) { return map . remove ( key ( it , task ) ) ; }
Remove a combination .
34
4
157,399
public void put ( PlotItem it , VisualizationTask task , Pair < Element , Visualization > pair ) { map . put ( key ( it , task ) , pair ) ; }
Put a new item into the visualizations
38
8