idx
int64 0
165k
| question
stringlengths 73
4.15k
| target
stringlengths 5
918
| len_question
int64 21
890
| len_target
int64 3
255
|
|---|---|---|---|---|
157,500
|
private void format ( NumberFormat nf , StringBuilder buffer , double value , int maxIntegerDigits ) { if ( value >= 0 ) { buffer . append ( " + " ) ; } else { buffer . append ( " - " ) ; } int digits = maxIntegerDigits - integerDigits ( value ) ; for ( int d = 0 ; d < digits ; d ++ ) { buffer . append ( ' ' ) ; } buffer . append ( nf . format ( Math . abs ( value ) ) ) ; }
|
Helper method for output of equations and solution . Appends the specified double value to the given string buffer according the number format and the maximum number of integer digits .
| 110
| 32
|
157,501
|
protected ArrayModifiableDBIDs initialMedoids ( DistanceQuery < V > distQ , DBIDs ids ) { if ( getLogger ( ) . isStatistics ( ) ) { getLogger ( ) . statistics ( new StringStatistic ( getClass ( ) . getName ( ) + ".initialization" , initializer . toString ( ) ) ) ; } Duration initd = getLogger ( ) . newDuration ( getClass ( ) . getName ( ) + ".initialization-time" ) . begin ( ) ; ArrayModifiableDBIDs medoids = DBIDUtil . newArray ( initializer . chooseInitialMedoids ( k , ids , distQ ) ) ; getLogger ( ) . statistics ( initd . end ( ) ) ; if ( medoids . size ( ) != k ) { throw new AbortException ( "Initializer " + initializer . toString ( ) + " did not return " + k + " means, but " + medoids . size ( ) ) ; } return medoids ; }
|
Choose the initial medoids .
| 221
| 6
|
157,502
|
public int getTotalClusterCount ( ) { int clusterCount = 0 ; for ( int i = 0 ; i < numclusters . length ; i ++ ) { clusterCount += numclusters [ i ] ; } return clusterCount ; }
|
Return the sum of all clusters
| 51
| 6
|
157,503
|
public int getHighestClusterCount ( ) { int maxClusters = 0 ; for ( int i = 0 ; i < numclusters . length ; i ++ ) { maxClusters = Math . max ( maxClusters , numclusters [ i ] ) ; } return maxClusters ; }
|
Returns the highest number of Clusters in the clusterings
| 64
| 11
|
157,504
|
protected static double getMinDist ( DBIDArrayIter j , DistanceQuery < ? > distQ , DBIDArrayIter mi , WritableDoubleDataStore mindist ) { double prev = mindist . doubleValue ( j ) ; if ( Double . isNaN ( prev ) ) { // NaN = unknown prev = Double . POSITIVE_INFINITY ; for ( mi . seek ( 0 ) ; mi . valid ( ) ; mi . advance ( ) ) { double d = distQ . distance ( j , mi ) ; prev = d < prev ? d : prev ; } mindist . putDouble ( j , prev ) ; } return prev ; }
|
Get the minimum distance to previous medoids .
| 139
| 9
|
157,505
|
private static void shuffle ( ArrayModifiableDBIDs ids , int ssize , int end , Random random ) { ssize = ssize < end ? ssize : end ; // Guard for choosing from tiny sets for ( int i = 1 ; i < ssize ; i ++ ) { ids . swap ( i - 1 , i + random . nextInt ( end - i ) ) ; } }
|
Partial Fisher - Yates shuffle .
| 84
| 7
|
157,506
|
public static LinearScale [ ] calcScales ( Relation < ? extends SpatialComparable > rel ) { int dim = RelationUtil . dimensionality ( rel ) ; DoubleMinMax [ ] minmax = DoubleMinMax . newArray ( dim ) ; LinearScale [ ] scales = new LinearScale [ dim ] ; // analyze data for ( DBIDIter iditer = rel . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { SpatialComparable v = rel . get ( iditer ) ; if ( v instanceof NumberVector ) { for ( int d = 0 ; d < dim ; d ++ ) { final double mi = v . getMin ( d ) ; if ( mi != mi ) { // NaN continue ; } minmax [ d ] . put ( mi ) ; } } else { for ( int d = 0 ; d < dim ; d ++ ) { final double mi = v . getMin ( d ) ; if ( mi == mi ) { // No NaN minmax [ d ] . put ( mi ) ; } final double ma = v . getMax ( d ) ; if ( ma == ma ) { // No NaN minmax [ d ] . put ( ma ) ; } } } } // generate scales for ( int d = 0 ; d < dim ; d ++ ) { scales [ d ] = new LinearScale ( minmax [ d ] . getMin ( ) , minmax [ d ] . getMax ( ) ) ; } return scales ; }
|
Compute a linear scale for each dimension .
| 323
| 9
|
157,507
|
public FrequentItemsetsResult run ( Database db , final Relation < BitVector > relation ) { // TODO: implement with resizable arrays, to not need dim. final int dim = RelationUtil . dimensionality ( relation ) ; final VectorFieldTypeInformation < BitVector > meta = RelationUtil . assumeVectorField ( relation ) ; // Compute absolute minsupport final int minsupp = getMinimumSupport ( relation . size ( ) ) ; LOG . verbose ( "Build 1-dimensional transaction lists." ) ; Duration ctime = LOG . newDuration ( STAT + "eclat.transposition.time" ) . begin ( ) ; DBIDs [ ] idx = buildIndex ( relation , dim , minsupp ) ; LOG . statistics ( ctime . end ( ) ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Building frequent itemsets" , idx . length , LOG ) : null ; Duration etime = LOG . newDuration ( STAT + "eclat.extraction.time" ) . begin ( ) ; final List < Itemset > solution = new ArrayList <> ( ) ; for ( int i = 0 ; i < idx . length ; i ++ ) { LOG . incrementProcessed ( prog ) ; extractItemsets ( idx , i , minsupp , solution ) ; } LOG . ensureCompleted ( prog ) ; Collections . sort ( solution ) ; LOG . statistics ( etime . end ( ) ) ; LOG . statistics ( new LongStatistic ( STAT + "frequent-itemsets" , solution . size ( ) ) ) ; return new FrequentItemsetsResult ( "Eclat" , "eclat" , solution , meta , relation . size ( ) ) ; }
|
Run the Eclat algorithm
| 376
| 6
|
157,508
|
public static TreeNode build ( List < Class < ? > > choices , String rootpkg ) { MutableTreeNode root = new PackageNode ( rootpkg , rootpkg ) ; HashMap < String , MutableTreeNode > lookup = new HashMap <> ( ) ; if ( rootpkg != null ) { lookup . put ( rootpkg , root ) ; } lookup . put ( "de.lmu.ifi.dbs.elki" , root ) ; lookup . put ( "" , root ) ; // Use the shorthand version of class names. String prefix = rootpkg != null ? rootpkg + "." : null ; Class < ? > [ ] choic = choices . toArray ( new Class < ? > [ choices . size ( ) ] ) ; Arrays . sort ( choic , ELKIServiceScanner . SORT_BY_PRIORITY ) ; for ( Class < ? > impl : choic ) { String name = impl . getName ( ) ; name = ( prefix != null && name . startsWith ( prefix ) ) ? name . substring ( prefix . length ( ) ) : name ; int plen = ( impl . getPackage ( ) != null ) ? impl . getPackage ( ) . getName ( ) . length ( ) + 1 : 0 ; MutableTreeNode c = new ClassNode ( impl . getName ( ) . substring ( plen ) , name ) ; MutableTreeNode p = null ; int l = name . lastIndexOf ( ' ' ) ; while ( p == null ) { if ( l < 0 ) { p = root ; break ; } String pname = name . substring ( 0 , l ) ; p = lookup . get ( pname ) ; if ( p != null ) { break ; } l = pname . lastIndexOf ( ' ' ) ; MutableTreeNode tmp = new PackageNode ( l >= 0 ? pname . substring ( l + 1 ) : pname , pname ) ; tmp . insert ( c , 0 ) ; c = tmp ; lookup . put ( pname , tmp ) ; name = pname ; } p . insert ( c , p . getChildCount ( ) ) ; } // Simplify tree, except for root node for ( int i = 0 ; i < root . getChildCount ( ) ; i ++ ) { MutableTreeNode c = ( MutableTreeNode ) root . getChildAt ( i ) ; MutableTreeNode c2 = simplifyTree ( c , null ) ; if ( c != c2 ) { root . remove ( i ) ; root . insert ( c2 , i ) ; } } return root ; }
|
Build the class tree for a given set of choices .
| 563
| 11
|
157,509
|
private static MutableTreeNode simplifyTree ( MutableTreeNode cur , String prefix ) { if ( cur instanceof PackageNode ) { PackageNode node = ( PackageNode ) cur ; if ( node . getChildCount ( ) == 1 ) { String newprefix = ( prefix != null ) ? prefix + "." + ( String ) node . getUserObject ( ) : ( String ) node . getUserObject ( ) ; cur = simplifyTree ( ( MutableTreeNode ) node . getChildAt ( 0 ) , newprefix ) ; } else { if ( prefix != null ) { node . setUserObject ( prefix + "." + ( String ) node . getUserObject ( ) ) ; } for ( int i = 0 ; i < node . getChildCount ( ) ; i ++ ) { MutableTreeNode c = ( MutableTreeNode ) node . getChildAt ( i ) ; MutableTreeNode c2 = simplifyTree ( c , null ) ; if ( c != c2 ) { node . remove ( i ) ; node . insert ( c2 , i ) ; } } } } else if ( cur instanceof ClassNode ) { ClassNode node = ( ClassNode ) cur ; if ( prefix != null ) { node . setUserObject ( prefix + "." + ( String ) node . getUserObject ( ) ) ; } } return cur ; }
|
Simplify the tree .
| 288
| 6
|
157,510
|
protected String formatValue ( List < Class < ? extends C > > val ) { StringBuilder buf = new StringBuilder ( 50 + val . size ( ) * 25 ) ; String pkgname = restrictionClass . getPackage ( ) . getName ( ) ; for ( Class < ? extends C > c : val ) { if ( buf . length ( ) > 0 ) { buf . append ( LIST_SEP ) ; } String name = c . getName ( ) ; boolean stripPrefix = name . length ( ) > pkgname . length ( ) && name . startsWith ( pkgname ) && name . charAt ( pkgname . length ( ) ) == ' ' ; buf . append ( name , stripPrefix ? pkgname . length ( ) + 1 : 0 , name . length ( ) ) ; } return buf . toString ( ) ; }
|
Format as string .
| 184
| 4
|
157,511
|
protected void publish ( final LogRecord record ) { if ( record instanceof ProgressLogRecord ) { ProgressLogRecord preg = ( ProgressLogRecord ) record ; Progress prog = preg . getProgress ( ) ; JProgressBar pbar = getOrCreateProgressBar ( prog ) ; updateProgressBar ( prog , pbar ) ; if ( prog . isComplete ( ) ) { removeProgressBar ( prog , pbar ) ; } if ( prog . isComplete ( ) || prog instanceof StepProgress ) { publishTextRecord ( record ) ; } } else { publishTextRecord ( record ) ; } }
|
Publish a logging record .
| 126
| 6
|
157,512
|
private void publishTextRecord ( final LogRecord record ) { try { logpane . publish ( record ) ; } catch ( Exception e ) { throw new RuntimeException ( "Error writing a log-like message." , e ) ; } }
|
Publish a text record to the pane
| 50
| 8
|
157,513
|
private JProgressBar getOrCreateProgressBar ( Progress prog ) { JProgressBar pbar = pbarmap . get ( prog ) ; // Add a new progress bar. if ( pbar == null ) { synchronized ( pbarmap ) { if ( prog instanceof FiniteProgress ) { pbar = new JProgressBar ( 0 , ( ( FiniteProgress ) prog ) . getTotal ( ) ) ; pbar . setStringPainted ( true ) ; } else if ( prog instanceof IndefiniteProgress ) { pbar = new JProgressBar ( ) ; pbar . setIndeterminate ( true ) ; pbar . setStringPainted ( true ) ; } else if ( prog instanceof MutableProgress ) { pbar = new JProgressBar ( 0 , ( ( MutableProgress ) prog ) . getTotal ( ) ) ; pbar . setStringPainted ( true ) ; } else { throw new RuntimeException ( "Unsupported progress record" ) ; } pbarmap . put ( prog , pbar ) ; final JProgressBar pbar2 = pbar ; // Make final SwingUtilities . invokeLater ( ( ) -> addProgressBar ( pbar2 ) ) ; } } return pbar ; }
|
Get an existing or create a new progress bar .
| 263
| 10
|
157,514
|
private void updateProgressBar ( Progress prog , JProgressBar pbar ) { if ( prog instanceof FiniteProgress ) { pbar . setValue ( ( ( FiniteProgress ) prog ) . getProcessed ( ) ) ; pbar . setString ( ( ( FiniteProgress ) prog ) . toString ( ) ) ; } else if ( prog instanceof IndefiniteProgress ) { pbar . setValue ( ( ( IndefiniteProgress ) prog ) . getProcessed ( ) ) ; pbar . setString ( ( ( IndefiniteProgress ) prog ) . toString ( ) ) ; } else if ( prog instanceof MutableProgress ) { pbar . setValue ( ( ( MutableProgress ) prog ) . getProcessed ( ) ) ; pbar . setMaximum ( ( ( MutableProgress ) prog ) . getProcessed ( ) ) ; pbar . setString ( ( ( MutableProgress ) prog ) . toString ( ) ) ; } else { throw new RuntimeException ( "Unsupported progress record" ) ; } }
|
Update a progress bar
| 222
| 4
|
157,515
|
private void removeProgressBar ( Progress prog , JProgressBar pbar ) { synchronized ( pbarmap ) { pbarmap . remove ( prog ) ; SwingUtilities . invokeLater ( ( ) -> removeProgressBar ( pbar ) ) ; } }
|
Remove a progress bar
| 55
| 4
|
157,516
|
public void clear ( ) { logpane . clear ( ) ; synchronized ( pbarmap ) { for ( Entry < Progress , JProgressBar > ent : pbarmap . entrySet ( ) ) { super . remove ( ent . getValue ( ) ) ; pbarmap . remove ( ent . getKey ( ) ) ; } } }
|
Clear the current contents .
| 75
| 5
|
157,517
|
@ Override public void componentResized ( ComponentEvent e ) { if ( e . getComponent ( ) == component ) { double newRatio = getCurrentRatio ( ) ; if ( Math . abs ( newRatio - activeRatio ) > threshold ) { activeRatio = newRatio ; executeResize ( newRatio ) ; } } }
|
React to a component resize event .
| 76
| 8
|
157,518
|
@ Override public String format ( LogRecord record ) { String msg = record . getMessage ( ) ; if ( msg . length ( ) > 0 ) { if ( record instanceof ProgressLogRecord ) { return msg ; } if ( msg . endsWith ( OutputStreamLogger . NEWLINE ) ) { return msg ; } } return msg + OutputStreamLogger . NEWLINE ; }
|
Retrieves the message as it is set in the given LogRecord .
| 81
| 15
|
157,519
|
protected double [ ] alignLabels ( List < ClassLabel > l1 , double [ ] d1 , Collection < ClassLabel > l2 ) { assert ( l1 . size ( ) == d1 . length ) ; if ( l1 == l2 ) { return d1 . clone ( ) ; } double [ ] d2 = new double [ l2 . size ( ) ] ; Iterator < ClassLabel > i2 = l2 . iterator ( ) ; for ( int i = 0 ; i2 . hasNext ( ) ; ) { ClassLabel l = i2 . next ( ) ; int idx = l1 . indexOf ( l ) ; if ( idx < 0 && getLogger ( ) . isDebuggingFiner ( ) ) { getLogger ( ) . debugFiner ( "Label not found: " + l ) ; } d2 [ i ] = ( idx >= 0 ) ? d1 [ idx ] : 0. ; // Default to 0 for unknown labels! } return d2 ; }
|
Align the labels for a label query .
| 218
| 9
|
157,520
|
public void setInitialClusters ( List < ? extends Cluster < ? extends MeanModel > > initialMeans ) { double [ ] [ ] vecs = new double [ initialMeans . size ( ) ] [ ] ; for ( int i = 0 ; i < vecs . length ; i ++ ) { vecs [ i ] = initialMeans . get ( i ) . getModel ( ) . getMean ( ) ; } this . initialMeans = vecs ; }
|
Set the initial means .
| 101
| 5
|
157,521
|
public static void exception ( String message , Throwable e ) { if ( message == null && e != null ) { message = e . getMessage ( ) ; } logExpensive ( Level . SEVERE , message , e ) ; }
|
Static version to log a severe exception .
| 50
| 8
|
157,522
|
public static void warning ( String message , Throwable e ) { if ( message == null && e != null ) { message = e . getMessage ( ) ; } logExpensive ( Level . WARNING , message , e ) ; }
|
Static version to log a warning message .
| 48
| 8
|
157,523
|
public static void message ( String message , Throwable e ) { if ( message == null && e != null ) { message = e . getMessage ( ) ; } logExpensive ( Level . INFO , message , e ) ; }
|
Static version to log a info message .
| 48
| 8
|
157,524
|
private static final String [ ] inferCaller ( ) { StackTraceElement [ ] stack = ( new Throwable ( ) ) . getStackTrace ( ) ; int ix = 0 ; while ( ix < stack . length ) { StackTraceElement frame = stack [ ix ] ; if ( ! frame . getClassName ( ) . equals ( LoggingUtil . class . getCanonicalName ( ) ) ) { return new String [ ] { frame . getClassName ( ) , frame . getMethodName ( ) } ; } ix ++ ; } return null ; }
|
Infer which class has called the logging helper .
| 126
| 10
|
157,525
|
public static long binomialCoefficient ( long n , long k ) { final long m = Math . max ( k , n - k ) ; double temp = 1 ; for ( long i = n , j = 1 ; i > m ; i -- , j ++ ) { temp = temp * i / j ; } return ( long ) temp ; }
|
Binomial coefficient also known as n choose k .
| 74
| 11
|
157,526
|
public static double approximateBinomialCoefficient ( int n , int k ) { final int m = max ( k , n - k ) ; long temp = 1 ; for ( int i = n , j = 1 ; i > m ; i -- , j ++ ) { temp = temp * i / j ; } return temp ; }
|
Binomial coefficent also known as n choose k ) .
| 71
| 14
|
157,527
|
public static int [ ] sequence ( int start , int end ) { if ( start >= end ) { return EMPTY_INTS ; } int [ ] ret = new int [ end - start ] ; for ( int j = 0 ; start < end ; start ++ , j ++ ) { ret [ j ] = start ; } return ret ; }
|
Generate an array of integers .
| 72
| 7
|
157,528
|
public KNNDistanceOrderResult run ( Database database , Relation < O > relation ) { final DistanceQuery < O > distanceQuery = database . getDistanceQuery ( relation , getDistanceFunction ( ) ) ; final KNNQuery < O > knnQuery = database . getKNNQuery ( distanceQuery , k + 1 ) ; final int size = ( int ) ( ( sample <= 1. ) ? Math . ceil ( relation . size ( ) * sample ) : sample ) ; DBIDs sample = DBIDUtil . randomSample ( relation . getDBIDs ( ) , size , rnd ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Sampling kNN distances" , size , LOG ) : null ; double [ ] knnDistances = new double [ size ] ; int i = 0 ; for ( DBIDIter iditer = sample . iter ( ) ; iditer . valid ( ) ; iditer . advance ( ) , i ++ ) { final KNNList neighbors = knnQuery . getKNNForDBID ( iditer , k + 1 ) ; knnDistances [ i ] = neighbors . getKNNDistance ( ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; return new KNNDistanceOrderResult ( knnDistances , k ) ; }
|
Provides an order of the kNN - distances for all objects within the specified database .
| 289
| 18
|
157,529
|
public DataStore < M > preprocess ( Class < ? super M > modelcls , Relation < O > relation , RangeQuery < O > query ) { WritableDataStore < M > storage = DataStoreUtil . makeStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , modelcls ) ; Duration time = getLogger ( ) . newDuration ( this . getClass ( ) . getName ( ) + ".preprocessing-time" ) . begin ( ) ; FiniteProgress progress = getLogger ( ) . isVerbose ( ) ? new FiniteProgress ( this . getClass ( ) . getName ( ) , relation . size ( ) , getLogger ( ) ) : null ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { DoubleDBIDList neighbors = query . getRangeForDBID ( iditer , epsilon ) ; storage . put ( iditer , computeLocalModel ( iditer , neighbors , relation ) ) ; getLogger ( ) . incrementProcessed ( progress ) ; } getLogger ( ) . ensureCompleted ( progress ) ; getLogger ( ) . statistics ( time . end ( ) ) ; return storage ; }
|
Perform the preprocessing step .
| 288
| 7
|
157,530
|
@ Override public < NV extends NumberVector > NV projectScaledToDataSpace ( double [ ] v , NumberVector . Factory < NV > factory ) { final int dim = v . length ; double [ ] vec = new double [ dim ] ; for ( int d = 0 ; d < dim ; d ++ ) { vec [ d ] = scales [ d ] . getUnscaled ( v [ d ] ) ; } return factory . newNumberVector ( vec ) ; }
|
Project a vector from scaled space to data space .
| 100
| 10
|
157,531
|
@ Override public < NV extends NumberVector > NV projectRenderToDataSpace ( double [ ] v , NumberVector . Factory < NV > prototype ) { final int dim = v . length ; double [ ] vec = projectRenderToScaled ( v ) ; // Not calling {@link #projectScaledToDataSpace} to avoid extra copy of // vector. for ( int d = 0 ; d < dim ; d ++ ) { vec [ d ] = scales [ d ] . getUnscaled ( vec [ d ] ) ; } return prototype . newNumberVector ( vec ) ; }
|
Project a vector from rendering space to data space .
| 124
| 10
|
157,532
|
@ Override public < NV extends NumberVector > NV projectRelativeScaledToDataSpace ( double [ ] v , NumberVector . Factory < NV > prototype ) { final int dim = v . length ; double [ ] vec = new double [ dim ] ; for ( int d = 0 ; d < dim ; d ++ ) { vec [ d ] = scales [ d ] . getRelativeUnscaled ( v [ d ] ) ; } return prototype . newNumberVector ( vec ) ; }
|
Project a relative vector from scaled space to data space .
| 104
| 11
|
157,533
|
public PointerHierarchyRepresentationResult complete ( ) { if ( csize != null ) { csize . destroy ( ) ; csize = null ; } if ( mergecount != ids . size ( ) - 1 ) { LOG . warning ( mergecount + " merges were added to the hierarchy, expected " + ( ids . size ( ) - 1 ) ) ; } if ( prototypes != null ) { return new PointerPrototypeHierarchyRepresentationResult ( ids , parent , parentDistance , isSquared , order , prototypes ) ; } return new PointerHierarchyRepresentationResult ( ids , parent , parentDistance , isSquared , order ) ; }
|
Finalize the result .
| 146
| 5
|
157,534
|
public int getSize ( DBIDRef id ) { if ( csize == null ) { csize = DataStoreUtil . makeIntegerStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , 1 ) ; } return csize . intValue ( id ) ; }
|
Get the cluster size of the current object .
| 69
| 9
|
157,535
|
public void setSize ( DBIDRef id , int size ) { if ( csize == null ) { csize = DataStoreUtil . makeIntegerStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP , 1 ) ; } csize . putInt ( id , size ) ; }
|
Set the cluster size of an object .
| 73
| 8
|
157,536
|
public OutlierResult run ( Database database , Relation < N > spatial , Relation < O > relation ) { final NeighborSetPredicate npred = getNeighborSetPredicateFactory ( ) . instantiate ( database , spatial ) ; DistanceQuery < O > distFunc = getNonSpatialDistanceFunction ( ) . instantiate ( relation ) ; WritableDoubleDataStore lrds = DataStoreUtil . makeDoubleStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_TEMP | DataStoreFactory . HINT_HOT ) ; WritableDoubleDataStore lofs = DataStoreUtil . makeDoubleStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_STATIC ) ; DoubleMinMax lofminmax = new DoubleMinMax ( ) ; // Compute densities for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { DBIDs neighbors = npred . getNeighborDBIDs ( iditer ) ; double avg = 0 ; for ( DBIDIter iter = neighbors . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { avg += distFunc . distance ( iditer , iter ) ; } double lrd = 1 / ( avg / neighbors . size ( ) ) ; if ( Double . isNaN ( lrd ) ) { lrd = 0 ; } lrds . putDouble ( iditer , lrd ) ; } // Compute density quotients for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { DBIDs neighbors = npred . getNeighborDBIDs ( iditer ) ; double avg = 0 ; for ( DBIDIter iter = neighbors . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { avg += lrds . doubleValue ( iter ) ; } final double lrd = ( avg / neighbors . size ( ) ) / lrds . doubleValue ( iditer ) ; if ( ! Double . isNaN ( lrd ) ) { lofs . putDouble ( iditer , lrd ) ; lofminmax . put ( lrd ) ; } else { lofs . putDouble ( iditer , 0.0 ) ; } } // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Spatial Outlier Factor" , "sof-outlier" , lofs , relation . getDBIDs ( ) ) ; OutlierScoreMeta scoreMeta = new QuotientOutlierScoreMeta ( lofminmax . getMin ( ) , lofminmax . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY , 1.0 ) ; OutlierResult or = new OutlierResult ( scoreMeta , scoreResult ) ; or . addChildResult ( npred ) ; return or ; }
|
The main run method
| 638
| 4
|
157,537
|
public void insertHandler ( Class < ? > restrictionClass , H handler ) { // note that the handlers list is kept in a list that is traversed in // backwards order. handlers . add ( new Pair < Class < ? > , H > ( restrictionClass , handler ) ) ; }
|
Insert a handler to the beginning of the stack .
| 59
| 10
|
157,538
|
public H getHandler ( Object o ) { if ( o == null ) { return null ; } // note that we start at the end of the list. ListIterator < Pair < Class < ? > , H > > iter = handlers . listIterator ( handlers . size ( ) ) ; while ( iter . hasPrevious ( ) ) { Pair < Class < ? > , H > pair = iter . previous ( ) ; try { // if we can cast to the restriction class, use the given handler. pair . getFirst ( ) . cast ( o ) ; return pair . getSecond ( ) ; } catch ( ClassCastException e ) { // do nothing, but try previous in list } } return null ; }
|
Find a matching handler for the given object
| 147
| 8
|
157,539
|
public synchronized static Logging getLogger ( final String name ) { Logging logger = loggers . get ( name ) ; if ( logger == null ) { logger = new Logging ( Logger . getLogger ( name ) ) ; loggers . put ( name , logger ) ; } return logger ; }
|
Retrieve logging utility for a particular class .
| 65
| 9
|
157,540
|
public void log ( java . util . logging . Level level , CharSequence message ) { LogRecord rec = new ELKILogRecord ( level , message ) ; logger . log ( rec ) ; }
|
Log a log message at the given level .
| 43
| 9
|
157,541
|
public void error ( CharSequence message , Throwable e ) { log ( Level . SEVERE , message , e ) ; }
|
Log a message at the severe level .
| 28
| 8
|
157,542
|
public void warning ( CharSequence message , Throwable e ) { log ( Level . WARNING , message , e ) ; }
|
Log a message at the warning level .
| 26
| 8
|
157,543
|
public void statistics ( CharSequence message , Throwable e ) { log ( Level . STATISTICS , message , e ) ; }
|
Log a message at the STATISTICS level .
| 28
| 10
|
157,544
|
public void veryverbose ( CharSequence message , Throwable e ) { log ( Level . VERYVERBOSE , message , e ) ; }
|
Log a message at the veryverbose level .
| 31
| 10
|
157,545
|
public void exception ( CharSequence message , Throwable e ) { log ( Level . SEVERE , message , e ) ; }
|
Log a message with exception at the severe level .
| 28
| 10
|
157,546
|
public void exception ( Throwable e ) { final String msg = e . getMessage ( ) ; log ( Level . SEVERE , msg != null ? msg : "An exception occurred." , e ) ; }
|
Log an exception at the severe level .
| 44
| 8
|
157,547
|
public void statistics ( Statistic stats ) { if ( stats != null ) { log ( Level . STATISTICS , stats . getKey ( ) + ": " + stats . formatValue ( ) ) ; } }
|
Log a statistics object .
| 45
| 5
|
157,548
|
public MultipleObjectsBundle generate ( ) { // we actually need some clusters. if ( generators . isEmpty ( ) ) { throw new AbortException ( "No clusters specified." ) ; } // Assert that cluster dimensions agree. final int dim = generators . get ( 0 ) . getDim ( ) ; for ( GeneratorInterface c : generators ) { if ( c . getDim ( ) != dim ) { throw new AbortException ( "Cluster dimensions do not agree." ) ; } } // Prepare result bundle MultipleObjectsBundle bundle = new MultipleObjectsBundle ( ) ; VectorFieldTypeInformation < DoubleVector > type = new VectorFieldTypeInformation <> ( DoubleVector . FACTORY , dim ) ; bundle . appendColumn ( type , new ArrayList <> ( ) ) ; bundle . appendColumn ( TypeUtil . CLASSLABEL , new ArrayList <> ( ) ) ; bundle . appendColumn ( Model . TYPE , new ArrayList < Model > ( ) ) ; // generate clusters ClassLabel [ ] labels = new ClassLabel [ generators . size ( ) ] ; Model [ ] models = new Model [ generators . size ( ) ] ; initLabelsAndModels ( generators , labels , models , relabelClusters ) ; final AssignPoint assignment ; if ( ! testAgainstModel ) { assignment = new AssignPoint ( ) ; } else if ( relabelClusters == null ) { assignment = new TestModel ( ) ; } else if ( ! relabelDistance ) { assignment = new AssignLabelsByDensity ( labels ) ; } else { assignment = new AssignLabelsByDistance ( labels ) ; } for ( int i = 0 ; i < labels . length ; i ++ ) { final GeneratorInterface curclus = generators . get ( i ) ; assignment . newCluster ( i , curclus ) ; // Only dynamic generators allow rejection / model testing: GeneratorInterfaceDynamic cursclus = ( curclus instanceof GeneratorInterfaceDynamic ) ? ( GeneratorInterfaceDynamic ) curclus : null ; int kept = 0 ; while ( kept < curclus . getSize ( ) ) { // generate the "missing" number of points List < double [ ] > newp = curclus . generate ( curclus . getSize ( ) - kept ) ; for ( double [ ] p : newp ) { int bestc = assignment . getAssignment ( i , p ) ; if ( bestc < 0 ) { cursclus . incrementDiscarded ( ) ; continue ; } bundle . appendSimple ( DoubleVector . wrap ( p ) , labels [ bestc ] , models [ bestc ] ) ; ++ kept ; } } } return bundle ; }
|
Main loop to generate data set .
| 561
| 7
|
157,549
|
private void initLabelsAndModels ( ArrayList < GeneratorInterface > generators , ClassLabel [ ] labels , Model [ ] models , Pattern reassign ) { int existingclusters = 0 ; if ( reassign != null ) { for ( int i = 0 ; i < labels . length ; i ++ ) { final GeneratorInterface curclus = generators . get ( i ) ; if ( ! reassign . matcher ( curclus . getName ( ) ) . find ( ) ) { labels [ i ] = new SimpleClassLabel ( curclus . getName ( ) ) ; models [ i ] = curclus . makeModel ( ) ; ++ existingclusters ; } } if ( existingclusters == 0 ) { LOG . warning ( "All clusters matched the 'reassign' pattern. Ignoring." ) ; } if ( existingclusters == 1 ) { // No need to test - only one possible answer. for ( int i = 0 ; i < labels . length ; i ++ ) { if ( labels [ i ] != null ) { Arrays . fill ( labels , labels [ i ] ) ; Arrays . fill ( models , models [ i ] ) ; break ; } } } if ( existingclusters == labels . length ) { LOG . warning ( "No clusters matched the 'reassign' pattern." ) ; } } // Default case, every cluster has a label and model. if ( existingclusters == 0 ) { for ( int i = 0 ; i < labels . length ; i ++ ) { final GeneratorInterface curclus = generators . get ( i ) ; labels [ i ] = new SimpleClassLabel ( curclus . getName ( ) ) ; models [ i ] = curclus . makeModel ( ) ; } } }
|
Initialize cluster labels and models .
| 366
| 7
|
157,550
|
public static < V extends FeatureVector < ? > > VectorFieldTypeInformation < V > assumeVectorField ( Relation < V > relation ) { try { return ( ( VectorFieldTypeInformation < V > ) relation . getDataTypeInformation ( ) ) ; } catch ( Exception e ) { throw new UnsupportedOperationException ( "Expected a vector field, got type information: " + relation . getDataTypeInformation ( ) . toString ( ) , e ) ; } }
|
Get the vector field type information from a relation .
| 99
| 10
|
157,551
|
public static < V extends NumberVector > NumberVector . Factory < V > getNumberVectorFactory ( Relation < V > relation ) { final VectorFieldTypeInformation < V > type = assumeVectorField ( relation ) ; @ SuppressWarnings ( "unchecked" ) final NumberVector . Factory < V > factory = ( NumberVector . Factory < V > ) type . getFactory ( ) ; return factory ; }
|
Get the number vector factory of a database relation .
| 87
| 10
|
157,552
|
public static int dimensionality ( Relation < ? extends SpatialComparable > relation ) { final SimpleTypeInformation < ? extends SpatialComparable > type = relation . getDataTypeInformation ( ) ; if ( type instanceof FieldTypeInformation ) { return ( ( FieldTypeInformation ) type ) . getDimensionality ( ) ; } return - 1 ; }
|
Get the dimensionality of a database relation .
| 75
| 9
|
157,553
|
public static double [ ] [ ] computeMinMax ( Relation < ? extends NumberVector > relation ) { int dim = RelationUtil . dimensionality ( relation ) ; double [ ] mins = new double [ dim ] , maxs = new double [ dim ] ; for ( int i = 0 ; i < dim ; i ++ ) { mins [ i ] = Double . MAX_VALUE ; maxs [ i ] = - Double . MAX_VALUE ; } for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { final NumberVector o = relation . get ( iditer ) ; for ( int d = 0 ; d < dim ; d ++ ) { final double v = o . doubleValue ( d ) ; mins [ d ] = ( v < mins [ d ] ) ? v : mins [ d ] ; maxs [ d ] = ( v > maxs [ d ] ) ? v : maxs [ d ] ; } } return new double [ ] [ ] { mins , maxs } ; }
|
Determines the minimum and maximum values in each dimension of all objects stored in the given database .
| 229
| 20
|
157,554
|
public static < V extends SpatialComparable > String getColumnLabel ( Relation < ? extends V > rel , int col ) { SimpleTypeInformation < ? extends V > type = rel . getDataTypeInformation ( ) ; if ( ! ( type instanceof VectorFieldTypeInformation ) ) { return "Column " + col ; } final VectorFieldTypeInformation < ? > vtype = ( VectorFieldTypeInformation < ? > ) type ; String lbl = vtype . getLabel ( col ) ; return ( lbl != null ) ? lbl : ( "Column " + col ) ; }
|
Get the column name or produce a generic label Column XY .
| 124
| 12
|
157,555
|
@ SuppressWarnings ( "unchecked" ) public static < V extends NumberVector , T extends NumberVector > Relation < V > relationUglyVectorCast ( Relation < T > database ) { return ( Relation < V > ) database ; }
|
An ugly vector type cast unavoidable in some situations due to Generics .
| 55
| 14
|
157,556
|
public KNNList get ( DBIDRef id ) { if ( storage == null ) { if ( getLogger ( ) . isDebugging ( ) ) { getLogger ( ) . debug ( "Running kNN preprocessor: " + this . getClass ( ) ) ; } preprocess ( ) ; } return storage . get ( id ) ; }
|
Get the k nearest neighbors .
| 75
| 6
|
157,557
|
public Clustering < DimensionModel > run ( Database database , Relation < V > relation ) { COPACNeighborPredicate . Instance npred = new COPACNeighborPredicate < V > ( settings ) . instantiate ( database , relation ) ; CorePredicate . Instance < DBIDs > cpred = new MinPtsCorePredicate ( settings . minpts ) . instantiate ( database ) ; Clustering < Model > dclusters = new GeneralizedDBSCAN . Instance <> ( npred , cpred , false ) . run ( ) ; // Re-wrap the detected clusters for COPAC: Clustering < DimensionModel > result = new Clustering <> ( "COPAC clustering" , "copac-clustering" ) ; // Generalized DBSCAN clusterings will be flat. for ( It < Cluster < Model > > iter = dclusters . iterToplevelClusters ( ) ; iter . valid ( ) ; iter . advance ( ) ) { Cluster < Model > clus = iter . get ( ) ; if ( clus . size ( ) > 0 ) { int dim = npred . dimensionality ( clus . getIDs ( ) . iter ( ) ) ; DimensionModel model = new DimensionModel ( dim ) ; result . addToplevelCluster ( new Cluster <> ( clus . getIDs ( ) , model ) ) ; } } return result ; }
|
Run the COPAC algorithm .
| 308
| 6
|
157,558
|
public int getUnpairedClusteringIndex ( ) { for ( int index = 0 ; index < clusterIds . length ; index ++ ) { if ( clusterIds [ index ] == UNCLUSTERED ) { return index ; } } return - 1 ; }
|
Returns the index of the first clustering having an unpaired cluster or - 1 no unpaired cluster exists .
| 58
| 22
|
157,559
|
protected static boolean isNull ( Object val ) { return ( val == null ) || STRING_NULL . equals ( val ) || DOUBLE_NULL . equals ( val ) || INTEGER_NULL . equals ( val ) ; }
|
Test a value for null .
| 50
| 6
|
157,560
|
private static String formatCause ( Throwable cause ) { if ( cause == null ) { return "" ; } String message = cause . getMessage ( ) ; return "\n" + ( message != null ? message : cause . toString ( ) ) ; }
|
Format the error cause .
| 53
| 5
|
157,561
|
public TextWriterWriterInterface < ? > getWriterFor ( Object o ) { if ( o == null ) { return null ; } TextWriterWriterInterface < ? > writer = writers . getHandler ( o ) ; if ( writer != null ) { return writer ; } try { final Class < ? > decl = o . getClass ( ) . getMethod ( "toString" ) . getDeclaringClass ( ) ; if ( decl == Object . class ) { return null ; // TODO: cache this, too } writers . insertHandler ( decl , fallbackwriter ) ; return fallbackwriter ; } catch ( NoSuchMethodException | SecurityException e ) { return null ; } }
|
Retrieve an appropriate writer from the handler list .
| 143
| 10
|
157,562
|
protected Cluster < BiclusterModel > defineBicluster ( BitSet rows , BitSet cols ) { ArrayDBIDs rowIDs = rowsBitsetToIDs ( rows ) ; int [ ] colIDs = colsBitsetToIDs ( cols ) ; return new Cluster <> ( rowIDs , new BiclusterModel ( colIDs ) ) ; }
|
Defines a Bicluster as given by the included rows and columns .
| 78
| 15
|
157,563
|
public double getSampleSkewness ( ) { if ( ! ( m2 > 0 ) || ! ( n > 2 ) ) { throw new ArithmeticException ( "Skewness not defined when variance is 0 or weight <= 2.0!" ) ; } return ( m3 * n / ( n - 1 ) / ( n - 2 ) ) / FastMath . pow ( getSampleVariance ( ) , 1.5 ) ; }
|
Get the skewness using sample variance .
| 94
| 9
|
157,564
|
public static double cosineOrHaversineDeg ( double lat1 , double lon1 , double lat2 , double lon2 ) { return cosineOrHaversineRad ( deg2rad ( lat1 ) , deg2rad ( lon1 ) , deg2rad ( lat2 ) , deg2rad ( lon2 ) ) ; }
|
Use cosine or haversine dynamically .
| 76
| 9
|
157,565
|
public static double crossTrackDistanceRad ( double lat1 , double lon1 , double lat2 , double lon2 , double latQ , double lonQ , double dist1Q ) { final double dlon12 = lon2 - lon1 ; final double dlon1Q = lonQ - lon1 ; // Compute trigonometric functions only once. final DoubleWrapper tmp = new DoubleWrapper ( ) ; // To return cosine final double slat1 = sinAndCos ( lat1 , tmp ) , clat1 = tmp . value ; final double slatQ = sinAndCos ( latQ , tmp ) , clatQ = tmp . value ; final double slat2 = sinAndCos ( lat2 , tmp ) , clat2 = tmp . value ; // / Compute the course // y = sin(dlon) * cos(lat2) final double sdlon12 = sinAndCos ( dlon12 , tmp ) , cdlon12 = tmp . value ; final double sdlon1Q = sinAndCos ( dlon1Q , tmp ) , cdlon1Q = tmp . value ; final double yE = sdlon12 * clat2 ; final double yQ = sdlon1Q * clatQ ; // x = cos(lat1) * sin(lat2) - sin(lat1) * cos(lat2) * cos(dlon) final double xE = clat1 * slat2 - slat1 * clat2 * cdlon12 ; final double xQ = clat1 * slatQ - slat1 * clatQ * cdlon1Q ; final double crs12 = atan2 ( yE , xE ) ; final double crs1Q = atan2 ( yQ , xQ ) ; // / Calculate cross-track distance return asin ( sin ( dist1Q ) * sin ( crs1Q - crs12 ) ) ; }
|
Compute the cross - track distance .
| 421
| 8
|
157,566
|
public static double alongTrackDistanceRad ( double lat1 , double lon1 , double lat2 , double lon2 , double latQ , double lonQ , double dist1Q , double ctd ) { // FIXME: optimize the sign computation! int sign = Math . abs ( bearingRad ( lat1 , lon1 , lat2 , lon2 ) - bearingRad ( lat1 , lon1 , latQ , lonQ ) ) < HALFPI ? + 1 : - 1 ; return sign * acos ( cos ( dist1Q ) / cos ( ctd ) ) ; // TODO: for short distances, use this instead? // asin(sqrt( (sin(dist_1Q))^2 - (sin(XTD))^2 )/cos(XTD)) }
|
The along track distance is the distance from S to Q along the track S to E .
| 174
| 18
|
157,567
|
private static double [ ] reversed ( double [ ] a ) { // TODO: there doesn't appear to be a nicer version in Java, unfortunately. Arrays . sort ( a ) ; for ( int i = 0 , j = a . length - 1 ; i < j ; i ++ , j -- ) { double tmp = a [ i ] ; a [ i ] = a [ j ] ; a [ j ] = tmp ; } return a ; }
|
Sort an array of doubles in descending order .
| 95
| 9
|
157,568
|
private double computeExplainedVariance ( double [ ] eigenValues , int filteredEigenPairs ) { double strongsum = 0. , weaksum = 0. ; for ( int i = 0 ; i < filteredEigenPairs ; i ++ ) { strongsum += eigenValues [ i ] ; } for ( int i = filteredEigenPairs ; i < eigenValues . length ; i ++ ) { weaksum += eigenValues [ i ] ; } return strongsum / ( strongsum + weaksum ) ; }
|
Compute the explained variance for a filtered EigenPairs .
| 116
| 13
|
157,569
|
private void assertSortedByDistance ( DoubleDBIDList results ) { // TODO: sort results instead? double dist = - 1.0 ; boolean sorted = true ; for ( DoubleDBIDListIter it = results . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { double qr = it . doubleValue ( ) ; if ( qr < dist ) { sorted = false ; } dist = qr ; } if ( ! sorted ) { try { ModifiableDoubleDBIDList . class . cast ( results ) . sort ( ) ; } catch ( ClassCastException | UnsupportedOperationException e ) { LoggingUtil . warning ( "WARNING: results not sorted by distance!" , e ) ; } } }
|
Ensure that the results are sorted by distance .
| 157
| 10
|
157,570
|
public static String prefixParameterToMessage ( Parameter < ? > p , String message ) { return new StringBuilder ( 100 + message . length ( ) ) // . append ( p instanceof Flag ? "Flag '" : "Parameter '" ) // . append ( p . getOptionID ( ) . getName ( ) ) // . append ( "' " ) . append ( message ) . toString ( ) ; }
|
Prefix parameter information to error message .
| 87
| 8
|
157,571
|
public static String prefixParametersToMessage ( Parameter < ? > p , String mid , Parameter < ? > p2 , String message ) { return new StringBuilder ( 200 + mid . length ( ) + message . length ( ) ) // . append ( p instanceof Flag ? "Flag '" : "Parameter '" ) // . append ( p . getOptionID ( ) . getName ( ) ) // . append ( "' " ) . append ( mid ) // . append ( p instanceof Flag ? " Flag '" : " Parameter '" ) // . append ( p . getOptionID ( ) . getName ( ) ) // . append ( message . length ( ) > 0 ? "' " : "'." ) . append ( message ) . toString ( ) ; }
|
Prefix parameters to error message .
| 164
| 7
|
157,572
|
@ Override protected int computeHeight ( ) { N node = getRoot ( ) ; int height = 1 ; // compute height while ( ! node . isLeaf ( ) && node . getNumEntries ( ) != 0 ) { E entry = node . getEntry ( 0 ) ; node = getNode ( entry ) ; height ++ ; } return height ; }
|
Computes the height of this RTree . Is called by the constructor . and should be overwritten by subclasses if necessary .
| 76
| 26
|
157,573
|
private List < E > createBulkDirectoryNodes ( List < E > nodes ) { int minEntries = dirMinimum ; int maxEntries = dirCapacity - 1 ; ArrayList < E > result = new ArrayList <> ( ) ; List < List < E > > partitions = settings . bulkSplitter . partition ( nodes , minEntries , maxEntries ) ; for ( List < E > partition : partitions ) { // create node N dirNode = createNewDirectoryNode ( ) ; // insert nodes for ( E o : partition ) { dirNode . addDirectoryEntry ( o ) ; } // write to file writeNode ( dirNode ) ; result . add ( createNewDirectoryEntry ( dirNode ) ) ; if ( getLogger ( ) . isDebuggingFiner ( ) ) { getLogger ( ) . debugFiner ( "Directory page no: " + dirNode . getPageID ( ) ) ; } } return result ; }
|
Creates and returns the directory nodes for bulk load .
| 203
| 11
|
157,574
|
private N createRoot ( N root , List < E > objects ) { // insert data for ( E entry : objects ) { if ( entry instanceof LeafEntry ) { root . addLeafEntry ( entry ) ; } else { root . addDirectoryEntry ( entry ) ; } } // set root mbr ( ( SpatialDirectoryEntry ) getRootEntry ( ) ) . setMBR ( root . computeMBR ( ) ) ; // write to file writeNode ( root ) ; if ( getLogger ( ) . isDebuggingFiner ( ) ) { StringBuilder msg = new StringBuilder ( ) ; msg . append ( "pageNo " ) . append ( root . getPageID ( ) ) ; getLogger ( ) . debugFiner ( msg . toString ( ) ) ; } return root ; }
|
Returns a root node for bulk load . If the objects are data objects a leaf node will be returned if the objects are nodes a directory node will be returned .
| 172
| 32
|
157,575
|
private int tailingNonNewline ( char [ ] cbuf , int off , int len ) { for ( int cnt = 0 ; cnt < len ; cnt ++ ) { final int pos = off + ( len - 1 ) - cnt ; if ( cbuf [ pos ] == UNIX_NEWLINE ) { return cnt ; } if ( cbuf [ pos ] == CARRIAGE_RETURN ) { return cnt ; } // TODO: need to compare to NEWLINEC, too? } return len ; }
|
Count the tailing non - newline characters .
| 115
| 10
|
157,576
|
@ Override public void write ( char [ ] cbuf , int off , int len ) throws IOException { if ( len <= 0 ) { return ; } // if we havn't last seen a newline, and don't get a CR, insert a newline. if ( charsSinceNewline > 0 ) { if ( cbuf [ off ] != CARRIAGE_RETURN ) { super . write ( NEWLINEC , 0 , NEWLINEC . length ) ; charsSinceNewline = 0 ; } else { // length of this line: int nonnl = countNonNewline ( cbuf , off + 1 , len - 1 ) ; // clear the existing chars. if ( nonnl < charsSinceNewline ) { super . write ( CARRIAGE_RETURN ) ; while ( charsSinceNewline > 0 ) { final int n = Math . min ( charsSinceNewline , WHITESPACE . length ( ) ) ; super . write ( WHITESPACE , 0 , n ) ; charsSinceNewline -= n ; } } else { charsSinceNewline = 0 ; } } } charsSinceNewline = tailingNonNewline ( cbuf , off , len ) ; super . write ( cbuf , off , len ) ; flush ( ) ; }
|
Writer that keeps track of when it hasn t seen a newline yet will auto - insert newlines except when lines start with a carriage return .
| 275
| 29
|
157,577
|
protected DoubleDataStore computeIDs ( DBIDs ids , KNNQuery < O > knnQ ) { WritableDoubleDataStore intDims = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Intrinsic dimensionality" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double id = 0. ; try { id = estimator . estimate ( knnQ , iter , k_c + 1 ) ; } catch ( ArithmeticException e ) { id = 0 ; // Too many duplicates, etc. } intDims . putDouble ( iter , id ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; return intDims ; }
|
Computes all IDs
| 218
| 4
|
157,578
|
protected DoubleDataStore computeIDOS ( DBIDs ids , KNNQuery < O > knnQ , DoubleDataStore intDims , DoubleMinMax idosminmax ) { WritableDoubleDataStore ldms = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_STATIC ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "ID Outlier Scores for objects" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { final KNNList neighbors = knnQ . getKNNForDBID ( iter , k_r ) ; double sum = 0. ; int cnt = 0 ; for ( DoubleDBIDListIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { if ( DBIDUtil . equal ( iter , neighbor ) ) { continue ; } final double id = intDims . doubleValue ( neighbor ) ; sum += id > 0 ? 1.0 / id : 0. ; if ( ++ cnt == k_r ) { // Always stop after at most k_r elements. break ; } } final double id_q = intDims . doubleValue ( iter ) ; final double idos = id_q > 0 ? id_q * sum / cnt : 0. ; ldms . putDouble ( iter , idos ) ; idosminmax . put ( idos ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; return ldms ; }
|
Computes all IDOS scores .
| 362
| 7
|
157,579
|
public OutlierResult run ( Database database , Relation < V > relation ) { final int dbsize = relation . size ( ) ; ArrayList < ArrayList < DBIDs > > ranges = buildRanges ( relation ) ; Heap < Individuum > . UnorderedIter individuums = ( new EvolutionarySearch ( relation , ranges , m , rnd . getSingleThreadedRandom ( ) ) ) . run ( ) ; WritableDoubleDataStore outlierScore = DataStoreUtil . makeDoubleStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_STATIC ) ; for ( ; individuums . valid ( ) ; individuums . advance ( ) ) { DBIDs ids = computeSubspaceForGene ( individuums . get ( ) . getGene ( ) , ranges ) ; double sparsityC = sparsity ( ids . size ( ) , dbsize , k , phi ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double prev = outlierScore . doubleValue ( iter ) ; if ( Double . isNaN ( prev ) || sparsityC < prev ) { outlierScore . putDouble ( iter , sparsityC ) ; } } } DoubleMinMax minmax = new DoubleMinMax ( ) ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { double val = outlierScore . doubleValue ( iditer ) ; if ( Double . isNaN ( val ) ) { outlierScore . putDouble ( iditer , val = 0. ) ; } minmax . put ( val ) ; } DoubleRelation scoreResult = new MaterializedDoubleRelation ( "AggarwalYuEvolutionary" , "aggarwal-yu-outlier" , outlierScore , relation . getDBIDs ( ) ) ; OutlierScoreMeta meta = new InvertedOutlierScoreMeta ( minmax . getMin ( ) , minmax . getMax ( ) , Double . NEGATIVE_INFINITY , 0.0 ) ; return new OutlierResult ( meta , scoreResult ) ; }
|
Performs the evolutionary algorithm on the given database .
| 491
| 10
|
157,580
|
protected double [ ] [ ] buildDistanceMatrix ( ArrayDBIDs ids , DistanceQuery < ? > dq ) { final int size = ids . size ( ) ; double [ ] [ ] dmat = new double [ size ] [ size ] ; final boolean square = ! dq . getDistanceFunction ( ) . isSquared ( ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing distance matrix" , ( size * ( size - 1 ) ) >>> 1 , LOG ) : null ; Duration timer = LOG . isStatistics ( ) ? LOG . newDuration ( this . getClass ( ) . getName ( ) + ".runtime.distancematrix" ) . begin ( ) : null ; DBIDArrayIter ix = ids . iter ( ) , iy = ids . iter ( ) ; for ( ix . seek ( 0 ) ; ix . valid ( ) ; ix . advance ( ) ) { double [ ] dmat_x = dmat [ ix . getOffset ( ) ] ; for ( iy . seek ( ix . getOffset ( ) + 1 ) ; iy . valid ( ) ; iy . advance ( ) ) { final double dist = dq . distance ( ix , iy ) ; dmat [ iy . getOffset ( ) ] [ ix . getOffset ( ) ] = dmat_x [ iy . getOffset ( ) ] = square ? ( dist * dist ) : dist ; } if ( prog != null ) { int row = ix . getOffset ( ) + 1 ; prog . setProcessed ( row * size - ( ( row * ( row + 1 ) ) >>> 1 ) , LOG ) ; } } LOG . ensureCompleted ( prog ) ; if ( timer != null ) { LOG . statistics ( timer . end ( ) ) ; } return dmat ; }
|
Build a distance matrix of squared distances .
| 403
| 8
|
157,581
|
@ Override public Clustering < M > run ( Database database , Relation < V > relation ) { MutableProgress prog = LOG . isVerbose ( ) ? new MutableProgress ( "X-means number of clusters" , k_max , LOG ) : null ; // Run initial k-means to find at least k_min clusters innerKMeans . setK ( k_min ) ; LOG . statistics ( new StringStatistic ( KEY + ".initialization" , initializer . toString ( ) ) ) ; splitInitializer . setInitialMeans ( initializer . chooseInitialMeans ( database , relation , k_min , getDistanceFunction ( ) ) ) ; Clustering < M > clustering = innerKMeans . run ( database , relation ) ; if ( prog != null ) { prog . setProcessed ( k_min , LOG ) ; } ArrayList < Cluster < M > > clusters = new ArrayList <> ( clustering . getAllClusters ( ) ) ; while ( clusters . size ( ) <= k_max ) { // Improve-Structure: ArrayList < Cluster < M >> nextClusters = new ArrayList <> ( ) ; for ( Cluster < M > cluster : clusters ) { // Try to split this cluster: List < Cluster < M >> childClusterList = splitCluster ( cluster , database , relation ) ; nextClusters . addAll ( childClusterList ) ; if ( childClusterList . size ( ) > 1 ) { k += childClusterList . size ( ) - 1 ; if ( prog != null ) { if ( k >= k_max ) { prog . setTotal ( k + 1 ) ; } prog . setProcessed ( k , LOG ) ; } } } if ( clusters . size ( ) == nextClusters . size ( ) ) { break ; } // Improve-Params: splitInitializer . setInitialClusters ( nextClusters ) ; innerKMeans . setK ( nextClusters . size ( ) ) ; clustering = innerKMeans . run ( database , relation ) ; clusters . clear ( ) ; clusters . addAll ( clustering . getAllClusters ( ) ) ; } // Ensure that the progress bar finished. if ( prog != null ) { prog . setTotal ( k ) ; prog . setProcessed ( k , LOG ) ; } return new Clustering <> ( "X-Means Result" , "X-Means" , clusters ) ; }
|
Run the algorithm on a database and relation .
| 533
| 9
|
157,582
|
protected List < Cluster < M > > splitCluster ( Cluster < M > parentCluster , Database database , Relation < V > relation ) { // Transform parent cluster into a clustering ArrayList < Cluster < M >> parentClusterList = new ArrayList < Cluster < M > > ( 1 ) ; parentClusterList . add ( parentCluster ) ; if ( parentCluster . size ( ) <= 1 ) { // Split is not possbile return parentClusterList ; } Clustering < M > parentClustering = new Clustering <> ( parentCluster . getName ( ) , parentCluster . getName ( ) , parentClusterList ) ; ProxyDatabase proxyDB = new ProxyDatabase ( parentCluster . getIDs ( ) , database ) ; splitInitializer . setInitialMeans ( splitCentroid ( parentCluster , relation ) ) ; innerKMeans . setK ( 2 ) ; Clustering < M > childClustering = innerKMeans . run ( proxyDB ) ; double parentEvaluation = informationCriterion . quality ( parentClustering , getDistanceFunction ( ) , relation ) ; double childrenEvaluation = informationCriterion . quality ( childClustering , getDistanceFunction ( ) , relation ) ; if ( LOG . isDebugging ( ) ) { LOG . debug ( "parentEvaluation: " + parentEvaluation ) ; LOG . debug ( "childrenEvaluation: " + childrenEvaluation ) ; } // Check if split is an improvement: return informationCriterion . isBetter ( parentEvaluation , childrenEvaluation ) ? parentClusterList : childClustering . getAllClusters ( ) ; }
|
Conditionally splits the clusters based on the information criterion .
| 365
| 11
|
157,583
|
protected double [ ] [ ] splitCentroid ( Cluster < ? extends MeanModel > parentCluster , Relation < V > relation ) { double [ ] parentCentroid = parentCluster . getModel ( ) . getMean ( ) ; // Compute size of cluster/region double radius = 0. ; for ( DBIDIter it = parentCluster . getIDs ( ) . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { double d = getDistanceFunction ( ) . distance ( relation . get ( it ) , DoubleVector . wrap ( parentCentroid ) ) ; radius = ( d > radius ) ? d : radius ; } // Choose random vector Random random = rnd . getSingleThreadedRandom ( ) ; final int dim = RelationUtil . dimensionality ( relation ) ; double [ ] randomVector = normalize ( MathUtil . randomDoubleArray ( dim , random ) ) ; timesEquals ( randomVector , ( .4 + random . nextDouble ( ) * .5 ) * radius ) ; // Get the new centroids for ( int d = 0 ; d < dim ; d ++ ) { double a = parentCentroid [ d ] , b = randomVector [ d ] ; parentCentroid [ d ] = a - b ; randomVector [ d ] = a + b ; } return new double [ ] [ ] { parentCentroid , randomVector } ; }
|
Split an existing centroid into two initial centers .
| 300
| 10
|
157,584
|
private void scan ( HilbertFeatures hf , int k0 ) { final int mink0 = Math . min ( 2 * k0 , capital_n - 1 ) ; if ( LOG . isDebuggingFine ( ) ) { LOG . debugFine ( "Scanning with k0=" + k0 + " (" + mink0 + ")" + " N*=" + capital_n_star ) ; } for ( int i = 0 ; i < hf . pf . length ; i ++ ) { if ( hf . pf [ i ] . ubound < omega_star ) { continue ; } if ( hf . pf [ i ] . lbound < hf . pf [ i ] . ubound ) { double omega = hf . fastUpperBound ( i ) ; if ( omega < omega_star ) { hf . pf [ i ] . ubound = omega ; } else { int maxcount ; // capital_n-1 instead of capital_n: all, except self if ( hf . top . contains ( hf . pf [ i ] ) ) { maxcount = capital_n - 1 ; } else { maxcount = mink0 ; } innerScan ( hf , i , maxcount ) ; } } if ( hf . pf [ i ] . ubound > 0 ) { hf . updateOUT ( i ) ; } if ( hf . pf [ i ] . lbound > 0 ) { hf . updateWLB ( i ) ; } if ( hf . wlb . size ( ) >= n ) { omega_star = Math . max ( omega_star , hf . wlb . peek ( ) . lbound ) ; } } }
|
Scan function performs a squential scan over the data .
| 370
| 11
|
157,585
|
private void trueOutliers ( HilbertFeatures h ) { n_star = 0 ; for ( ObjectHeap . UnsortedIter < HilFeature > iter = h . out . unsortedIter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { HilFeature entry = iter . get ( ) ; if ( entry . ubound >= omega_star && ( entry . ubound - entry . lbound < 1E-10 ) ) { n_star ++ ; } } }
|
trueOutliers function updates n_star
| 104
| 9
|
157,586
|
public static void load ( Class < ? > restrictionClass ) { if ( MASTER_CACHE == null ) { initialize ( ) ; } if ( MASTER_CACHE . isEmpty ( ) ) { return ; } Iterator < Class < ? > > iter = MASTER_CACHE . iterator ( ) ; while ( iter . hasNext ( ) ) { Class < ? > clazz = iter . next ( ) ; // Skip other classes. if ( ! restrictionClass . isAssignableFrom ( clazz ) ) { continue ; } // skip abstract / private classes. if ( Modifier . isInterface ( clazz . getModifiers ( ) ) || Modifier . isAbstract ( clazz . getModifiers ( ) ) || Modifier . isPrivate ( clazz . getModifiers ( ) ) ) { continue ; } boolean instantiable = false ; try { instantiable = clazz . getConstructor ( ) != null ; } catch ( Exception | Error e ) { // ignore } try { instantiable = instantiable || ClassGenericsUtil . getParameterizer ( clazz ) != null ; } catch ( Exception | Error e ) { // ignore } if ( ! instantiable ) { continue ; } ELKIServiceRegistry . register ( restrictionClass , clazz ) ; } }
|
Load classes via linear scanning .
| 277
| 6
|
157,587
|
private static int comparePackageClass ( Class < ? > o1 , Class < ? > o2 ) { return o1 . getPackage ( ) == o2 . getPackage ( ) ? // o1 . getCanonicalName ( ) . compareTo ( o2 . getCanonicalName ( ) ) // : o1 . getPackage ( ) == null ? - 1 : o2 . getPackage ( ) == null ? + 1 // : o1 . getPackage ( ) . getName ( ) . compareTo ( o2 . getPackage ( ) . getName ( ) ) ; }
|
Compare two classes by package name first .
| 125
| 8
|
157,588
|
private static int classPriority ( Class < ? > o1 ) { Priority p = o1 . getAnnotation ( Priority . class ) ; if ( p == null ) { Class < ? > pa = o1 . getDeclaringClass ( ) ; p = ( pa != null ) ? pa . getAnnotation ( Priority . class ) : null ; } return p != null ? p . value ( ) : Priority . DEFAULT ; }
|
Get the priority of a class or its outer class .
| 92
| 11
|
157,589
|
public int nextIndex ( int weight ) { if ( used == parent . length ) { int nsize = used + ( used >> 1 ) ; this . weight = Arrays . copyOf ( this . weight , nsize ) ; this . parent = Arrays . copyOf ( this . parent , nsize ) ; } this . weight [ used ] = weight ; this . parent [ used ] = used ; return used ++ ; }
|
Occupy the next unused index .
| 90
| 7
|
157,590
|
public int find ( int cur ) { assert ( cur >= 0 && cur < parent . length ) ; int p = parent [ cur ] , tmp ; while ( cur != p ) { tmp = p ; p = parent [ cur ] = parent [ p ] ; // Perform simple path compression. cur = tmp ; } return cur ; }
|
Find the parent of an object .
| 69
| 7
|
157,591
|
public int union ( int first , int second ) { int firstComponent = find ( first ) , secondComponent = find ( second ) ; if ( firstComponent == secondComponent ) { return firstComponent ; } final int w1 = weight [ firstComponent ] , w2 = weight [ secondComponent ] ; if ( w1 > w2 ) { parent [ secondComponent ] = firstComponent ; weight [ firstComponent ] += w2 ; return firstComponent ; } else { parent [ firstComponent ] = secondComponent ; weight [ secondComponent ] += w1 ; return secondComponent ; } }
|
Join the components of elements p and q .
| 120
| 9
|
157,592
|
public IntList getRoots ( ) { IntList roots = new IntArrayList ( ) ; for ( int i = 0 ; i < used ; i ++ ) { // roots or one element in component if ( parent [ i ] == i ) { roots . add ( i ) ; } } return roots ; }
|
Collect all component root elements .
| 65
| 6
|
157,593
|
public int growSuperNode ( ) { if ( getNumEntries ( ) < getCapacity ( ) ) { throw new IllegalStateException ( "This node is not yet overflowing (only " + getNumEntries ( ) + " of " + getCapacity ( ) + " entries)" ) ; } Entry [ ] old_nodes = super . entries . clone ( ) ; assert old_nodes [ old_nodes . length - 1 ] != null ; super . entries = ( Entry [ ] ) java . util . Arrays . copyOfRange ( old_nodes , 0 , getCapacity ( ) * 2 - 1 , entries . getClass ( ) ) ; assert super . entries . length == old_nodes . length * 2 - 1 ; return getCapacity ( ) ; }
|
Grows the supernode by duplicating its capacity .
| 169
| 11
|
157,594
|
public < T extends AbstractXTree < N > > void readSuperNode ( ObjectInput in , T tree ) throws IOException , ClassNotFoundException { readExternal ( in ) ; if ( capacity_to_be_filled <= 0 || ! isSuperNode ( ) ) { throw new IllegalStateException ( "This node does not appear to be a supernode" ) ; } if ( isLeaf ) { throw new IllegalStateException ( "A supernode is cannot be a leaf" ) ; } // TODO: verify entries = new Entry [ capacity_to_be_filled ] ; // old way: // entries = (E[]) new XDirectoryEntry[capacity_to_be_filled]; capacity_to_be_filled = 0 ; for ( int i = 0 ; i < numEntries ; i ++ ) { SpatialEntry s = new SpatialDirectoryEntry ( ) ; s . readExternal ( in ) ; entries [ i ] = s ; } N n = tree . getSupernodes ( ) . put ( ( long ) getPageID ( ) , ( N ) this ) ; if ( n != null ) { Logging . getLogger ( this . getClass ( ) ) . fine ( "Warning: this supernode should only be read once. Now a node of size " + entries . length + " has replaced a node of size " + n . entries . length + " for id " + getPageID ( ) ) ; } }
|
Reads the id of this supernode the numEntries and the entries array from the specified stream .
| 309
| 21
|
157,595
|
public static int compare ( DBIDRef id1 , DBIDRef id2 ) { return DBIDFactory . FACTORY . compare ( id1 , id2 ) ; }
|
Compare two DBIDs .
| 37
| 5
|
157,596
|
public static boolean equal ( DBIDRef id1 , DBIDRef id2 ) { return DBIDFactory . FACTORY . equal ( id1 , id2 ) ; }
|
Test two DBIDs for equality .
| 37
| 7
|
157,597
|
public static DBID deref ( DBIDRef ref ) { return ref instanceof DBID ? ( DBID ) ref : importInteger ( ref . internalGetIndex ( ) ) ; }
|
Dereference a DBID reference .
| 39
| 8
|
157,598
|
public static ModifiableDBIDs union ( DBIDs ids1 , DBIDs ids2 ) { ModifiableDBIDs result = DBIDUtil . newHashSet ( Math . max ( ids1 . size ( ) , ids2 . size ( ) ) ) ; result . addDBIDs ( ids1 ) ; result . addDBIDs ( ids2 ) ; return result ; }
|
Returns the union of the two specified collection of IDs .
| 84
| 11
|
157,599
|
public static ModifiableDBIDs difference ( DBIDs ids1 , DBIDs ids2 ) { ModifiableDBIDs result = DBIDUtil . newHashSet ( ids1 ) ; result . removeDBIDs ( ids2 ) ; return result ; }
|
Returns the difference of the two specified collection of IDs .
| 56
| 11
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.