idx
int64
0
165k
question
stringlengths
73
4.15k
target
stringlengths
5
918
len_question
int64
21
890
len_target
int64
3
255
157,400
public double coveringRadiusFromEntries ( DBID routingObjectID , AbstractMTree < O , N , E , ? > mTree ) { double coveringRadius = 0. ; for ( int i = 0 ; i < getNumEntries ( ) ; i ++ ) { E entry = getEntry ( i ) ; final double cover = entry . getParentDistance ( ) + entry . getCoveringRadius ( ) ; coveringRadius = coveringRadius < cover ? cover : coveringRadius ; } return coveringRadius ; }
Determines and returns the covering radius of this node .
113
12
157,401
public static double quadraticEuclidean ( double [ ] v1 , double [ ] v2 ) { final double d1 = v1 [ 0 ] - v2 [ 0 ] , d2 = v1 [ 1 ] - v2 [ 1 ] ; return ( d1 * d1 ) + ( d2 * d2 ) ; }
Squared euclidean distance . 2d .
74
11
157,402
protected void aggregateSpecial ( T value , int bin ) { final T exist = getSpecial ( bin ) ; // Note: do not inline above accessor, as getSpecial will initialize the // special variable used below! special [ bin ] = aggregate ( exist , value ) ; }
Aggregate for a special value .
57
7
157,403
protected void removePreviousRelation ( Relation < ? > relation ) { if ( keep ) { return ; } boolean first = true ; for ( It < Index > it = relation . getHierarchy ( ) . iterDescendants ( relation ) . filter ( Index . class ) ; it . valid ( ) ; it . advance ( ) ) { if ( first ) { Logging . getLogger ( getClass ( ) ) . statistics ( "Index statistics when removing initial data relation." ) ; first = false ; } it . get ( ) . logStatistics ( ) ; } ResultUtil . removeRecursive ( relation . getHierarchy ( ) , relation ) ; }
Remove the previous relation .
141
5
157,404
protected double [ ] kNNDistances ( ) { int k = getEntry ( 0 ) . getKnnDistances ( ) . length ; double [ ] result = new double [ k ] ; for ( int i = 0 ; i < getNumEntries ( ) ; i ++ ) { for ( int j = 0 ; j < k ; j ++ ) { MkTabEntry entry = getEntry ( i ) ; result [ j ] = Math . max ( result [ j ] , entry . getKnnDistance ( j + 1 ) ) ; } } return result ; }
Determines and returns the knn distance of this node as the maximum knn distance of all entries .
120
22
157,405
public OutlierResult run ( Database database , Relation < O > relation ) { StepProgress stepprog = LOG . isVerbose ( ) ? new StepProgress ( "VOV" , 3 ) : null ; DBIDs ids = relation . getDBIDs ( ) ; int dim = RelationUtil . dimensionality ( relation ) ; LOG . beginStep ( stepprog , 1 , "Materializing nearest-neighbor sets." ) ; KNNQuery < O > knnq = DatabaseUtil . precomputedKNNQuery ( database , relation , getDistanceFunction ( ) , k ) ; // Compute Volumes LOG . beginStep ( stepprog , 2 , "Computing Volumes." ) ; WritableDoubleDataStore vols = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_TEMP ) ; computeVolumes ( knnq , dim , ids , vols ) ; // compute VOV of each object LOG . beginStep ( stepprog , 3 , "Computing Variance of Volumes (VOV)." ) ; WritableDoubleDataStore vovs = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_HOT | DataStoreFactory . HINT_DB ) ; // track the maximum value for normalization. DoubleMinMax vovminmax = new DoubleMinMax ( ) ; computeVOVs ( knnq , ids , vols , vovs , vovminmax ) ; LOG . setCompleted ( stepprog ) ; // Build result representation. DoubleRelation scoreResult = new MaterializedDoubleRelation ( "Variance of Volume" , "vov-outlier" , vovs , ids ) ; OutlierScoreMeta scoreMeta = new BasicOutlierScoreMeta ( vovminmax . getMin ( ) , vovminmax . getMax ( ) , 0.0 , Double . POSITIVE_INFINITY , 0.0 ) ; return new OutlierResult ( scoreMeta , scoreResult ) ; }
Runs the VOV algorithm on the given database .
452
11
157,406
private void computeVOVs ( KNNQuery < O > knnq , DBIDs ids , DoubleDataStore vols , WritableDoubleDataStore vovs , DoubleMinMax vovminmax ) { FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Variance of Volume" , ids . size ( ) , LOG ) : null ; boolean warned = false ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { KNNList knns = knnq . getKNNForDBID ( iter , k ) ; DoubleDBIDListIter it = knns . iter ( ) ; double vbar = 0. ; for ( ; it . valid ( ) ; it . advance ( ) ) { vbar += vols . doubleValue ( it ) ; } vbar /= knns . size ( ) ; // Average double vov = 0. ; for ( it . seek ( 0 ) ; it . valid ( ) ; it . advance ( ) ) { double v = vols . doubleValue ( it ) - vbar ; vov += v * v ; } if ( ! ( vov < Double . POSITIVE_INFINITY ) && ! warned ) { LOG . warning ( "Variance of Volumes has hit double precision limits, results are not reliable." ) ; warned = true ; } vov = ( vov < Double . POSITIVE_INFINITY ) ? vov / ( knns . size ( ) - 1 ) : Double . POSITIVE_INFINITY ; vovs . putDouble ( iter , vov ) ; // update minimum and maximum vovminmax . put ( vov ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; }
Compute variance of volumes .
393
6
157,407
private void boundSize ( HashSetModifiableDBIDs set , int items ) { if ( set . size ( ) > items ) { DBIDs sample = DBIDUtil . randomSample ( set , items , rnd ) ; set . clear ( ) ; set . addDBIDs ( sample ) ; } }
Bound the size of a set by random sampling .
65
10
157,408
private boolean add ( DBIDRef cur , DBIDRef cand , double distance ) { KNNHeap neighbors = store . get ( cur ) ; if ( neighbors . contains ( cand ) ) { return false ; } double newKDistance = neighbors . insert ( distance , cand ) ; return ( distance <= newKDistance ) ; }
Add cand to cur s heap neighbors with distance
69
9
157,409
private int sampleNew ( DBIDs ids , WritableDataStore < HashSetModifiableDBIDs > sampleNewNeighbors , WritableDataStore < HashSetModifiableDBIDs > newNeighborHash , int items ) { int t = 0 ; for ( DBIDIter iditer = ids . iter ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { KNNHeap realNeighbors = store . get ( iditer ) ; HashSetModifiableDBIDs newNeighbors = newNeighborHash . get ( iditer ) ; HashSetModifiableDBIDs realNewNeighbors = sampleNewNeighbors . get ( iditer ) ; realNewNeighbors . clear ( ) ; // Reuse for ( DoubleDBIDListIter heapiter = realNeighbors . unorderedIterator ( ) ; heapiter . valid ( ) ; heapiter . advance ( ) ) { if ( newNeighbors . contains ( heapiter ) ) { realNewNeighbors . add ( heapiter ) ; t ++ ; } } boundSize ( realNewNeighbors , items ) ; newNeighbors . removeDBIDs ( realNewNeighbors ) ; newNeighborHash . put ( iditer , newNeighbors ) ; } return t ; }
samples newNeighbors for every object
263
8
157,410
private void reverse ( WritableDataStore < HashSetModifiableDBIDs > sampleNewHash , WritableDataStore < HashSetModifiableDBIDs > newReverseNeighbors , WritableDataStore < HashSetModifiableDBIDs > oldReverseNeighbors ) { for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { KNNHeap heap = store . get ( iditer ) ; HashSetDBIDs newNeighbors = sampleNewHash . get ( iditer ) ; for ( DoubleDBIDListIter heapiter = heap . unorderedIterator ( ) ; heapiter . valid ( ) ; heapiter . advance ( ) ) { ( newNeighbors . contains ( heapiter ) ? newReverseNeighbors : oldReverseNeighbors ) . get ( heapiter ) . add ( iditer ) ; } } }
calculates new and old neighbors for database
196
9
157,411
public static double similarityNumberVector ( NumberVector o1 , NumberVector o2 ) { final int d1 = o1 . getDimensionality ( ) , d2 = o2 . getDimensionality ( ) ; int intersection = 0 , union = 0 ; int d = 0 ; for ( ; d < d1 && d < d2 ; d ++ ) { double v1 = o1 . doubleValue ( d ) , v2 = o2 . doubleValue ( d ) ; if ( v1 != v1 || v2 != v2 ) { // Skip NaNs. continue ; } if ( v1 != 0. || v2 != 0 ) { ++ union ; if ( v1 == v2 ) { ++ intersection ; } } } for ( ; d < d1 ; d ++ ) { if ( o1 . doubleValue ( d ) != 0 ) { ++ union ; } } for ( ; d < d2 ; d ++ ) { if ( o2 . doubleValue ( d ) != 0 ) { ++ union ; } } return intersection / ( double ) union ; }
Compute Jaccard similarity for two number vectors .
228
11
157,412
@ Deprecated protected final Map < DBID , KNNList > batchNN ( N node , DBIDs ids , int kmax ) { Map < DBID , KNNList > res = new HashMap <> ( ids . size ( ) ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { DBID id = DBIDUtil . deref ( iter ) ; res . put ( id , knnq . getKNNForDBID ( id , kmax ) ) ; } return res ; }
Performs a batch k - nearest neighbor query for a list of query objects .
126
16
157,413
void writeResult ( PrintStream out , DBIDs ids , OutlierResult result , ScalingFunction scaling , String label ) { if ( scaling instanceof OutlierScaling ) { ( ( OutlierScaling ) scaling ) . prepare ( result ) ; } out . append ( label ) ; DoubleRelation scores = result . getScores ( ) ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double value = scores . doubleValue ( iter ) ; value = scaling != null ? scaling . getScaled ( value ) : value ; out . append ( ' ' ) . append ( Double . toString ( value ) ) ; } out . append ( FormatUtil . NEWLINE ) ; }
Write a single output line .
162
6
157,414
private void runForEachK ( String prefix , int mink , int maxk , IntFunction < OutlierResult > runner , BiConsumer < String , OutlierResult > out ) { if ( isDisabled ( prefix ) ) { LOG . verbose ( "Skipping (disabled): " + prefix ) ; return ; // Disabled } LOG . verbose ( "Running " + prefix ) ; final int digits = ( int ) FastMath . ceil ( FastMath . log10 ( krange . getMax ( ) + 1 ) ) ; final String format = "%s-%0" + digits + "d" ; krange . forEach ( k -> { if ( k >= mink && k <= maxk ) { Duration time = LOG . newDuration ( this . getClass ( ) . getCanonicalName ( ) + "." + prefix + ".k" + k + ".runtime" ) . begin ( ) ; OutlierResult result = runner . apply ( k ) ; LOG . statistics ( time . end ( ) ) ; if ( result != null ) { out . accept ( String . format ( Locale . ROOT , format , prefix , k ) , result ) ; result . getHierarchy ( ) . removeSubtree ( result ) ; } } } ) ; }
Iterate over the k range .
272
7
157,415
public double [ ] getCoefficients ( ) { double [ ] result = new double [ b . length ] ; System . arraycopy ( b , 0 , result , 0 , b . length ) ; return result ; }
Returns a copy of the the array of coefficients b0 ... bp .
47
15
157,416
public double getValueAt ( int k ) { double result = 0. ; double log_k = FastMath . log ( k ) , acc = 1. ; for ( int p = 0 ; p < b . length ; p ++ ) { result += b [ p ] * acc ; acc *= log_k ; } return result ; }
Returns the function value of the polynomial approximation at the specified k .
72
15
157,417
@ SuppressWarnings ( "unchecked" ) private static < V extends FeatureVector < F > , F > ArrayAdapter < F , ? super V > getAdapter ( Factory < V , F > factory ) { if ( factory instanceof NumberVector . Factory ) { return ( ArrayAdapter < F , ? super V > ) NumberVectorAdapter . STATIC ; } return ( ArrayAdapter < F , ? super V > ) FeatureVectorAdapter . STATIC ; }
Choose the best adapter for this .
97
7
157,418
protected void expandClusterOrder ( DBID ipt , ClusterOrder order , DistanceQuery < V > dq , FiniteProgress prog ) { UpdatableHeap < OPTICSHeapEntry > heap = new UpdatableHeap <> ( ) ; heap . add ( new OPTICSHeapEntry ( ipt , null , Double . POSITIVE_INFINITY ) ) ; while ( ! heap . isEmpty ( ) ) { final OPTICSHeapEntry current = heap . poll ( ) ; DBID currPt = current . objectID ; order . add ( currPt , current . reachability , current . predecessorID ) ; processed . add ( currPt ) ; double coredist = inverseDensities . doubleValue ( currPt ) ; for ( DBIDIter it = neighs . get ( currPt ) . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { if ( processed . contains ( it ) ) { continue ; } double nrdist = dq . distance ( currPt , it ) ; if ( coredist > nrdist ) { nrdist = coredist ; } if ( reachDist . doubleValue ( it ) == UNDEFINED_DISTANCE ) { reachDist . put ( it , nrdist ) ; } else if ( nrdist < reachDist . doubleValue ( it ) ) { reachDist . put ( it , nrdist ) ; } heap . add ( new OPTICSHeapEntry ( DBIDUtil . deref ( it ) , currPt , nrdist ) ) ; } LOG . incrementProcessed ( prog ) ; } }
OPTICS algorithm for processing a point but with different density estimates
364
13
157,419
public synchronized void resizeMatrix ( int newsize ) throws IOException { if ( newsize >= 0xFFFF ) { throw new RuntimeException ( "Matrix size is too big and will overflow the integer datatype." ) ; } if ( ! array . isWritable ( ) ) { throw new IOException ( "Can't resize a read-only array." ) ; } array . resizeFile ( arraysize ( newsize ) ) ; this . matrixsize = newsize ; ByteBuffer header = array . getExtraHeader ( ) ; header . putInt ( this . matrixsize ) ; }
Resize the matrix to cover newsize x newsize .
122
12
157,420
private int computeOffset ( int x , int y ) { if ( y > x ) { return computeOffset ( y , x ) ; } return ( ( x * ( x + 1 ) ) >> 1 ) + y ; }
Compute the offset within the file .
47
8
157,421
private void validateHeader ( boolean validateRecordSize ) throws IOException { int readmagic = file . readInt ( ) ; // Validate magic number if ( readmagic != this . magic ) { file . close ( ) ; throw new IOException ( "Magic in LinearDiskCache does not match: " + readmagic + " instead of " + this . magic ) ; } // Validate header size if ( file . readInt ( ) != this . headersize ) { file . close ( ) ; throw new IOException ( "Header size in LinearDiskCache does not match." ) ; } if ( validateRecordSize ) { // Validate record size if ( file . readInt ( ) != this . recordsize ) { file . close ( ) ; throw new IOException ( "Recordsize in LinearDiskCache does not match." ) ; } } else { // or just read it from file this . recordsize = file . readInt ( ) ; } // read the number of records and validate with file size. if ( file . getFilePointer ( ) != HEADER_POS_SIZE ) { throw new IOException ( "Incorrect file position when reading header." ) ; } this . numrecs = file . readInt ( ) ; if ( numrecs < 0 || file . length ( ) != indexToFileposition ( numrecs ) ) { throw new IOException ( "File size and number of records do not agree." ) ; } // yet another sanity check. We should have read all of our internal header // now. if ( file . getFilePointer ( ) != INTERNAL_HEADER_SIZE ) { throw new IOException ( "Incorrect file position after reading header." ) ; } }
Validates the header and throws an IOException if the header is invalid . If validateRecordSize is set to true the record size must match exactly the stored record size within the files header else the record size is read from the header and used .
360
49
157,422
public synchronized void resizeFile ( int newsize ) throws IOException { if ( ! writable ) { throw new IOException ( "File is not writeable!" ) ; } // update the number of records this . numrecs = newsize ; file . seek ( HEADER_POS_SIZE ) ; file . writeInt ( numrecs ) ; // resize file file . setLength ( indexToFileposition ( numrecs ) ) ; mapArray ( ) ; }
Resize file to the intended size
98
7
157,423
public synchronized ByteBuffer getExtraHeader ( ) throws IOException { final int size = headersize - INTERNAL_HEADER_SIZE ; final MapMode mode = writable ? MapMode . READ_WRITE : MapMode . READ_ONLY ; return file . getChannel ( ) . map ( mode , INTERNAL_HEADER_SIZE , size ) ; }
Read the extra header data .
78
6
157,424
public PointerPrototypeHierarchyRepresentationResult run ( Database db , Relation < O > relation ) { DistanceQuery < O > dq = DatabaseUtil . precomputedDistanceQuery ( db , relation , getDistanceFunction ( ) , LOG ) ; final DBIDs ids = relation . getDBIDs ( ) ; final int size = ids . size ( ) ; // Initialize space for result: PointerHierarchyRepresentationBuilder builder = new PointerHierarchyRepresentationBuilder ( ids , dq . getDistanceFunction ( ) . isSquared ( ) ) ; Int2ObjectOpenHashMap < ModifiableDBIDs > clusters = new Int2ObjectOpenHashMap <> ( size ) ; // Allocate working space: MatrixParadigm mat = new MatrixParadigm ( ids ) ; ArrayModifiableDBIDs prots = DBIDUtil . newArray ( MatrixParadigm . triangleSize ( size ) ) ; initializeMatrices ( mat , prots , dq ) ; DBIDArrayMIter protiter = prots . iter ( ) ; FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "MiniMax clustering" , size - 1 , LOG ) : null ; DBIDArrayIter ix = mat . ix ; for ( int i = 1 , end = size ; i < size ; i ++ ) { end = AGNES . shrinkActiveSet ( ix , builder , end , // findMerge ( end , mat , protiter , builder , clusters , dq ) ) ; LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; return ( PointerPrototypeHierarchyRepresentationResult ) builder . complete ( ) ; }
Run the algorithm on a database .
372
7
157,425
protected static < O > void initializeMatrices ( MatrixParadigm mat , ArrayModifiableDBIDs prots , DistanceQuery < O > dq ) { final DBIDArrayIter ix = mat . ix , iy = mat . iy ; final double [ ] distances = mat . matrix ; int pos = 0 ; for ( ix . seek ( 0 ) ; ix . valid ( ) ; ix . advance ( ) ) { for ( iy . seek ( 0 ) ; iy . getOffset ( ) < ix . getOffset ( ) ; iy . advance ( ) ) { distances [ pos ] = dq . distance ( ix , iy ) ; prots . add ( iy ) ; pos ++ ; } } assert ( prots . size ( ) == pos ) ; }
Initializes the inter - cluster distance matrix of possible merges
172
12
157,426
protected static int findMerge ( int end , MatrixParadigm mat , DBIDArrayMIter prots , PointerHierarchyRepresentationBuilder builder , Int2ObjectOpenHashMap < ModifiableDBIDs > clusters , DistanceQuery < ? > dq ) { final DBIDArrayIter ix = mat . ix , iy = mat . iy ; final double [ ] distances = mat . matrix ; double mindist = Double . POSITIVE_INFINITY ; int x = - 1 , y = - 1 ; for ( int dx = 0 ; dx < end ; dx ++ ) { // Skip if object is already linked if ( builder . isLinked ( ix . seek ( dx ) ) ) { continue ; } final int xoffset = MatrixParadigm . triangleSize ( dx ) ; for ( int dy = 0 ; dy < dx ; dy ++ ) { // Skip if object is already linked if ( builder . isLinked ( iy . seek ( dy ) ) ) { continue ; } double dist = distances [ xoffset + dy ] ; if ( dist < mindist ) { mindist = dist ; x = dx ; y = dy ; } } } assert ( y < x ) ; merge ( end , mat , prots , builder , clusters , dq , x , y ) ; return x ; }
Find the best merge .
282
5
157,427
protected static void merge ( int size , MatrixParadigm mat , DBIDArrayMIter prots , PointerHierarchyRepresentationBuilder builder , Int2ObjectOpenHashMap < ModifiableDBIDs > clusters , DistanceQuery < ? > dq , int x , int y ) { assert ( y < x ) ; final DBIDArrayIter ix = mat . ix . seek ( x ) , iy = mat . iy . seek ( y ) ; final double [ ] distances = mat . matrix ; int offset = MatrixParadigm . triangleSize ( x ) + y ; if ( LOG . isDebuggingFine ( ) ) { LOG . debugFine ( "Merging: " + DBIDUtil . toString ( ix ) + " -> " + DBIDUtil . toString ( iy ) + " " + distances [ offset ] ) ; } ModifiableDBIDs cx = clusters . get ( x ) , cy = clusters . get ( y ) ; // Keep y if ( cy == null ) { cy = DBIDUtil . newHashSet ( ) ; cy . add ( iy ) ; } if ( cx == null ) { cy . add ( ix ) ; } else { cy . addDBIDs ( cx ) ; clusters . remove ( x ) ; } clusters . put ( y , cy ) ; // parent of x is set to y builder . add ( ix , distances [ offset ] , iy , prots . seek ( offset ) ) ; updateMatrices ( size , mat , prots , builder , clusters , dq , y ) ; }
Merges two clusters given by x y their points with smallest IDs and y to keep
338
17
157,428
protected static < O > void updateMatrices ( int size , MatrixParadigm mat , DBIDArrayMIter prots , PointerHierarchyRepresentationBuilder builder , Int2ObjectOpenHashMap < ModifiableDBIDs > clusters , DistanceQuery < O > dq , int c ) { final DBIDArrayIter ix = mat . ix , iy = mat . iy ; // c is the new cluster. // Update entries (at (x,y) with x > y) in the matrix where x = c or y = c // Update entries at (c,y) with y < c ix . seek ( c ) ; for ( iy . seek ( 0 ) ; iy . getOffset ( ) < c ; iy . advance ( ) ) { // Skip entry if already merged if ( builder . isLinked ( iy ) ) { continue ; } updateEntry ( mat , prots , clusters , dq , c , iy . getOffset ( ) ) ; } // Update entries at (x,c) with x > c iy . seek ( c ) ; for ( ix . seek ( c + 1 ) ; ix . valid ( ) ; ix . advance ( ) ) { // Skip entry if already merged if ( builder . isLinked ( ix ) ) { continue ; } updateEntry ( mat , prots , clusters , dq , ix . getOffset ( ) , c ) ; } }
Update the entries of the matrices that contain a distance to c the newly merged cluster .
309
18
157,429
protected static void updateEntry ( MatrixParadigm mat , DBIDArrayMIter prots , Int2ObjectOpenHashMap < ModifiableDBIDs > clusters , DistanceQuery < ? > dq , int x , int y ) { assert ( y < x ) ; final DBIDArrayIter ix = mat . ix , iy = mat . iy ; final double [ ] distances = mat . matrix ; ModifiableDBIDs cx = clusters . get ( x ) , cy = clusters . get ( y ) ; DBIDVar prototype = DBIDUtil . newVar ( ix . seek ( x ) ) ; // Default prototype double minMaxDist ; // Two "real" clusters: if ( cx != null && cy != null ) { minMaxDist = findPrototype ( dq , cx , cy , prototype , Double . POSITIVE_INFINITY ) ; minMaxDist = findPrototype ( dq , cy , cx , prototype , minMaxDist ) ; } else if ( cx != null ) { // cy is singleton. minMaxDist = findPrototypeSingleton ( dq , cx , iy . seek ( y ) , prototype ) ; } else if ( cy != null ) { // cx is singleton. minMaxDist = findPrototypeSingleton ( dq , cy , ix . seek ( x ) , prototype ) ; } else { minMaxDist = dq . distance ( ix . seek ( x ) , iy . seek ( y ) ) ; prototype . set ( ix ) ; } final int offset = MatrixParadigm . triangleSize ( x ) + y ; distances [ offset ] = minMaxDist ; prots . seek ( offset ) . setDBID ( prototype ) ; }
Update entry at x y for distance matrix distances
369
9
157,430
private static double findMax ( DistanceQuery < ? > dq , DBIDIter i , DBIDs cy , double maxDist , double minMaxDist ) { for ( DBIDIter j = cy . iter ( ) ; j . valid ( ) ; j . advance ( ) ) { double dist = dq . distance ( i , j ) ; if ( dist > maxDist ) { // Stop early, if we already know a better candidate. if ( dist >= minMaxDist ) { return dist ; } maxDist = dist ; } } return maxDist ; }
Find the maximum distance of one object to a set .
117
11
157,431
@ Override public void writeExternal ( ObjectOutput out ) throws IOException { out . writeInt ( DBIDUtil . asInteger ( id ) ) ; out . writeInt ( values . length ) ; for ( double v : values ) { out . writeDouble ( v ) ; } }
Calls the super method and writes the values of this entry to the specified stream .
61
17
157,432
@ Override public void readExternal ( ObjectInput in ) throws IOException , ClassNotFoundException { id = DBIDUtil . importInteger ( in . read ( ) ) ; values = new double [ in . readInt ( ) ] ; for ( int d = 0 ; d < values . length ; d ++ ) { values [ d ] = in . readDouble ( ) ; } }
Calls the super method and reads the values of this entry from the specified input stream .
82
18
157,433
@ Override public StringBuilder appendToBuffer ( StringBuilder buf ) { buf . append ( getTask ( ) ) ; buf . append ( ": " ) ; buf . append ( getProcessed ( ) ) ; return buf ; }
Serialize indefinite progress .
49
5
157,434
private TypeInformation getInputTypeRestriction ( ) { // Find maximum dimension requested int m = dims [ 0 ] ; for ( int i = 1 ; i < dims . length ; i ++ ) { m = Math . max ( dims [ i ] , m ) ; } return VectorFieldTypeInformation . typeRequest ( NumberVector . class , m , Integer . MAX_VALUE ) ; }
The input type we use .
83
6
157,435
private boolean isLocalMaximum ( double kdist , DBIDs neighbors , WritableDoubleDataStore kdists ) { for ( DBIDIter it = neighbors . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { if ( kdists . doubleValue ( it ) < kdist ) { return false ; } } return true ; }
Test if a point is a local density maximum .
75
10
157,436
protected int expandCluster ( final int clusterid , final WritableIntegerDataStore clusterids , final KNNQuery < O > knnq , final DBIDs neighbors , final double maxkdist , final FiniteProgress progress ) { int clustersize = 1 ; // initial seed! final ArrayModifiableDBIDs activeSet = DBIDUtil . newArray ( ) ; activeSet . addDBIDs ( neighbors ) ; // run expandCluster as long as this set is non-empty (non-recursive // implementation) DBIDVar id = DBIDUtil . newVar ( ) ; while ( ! activeSet . isEmpty ( ) ) { activeSet . pop ( id ) ; // Assign object to cluster final int oldclus = clusterids . intValue ( id ) ; if ( oldclus == NOISE ) { clustersize += 1 ; // Non core point cluster member: clusterids . putInt ( id , - clusterid ) ; } else if ( oldclus == UNPROCESSED ) { clustersize += 1 ; // expandCluster again: // Evaluate Neighborhood predicate final KNNList newneighbors = knnq . getKNNForDBID ( id , k ) ; // Evaluate Core-Point predicate if ( newneighbors . getKNNDistance ( ) <= maxkdist ) { activeSet . addDBIDs ( newneighbors ) ; } clusterids . putInt ( id , clusterid ) ; LOG . incrementProcessed ( progress ) ; } } return clustersize ; }
Set - based expand cluster implementation .
322
7
157,437
private void fillDensities ( KNNQuery < O > knnq , DBIDs ids , WritableDoubleDataStore dens ) { FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Densities" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { final KNNList neighbors = knnq . getKNNForDBID ( iter , k ) ; dens . putDouble ( iter , neighbors . getKNNDistance ( ) ) ; LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; }
Collect all densities into an array for sorting .
152
10
157,438
public Clustering < SubspaceModel > run ( Relation < ? extends NumberVector > relation ) { final int dimensionality = RelationUtil . dimensionality ( relation ) ; StepProgress step = new StepProgress ( 2 ) ; // 1. Identification of subspaces that contain clusters step . beginStep ( 1 , "Identification of subspaces that contain clusters" , LOG ) ; ArrayList < List < CLIQUESubspace > > dimensionToDenseSubspaces = new ArrayList <> ( dimensionality ) ; List < CLIQUESubspace > denseSubspaces = findOneDimensionalDenseSubspaces ( relation ) ; dimensionToDenseSubspaces . add ( denseSubspaces ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "1-dimensional dense subspaces: " + denseSubspaces . size ( ) ) ; } if ( LOG . isDebugging ( ) ) { for ( CLIQUESubspace s : denseSubspaces ) { LOG . debug ( s . toString ( ) ) ; } } for ( int k = 2 ; k <= dimensionality && ! denseSubspaces . isEmpty ( ) ; k ++ ) { denseSubspaces = findDenseSubspaces ( relation , denseSubspaces ) ; assert ( dimensionToDenseSubspaces . size ( ) == k - 1 ) ; dimensionToDenseSubspaces . add ( denseSubspaces ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( k + "-dimensional dense subspaces: " + denseSubspaces . size ( ) ) ; } if ( LOG . isDebugging ( ) ) { for ( CLIQUESubspace s : denseSubspaces ) { LOG . debug ( s . toString ( ) ) ; } } } // 2. Identification of clusters step . beginStep ( 2 , "Identification of clusters" , LOG ) ; // build result Clustering < SubspaceModel > result = new Clustering <> ( "CLIQUE clustering" , "clique-clustering" ) ; for ( int dim = 0 ; dim < dimensionToDenseSubspaces . size ( ) ; dim ++ ) { List < CLIQUESubspace > subspaces = dimensionToDenseSubspaces . get ( dim ) ; List < Pair < Subspace , ModifiableDBIDs > > modelsAndClusters = determineClusters ( subspaces ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( ( dim + 1 ) + "-dimensional clusters: " + modelsAndClusters . size ( ) ) ; } for ( Pair < Subspace , ModifiableDBIDs > modelAndCluster : modelsAndClusters ) { Cluster < SubspaceModel > newCluster = new Cluster <> ( modelAndCluster . second ) ; newCluster . setModel ( new SubspaceModel ( modelAndCluster . first , Centroid . make ( relation , modelAndCluster . second ) . getArrayRef ( ) ) ) ; result . addToplevelCluster ( newCluster ) ; } } return result ; }
Performs the CLIQUE algorithm on the given database .
681
12
157,439
private List < Pair < Subspace , ModifiableDBIDs > > determineClusters ( List < CLIQUESubspace > denseSubspaces ) { List < Pair < Subspace , ModifiableDBIDs > > clusters = new ArrayList <> ( ) ; for ( CLIQUESubspace subspace : denseSubspaces ) { List < Pair < Subspace , ModifiableDBIDs > > clustersInSubspace = subspace . determineClusters ( ) ; if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "Subspace " + subspace + " clusters " + clustersInSubspace . size ( ) ) ; } clusters . addAll ( clustersInSubspace ) ; } return clusters ; }
Determines the clusters in the specified dense subspaces .
153
13
157,440
private List < CLIQUESubspace > findOneDimensionalDenseSubspaces ( Relation < ? extends NumberVector > database ) { List < CLIQUESubspace > denseSubspaceCandidates = findOneDimensionalDenseSubspaceCandidates ( database ) ; return prune ? pruneDenseSubspaces ( denseSubspaceCandidates ) : denseSubspaceCandidates ; }
Determines the one dimensional dense subspaces and performs a pruning if this option is chosen .
84
21
157,441
private void updateMinMax ( NumberVector featureVector , double [ ] minima , double [ ] maxima ) { assert ( minima . length == featureVector . getDimensionality ( ) ) ; for ( int d = 0 ; d < featureVector . getDimensionality ( ) ; d ++ ) { double v = featureVector . doubleValue ( d ) ; if ( v == v ) { // Avoid NaN. maxima [ d ] = MathUtil . max ( v , maxima [ d ] ) ; minima [ d ] = MathUtil . min ( v , minima [ d ] ) ; } } }
Updates the minima and maxima array according to the specified feature vector .
134
16
157,442
private List < CLIQUESubspace > findOneDimensionalDenseSubspaceCandidates ( Relation < ? extends NumberVector > database ) { Collection < CLIQUEUnit > units = initOneDimensionalUnits ( database ) ; // identify dense units double total = database . size ( ) ; for ( DBIDIter it = database . iterDBIDs ( ) ; it . valid ( ) ; it . advance ( ) ) { NumberVector featureVector = database . get ( it ) ; // FIXME: rather than repeatedly testing, use a clever data structure? for ( CLIQUEUnit unit : units ) { unit . addFeatureVector ( it , featureVector ) ; } } int dimensionality = RelationUtil . dimensionality ( database ) ; Collection < CLIQUEUnit > denseUnits = new ArrayList <> ( ) ; CLIQUESubspace [ ] denseSubspaces = new CLIQUESubspace [ dimensionality ] ; for ( CLIQUEUnit unit : units ) { // unit is a dense unit if ( unit . selectivity ( total ) >= tau ) { denseUnits . add ( unit ) ; // add the one-dimensional dense unit to its subspace int dim = unit . getDimension ( 0 ) ; CLIQUESubspace subspace_d = denseSubspaces [ dim ] ; if ( subspace_d == null ) { denseSubspaces [ dim ] = subspace_d = new CLIQUESubspace ( dim ) ; } subspace_d . addDenseUnit ( unit ) ; } } // Omit null values where no dense unit was found: List < CLIQUESubspace > subspaceCandidates = new ArrayList <> ( dimensionality ) ; for ( CLIQUESubspace s : denseSubspaces ) { if ( s != null ) { subspaceCandidates . add ( s ) ; } } Collections . sort ( subspaceCandidates , CLIQUESubspace . BY_COVERAGE ) ; if ( LOG . isDebugging ( ) ) { LOG . debugFine ( new StringBuilder ( ) . append ( " number of 1-dim dense units: " ) . append ( denseUnits . size ( ) ) // . append ( "\n number of 1-dim dense subspace candidates: " ) . append ( subspaceCandidates . size ( ) ) . toString ( ) ) ; } return subspaceCandidates ; }
Determines the one - dimensional dense subspace candidates by making a pass over the database .
513
19
157,443
private List < CLIQUESubspace > pruneDenseSubspaces ( List < CLIQUESubspace > denseSubspaces ) { int [ ] [ ] means = computeMeans ( denseSubspaces ) ; double [ ] [ ] diffs = computeDiffs ( denseSubspaces , means [ 0 ] , means [ 1 ] ) ; double [ ] codeLength = new double [ denseSubspaces . size ( ) ] ; double minCL = Double . MAX_VALUE ; int min_i = - 1 ; for ( int i = 0 ; i < denseSubspaces . size ( ) ; i ++ ) { int mi = means [ 0 ] [ i ] , mp = means [ 1 ] [ i ] ; double cl = codeLength [ i ] = log2OrZero ( mi ) + diffs [ 0 ] [ i ] + log2OrZero ( mp ) + diffs [ 1 ] [ i ] ; if ( cl <= minCL ) { minCL = cl ; min_i = i ; } } return denseSubspaces . subList ( 0 , min_i + 1 ) ; }
Performs a MDL - based pruning of the specified dense subspaces as described in the CLIQUE algorithm .
239
25
157,444
private int [ ] [ ] computeMeans ( List < CLIQUESubspace > denseSubspaces ) { int n = denseSubspaces . size ( ) - 1 ; int [ ] mi = new int [ n + 1 ] , mp = new int [ n + 1 ] ; double resultMI = 0 , resultMP = 0 ; for ( int i = 0 ; i < denseSubspaces . size ( ) ; i ++ ) { resultMI += denseSubspaces . get ( i ) . getCoverage ( ) ; resultMP += denseSubspaces . get ( n - i ) . getCoverage ( ) ; mi [ i ] = ( int ) FastMath . ceil ( resultMI / ( i + 1 ) ) ; if ( i != n ) { mp [ n - 1 - i ] = ( int ) FastMath . ceil ( resultMP / ( i + 1 ) ) ; } } return new int [ ] [ ] { mi , mp } ; }
The specified sorted list of dense subspaces is divided into the selected set I and the pruned set P . For each set the mean of the cover fractions is computed .
209
35
157,445
private double [ ] [ ] computeDiffs ( List < CLIQUESubspace > denseSubspaces , int [ ] mi , int [ ] mp ) { int n = denseSubspaces . size ( ) - 1 ; double [ ] diff_mi = new double [ n + 1 ] , diff_mp = new double [ n + 1 ] ; double resultMI = 0 , resultMP = 0 ; for ( int i = 0 ; i < denseSubspaces . size ( ) ; i ++ ) { double diffMI = Math . abs ( denseSubspaces . get ( i ) . getCoverage ( ) - mi [ i ] ) ; resultMI += log2OrZero ( diffMI ) ; double diffMP = ( i != n ) ? Math . abs ( denseSubspaces . get ( n - i ) . getCoverage ( ) - mp [ n - 1 - i ] ) : 0 ; resultMP += log2OrZero ( diffMP ) ; diff_mi [ i ] = resultMI ; if ( i != n ) { diff_mp [ n - 1 - i ] = resultMP ; } } return new double [ ] [ ] { diff_mi , diff_mp } ; }
The specified sorted list of dense subspaces is divided into the selected set I and the pruned set P . For each set the difference from the specified mean values is computed .
257
36
157,446
public void append ( SimpleTypeInformation < ? > meta , Object data ) { this . meta . add ( meta ) ; this . contents . add ( data ) ; }
Append a single representation to the object .
35
9
157,447
public boolean contains ( long [ ] bitset ) { for ( int i = 0 ; i < bitset . length ; i ++ ) { final long b = bitset [ i ] ; if ( i >= bits . length && b != 0L ) { return false ; } if ( ( b & bits [ i ] ) != b ) { return false ; } } return true ; }
Returns whether this BitVector contains all bits that are set to true in the specified BitSet .
80
19
157,448
public double jaccardSimilarity ( BitVector v2 ) { return BitsUtil . intersectionSize ( bits , v2 . bits ) / ( double ) BitsUtil . unionSize ( bits , v2 . bits ) ; }
Compute the Jaccard similarity of two bit vectors .
49
12
157,449
public static int writeShort ( byte [ ] array , int offset , int v ) { array [ offset + 0 ] = ( byte ) ( v >>> 8 ) ; array [ offset + 1 ] = ( byte ) ( v >>> 0 ) ; return SIZE_SHORT ; }
Write a short to the byte array at the given offset .
58
12
157,450
public static int writeInt ( byte [ ] array , int offset , int v ) { array [ offset + 0 ] = ( byte ) ( v >>> 24 ) ; array [ offset + 1 ] = ( byte ) ( v >>> 16 ) ; array [ offset + 2 ] = ( byte ) ( v >>> 8 ) ; array [ offset + 3 ] = ( byte ) ( v >>> 0 ) ; return SIZE_INT ; }
Write an integer to the byte array at the given offset .
89
12
157,451
public static int writeLong ( byte [ ] array , int offset , long v ) { array [ offset + 0 ] = ( byte ) ( v >>> 56 ) ; array [ offset + 1 ] = ( byte ) ( v >>> 48 ) ; array [ offset + 2 ] = ( byte ) ( v >>> 40 ) ; array [ offset + 3 ] = ( byte ) ( v >>> 32 ) ; array [ offset + 4 ] = ( byte ) ( v >>> 24 ) ; array [ offset + 5 ] = ( byte ) ( v >>> 16 ) ; array [ offset + 6 ] = ( byte ) ( v >>> 8 ) ; array [ offset + 7 ] = ( byte ) ( v >>> 0 ) ; return SIZE_LONG ; }
Write a long to the byte array at the given offset .
154
12
157,452
public static int writeFloat ( byte [ ] array , int offset , float v ) { return writeInt ( array , offset , Float . floatToIntBits ( v ) ) ; }
Write a float to the byte array at the given offset .
39
12
157,453
public static int writeDouble ( byte [ ] array , int offset , double v ) { return writeLong ( array , offset , Double . doubleToLongBits ( v ) ) ; }
Write a double to the byte array at the given offset .
39
12
157,454
public static short readShort ( byte [ ] array , int offset ) { // First make integers to resolve signed vs. unsigned issues. int b0 = array [ offset + 0 ] & 0xFF ; int b1 = array [ offset + 1 ] & 0xFF ; return ( short ) ( ( b0 << 8 ) + ( b1 << 0 ) ) ; }
Read a short from the byte array at the given offset .
78
12
157,455
public static int readUnsignedShort ( byte [ ] array , int offset ) { // First make integers to resolve signed vs. unsigned issues. int b0 = array [ offset + 0 ] & 0xFF ; int b1 = array [ offset + 1 ] & 0xFF ; return ( ( b0 << 8 ) + ( b1 << 0 ) ) ; }
Read an unsigned short from the byte array at the given offset .
77
13
157,456
public static int readInt ( byte [ ] array , int offset ) { // First make integers to resolve signed vs. unsigned issues. int b0 = array [ offset + 0 ] & 0xFF ; int b1 = array [ offset + 1 ] & 0xFF ; int b2 = array [ offset + 2 ] & 0xFF ; int b3 = array [ offset + 3 ] & 0xFF ; return ( ( b0 << 24 ) + ( b1 << 16 ) + ( b2 << 8 ) + ( b3 << 0 ) ) ; }
Read an integer from the byte array at the given offset .
119
12
157,457
public static long readLong ( byte [ ] array , int offset ) { // First make integers to resolve signed vs. unsigned issues. long b0 = array [ offset + 0 ] ; long b1 = array [ offset + 1 ] & 0xFF ; long b2 = array [ offset + 2 ] & 0xFF ; long b3 = array [ offset + 3 ] & 0xFF ; long b4 = array [ offset + 4 ] & 0xFF ; int b5 = array [ offset + 5 ] & 0xFF ; int b6 = array [ offset + 6 ] & 0xFF ; int b7 = array [ offset + 7 ] & 0xFF ; return ( ( b0 << 56 ) + ( b1 << 48 ) + ( b2 << 40 ) + ( b3 << 32 ) + ( b4 << 24 ) + ( b5 << 16 ) + ( b6 << 8 ) + ( b7 << 0 ) ) ; }
Read a long from the byte array at the given offset .
203
12
157,458
public static void writeUnsignedVarint ( ByteBuffer buffer , int val ) { // Extra bytes have the high bit set while ( ( val & 0x7F ) != val ) { buffer . put ( ( byte ) ( ( val & 0x7F ) | 0x80 ) ) ; val >>>= 7 ; } // Last byte doesn't have high bit set buffer . put ( ( byte ) ( val & 0x7F ) ) ; }
Write an unsigned integer using a variable - length encoding .
95
11
157,459
public static void writeUnsignedVarintLong ( ByteBuffer buffer , long val ) { // Extra bytes have the high bit set while ( ( val & 0x7F ) != val ) { buffer . put ( ( byte ) ( ( val & 0x7F ) | 0x80 ) ) ; val >>>= 7 ; } // Last byte doesn't have high bit set buffer . put ( ( byte ) ( val & 0x7F ) ) ; }
Write an unsigned long using a variable - length encoding .
96
11
157,460
public static void writeString ( ByteBuffer buffer , String s ) throws IOException { if ( s == null ) { s = "" ; // Which will be written as Varint 0 = single byte 0. } ByteArrayUtil . STRING_SERIALIZER . toByteBuffer ( buffer , s ) ; }
Write a string to the buffer .
66
7
157,461
public static int readUnsignedVarint ( ByteBuffer buffer ) throws IOException { int val = 0 ; int bits = 0 ; while ( true ) { final int data = buffer . get ( ) ; val |= ( data & 0x7F ) << bits ; if ( ( data & 0x80 ) == 0 ) { return val ; } bits += 7 ; if ( bits > 35 ) { throw new IOException ( "Variable length quantity is too long for expected integer." ) ; } } }
Read an unsigned integer .
105
5
157,462
public static void unmapByteBuffer ( final MappedByteBuffer map ) { if ( map == null ) { return ; } map . force ( ) ; try { if ( Runtime . class . getDeclaredMethod ( "version" ) != null ) return ; // At later Java, the hack below will not work anymore. } catch ( NoSuchMethodException e ) { // This is an ugly hack, but all that Java <8 offers to help freeing // memory allocated using such buffers. // See also: http://bugs.sun.com/view_bug.do?bug_id=4724038 AccessController . doPrivileged ( new PrivilegedAction < Object > ( ) { @ Override public Object run ( ) { try { Method getCleanerMethod = map . getClass ( ) . getMethod ( "cleaner" , new Class [ 0 ] ) ; if ( getCleanerMethod == null ) { return null ; } getCleanerMethod . setAccessible ( true ) ; Object cleaner = getCleanerMethod . invoke ( map , new Object [ 0 ] ) ; Method cleanMethod = cleaner . getClass ( ) . getMethod ( "clean" ) ; if ( cleanMethod == null ) { return null ; } cleanMethod . invoke ( cleaner ) ; } catch ( Exception e ) { LoggingUtil . exception ( e ) ; } return null ; } } ) ; } catch ( SecurityException e1 ) { // Ignore. } }
Unmap a byte buffer .
305
6
157,463
private void sortAxes ( ) { for ( int d = 0 ; d < shared . dim ; d ++ ) { double dist = shared . camera . squaredDistanceFromCamera ( shared . layout . getNode ( d ) . getX ( ) , shared . layout . getNode ( d ) . getY ( ) ) ; axes [ d ] . first = - dist ; axes [ d ] . second = d ; } Arrays . sort ( axes ) ; for ( int i = 0 ; i < shared . dim ; i ++ ) { dindex [ axes [ i ] . second ] = i ; } }
Depth - sort the axes .
128
6
157,464
private IntIntPair [ ] sortEdges ( int [ ] dindex ) { IntIntPair [ ] edgesort = new IntIntPair [ shared . layout . edges . size ( ) ] ; int e = 0 ; for ( Layout . Edge edge : shared . layout . edges ) { int i1 = dindex [ edge . dim1 ] , i2 = dindex [ edge . dim2 ] ; edgesort [ e ] = new IntIntPair ( Math . min ( i1 , i2 ) , e ) ; e ++ ; } Arrays . sort ( edgesort ) ; return edgesort ; }
Sort the edges for rendering .
132
6
157,465
@ Override public void finalizeFirstPassE ( ) { double s = 1. / wsum ; for ( int i = 0 ; i < mean . length ; i ++ ) { mean [ i ] *= s ; } }
Finish computation of the mean .
49
6
157,466
private double restore ( int d , double val ) { d = ( mean . length == 1 ) ? 0 : d ; return val * mean [ d ] ; }
Restore a single dimension .
34
6
157,467
public OutlierResult run ( Relation < ? extends NumberVector > relation ) { final DBIDs ids = relation . getDBIDs ( ) ; WritableDoubleDataStore ranks = DataStoreUtil . makeDoubleStorage ( ids , DataStoreFactory . HINT_STATIC ) ; DoubleMinMax minmax = new DoubleMinMax ( ) ; KernelDensityEstimator kernel = new KernelDensityEstimator ( relation , eps ) ; long [ ] subspace = BitsUtil . zero ( kernel . dim ) ; FiniteProgress progress = LOG . isVerbose ( ) ? new FiniteProgress ( "OUTRES scores" , ids . size ( ) , LOG ) : null ; for ( DBIDIter iditer = ids . iter ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { BitsUtil . zeroI ( subspace ) ; double score = outresScore ( 0 , subspace , iditer , kernel , ids ) ; ranks . putDouble ( iditer , score ) ; minmax . put ( score ) ; LOG . incrementProcessed ( progress ) ; } LOG . ensureCompleted ( progress ) ; OutlierScoreMeta meta = new InvertedOutlierScoreMeta ( minmax . getMin ( ) , minmax . getMax ( ) , 0. , 1. , 1. ) ; return new OutlierResult ( meta , new MaterializedDoubleRelation ( "OUTRES" , "outres-score" , ranks , ids ) ) ; }
Main loop for OUTRES
326
5
157,468
public double outresScore ( final int s , long [ ] subspace , DBIDRef id , KernelDensityEstimator kernel , DBIDs cands ) { double score = 1.0 ; // Initial score is 1.0 final SubspaceEuclideanDistanceFunction df = new SubspaceEuclideanDistanceFunction ( subspace ) ; MeanVariance meanv = new MeanVariance ( ) ; ModifiableDoubleDBIDList neighcand = DBIDUtil . newDistanceDBIDList ( cands . size ( ) ) ; ModifiableDoubleDBIDList nn = DBIDUtil . newDistanceDBIDList ( cands . size ( ) ) ; for ( int i = s ; i < kernel . dim ; i ++ ) { assert ! BitsUtil . get ( subspace , i ) ; BitsUtil . setI ( subspace , i ) ; df . setSelectedDimensions ( subspace ) ; final double adjustedEps = kernel . adjustedEps ( kernel . dim ) ; DoubleDBIDList neigh = initialRange ( id , cands , df , adjustedEps * 2 , kernel , neighcand ) ; // Relevance test if ( neigh . size ( ) > 2 ) { if ( relevantSubspace ( subspace , neigh , kernel ) ) { final double density = kernel . subspaceDensity ( subspace , neigh ) ; // Compute mean and standard deviation for densities of neighbors. meanv . reset ( ) ; for ( DoubleDBIDListIter neighbor = neigh . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { subsetNeighborhoodQuery ( neighcand , neighbor , df , adjustedEps , kernel , nn ) ; meanv . put ( kernel . subspaceDensity ( subspace , nn ) ) ; } final double deviation = ( meanv . getMean ( ) - density ) / ( 2. * meanv . getSampleStddev ( ) ) ; // High deviation: if ( deviation >= 1 ) { score *= density / deviation ; } // Recursion score *= outresScore ( i + 1 , subspace , id , kernel , neighcand ) ; } } BitsUtil . clearI ( subspace , i ) ; } return score ; }
Main loop of OUTRES . Run for each object
481
10
157,469
private DoubleDBIDList initialRange ( DBIDRef obj , DBIDs cands , PrimitiveDistanceFunction < ? super NumberVector > df , double eps , KernelDensityEstimator kernel , ModifiableDoubleDBIDList n ) { n . clear ( ) ; NumberVector o = kernel . relation . get ( obj ) ; final double twoeps = eps * 2 ; int matches = 0 ; for ( DBIDIter cand = cands . iter ( ) ; cand . valid ( ) ; cand . advance ( ) ) { final double dist = df . distance ( o , kernel . relation . get ( cand ) ) ; if ( dist <= twoeps ) { n . add ( dist , cand ) ; if ( dist <= eps ) { ++ matches ; } } } n . sort ( ) ; return n . slice ( 0 , matches ) ; }
Initial range query .
180
4
157,470
private DoubleDBIDList subsetNeighborhoodQuery ( DoubleDBIDList neighc , DBIDRef dbid , PrimitiveDistanceFunction < ? super NumberVector > df , double adjustedEps , KernelDensityEstimator kernel , ModifiableDoubleDBIDList n ) { n . clear ( ) ; NumberVector query = kernel . relation . get ( dbid ) ; for ( DoubleDBIDListIter neighbor = neighc . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { // TODO: use triangle inequality for pruning double dist = df . distance ( query , kernel . relation . get ( neighbor ) ) ; if ( dist <= adjustedEps ) { n . add ( dist , neighbor ) ; } } return n ; }
Refine neighbors within a subset .
160
7
157,471
protected boolean relevantSubspace ( long [ ] subspace , DoubleDBIDList neigh , KernelDensityEstimator kernel ) { final double crit = K_S_CRITICAL001 / FastMath . sqrt ( neigh . size ( ) - 2 ) ; double [ ] data = new double [ neigh . size ( ) ] ; Relation < ? extends NumberVector > relation = kernel . relation ; for ( int dim = BitsUtil . nextSetBit ( subspace , 0 ) ; dim >= 0 ; dim = BitsUtil . nextSetBit ( subspace , dim + 1 ) ) { // TODO: can/should we save this copy? int count = 0 ; for ( DBIDIter neighbor = neigh . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { data [ count ++ ] = relation . get ( neighbor ) . doubleValue ( dim ) ; } assert ( count == neigh . size ( ) ) ; Arrays . sort ( data ) ; final double min = data [ 0 ] , norm = data [ data . length - 1 ] - min ; // Kolmogorow-Smirnow-Test against uniform distribution: boolean flag = false ; for ( int j = 1 , end = data . length - 1 ; j < end ; j ++ ) { if ( Math . abs ( j / ( data . length - 2. ) - ( data [ j ] - min ) / norm ) > crit ) { flag = true ; break ; } } if ( ! flag ) { return false ; } } return true ; }
Subspace relevance test .
329
5
157,472
public static double of ( double ... data ) { double sum = 0. ; for ( double v : data ) { sum += v ; } return sum / data . length ; }
Static helper function .
37
4
157,473
@ Reference ( authors = "P. M. Neely" , // title = "Comparison of Several Algorithms for Computation of Means, Standard Deviations and Correlation Coefficients" , // booktitle = "Communications of the ACM 9(7), 1966" , // url = "https://doi.org/10.1145/365719.365958" , // bibkey = "doi:10.1145/365719.365958" ) public static double highPrecision ( double ... data ) { double sum = 0. ; for ( double v : data ) { sum += v ; } sum /= data . length ; // Perform a second pass to increase precision // In ideal math, this would sum to 0. double err = 0 ; for ( double v : data ) { err += v - sum ; } return sum + err / data . length ; }
Static helper function with extra precision
192
6
157,474
public void insertAll ( List < E > entries ) { if ( ! initialized && ! entries . isEmpty ( ) ) { initialize ( entries . get ( 0 ) ) ; } for ( E entry : entries ) { insert ( entry , false ) ; } }
Bulk insert .
54
4
157,475
protected final List < DoubleIntPair > getSortedEntries ( N node , DBID q ) { List < DoubleIntPair > result = new ArrayList <> ( ) ; for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { E entry = node . getEntry ( i ) ; double distance = distance ( entry . getRoutingObjectID ( ) , q ) ; double radius = entry . getCoveringRadius ( ) ; double minDist = ( radius > distance ) ? 0.0 : distance - radius ; result . add ( new DoubleIntPair ( minDist , i ) ) ; } Collections . sort ( result ) ; return result ; }
Sorts the entries of the specified node according to their minimum distance to the specified object .
151
18
157,476
public final double distance ( E e1 , E e2 ) { return distance ( e1 . getRoutingObjectID ( ) , e2 . getRoutingObjectID ( ) ) ; }
Returns the distance between the routing object of two entries .
41
11
157,477
public static < A > double [ ] alphaPWM ( A data , NumberArrayAdapter < ? , A > adapter , final int nmom ) { final int n = adapter . size ( data ) ; final double [ ] xmom = new double [ nmom ] ; double weight = 1. / n ; for ( int i = 0 ; i < n ; i ++ ) { final double val = adapter . getDouble ( data , i ) ; xmom [ 0 ] += weight * val ; for ( int j = 1 ; j < nmom ; j ++ ) { weight *= ( n - i - j + 1 ) / ( n - j + 1 ) ; xmom [ j ] += weight * val ; } } return xmom ; }
Compute the alpha_r factors using the method of probability - weighted moments .
157
16
157,478
public static < A > double [ ] alphaBetaPWM ( A data , NumberArrayAdapter < ? , A > adapter , final int nmom ) { final int n = adapter . size ( data ) ; final double [ ] xmom = new double [ nmom << 1 ] ; double aweight = 1. / n , bweight = aweight ; for ( int i = 0 ; i < n ; i ++ ) { final double val = adapter . getDouble ( data , i ) ; xmom [ 0 ] += aweight * val ; xmom [ 1 ] += bweight * val ; for ( int j = 1 , k = 2 ; j < nmom ; j ++ , k += 2 ) { aweight *= ( n - i - j + 1 ) / ( n - j + 1 ) ; bweight *= ( i - j + 1 ) / ( n - j + 1 ) ; xmom [ k + 1 ] += aweight * val ; xmom [ k + 1 ] += bweight * val ; } } return xmom ; }
Compute the alpha_r and beta_r factors in parallel using the method of probability - weighted moments . Usually cheaper than computing them separately .
224
29
157,479
public static < A > double [ ] samLMR ( A sorted , NumberArrayAdapter < ? , A > adapter , int nmom ) { final int n = adapter . size ( sorted ) ; final double [ ] sum = new double [ nmom ] ; nmom = n < nmom ? n : nmom ; // Estimate probability weighted moments (unbiased) for ( int i = 0 ; i < n ; i ++ ) { double term = adapter . getDouble ( sorted , i ) ; // Robustness: skip bad values if ( Double . isInfinite ( term ) || Double . isNaN ( term ) ) { continue ; } sum [ 0 ] += term ; for ( int j = 1 , z = i ; j < nmom ; j ++ , z -- ) { term *= z ; sum [ j ] += term ; } } // Normalize by "n choose (j + 1)" sum [ 0 ] /= n ; double z = n ; for ( int j = 1 ; j < nmom ; j ++ ) { z *= n - j ; sum [ j ] /= z ; } normalizeLMR ( sum , nmom ) ; // Handle case when lambda2 == 0, by setting tau3...tauN = 0: if ( sum [ 1 ] == 0 ) { for ( int i = 2 ; i < nmom ; i ++ ) { sum [ i ] = 0. ; // tau3...tauN = 0. } return sum ; } // Map lambda3...lambdaN to tau3...tauN for ( int i = 2 ; i < nmom ; i ++ ) { sum [ i ] /= sum [ 1 ] ; } return sum ; }
Compute the sample L - Moments using probability weighted moments .
367
12
157,480
private static void normalizeLMR ( double [ ] sum , int nmom ) { for ( int k = nmom - 1 ; k >= 1 ; -- k ) { double p = ( ( k & 1 ) == 0 ) ? + 1 : - 1 ; double temp = p * sum [ 0 ] ; for ( int i = 0 ; i < k ; i ++ ) { double ai = i + 1. ; p *= - ( k + ai ) * ( k - i ) / ( ai * ai ) ; temp += p * sum [ i + 1 ] ; } sum [ k ] = temp ; } }
Normalize the moments
135
4
157,481
private int [ ] countItemSupport ( final Relation < BitVector > relation , final int dim ) { final int [ ] counts = new int [ dim ] ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Finding frequent 1-items" , relation . size ( ) , LOG ) : null ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { SparseFeatureVector < ? > bv = relation . get ( iditer ) ; // TODO: only count those which satisfy minlength? for ( int it = bv . iter ( ) ; bv . iterValid ( it ) ; it = bv . iterAdvance ( it ) ) { counts [ bv . iterDim ( it ) ] ++ ; } LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; return counts ; }
Count the support of each 1 - item .
203
9
157,482
private FPTree buildFPTree ( final Relation < BitVector > relation , int [ ] iidx , final int items ) { FPTree tree = new FPTree ( items ) ; FiniteProgress prog = LOG . isVerbose ( ) ? new FiniteProgress ( "Building FP-tree" , relation . size ( ) , LOG ) : null ; int [ ] buf = new int [ items ] ; for ( DBIDIter iditer = relation . iterDBIDs ( ) ; iditer . valid ( ) ; iditer . advance ( ) ) { // Convert item to index representation: int l = 0 ; SparseFeatureVector < ? > bv = relation . get ( iditer ) ; for ( int it = bv . iter ( ) ; bv . iterValid ( it ) ; it = bv . iterAdvance ( it ) ) { int i = iidx [ bv . iterDim ( it ) ] ; if ( i < 0 ) { continue ; // Skip non-frequent items } buf [ l ++ ] = i ; } // Skip too short entries if ( l >= minlength ) { Arrays . sort ( buf , 0 , l ) ; // Sort ascending tree . insert ( buf , 0 , l , 1 ) ; } LOG . incrementProcessed ( prog ) ; } LOG . ensureCompleted ( prog ) ; return tree ; }
Build the actual FP - tree structure .
292
8
157,483
public StringBuilder appendTo ( StringBuilder buf , VectorFieldTypeInformation < BitVector > meta ) { this . antecedent . appendTo ( buf , meta ) ; buf . append ( " --> " ) ; this . consequent . appendItemsTo ( buf , meta ) ; buf . append ( ": " ) ; buf . append ( union . getSupport ( ) ) ; buf . append ( " : " ) ; buf . append ( this . measure ) ; return buf ; }
Append to a string buffer .
101
7
157,484
public void process ( Clustering < ? > result1 , Clustering < ? > result2 ) { // Get the clusters final List < ? extends Cluster < ? > > cs1 = result1 . getAllClusters ( ) ; final List < ? extends Cluster < ? > > cs2 = result2 . getAllClusters ( ) ; // Initialize size1 = cs1 . size ( ) ; size2 = cs2 . size ( ) ; contingency = new int [ size1 + 2 ] [ size2 + 2 ] ; noise1 = BitsUtil . zero ( size1 ) ; noise2 = BitsUtil . zero ( size2 ) ; // Fill main part of matrix { final Iterator < ? extends Cluster < ? > > it2 = cs2 . iterator ( ) ; for ( int i2 = 0 ; it2 . hasNext ( ) ; i2 ++ ) { final Cluster < ? > c2 = it2 . next ( ) ; if ( c2 . isNoise ( ) ) { BitsUtil . setI ( noise2 , i2 ) ; } contingency [ size1 + 1 ] [ i2 ] = c2 . size ( ) ; contingency [ size1 + 1 ] [ size2 ] += c2 . size ( ) ; } } final Iterator < ? extends Cluster < ? > > it1 = cs1 . iterator ( ) ; for ( int i1 = 0 ; it1 . hasNext ( ) ; i1 ++ ) { final Cluster < ? > c1 = it1 . next ( ) ; if ( c1 . isNoise ( ) ) { BitsUtil . setI ( noise1 , i1 ) ; } final DBIDs ids = DBIDUtil . ensureSet ( c1 . getIDs ( ) ) ; contingency [ i1 ] [ size2 + 1 ] = c1 . size ( ) ; contingency [ size1 ] [ size2 + 1 ] += c1 . size ( ) ; final Iterator < ? extends Cluster < ? > > it2 = cs2 . iterator ( ) ; for ( int i2 = 0 ; it2 . hasNext ( ) ; i2 ++ ) { final Cluster < ? > c2 = it2 . next ( ) ; int count = DBIDUtil . intersectionSize ( ids , c2 . getIDs ( ) ) ; contingency [ i1 ] [ i2 ] = count ; contingency [ i1 ] [ size2 ] += count ; contingency [ size1 ] [ i2 ] += count ; contingency [ size1 ] [ size2 ] += count ; } } }
Process two clustering results .
546
6
157,485
private long [ ] randomSubspace ( final int alldim , final int mindim , final int maxdim , final Random rand ) { long [ ] dimset = BitsUtil . zero ( alldim ) ; // Fill with all dimensions int [ ] dims = new int [ alldim ] ; for ( int d = 0 ; d < alldim ; d ++ ) { dims [ d ] = d ; } // Target dimensionality: int subdim = mindim + rand . nextInt ( maxdim - mindim ) ; // Shrink the subspace to the destination size for ( int d = 0 ; d < alldim - subdim ; d ++ ) { int s = rand . nextInt ( alldim - d ) ; BitsUtil . setI ( dimset , dims [ s ] ) ; dims [ s ] = dims [ alldim - d - 1 ] ; } return dimset ; }
Choose a random subspace .
195
6
157,486
public Element renderCheckBox ( SVGPlot svgp , double x , double y , double size ) { // create check final Element checkmark = SVGEffects . makeCheckmark ( svgp ) ; checkmark . setAttribute ( SVGConstants . SVG_TRANSFORM_ATTRIBUTE , "scale(" + ( size / 12 ) + ") translate(" + x + " " + y + ")" ) ; if ( ! checked ) { checkmark . setAttribute ( SVGConstants . SVG_STYLE_ATTRIBUTE , SVGConstants . CSS_DISPLAY_PROPERTY + ":" + SVGConstants . CSS_NONE_VALUE ) ; } // create box Element checkbox_box = SVGUtil . svgRect ( svgp . getDocument ( ) , x , y , size , size ) ; checkbox_box . setAttribute ( SVGConstants . SVG_FILL_ATTRIBUTE , "#d4e4f1" ) ; checkbox_box . setAttribute ( SVGConstants . SVG_STROKE_ATTRIBUTE , "#a0a0a0" ) ; checkbox_box . setAttribute ( SVGConstants . SVG_STROKE_WIDTH_ATTRIBUTE , "0.5" ) ; // create checkbox final Element checkbox = svgp . svgElement ( SVGConstants . SVG_G_TAG ) ; checkbox . appendChild ( checkbox_box ) ; checkbox . appendChild ( checkmark ) ; // create Label if ( label != null ) { Element labele = svgp . svgText ( x + 2 * size , y + size , label ) ; // TODO: font size! checkbox . appendChild ( labele ) ; } // add click event listener EventTarget targ = ( EventTarget ) checkbox ; targ . addEventListener ( SVGConstants . SVG_CLICK_EVENT_TYPE , new EventListener ( ) { @ Override public void handleEvent ( Event evt ) { if ( checked ^= true ) { checkmark . removeAttribute ( SVGConstants . SVG_STYLE_ATTRIBUTE ) ; } else { checkmark . setAttribute ( SVGConstants . SVG_STYLE_ATTRIBUTE , SVGConstants . CSS_DISPLAY_PROPERTY + ":" + SVGConstants . CSS_NONE_VALUE ) ; } fireSwitchEvent ( new ChangeEvent ( SVGCheckbox . this ) ) ; } } , false ) ; return checkbox ; }
Render the SVG checkbox to a plot
544
8
157,487
protected void fireSwitchEvent ( ChangeEvent evt ) { Object [ ] listeners = listenerList . getListenerList ( ) ; for ( int i = 1 ; i < listeners . length ; i += 2 ) { if ( listeners [ i - 1 ] == ChangeListener . class ) { ( ( ChangeListener ) listeners [ i ] ) . stateChanged ( evt ) ; } } }
Fire the event to listeners
80
5
157,488
protected static void calculateSelectivityCoeffs ( List < DoubleObjPair < DAFile > > daFiles , NumberVector query , double epsilon ) { final int dimensions = query . getDimensionality ( ) ; double [ ] lowerVals = new double [ dimensions ] ; double [ ] upperVals = new double [ dimensions ] ; VectorApproximation queryApprox = calculatePartialApproximation ( null , query , daFiles ) ; for ( int i = 0 ; i < dimensions ; i ++ ) { final double val = query . doubleValue ( i ) ; lowerVals [ i ] = val - epsilon ; upperVals [ i ] = val + epsilon ; } DoubleVector lowerEpsilon = DoubleVector . wrap ( lowerVals ) ; VectorApproximation lowerEpsilonPartitions = calculatePartialApproximation ( null , lowerEpsilon , daFiles ) ; DoubleVector upperEpsilon = DoubleVector . wrap ( upperVals ) ; VectorApproximation upperEpsilonPartitions = calculatePartialApproximation ( null , upperEpsilon , daFiles ) ; for ( int i = 0 ; i < daFiles . size ( ) ; i ++ ) { int coeff = ( queryApprox . getApproximation ( i ) - lowerEpsilonPartitions . getApproximation ( i ) ) + ( upperEpsilonPartitions . getApproximation ( i ) - queryApprox . getApproximation ( i ) ) + 1 ; daFiles . get ( i ) . first = coeff ; } }
Calculate selectivity coefficients .
343
7
157,489
protected static VectorApproximation calculatePartialApproximation ( DBID id , NumberVector dv , List < DoubleObjPair < DAFile > > daFiles ) { int [ ] approximation = new int [ dv . getDimensionality ( ) ] ; for ( int i = 0 ; i < daFiles . size ( ) ; i ++ ) { double val = dv . doubleValue ( i ) ; double [ ] borders = daFiles . get ( i ) . second . getSplitPositions ( ) ; assert borders != null : "borders are null" ; int lastBorderIndex = borders . length - 1 ; // value is lower outlier if ( val < borders [ 0 ] ) { approximation [ i ] = 0 ; } // value is upper outlier else if ( val > borders [ lastBorderIndex ] ) { approximation [ i ] = lastBorderIndex - 1 ; } // normal case else { for ( int s = 0 ; s < lastBorderIndex ; s ++ ) { if ( val >= borders [ s ] && val < borders [ s + 1 ] && approximation [ i ] != - 1 ) { approximation [ i ] = s ; } } } } return new VectorApproximation ( id , approximation ) ; }
Calculate partial vector approximation .
262
7
157,490
public String solutionToString ( int fractionDigits ) { if ( ! isSolvable ( ) ) { throw new IllegalStateException ( "System is not solvable!" ) ; } DecimalFormat nf = new DecimalFormat ( ) ; nf . setMinimumFractionDigits ( fractionDigits ) ; nf . setMaximumFractionDigits ( fractionDigits ) ; nf . setDecimalFormatSymbols ( new DecimalFormatSymbols ( Locale . US ) ) ; nf . setNegativePrefix ( "" ) ; nf . setPositivePrefix ( "" ) ; int row = coeff [ 0 ] . length >> 1 ; int params = u . length ; int paramsDigits = integerDigits ( params ) ; int x0Digits = maxIntegerDigits ( x_0 ) ; int [ ] uDigits = maxIntegerDigits ( u ) ; StringBuilder buffer = new StringBuilder ( ) ; for ( int i = 0 ; i < x_0 . length ; i ++ ) { double value = x_0 [ i ] ; format ( nf , buffer , value , x0Digits ) ; for ( int j = 0 ; j < u [ 0 ] . length ; j ++ ) { if ( i == row ) { buffer . append ( " + a_" ) . append ( j ) . append ( " * " ) ; } else { buffer . append ( " " ) ; for ( int d = 0 ; d < paramsDigits ; d ++ ) { buffer . append ( ' ' ) ; } } format ( nf , buffer , u [ i ] [ j ] , uDigits [ j ] ) ; } buffer . append ( ' ' ) ; } return buffer . toString ( ) ; }
Returns a string representation of the solution of this equation system .
378
12
157,491
private void reducedRowEchelonForm ( int method ) { final int rows = coeff . length ; final int cols = coeff [ 0 ] . length ; int k = - 1 ; // denotes current position on diagonal int pivotRow ; // row index of pivot element int pivotCol ; // column index of pivot element double pivot ; // value of pivot element // main loop, transformation to reduced row echelon form boolean exitLoop = false ; while ( ! exitLoop ) { k ++ ; // pivot search for entry in remaining matrix // (depends on chosen method in switch) // store position in pivotRow, pivotCol // TODO: Note that we're using "row, col", whereas "col, row" would be // more common? IntIntPair pivotPos = new IntIntPair ( 0 , 0 ) ; IntIntPair currPos = new IntIntPair ( k , k ) ; switch ( method ) { case TRIVAL_PIVOT_SEARCH : pivotPos = nonZeroPivotSearch ( k ) ; break ; case TOTAL_PIVOT_SEARCH : pivotPos = totalPivotSearch ( k ) ; break ; } pivotRow = pivotPos . first ; pivotCol = pivotPos . second ; pivot = coeff [ this . row [ pivotRow ] ] [ col [ pivotCol ] ] ; if ( LOG . isDebugging ( ) ) { StringBuilder msg = new StringBuilder ( ) ; msg . append ( "equations " ) . append ( equationsToString ( 4 ) ) ; msg . append ( " *** pivot at (" ) . append ( pivotRow ) . append ( ' ' ) . append ( pivotCol ) . append ( ") = " ) . append ( pivot ) . append ( ' ' ) ; LOG . debugFine ( msg . toString ( ) ) ; } // permute rows and columns to get this entry onto // the diagonal permutePivot ( pivotPos , currPos ) ; // test conditions for exiting loop // after this iteration // reasons are: Math.abs(pivot) == 0 if ( ( Math . abs ( pivot ) <= DELTA ) ) { exitLoop = true ; } // pivoting only if Math.abs(pivot) > 0 // and k <= m - 1 if ( ( Math . abs ( pivot ) > DELTA ) ) { rank ++ ; pivotOperation ( k ) ; } // test conditions for exiting loop // after this iteration // reasons are: k == rows-1 : no more rows // k == cols-1 : no more columns if ( k == rows - 1 || k == cols - 1 ) { exitLoop = true ; } } // end while reducedRowEchelonForm = true ; }
Brings this linear equation system into reduced row echelon form with choice of pivot method .
575
19
157,492
private IntIntPair nonZeroPivotSearch ( int k ) { int i , j ; double absValue ; for ( i = k ; i < coeff . length ; i ++ ) { for ( j = k ; j < coeff [ 0 ] . length ; j ++ ) { // compute absolute value of // current entry in absValue absValue = Math . abs ( coeff [ row [ i ] ] [ col [ j ] ] ) ; // check if absValue is non-zero if ( absValue > 0 ) { // found a pivot element return new IntIntPair ( i , j ) ; } // end if } // end for j } // end for k return new IntIntPair ( k , k ) ; }
Method for trivial pivot search searches for non - zero entry .
156
12
157,493
private void permutePivot ( IntIntPair pos1 , IntIntPair pos2 ) { int r1 = pos1 . first ; int c1 = pos1 . second ; int r2 = pos2 . first ; int c2 = pos2 . second ; int index ; index = row [ r2 ] ; row [ r2 ] = row [ r1 ] ; row [ r1 ] = index ; index = col [ c2 ] ; col [ c2 ] = col [ c1 ] ; col [ c1 ] = index ; }
permutes two matrix rows and two matrix columns
118
9
157,494
private void pivotOperation ( int k ) { double pivot = coeff [ row [ k ] ] [ col [ k ] ] ; // pivot row: set pivot to 1 coeff [ row [ k ] ] [ col [ k ] ] = 1 ; for ( int i = k + 1 ; i < coeff [ k ] . length ; i ++ ) { coeff [ row [ k ] ] [ col [ i ] ] /= pivot ; } rhs [ row [ k ] ] /= pivot ; if ( LOG . isDebugging ( ) ) { StringBuilder msg = new StringBuilder ( ) ; msg . append ( "set pivot element to 1 " ) . append ( equationsToString ( 4 ) ) ; LOG . debugFine ( msg . toString ( ) ) ; } // for (int i = k + 1; i < coeff.length; i++) { for ( int i = 0 ; i < coeff . length ; i ++ ) { if ( i == k ) { continue ; } // compute factor double q = coeff [ row [ i ] ] [ col [ k ] ] ; // modify entry a[i,k], i <> k coeff [ row [ i ] ] [ col [ k ] ] = 0 ; // modify entries a[i,j], i > k fixed, j = k+1...n-1 for ( int j = k + 1 ; j < coeff [ 0 ] . length ; j ++ ) { coeff [ row [ i ] ] [ col [ j ] ] = coeff [ row [ i ] ] [ col [ j ] ] - coeff [ row [ k ] ] [ col [ j ] ] * q ; } // end for j // modify right-hand-side rhs [ row [ i ] ] = rhs [ row [ i ] ] - rhs [ row [ k ] ] * q ; } // end for k if ( LOG . isDebugging ( ) ) { StringBuilder msg = new StringBuilder ( ) ; msg . append ( "after pivot operation " ) . append ( equationsToString ( 4 ) ) ; LOG . debugFine ( msg . toString ( ) ) ; } }
performs a pivot operation
459
5
157,495
private void solve ( int method ) throws NullPointerException { // solution exists if ( solved ) { return ; } // bring in reduced row echelon form if ( ! reducedRowEchelonForm ) { reducedRowEchelonForm ( method ) ; } if ( ! isSolvable ( method ) ) { if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "Equation system is not solvable!" ) ; } return ; } // compute one special solution final int cols = coeff [ 0 ] . length ; int numbound = 0 , numfree = 0 ; int [ ] boundIndices = new int [ cols ] , freeIndices = new int [ cols ] ; x_0 = new double [ cols ] ; outer : for ( int i = 0 ; i < coeff . length ; i ++ ) { for ( int j = i ; j < coeff [ row [ i ] ] . length ; j ++ ) { if ( coeff [ row [ i ] ] [ col [ j ] ] == 1 ) { x_0 [ col [ i ] ] = rhs [ row [ i ] ] ; boundIndices [ numbound ++ ] = col [ i ] ; continue outer ; } } freeIndices [ numfree ++ ] = i ; } StringBuilder msg = new StringBuilder ( ) ; if ( LOG . isDebugging ( ) ) { msg . append ( "\nSpecial solution x_0 = [" ) . append ( FormatUtil . format ( x_0 , "," , FormatUtil . NF4 ) ) . append ( ' ' ) // . append ( "\nbound Indices " ) . append ( FormatUtil . format ( boundIndices , "," ) ) // . append ( "\nfree Indices " ) . append ( FormatUtil . format ( freeIndices , "," ) ) ; } // compute solution space of homogeneous linear equation system Arrays . sort ( boundIndices , 0 , numbound ) ; int freeIndex = 0 ; int boundIndex = 0 ; u = new double [ cols ] [ numfree ] ; for ( int j = 0 ; j < u [ 0 ] . length ; j ++ ) { for ( int i = 0 ; i < u . length ; i ++ ) { if ( freeIndex < numfree && i == freeIndices [ freeIndex ] ) { u [ i ] [ j ] = 1 ; } else if ( boundIndex < numbound && i == boundIndices [ boundIndex ] ) { u [ i ] [ j ] = - coeff [ row [ boundIndex ] ] [ freeIndices [ freeIndex ] ] ; boundIndex ++ ; } } freeIndex ++ ; boundIndex = 0 ; // Restart } if ( LOG . isDebugging ( ) ) { msg . append ( "\nU" ) ; for ( double [ ] anU : u ) { msg . append ( ' ' ) . append ( FormatUtil . format ( anU , "," , FormatUtil . NF4 ) ) ; } LOG . debugFine ( msg . toString ( ) ) ; } solved = true ; }
solves linear system with the chosen method
666
8
157,496
private boolean isSolvable ( int method ) throws NullPointerException { if ( solved ) { return solvable ; } if ( ! reducedRowEchelonForm ) { reducedRowEchelonForm ( method ) ; } // test if rank(coeff) == rank(coeff|rhs) for ( int i = rank ; i < rhs . length ; i ++ ) { if ( Math . abs ( rhs [ row [ i ] ] ) > DELTA ) { solvable = false ; return false ; // not solvable } } solvable = true ; return true ; }
Checks solvability of this linear equation system with the chosen method .
125
15
157,497
private int [ ] maxIntegerDigits ( double [ ] [ ] values ) { int [ ] digits = new int [ values [ 0 ] . length ] ; for ( int j = 0 ; j < values [ 0 ] . length ; j ++ ) { for ( double [ ] value : values ) { digits [ j ] = Math . max ( digits [ j ] , integerDigits ( value [ j ] ) ) ; } } return digits ; }
Returns the maximum integer digits in each column of the specified values .
94
13
157,498
private int maxIntegerDigits ( double [ ] values ) { int digits = 0 ; for ( double value : values ) { digits = Math . max ( digits , integerDigits ( value ) ) ; } return digits ; }
Returns the maximum integer digits of the specified values .
47
10
157,499
private int integerDigits ( double d ) { double value = Math . abs ( d ) ; if ( value < 10 ) { return 1 ; } return ( int ) FastMath . log10 ( value ) + 1 ; }
Returns the integer digits of the specified double value .
47
10