idx
int64 0
165k
| question
stringlengths 73
4.15k
| target
stringlengths 5
918
| len_question
int64 21
890
| len_target
int64 3
255
|
|---|---|---|---|---|
156,900
|
private GeneratorMain loadXMLSpecification ( ) { try { DocumentBuilderFactory dbf = DocumentBuilderFactory . newInstance ( ) ; dbf . setFeature ( "http://apache.org/xml/features/nonvalidating/load-external-dtd" , false ) ; URL url = ClassLoader . getSystemResource ( GENERATOR_SCHEMA_FILE ) ; if ( url != null ) { try { Schema schema = SchemaFactory . newInstance ( XMLConstants . W3C_XML_SCHEMA_NS_URI ) . newSchema ( url ) ; dbf . setSchema ( schema ) ; dbf . setIgnoringElementContentWhitespace ( true ) ; } catch ( Exception e ) { LOG . warning ( "Could not set up XML Schema validation for specification file." , e ) ; } } else { LOG . warning ( "Could not set up XML Schema validation for specification file." ) ; } Document doc = dbf . newDocumentBuilder ( ) . parse ( specfile ) ; Node root = doc . getDocumentElement ( ) ; if ( TAG_DATASET . equals ( root . getNodeName ( ) ) ) { GeneratorMain gen = new GeneratorMain ( ) ; processElementDataset ( gen , root ) ; return gen ; } else { throw new AbortException ( "Experiment specification has incorrect document element: " + root . getNodeName ( ) ) ; } } catch ( FileNotFoundException e ) { throw new AbortException ( "Can't open specification file." , e ) ; } catch ( SAXException e ) { throw new AbortException ( "Error parsing specification file." , e ) ; } catch ( IOException e ) { throw new AbortException ( "IO Exception loading specification file." , e ) ; } catch ( ParserConfigurationException e ) { throw new AbortException ( "Parser Configuration Error" , e ) ; } }
|
Load the XML configuration file .
| 413
| 6
|
156,901
|
private void processElementDataset ( GeneratorMain gen , Node cur ) { // *** get parameters String seedstr = ( ( Element ) cur ) . getAttribute ( ATTR_SEED ) ; if ( clusterRandom != RandomFactory . DEFAULT && seedstr != null && seedstr . length ( ) > 0 ) { clusterRandom = new RandomFactory ( ( long ) ( ParseUtil . parseIntBase10 ( seedstr ) * sizescale ) ) ; } String testmod = ( ( Element ) cur ) . getAttribute ( ATTR_TEST ) ; if ( testmod != null && testmod . length ( ) > 0 ) { testAgainstModel = Boolean . valueOf ( ParseUtil . parseIntBase10 ( testmod ) != 0 ) ; } // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator ( cur . getFirstChild ( ) ) ; while ( iter . hasNext ( ) ) { Node child = iter . next ( ) ; if ( TAG_CLUSTER . equals ( child . getNodeName ( ) ) ) { processElementCluster ( gen , child ) ; } else if ( TAG_STATIC . equals ( child . getNodeName ( ) ) ) { processElementStatic ( gen , child ) ; } else if ( child . getNodeType ( ) == Node . ELEMENT_NODE ) { LOG . warning ( "Unknown element in XML specification file: " + child . getNodeName ( ) ) ; } } }
|
Process a dataset Element in the XML stream .
| 319
| 9
|
156,902
|
private void processElementCluster ( GeneratorMain gen , Node cur ) { int size = - 1 ; double overweight = 1.0 ; String sizestr = ( ( Element ) cur ) . getAttribute ( ATTR_SIZE ) ; if ( sizestr != null && sizestr . length ( ) > 0 ) { size = ( int ) ( ParseUtil . parseIntBase10 ( sizestr ) * sizescale ) ; } String name = ( ( Element ) cur ) . getAttribute ( ATTR_NAME ) ; String dcostr = ( ( Element ) cur ) . getAttribute ( ATTR_DENSITY ) ; if ( dcostr != null && dcostr . length ( ) > 0 ) { overweight = ParseUtil . parseDouble ( dcostr ) ; } if ( size < 0 ) { throw new AbortException ( "No valid cluster size given in specification file." ) ; } if ( name == null || name . length ( ) == 0 ) { throw new AbortException ( "No cluster name given in specification file." ) ; } // *** add new cluster object Random newRand = clusterRandom . getSingleThreadedRandom ( ) ; GeneratorSingleCluster cluster = new GeneratorSingleCluster ( name , size , overweight , newRand ) ; // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator ( cur . getFirstChild ( ) ) ; while ( iter . hasNext ( ) ) { Node child = iter . next ( ) ; if ( TAG_UNIFORM . equals ( child . getNodeName ( ) ) ) { processElementUniform ( cluster , child ) ; } else if ( TAG_NORMAL . equals ( child . getNodeName ( ) ) ) { processElementNormal ( cluster , child ) ; } else if ( TAG_GAMMA . equals ( child . getNodeName ( ) ) ) { processElementGamma ( cluster , child ) ; } else if ( TAG_HALTON . equals ( child . getNodeName ( ) ) ) { processElementHalton ( cluster , child ) ; } else if ( TAG_ROTATE . equals ( child . getNodeName ( ) ) ) { processElementRotate ( cluster , child ) ; } else if ( TAG_TRANSLATE . equals ( child . getNodeName ( ) ) ) { processElementTranslate ( cluster , child ) ; } else if ( TAG_CLIP . equals ( child . getNodeName ( ) ) ) { processElementClipping ( cluster , child ) ; } else if ( child . getNodeType ( ) == Node . ELEMENT_NODE ) { LOG . warning ( "Unknown element in XML specification file: " + child . getNodeName ( ) ) ; } } gen . addCluster ( cluster ) ; }
|
Process a cluster Element in the XML stream .
| 603
| 9
|
156,903
|
private void processElementUniform ( GeneratorSingleCluster cluster , Node cur ) { double min = 0.0 ; double max = 1.0 ; String minstr = ( ( Element ) cur ) . getAttribute ( ATTR_MIN ) ; if ( minstr != null && minstr . length ( ) > 0 ) { min = ParseUtil . parseDouble ( minstr ) ; } String maxstr = ( ( Element ) cur ) . getAttribute ( ATTR_MAX ) ; if ( maxstr != null && maxstr . length ( ) > 0 ) { max = ParseUtil . parseDouble ( maxstr ) ; } // *** new uniform generator Random random = cluster . getNewRandomGenerator ( ) ; Distribution generator = new UniformDistribution ( min , max , random ) ; cluster . addGenerator ( generator ) ; // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator ( cur . getFirstChild ( ) ) ; while ( iter . hasNext ( ) ) { Node child = iter . next ( ) ; if ( child . getNodeType ( ) == Node . ELEMENT_NODE ) { LOG . warning ( "Unknown element in XML specification file: " + child . getNodeName ( ) ) ; } } }
|
Process a uniform Element in the XML stream .
| 269
| 9
|
156,904
|
private void processElementNormal ( GeneratorSingleCluster cluster , Node cur ) { double mean = 0.0 ; double stddev = 1.0 ; String meanstr = ( ( Element ) cur ) . getAttribute ( ATTR_MEAN ) ; if ( meanstr != null && meanstr . length ( ) > 0 ) { mean = ParseUtil . parseDouble ( meanstr ) ; } String stddevstr = ( ( Element ) cur ) . getAttribute ( ATTR_STDDEV ) ; if ( stddevstr != null && stddevstr . length ( ) > 0 ) { stddev = ParseUtil . parseDouble ( stddevstr ) ; } // *** New normal distribution generator Random random = cluster . getNewRandomGenerator ( ) ; Distribution generator = new NormalDistribution ( mean , stddev , random ) ; cluster . addGenerator ( generator ) ; // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator ( cur . getFirstChild ( ) ) ; while ( iter . hasNext ( ) ) { Node child = iter . next ( ) ; if ( child . getNodeType ( ) == Node . ELEMENT_NODE ) { LOG . warning ( "Unknown element in XML specification file: " + child . getNodeName ( ) ) ; } } }
|
Process a normal Element in the XML stream .
| 285
| 9
|
156,905
|
private void processElementGamma ( GeneratorSingleCluster cluster , Node cur ) { double k = 1.0 ; double theta = 1.0 ; String kstr = ( ( Element ) cur ) . getAttribute ( ATTR_K ) ; if ( kstr != null && kstr . length ( ) > 0 ) { k = ParseUtil . parseDouble ( kstr ) ; } String thetastr = ( ( Element ) cur ) . getAttribute ( ATTR_THETA ) ; if ( thetastr != null && thetastr . length ( ) > 0 ) { theta = ParseUtil . parseDouble ( thetastr ) ; } // *** New normal distribution generator Random random = cluster . getNewRandomGenerator ( ) ; Distribution generator = new GammaDistribution ( k , theta , random ) ; cluster . addGenerator ( generator ) ; // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator ( cur . getFirstChild ( ) ) ; while ( iter . hasNext ( ) ) { Node child = iter . next ( ) ; if ( child . getNodeType ( ) == Node . ELEMENT_NODE ) { LOG . warning ( "Unknown element in XML specification file: " + child . getNodeName ( ) ) ; } } }
|
Process a gamma Element in the XML stream .
| 282
| 9
|
156,906
|
private void processElementRotate ( GeneratorSingleCluster cluster , Node cur ) { int axis1 = 0 ; int axis2 = 0 ; double angle = 0.0 ; String a1str = ( ( Element ) cur ) . getAttribute ( ATTR_AXIS1 ) ; if ( a1str != null && a1str . length ( ) > 0 ) { axis1 = ParseUtil . parseIntBase10 ( a1str ) ; } String a2str = ( ( Element ) cur ) . getAttribute ( ATTR_AXIS2 ) ; if ( a2str != null && a2str . length ( ) > 0 ) { axis2 = ParseUtil . parseIntBase10 ( a2str ) ; } String anstr = ( ( Element ) cur ) . getAttribute ( ATTR_ANGLE ) ; if ( anstr != null && anstr . length ( ) > 0 ) { angle = ParseUtil . parseDouble ( anstr ) ; } if ( axis1 <= 0 || axis1 > cluster . getDim ( ) ) { throw new AbortException ( "Invalid axis1 number given in specification file." ) ; } if ( axis2 <= 0 || axis2 > cluster . getDim ( ) ) { throw new AbortException ( "Invalid axis2 number given in specification file." ) ; } if ( axis1 == axis2 ) { throw new AbortException ( "Invalid axis numbers given in specification file." ) ; } // Add rotation to cluster. cluster . addRotation ( axis1 - 1 , axis2 - 1 , Math . toRadians ( angle ) ) ; // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator ( cur . getFirstChild ( ) ) ; while ( iter . hasNext ( ) ) { Node child = iter . next ( ) ; if ( child . getNodeType ( ) == Node . ELEMENT_NODE ) { LOG . warning ( "Unknown element in XML specification file: " + child . getNodeName ( ) ) ; } } }
|
Process a rotate Element in the XML stream .
| 436
| 9
|
156,907
|
private void processElementTranslate ( GeneratorSingleCluster cluster , Node cur ) { double [ ] offset = null ; String vstr = ( ( Element ) cur ) . getAttribute ( ATTR_VECTOR ) ; if ( vstr != null && vstr . length ( ) > 0 ) { offset = parseVector ( vstr ) ; } if ( offset == null ) { throw new AbortException ( "No translation vector given." ) ; } // *** add new translation cluster . addTranslation ( offset ) ; // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator ( cur . getFirstChild ( ) ) ; while ( iter . hasNext ( ) ) { Node child = iter . next ( ) ; if ( child . getNodeType ( ) == Node . ELEMENT_NODE ) { LOG . warning ( "Unknown element in XML specification file: " + child . getNodeName ( ) ) ; } } }
|
Process a translate Element in the XML stream .
| 200
| 9
|
156,908
|
private void processElementClipping ( GeneratorSingleCluster cluster , Node cur ) { double [ ] cmin = null , cmax = null ; String minstr = ( ( Element ) cur ) . getAttribute ( ATTR_MIN ) ; if ( minstr != null && minstr . length ( ) > 0 ) { cmin = parseVector ( minstr ) ; } String maxstr = ( ( Element ) cur ) . getAttribute ( ATTR_MAX ) ; if ( maxstr != null && maxstr . length ( ) > 0 ) { cmax = parseVector ( maxstr ) ; } if ( cmin == null || cmax == null ) { throw new AbortException ( "No or incomplete clipping vectors given." ) ; } // *** set clipping cluster . setClipping ( cmin , cmax ) ; // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator ( cur . getFirstChild ( ) ) ; while ( iter . hasNext ( ) ) { Node child = iter . next ( ) ; if ( child . getNodeType ( ) == Node . ELEMENT_NODE ) { LOG . warning ( "Unknown element in XML specification file: " + child . getNodeName ( ) ) ; } } }
|
Process a clipping Element in the XML stream .
| 265
| 9
|
156,909
|
private void processElementStatic ( GeneratorMain gen , Node cur ) { String name = ( ( Element ) cur ) . getAttribute ( ATTR_NAME ) ; if ( name == null ) { throw new AbortException ( "No cluster name given in specification file." ) ; } ArrayList < double [ ] > points = new ArrayList <> ( ) ; // TODO: check for unknown attributes. XMLNodeIterator iter = new XMLNodeIterator ( cur . getFirstChild ( ) ) ; while ( iter . hasNext ( ) ) { Node child = iter . next ( ) ; if ( TAG_POINT . equals ( child . getNodeName ( ) ) ) { processElementPoint ( points , child ) ; } else if ( child . getNodeType ( ) == Node . ELEMENT_NODE ) { LOG . warning ( "Unknown element in XML specification file: " + child . getNodeName ( ) ) ; } } // *** add new cluster object GeneratorStatic cluster = new GeneratorStatic ( name , points ) ; gen . addCluster ( cluster ) ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Loaded cluster " + cluster . name + " from specification." ) ; } }
|
Process a static cluster Element in the XML stream .
| 259
| 10
|
156,910
|
private double [ ] parseVector ( String s ) { String [ ] entries = WHITESPACE_PATTERN . split ( s ) ; double [ ] d = new double [ entries . length ] ; for ( int i = 0 ; i < entries . length ; i ++ ) { try { d [ i ] = ParseUtil . parseDouble ( entries [ i ] ) ; } catch ( NumberFormatException e ) { throw new AbortException ( "Could not parse vector." ) ; } } return d ; }
|
Parse a string into a vector .
| 111
| 8
|
156,911
|
public static NumberVector getPrototype ( Model model , Relation < ? extends NumberVector > relation ) { // Mean model contains a numeric Vector if ( model instanceof MeanModel ) { return DoubleVector . wrap ( ( ( MeanModel ) model ) . getMean ( ) ) ; } // Handle medoid models if ( model instanceof MedoidModel ) { return relation . get ( ( ( MedoidModel ) model ) . getMedoid ( ) ) ; } if ( model instanceof PrototypeModel ) { Object p = ( ( PrototypeModel < ? > ) model ) . getPrototype ( ) ; if ( p instanceof NumberVector ) { return ( NumberVector ) p ; } return null ; // Inconvertible } return null ; }
|
Get the representative vector for a cluster model .
| 156
| 9
|
156,912
|
private long [ ] determinePreferenceVectorByApriori ( Relation < V > relation , ModifiableDBIDs [ ] neighborIDs , StringBuilder msg ) { int dimensionality = neighborIDs . length ; // database for apriori UpdatableDatabase apriori_db = new HashmapDatabase ( ) ; SimpleTypeInformation < ? > bitmeta = VectorFieldTypeInformation . typeRequest ( BitVector . class , dimensionality , dimensionality ) ; for ( DBIDIter it = relation . iterDBIDs ( ) ; it . valid ( ) ; it . advance ( ) ) { long [ ] bits = BitsUtil . zero ( dimensionality ) ; boolean allFalse = true ; for ( int d = 0 ; d < dimensionality ; d ++ ) { if ( neighborIDs [ d ] . contains ( it ) ) { BitsUtil . setI ( bits , d ) ; allFalse = false ; } } if ( ! allFalse ) { SingleObjectBundle oaa = new SingleObjectBundle ( ) ; oaa . append ( bitmeta , new BitVector ( bits , dimensionality ) ) ; apriori_db . insert ( oaa ) ; } } APRIORI apriori = new APRIORI ( minpts ) ; FrequentItemsetsResult aprioriResult = apriori . run ( apriori_db ) ; // result of apriori List < Itemset > frequentItemsets = aprioriResult . getItemsets ( ) ; if ( msg != null ) { msg . append ( "\n Frequent itemsets: " ) . append ( frequentItemsets ) ; } int maxSupport = 0 , maxCardinality = 0 ; long [ ] preferenceVector = BitsUtil . zero ( dimensionality ) ; for ( Itemset itemset : frequentItemsets ) { if ( ( maxCardinality < itemset . length ( ) ) || ( maxCardinality == itemset . length ( ) && maxSupport == itemset . getSupport ( ) ) ) { preferenceVector = Itemset . toBitset ( itemset , BitsUtil . zero ( dimensionality ) ) ; maxCardinality = itemset . length ( ) ; maxSupport = itemset . getSupport ( ) ; } } if ( msg != null ) { msg . append ( "\n preference " ) // . append ( BitsUtil . toStringLow ( preferenceVector , dimensionality ) ) // . append ( ' ' ) ; LOG . debugFine ( msg . toString ( ) ) ; } return preferenceVector ; }
|
Determines the preference vector with the apriori strategy .
| 540
| 13
|
156,913
|
private long [ ] determinePreferenceVectorByMaxIntersection ( ModifiableDBIDs [ ] neighborIDs , StringBuilder msg ) { int dimensionality = neighborIDs . length ; long [ ] preferenceVector = BitsUtil . zero ( dimensionality ) ; Map < Integer , ModifiableDBIDs > candidates = new HashMap <> ( dimensionality ) ; for ( int i = 0 ; i < dimensionality ; i ++ ) { ModifiableDBIDs s_i = neighborIDs [ i ] ; if ( s_i . size ( ) > minpts ) { candidates . put ( i , s_i ) ; } } if ( msg != null ) { msg . append ( "\n candidates " ) . append ( candidates . keySet ( ) ) ; } if ( ! candidates . isEmpty ( ) ) { int i = max ( candidates ) ; ModifiableDBIDs intersection = candidates . remove ( i ) ; BitsUtil . setI ( preferenceVector , i ) ; while ( ! candidates . isEmpty ( ) ) { i = maxIntersection ( candidates , intersection ) ; candidates . remove ( i ) ; if ( intersection . size ( ) < minpts ) { break ; } BitsUtil . setI ( preferenceVector , i ) ; } } if ( msg != null ) { msg . append ( "\n preference " ) . append ( BitsUtil . toStringLow ( preferenceVector , dimensionality ) ) ; LOG . debug ( msg . toString ( ) ) ; } return preferenceVector ; }
|
Determines the preference vector with the max intersection strategy .
| 317
| 12
|
156,914
|
private int max ( Map < Integer , ModifiableDBIDs > candidates ) { int maxDim = - 1 , size = - 1 ; for ( Integer nextDim : candidates . keySet ( ) ) { int nextSet = candidates . get ( nextDim ) . size ( ) ; if ( size < nextSet ) { size = nextSet ; maxDim = nextDim ; } } return maxDim ; }
|
Returns the set with the maximum size contained in the specified map .
| 84
| 13
|
156,915
|
private int maxIntersection ( Map < Integer , ModifiableDBIDs > candidates , ModifiableDBIDs set ) { int maxDim = - 1 ; ModifiableDBIDs maxIntersection = null ; for ( Integer nextDim : candidates . keySet ( ) ) { DBIDs nextSet = candidates . get ( nextDim ) ; ModifiableDBIDs nextIntersection = DBIDUtil . intersection ( set , nextSet ) ; if ( maxDim < 0 || maxIntersection . size ( ) < nextIntersection . size ( ) ) { maxIntersection = nextIntersection ; maxDim = nextDim ; } } if ( maxDim >= 0 ) { set . clear ( ) ; set . addDBIDs ( maxIntersection ) ; } return maxDim ; }
|
Returns the index of the set having the maximum intersection set with the specified set contained in the specified map .
| 160
| 21
|
156,916
|
private RangeQuery < V > [ ] initRangeQueries ( Relation < V > relation , int dimensionality ) { @ SuppressWarnings ( "unchecked" ) RangeQuery < V > [ ] rangeQueries = ( RangeQuery < V > [ ] ) new RangeQuery [ dimensionality ] ; for ( int d = 0 ; d < dimensionality ; d ++ ) { rangeQueries [ d ] = relation . getRangeQuery ( new PrimitiveDistanceQuery <> ( relation , new OnedimensionalDistanceFunction ( d ) ) ) ; } return rangeQueries ; }
|
Initializes the dimension selecting distancefunctions to determine the preference vectors .
| 123
| 14
|
156,917
|
@ Reference ( authors = "F. J. Rohlf" , title = "Methods of comparing classifications" , // booktitle = "Annual Review of Ecology and Systematics" , // url = "https://doi.org/10.1146/annurev.es.05.110174.000533" , // bibkey = "doi:10.1146/annurev.es.05.110174.000533" ) public double computeTau ( long c , long d , double m , long wd , long bd ) { double tie = ( wd * ( wd - 1 ) + bd * ( bd - 1 ) ) >>> 1 ; return ( c - d ) / FastMath . sqrt ( ( m - tie ) * m ) ; // return (4. * c - m) / m; }
|
Compute the Tau correlation measure
| 186
| 6
|
156,918
|
public void checkServices ( String update ) { TreeSet < String > props = new TreeSet <> ( ) ; Enumeration < URL > us ; try { us = getClass ( ) . getClassLoader ( ) . getResources ( ELKIServiceLoader . RESOURCE_PREFIX ) ; } catch ( IOException e ) { throw new AbortException ( "Error enumerating service folders." , e ) ; } while ( us . hasMoreElements ( ) ) { URL u = us . nextElement ( ) ; try { if ( ( "jar" . equals ( u . getProtocol ( ) ) ) ) { JarURLConnection con = ( JarURLConnection ) u . openConnection ( ) ; try ( JarFile jar = con . getJarFile ( ) ) { Enumeration < JarEntry > entries = jar . entries ( ) ; while ( entries . hasMoreElements ( ) ) { String prop = entries . nextElement ( ) . getName ( ) ; if ( prop . startsWith ( ELKIServiceLoader . RESOURCE_PREFIX ) ) { props . add ( prop . substring ( ELKIServiceLoader . RESOURCE_PREFIX . length ( ) ) ) ; } else if ( prop . startsWith ( ELKIServiceLoader . FILENAME_PREFIX ) ) { props . add ( prop . substring ( ELKIServiceLoader . FILENAME_PREFIX . length ( ) ) ) ; } } } continue ; } if ( "file" . equals ( u . getProtocol ( ) ) ) { props . addAll ( Arrays . asList ( new File ( u . toURI ( ) ) . list ( ) ) ) ; } } catch ( IOException | URISyntaxException e ) { throw new AbortException ( "Error enumerating service folders." , e ) ; } } for ( String prop : props ) { if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Checking property: " + prop ) ; } checkService ( prop , update ) ; } }
|
Retrieve all properties and check them .
| 449
| 8
|
156,919
|
@ SuppressWarnings ( "unchecked" ) private void checkAliases ( Class < ? > parent , String classname , String [ ] parts ) { Class < ? > c = ELKIServiceRegistry . findImplementation ( ( Class < Object > ) parent , classname ) ; if ( c == null ) { return ; } Alias ann = c . getAnnotation ( Alias . class ) ; if ( ann == null ) { if ( parts . length > 1 ) { StringBuilder buf = new StringBuilder ( 100 ) // . append ( "Class " ) . append ( classname ) // . append ( " in " ) . append ( parent . getCanonicalName ( ) ) // . append ( " has the following extraneous aliases:" ) ; for ( int i = 1 ; i < parts . length ; i ++ ) { buf . append ( ' ' ) . append ( parts [ i ] ) ; } LOG . warning ( buf ) ; } return ; } HashSet < String > aliases = new HashSet < String > ( ) ; for ( int i = 1 ; i < parts . length ; i ++ ) { aliases . add ( parts [ i ] ) ; } StringBuilder buf = null ; for ( String a : ann . value ( ) ) { if ( ! aliases . remove ( a ) ) { if ( buf == null ) { buf = new StringBuilder ( 100 ) // . append ( "Class " ) . append ( classname ) // . append ( " in " ) . append ( parent . getCanonicalName ( ) ) // . append ( " is missing the following aliases:" ) ; } buf . append ( ' ' ) . append ( a ) ; } } if ( ! aliases . isEmpty ( ) ) { buf = ( buf == null ? new StringBuilder ( ) : buf . append ( FormatUtil . NEWLINE ) ) // . append ( "Class " ) . append ( classname ) // . append ( " in " ) . append ( parent . getCanonicalName ( ) ) // . append ( " has the following extraneous aliases:" ) ; for ( String a : aliases ) { buf . append ( ' ' ) . append ( a ) ; } } if ( buf != null ) { LOG . warning ( buf ) ; } }
|
Check if aliases are listed completely .
| 486
| 7
|
156,920
|
public LinearEquationSystem getNormalizedLinearEquationSystem ( Normalization < ? > normalization ) throws NonNumericFeaturesException { if ( normalization != null ) { LinearEquationSystem lq = normalization . transform ( linearEquationSystem ) ; lq . solveByTotalPivotSearch ( ) ; return lq ; } else { return linearEquationSystem ; } }
|
Returns the linear equation system for printing purposes . If normalization is null the linear equation system is returned otherwise the linear equation system will be transformed according to the normalization .
| 82
| 34
|
156,921
|
public double squaredDistance ( V p ) { // V_affin = V + a // dist(p, V_affin) = d(p-a, V) = ||p - a - proj_V(p-a) || double [ ] p_minus_a = minusEquals ( p . toArray ( ) , centroid ) ; return squareSum ( minusEquals ( p_minus_a , times ( strongEigenvectors , transposeTimes ( strongEigenvectors , p_minus_a ) ) ) ) ; }
|
Returns the distance of NumberVector p from the hyperplane underlying this solution .
| 120
| 15
|
156,922
|
public double [ ] errorVector ( V p ) { return weakEigenvectors . length > 0 ? times ( weakEigenvectors , transposeTimes ( weakEigenvectors , minusEquals ( p . toArray ( ) , centroid ) ) ) : EMPTY_VECTOR ; }
|
Returns the error vectors after projection .
| 65
| 7
|
156,923
|
public double [ ] dataVector ( V p ) { return strongEigenvectors . length > 0 ? times ( strongEigenvectors , transposeTimes ( strongEigenvectors , minusEquals ( p . toArray ( ) , centroid ) ) ) : EMPTY_VECTOR ; }
|
Returns the data vectors after projection .
| 65
| 7
|
156,924
|
@ Override public void writeToText ( TextWriterStream out , String label ) { if ( label != null ) { out . commentPrintLn ( label ) ; } out . commentPrintLn ( "Model class: " + this . getClass ( ) . getName ( ) ) ; try { if ( getNormalizedLinearEquationSystem ( null ) != null ) { // TODO: more elegant way of doing normalization here? /* * if(out instanceof TextWriterStreamNormalizing) { * TextWriterStreamNormalizing<V> nout = * (TextWriterStreamNormalizing<V>) out; LinearEquationSystem lq = * getNormalizedLinearEquationSystem(nout.getNormalization()); * out.commentPrint("Linear Equation System: "); * out.commentPrintLn(lq.equationsToString(nf)); } else { */ LinearEquationSystem lq = getNormalizedLinearEquationSystem ( null ) ; out . commentPrint ( "Linear Equation System: " ) ; out . commentPrintLn ( lq . equationsToString ( nf ) ) ; // } } } catch ( NonNumericFeaturesException e ) { LoggingUtil . exception ( e ) ; } }
|
Text output of the equation system
| 272
| 6
|
156,925
|
protected int getBinNr ( double coord ) { if ( Double . isInfinite ( coord ) || Double . isNaN ( coord ) ) { throw new UnsupportedOperationException ( "Encountered non-finite value in Histogram: " + coord ) ; } if ( coord == max ) { // System.err.println("Triggered special case: "+ (Math.floor((coord - // base) / binsize) + offset) + " vs. " + (size - 1)); return size - 1 ; } return ( int ) Math . floor ( ( coord - base ) / binsize ) + offset ; }
|
Compute the bin number . Has a special case for rounding max down to the last bin .
| 135
| 19
|
156,926
|
protected static int growSize ( int current , int requiredSize ) { // Double until 64, then increase by 50% each time. int newCapacity = ( ( current < 64 ) ? ( ( current + 1 ) << 1 ) : ( ( current >> 1 ) * 3 ) ) ; // overflow? if ( newCapacity < 0 ) { throw new OutOfMemoryError ( ) ; } if ( requiredSize > newCapacity ) { newCapacity = requiredSize ; } return requiredSize ; }
|
Compute the size to grow to .
| 105
| 8
|
156,927
|
private void expandNodes ( DeLiCluTree index , SpatialPrimitiveDistanceFunction < V > distFunction , SpatialObjectPair nodePair , DataStore < KNNList > knns ) { DeLiCluNode node1 = index . getNode ( ( ( SpatialDirectoryEntry ) nodePair . entry1 ) . getPageID ( ) ) ; DeLiCluNode node2 = index . getNode ( ( ( SpatialDirectoryEntry ) nodePair . entry2 ) . getPageID ( ) ) ; if ( node1 . isLeaf ( ) ) { expandLeafNodes ( distFunction , node1 , node2 , knns ) ; } else { expandDirNodes ( distFunction , node1 , node2 ) ; } index . setExpanded ( nodePair . entry2 , nodePair . entry1 ) ; }
|
Expands the spatial nodes of the specified pair .
| 187
| 10
|
156,928
|
private void expandDirNodes ( SpatialPrimitiveDistanceFunction < V > distFunction , DeLiCluNode node1 , DeLiCluNode node2 ) { if ( LOG . isDebuggingFinest ( ) ) { LOG . debugFinest ( "ExpandDirNodes: " + node1 . getPageID ( ) + " + " + node2 . getPageID ( ) ) ; } int numEntries_1 = node1 . getNumEntries ( ) ; int numEntries_2 = node2 . getNumEntries ( ) ; // insert all combinations of unhandled - handled children of // node1-node2 into pq for ( int i = 0 ; i < numEntries_1 ; i ++ ) { DeLiCluEntry entry1 = node1 . getEntry ( i ) ; if ( ! entry1 . hasUnhandled ( ) ) { continue ; } for ( int j = 0 ; j < numEntries_2 ; j ++ ) { DeLiCluEntry entry2 = node2 . getEntry ( j ) ; if ( ! entry2 . hasHandled ( ) ) { continue ; } double distance = distFunction . minDist ( entry1 , entry2 ) ; heap . add ( new SpatialObjectPair ( distance , entry1 , entry2 , true ) ) ; } } }
|
Expands the specified directory nodes .
| 288
| 7
|
156,929
|
private void expandLeafNodes ( SpatialPrimitiveDistanceFunction < V > distFunction , DeLiCluNode node1 , DeLiCluNode node2 , DataStore < KNNList > knns ) { if ( LOG . isDebuggingFinest ( ) ) { LOG . debugFinest ( "ExpandLeafNodes: " + node1 . getPageID ( ) + " + " + node2 . getPageID ( ) ) ; } int numEntries_1 = node1 . getNumEntries ( ) ; int numEntries_2 = node2 . getNumEntries ( ) ; // insert all combinations of unhandled - handled children of // node1-node2 into pq for ( int i = 0 ; i < numEntries_1 ; i ++ ) { DeLiCluEntry entry1 = node1 . getEntry ( i ) ; if ( ! entry1 . hasUnhandled ( ) ) { continue ; } for ( int j = 0 ; j < numEntries_2 ; j ++ ) { DeLiCluEntry entry2 = node2 . getEntry ( j ) ; if ( ! entry2 . hasHandled ( ) ) { continue ; } double distance = distFunction . minDist ( entry1 , entry2 ) ; double reach = MathUtil . max ( distance , knns . get ( ( ( LeafEntry ) entry2 ) . getDBID ( ) ) . getKNNDistance ( ) ) ; heap . add ( new SpatialObjectPair ( reach , entry1 , entry2 , false ) ) ; } } }
|
Expands the specified leaf nodes .
| 340
| 7
|
156,930
|
private void reinsertExpanded ( SpatialPrimitiveDistanceFunction < V > distFunction , DeLiCluTree index , IndexTreePath < DeLiCluEntry > path , DataStore < KNNList > knns ) { int l = 0 ; // Count the number of components. for ( IndexTreePath < DeLiCluEntry > it = path ; it != null ; it = it . getParentPath ( ) ) { l ++ ; } ArrayList < IndexTreePath < DeLiCluEntry > > p = new ArrayList <> ( l - 1 ) ; // All except the last (= root). IndexTreePath < DeLiCluEntry > it = path ; for ( ; it . getParentPath ( ) != null ; it = it . getParentPath ( ) ) { p . add ( it ) ; } assert ( p . size ( ) == l - 1 ) ; DeLiCluEntry rootEntry = it . getEntry ( ) ; reinsertExpanded ( distFunction , index , p , l - 2 , rootEntry , knns ) ; }
|
Reinserts the objects of the already expanded nodes .
| 229
| 11
|
156,931
|
public static int assertSameDimensionality ( SpatialComparable box1 , SpatialComparable box2 ) { final int dim = box1 . getDimensionality ( ) ; if ( dim != box2 . getDimensionality ( ) ) { throw new IllegalArgumentException ( "The spatial objects do not have the same dimensionality!" ) ; } return dim ; }
|
Check that two spatial objects have the same dimensionality .
| 79
| 11
|
156,932
|
public static double [ ] getMin ( SpatialComparable box ) { final int dim = box . getDimensionality ( ) ; double [ ] min = new double [ dim ] ; for ( int i = 0 ; i < dim ; i ++ ) { min [ i ] = box . getMin ( i ) ; } return min ; }
|
Returns a clone of the minimum hyper point .
| 72
| 9
|
156,933
|
public static double [ ] getMax ( SpatialComparable box ) { final int dim = box . getDimensionality ( ) ; double [ ] max = new double [ dim ] ; for ( int i = 0 ; i < dim ; i ++ ) { max [ i ] = box . getMax ( i ) ; } return max ; }
|
Returns a clone of the maximum hyper point .
| 72
| 9
|
156,934
|
public static boolean intersects ( SpatialComparable box1 , SpatialComparable box2 ) { final int dim = assertSameDimensionality ( box1 , box2 ) ; for ( int i = 0 ; i < dim ; i ++ ) { if ( box2 . getMax ( i ) < box1 . getMin ( i ) || box1 . getMax ( i ) < box2 . getMin ( i ) ) { return false ; } } return true ; }
|
Returns true if the two SpatialComparables intersect false otherwise .
| 101
| 13
|
156,935
|
public static boolean contains ( SpatialComparable box , double [ ] point ) { final int dim = box . getDimensionality ( ) ; if ( dim != point . length ) { throw new IllegalArgumentException ( "This HyperBoundingBox and the given point need same dimensionality" ) ; } for ( int i = 0 ; i < dim ; i ++ ) { if ( box . getMin ( i ) > point [ i ] || box . getMax ( i ) < point [ i ] ) { return false ; } } return true ; }
|
Returns true if this SpatialComparable contains the given point false otherwise .
| 117
| 15
|
156,936
|
public static double enlargement ( SpatialComparable exist , SpatialComparable addit ) { final int dim = assertSameDimensionality ( exist , addit ) ; double v1 = 1. ; double v2 = 1. ; for ( int i = 0 ; i < dim ; i ++ ) { final double emin = exist . getMin ( i ) ; final double emax = exist . getMax ( i ) ; final double amin = addit . getMin ( i ) ; final double amax = addit . getMax ( i ) ; final double min = Math . min ( emin , amin ) ; final double max = Math . max ( emax , amax ) ; v1 *= ( max - min ) ; v2 *= ( emax - emin ) ; } return v2 - v1 ; }
|
Compute the enlargement obtained by adding an object to an existing object .
| 180
| 15
|
156,937
|
public static double perimeter ( SpatialComparable box ) { final int dim = box . getDimensionality ( ) ; double perimeter = 0. ; for ( int i = 0 ; i < dim ; i ++ ) { perimeter += box . getMax ( i ) - box . getMin ( i ) ; } return perimeter ; }
|
Computes the perimeter of this SpatialComparable .
| 69
| 11
|
156,938
|
public static double overlap ( SpatialComparable box1 , SpatialComparable box2 ) { final int dim = assertSameDimensionality ( box1 , box2 ) ; // the maximal and minimal value of the overlap box. double omax , omin ; // the overlap volume double overlap = 1. ; for ( int i = 0 ; i < dim ; i ++ ) { // The maximal value of that overlap box in the current // dimension is the minimum of the max values. omax = Math . min ( box1 . getMax ( i ) , box2 . getMax ( i ) ) ; // The minimal value is the maximum of the min values. omin = Math . max ( box1 . getMin ( i ) , box2 . getMin ( i ) ) ; // if omax <= omin in any dimension, the overlap box has a volume of zero if ( omax <= omin ) { return 0. ; } overlap *= omax - omin ; } return overlap ; }
|
Computes the volume of the overlapping box between two SpatialComparables .
| 207
| 15
|
156,939
|
public static double relativeOverlap ( SpatialComparable box1 , SpatialComparable box2 ) { final int dim = assertSameDimensionality ( box1 , box2 ) ; // the overlap volume double overlap = 1. ; double vol1 = 1. ; double vol2 = 1. ; for ( int i = 0 ; i < dim ; i ++ ) { final double box1min = box1 . getMin ( i ) ; final double box1max = box1 . getMax ( i ) ; final double box2min = box2 . getMin ( i ) ; final double box2max = box2 . getMax ( i ) ; final double omax = Math . min ( box1max , box2max ) ; final double omin = Math . max ( box1min , box2min ) ; // if omax <= omin in any dimension, the overlap box has a volume of zero if ( omax <= omin ) { return 0. ; } overlap *= omax - omin ; vol1 *= box1max - box1min ; vol2 *= box2max - box2min ; } return overlap / ( vol1 + vol2 ) ; }
|
Computes the volume of the overlapping box between two SpatialComparables and return the relation between the volume of the overlapping box and the volume of both SpatialComparable .
| 249
| 35
|
156,940
|
public static ModifiableHyperBoundingBox unionTolerant ( SpatialComparable mbr1 , SpatialComparable mbr2 ) { if ( mbr1 == null && mbr2 == null ) { return null ; } if ( mbr1 == null ) { // Clone - intentionally return new ModifiableHyperBoundingBox ( mbr2 ) ; } if ( mbr2 == null ) { // Clone - intentionally return new ModifiableHyperBoundingBox ( mbr1 ) ; } return union ( mbr1 , mbr2 ) ; }
|
Returns the union of the two specified MBRs . Tolerant of null values .
| 118
| 17
|
156,941
|
public static double [ ] centroid ( SpatialComparable obj ) { final int dim = obj . getDimensionality ( ) ; double [ ] centroid = new double [ dim ] ; for ( int d = 0 ; d < dim ; d ++ ) { centroid [ d ] = ( obj . getMax ( d ) + obj . getMin ( d ) ) * .5 ; } return centroid ; }
|
Returns the centroid of this SpatialComparable .
| 88
| 11
|
156,942
|
public static LinearScaling fromMinMax ( double min , double max ) { double zoom = 1.0 / ( max - min ) ; return new LinearScaling ( zoom , - min * zoom ) ; }
|
Make a linear scaling from a given minimum and maximum . The minimum will be mapped to zero the maximum to one .
| 44
| 23
|
156,943
|
public double fMeasure ( double beta ) { final double beta2 = beta * beta ; double fmeasure = ( ( 1 + beta2 ) * pairconfuse [ 0 ] ) / ( ( 1 + beta2 ) * pairconfuse [ 0 ] + beta2 * pairconfuse [ 1 ] + pairconfuse [ 2 ] ) ; return fmeasure ; }
|
Get the pair - counting F - Measure
| 78
| 8
|
156,944
|
@ Override public void write ( TextWriterStream out , String label , Object object ) { StringBuilder buf = new StringBuilder ( 100 ) ; if ( label != null ) { buf . append ( label ) . append ( ' ' ) ; } if ( object != null ) { buf . append ( object . toString ( ) ) ; } out . commentPrintLn ( buf ) ; }
|
Put an object into the comment section
| 82
| 7
|
156,945
|
protected static < I > double [ ] [ ] computeSquaredDistanceMatrix ( final List < I > col , PrimitiveDistanceFunction < ? super I > dist ) { final int size = col . size ( ) ; double [ ] [ ] imat = new double [ size ] [ size ] ; boolean squared = dist . isSquared ( ) ; FiniteProgress dprog = LOG . isVerbose ( ) ? new FiniteProgress ( "Computing distance matrix" , ( size * ( size - 1 ) ) >>> 1 , LOG ) : null ; for ( int x = 0 ; x < size ; x ++ ) { final I ox = col . get ( x ) ; for ( int y = x + 1 ; y < size ; y ++ ) { final I oy = col . get ( y ) ; double distance = dist . distance ( ox , oy ) ; distance *= squared ? - .5 : ( - .5 * distance ) ; imat [ x ] [ y ] = imat [ y ] [ x ] = distance ; } if ( dprog != null ) { dprog . setProcessed ( dprog . getProcessed ( ) + size - x - 1 , LOG ) ; } } LOG . ensureCompleted ( dprog ) ; return imat ; }
|
Compute the squared distance matrix .
| 275
| 7
|
156,946
|
private OutlierResult getOutlierResult ( ResultHierarchy hier , Result result ) { List < OutlierResult > ors = ResultUtil . filterResults ( hier , result , OutlierResult . class ) ; if ( ! ors . isEmpty ( ) ) { return ors . get ( 0 ) ; } throw new IllegalStateException ( "Comparison algorithm expected at least one outlier result." ) ; }
|
Find an OutlierResult to work with .
| 89
| 9
|
156,947
|
protected PolynomialApproximation knnDistanceApproximation ( ) { int p_max = 0 ; double [ ] b = null ; for ( int i = 0 ; i < getNumEntries ( ) ; i ++ ) { MkAppEntry entry = getEntry ( i ) ; PolynomialApproximation approximation = entry . getKnnDistanceApproximation ( ) ; if ( b == null ) { p_max = approximation . getPolynomialOrder ( ) ; b = new double [ p_max ] ; } for ( int p = 0 ; p < p_max ; p ++ ) { b [ p ] += approximation . getB ( p ) ; } } for ( int p = 0 ; p < p_max ; p ++ ) { b [ p ] /= p_max ; } if ( LoggingConfiguration . DEBUG ) { StringBuilder msg = new StringBuilder ( ) ; msg . append ( "b " + FormatUtil . format ( b , FormatUtil . NF4 ) ) ; Logger . getLogger ( this . getClass ( ) . getName ( ) ) . fine ( msg . toString ( ) ) ; } return new PolynomialApproximation ( b ) ; }
|
Determines and returns the polynomial approximation for the knn distances of this node as the maximum of the polynomial approximations of all entries .
| 261
| 33
|
156,948
|
public void setDBIDs ( DBIDs ids ) { this . idrep . setDBIDs ( ids ) ; // Update relations. for ( Relation < ? > orel : this . relations ) { if ( orel instanceof ProxyView ) { ( ( ProxyView < ? > ) orel ) . setDBIDs ( this . idrep . getDBIDs ( ) ) ; } } }
|
Set the DBIDs to use .
| 85
| 7
|
156,949
|
public synchronized Collection < Progress > getProgresses ( ) { List < Progress > list = new ArrayList <> ( progresses . size ( ) ) ; Iterator < WeakReference < Progress > > iter = progresses . iterator ( ) ; while ( iter . hasNext ( ) ) { WeakReference < Progress > ref = iter . next ( ) ; if ( ref . get ( ) == null ) { iter . remove ( ) ; } else { list . add ( ref . get ( ) ) ; } } return list ; }
|
Get a list of progresses tracked .
| 108
| 7
|
156,950
|
public synchronized void addProgress ( Progress p ) { // Don't add more than once. Iterator < WeakReference < Progress >> iter = progresses . iterator ( ) ; while ( iter . hasNext ( ) ) { WeakReference < Progress > ref = iter . next ( ) ; // since we are at it anyway, remove old links. if ( ref . get ( ) == null ) { iter . remove ( ) ; } else { if ( ref . get ( ) == p ) { return ; } } } progresses . add ( new WeakReference <> ( p ) ) ; }
|
Add a new Progress to the tracker .
| 120
| 8
|
156,951
|
public synchronized IndexTreePath < DeLiCluEntry > setHandled ( DBID id , O obj ) { if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "setHandled " + id + ", " + obj + "\n" ) ; } // find the leaf node containing o IndexTreePath < DeLiCluEntry > pathToObject = findPathToObject ( getRootPath ( ) , obj , id ) ; if ( pathToObject == null ) { throw new AbortException ( "Object not found in setHandled." ) ; } // set o handled DeLiCluEntry entry = pathToObject . getEntry ( ) ; entry . setHasHandled ( true ) ; entry . setHasUnhandled ( false ) ; for ( IndexTreePath < DeLiCluEntry > path = pathToObject ; path . getParentPath ( ) != null ; path = path . getParentPath ( ) ) { DeLiCluEntry parentEntry = path . getParentPath ( ) . getEntry ( ) ; DeLiCluNode node = getNode ( parentEntry ) ; boolean hasHandled = false ; boolean hasUnhandled = false ; for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { final DeLiCluEntry nodeEntry = node . getEntry ( i ) ; hasHandled = hasHandled || nodeEntry . hasHandled ( ) ; hasUnhandled = hasUnhandled || nodeEntry . hasUnhandled ( ) ; } parentEntry . setHasUnhandled ( hasUnhandled ) ; parentEntry . setHasHandled ( hasHandled ) ; } return pathToObject ; }
|
Marks the specified object as handled and returns the path of node ids from the root to the objects s parent .
| 358
| 24
|
156,952
|
@ Override public final boolean delete ( DBIDRef id ) { // find the leaf node containing o O obj = relation . get ( id ) ; IndexTreePath < DeLiCluEntry > deletionPath = findPathToObject ( getRootPath ( ) , obj , id ) ; if ( deletionPath == null ) { return false ; } deletePath ( deletionPath ) ; return true ; }
|
Deletes the specified object from this index .
| 83
| 9
|
156,953
|
protected double [ ] makeSample ( int maxk ) { final Random rnd = this . rnd . getSingleThreadedRandom ( ) ; double [ ] dists = new double [ maxk + 1 ] ; final double e = 1. / dim ; for ( int i = 0 ; i <= maxk ; i ++ ) { dists [ i ] = FastMath . pow ( rnd . nextDouble ( ) , e ) ; } Arrays . sort ( dists ) ; return dists ; }
|
Generate a data sample .
| 107
| 6
|
156,954
|
@ Override public final boolean setRoutingObjectID ( DBID objectID ) { if ( objectID == routingObjectID || DBIDUtil . equal ( objectID , routingObjectID ) ) { return false ; } this . routingObjectID = objectID ; return true ; }
|
Sets the id of the routing object of this entry .
| 60
| 12
|
156,955
|
@ Override public void writeExternal ( ObjectOutput out ) throws IOException { out . writeInt ( id ) ; out . writeInt ( DBIDUtil . asInteger ( routingObjectID ) ) ; out . writeDouble ( parentDistance ) ; out . writeDouble ( coveringRadius ) ; }
|
Calls the super method and writes the routingObjectID the parentDistance and the coveringRadius of this entry to the specified stream .
| 63
| 27
|
156,956
|
@ Override public void write ( TextWriterStream out , String label , TextWriteable obj ) { obj . writeToText ( out , label ) ; }
|
Use the objects own text serialization .
| 33
| 8
|
156,957
|
protected void evaluteResult ( Database db , Clustering < ? > c , Clustering < ? > refc ) { ClusterContingencyTable contmat = new ClusterContingencyTable ( selfPairing , noiseSpecialHandling ) ; contmat . process ( refc , c ) ; ScoreResult sr = new ScoreResult ( contmat ) ; sr . addHeader ( c . getLongName ( ) ) ; db . getHierarchy ( ) . add ( c , sr ) ; }
|
Evaluate a clustering result .
| 107
| 8
|
156,958
|
private boolean isReferenceResult ( Clustering < ? > t ) { // FIXME: don't hard-code strings return "bylabel-clustering" . equals ( t . getShortName ( ) ) // || "bymodel-clustering" . equals ( t . getShortName ( ) ) // || "allinone-clustering" . equals ( t . getShortName ( ) ) // || "allinnoise-clustering" . equals ( t . getShortName ( ) ) ; }
|
Test if a clustering result is a valid reference result .
| 112
| 12
|
156,959
|
@ Override public final synchronized int writePage ( P page ) { int pageid = setPageID ( page ) ; writePage ( pageid , page ) ; return pageid ; }
|
Writes a page into this file . The method tests if the page has already an id otherwise a new id is assigned and returned .
| 39
| 27
|
156,960
|
public static Element drawManhattan ( SVGPlot svgp , Projection2D proj , NumberVector mid , double radius ) { final double [ ] v_mid = mid . toArray ( ) ; // a copy final long [ ] dims = proj . getVisibleDimensions2D ( ) ; SVGPath path = new SVGPath ( ) ; for ( int dim = BitsUtil . nextSetBit ( dims , 0 ) ; dim >= 0 ; dim = BitsUtil . nextSetBit ( dims , dim + 1 ) ) { v_mid [ dim ] += radius ; double [ ] p1 = proj . fastProjectDataToRenderSpace ( v_mid ) ; v_mid [ dim ] -= radius * 2 ; double [ ] p2 = proj . fastProjectDataToRenderSpace ( v_mid ) ; v_mid [ dim ] += radius ; for ( int dim2 = BitsUtil . nextSetBit ( dims , 0 ) ; dim2 >= 0 ; dim2 = BitsUtil . nextSetBit ( dims , dim2 + 1 ) ) { if ( dim < dim2 ) { v_mid [ dim2 ] += radius ; double [ ] p3 = proj . fastProjectDataToRenderSpace ( v_mid ) ; v_mid [ dim2 ] -= radius * 2 ; double [ ] p4 = proj . fastProjectDataToRenderSpace ( v_mid ) ; v_mid [ dim2 ] += radius ; path . moveTo ( p1 [ 0 ] , p1 [ 1 ] ) . drawTo ( p3 [ 0 ] , p3 [ 1 ] ) // . moveTo ( p1 [ 0 ] , p1 [ 1 ] ) . drawTo ( p4 [ 0 ] , p4 [ 1 ] ) // . moveTo ( p2 [ 0 ] , p2 [ 1 ] ) . drawTo ( p3 [ 0 ] , p3 [ 1 ] ) // . moveTo ( p2 [ 0 ] , p2 [ 1 ] ) . drawTo ( p4 [ 0 ] , p4 [ 1 ] ) // . close ( ) ; } } } return path . makeElement ( svgp ) ; }
|
Wireframe manhattan hypersphere
| 468
| 7
|
156,961
|
public static Element drawCross ( SVGPlot svgp , Projection2D proj , NumberVector mid , double radius ) { final double [ ] v_mid = mid . toArray ( ) ; final long [ ] dims = proj . getVisibleDimensions2D ( ) ; SVGPath path = new SVGPath ( ) ; for ( int dim = BitsUtil . nextSetBit ( dims , 0 ) ; dim >= 0 ; dim = BitsUtil . nextSetBit ( dims , dim + 1 ) ) { v_mid [ dim ] += radius ; double [ ] p1 = proj . fastProjectDataToRenderSpace ( v_mid ) ; v_mid [ dim ] -= radius * 2 ; double [ ] p2 = proj . fastProjectDataToRenderSpace ( v_mid ) ; v_mid [ dim ] += radius ; path . moveTo ( p1 [ 0 ] , p1 [ 1 ] ) . drawTo ( p2 [ 0 ] , p2 [ 1 ] ) . close ( ) ; } return path . makeElement ( svgp ) ; }
|
Wireframe cross hypersphere
| 234
| 6
|
156,962
|
@ SuppressWarnings ( { "cast" , "unchecked" } ) public static < O extends SpatialComparable > RangeQuery < O > getRangeQuery ( AbstractRStarTree < ? , ? , ? > tree , SpatialDistanceQuery < O > distanceQuery , Object ... hints ) { // Can we support this distance function - spatial distances only! SpatialPrimitiveDistanceFunction < ? super O > df = distanceQuery . getDistanceFunction ( ) ; if ( EuclideanDistanceFunction . STATIC . equals ( df ) ) { return ( RangeQuery < O > ) new EuclideanRStarTreeRangeQuery <> ( tree , ( Relation < NumberVector > ) distanceQuery . getRelation ( ) ) ; } return new RStarTreeRangeQuery <> ( tree , distanceQuery . getRelation ( ) , df ) ; }
|
Get an RTree range query using an optimized double implementation when possible .
| 182
| 14
|
156,963
|
@ SuppressWarnings ( { "cast" , "unchecked" } ) public static < O extends SpatialComparable > KNNQuery < O > getKNNQuery ( AbstractRStarTree < ? , ? , ? > tree , SpatialDistanceQuery < O > distanceQuery , Object ... hints ) { // Can we support this distance function - spatial distances only! SpatialPrimitiveDistanceFunction < ? super O > df = distanceQuery . getDistanceFunction ( ) ; if ( EuclideanDistanceFunction . STATIC . equals ( df ) ) { return ( KNNQuery < O > ) new EuclideanRStarTreeKNNQuery <> ( tree , ( Relation < NumberVector > ) distanceQuery . getRelation ( ) ) ; } return new RStarTreeKNNQuery <> ( tree , distanceQuery . getRelation ( ) , df ) ; }
|
Get an RTree knn query using an optimized double implementation when possible .
| 187
| 15
|
156,964
|
private void privateReconfigureLogging ( String pkg , final String name ) { LogManager logManager = LogManager . getLogManager ( ) ; Logger logger = Logger . getLogger ( LoggingConfiguration . class . getName ( ) ) ; // allow null as package name. if ( pkg == null ) { pkg = "" ; } // Load logging configuration from current directory String cfgfile = name ; if ( new File ( name ) . exists ( ) ) { cfgfile = name ; } else { // Fall back to full path / resources. cfgfile = pkg . replace ( ' ' , File . separatorChar ) + File . separatorChar + name ; } try { InputStream cfgdata = openSystemFile ( cfgfile ) ; logManager . readConfiguration ( cfgdata ) ; // also load as properties for us, to get debug flag. InputStream cfgdata2 = openSystemFile ( cfgfile ) ; Properties cfgprop = new Properties ( ) ; cfgprop . load ( cfgdata2 ) ; DEBUG = Boolean . parseBoolean ( cfgprop . getProperty ( "debug" ) ) ; logger . info ( "Logging configuration read." ) ; } catch ( FileNotFoundException e ) { logger . log ( Level . SEVERE , "Could not find logging configuration file: " + cfgfile , e ) ; } catch ( Exception e ) { logger . log ( Level . SEVERE , "Failed to configure logging from file: " + cfgfile , e ) ; } }
|
Reconfigure logging .
| 335
| 5
|
156,965
|
private static InputStream openSystemFile ( String filename ) throws FileNotFoundException { try { return new FileInputStream ( filename ) ; } catch ( FileNotFoundException e ) { // try with classloader String resname = File . separatorChar != ' ' ? filename . replace ( File . separatorChar , ' ' ) : filename ; ClassLoader cl = LoggingConfiguration . class . getClassLoader ( ) ; InputStream result = cl . getResourceAsStream ( resname ) ; if ( result != null ) { return result ; } // Sometimes, URLClassLoader does not work right. Try harder: URL u = cl . getResource ( resname ) ; if ( u == null ) { throw e ; } try { URLConnection conn = u . openConnection ( ) ; conn . setUseCaches ( false ) ; result = conn . getInputStream ( ) ; if ( result != null ) { return result ; } } catch ( IOException x ) { throw e ; // Throw original error instead. } throw e ; } }
|
Private copy from FileUtil to avoid cross - dependencies . Try to open a file first trying the file system then falling back to the classpath .
| 218
| 30
|
156,966
|
public static void setVerbose ( java . util . logging . Level verbose ) { if ( verbose . intValue ( ) <= Level . VERBOSE . intValue ( ) ) { // decrease to VERBOSE if it was higher, otherwise further to // VERYVERBOSE if ( LOGGER_GLOBAL_TOP . getLevel ( ) == null || LOGGER_GLOBAL_TOP . getLevel ( ) . intValue ( ) > verbose . intValue ( ) ) { LOGGER_GLOBAL_TOP . setLevel ( verbose ) ; } if ( LOGGER_ELKI_TOP . getLevel ( ) == null || LOGGER_ELKI_TOP . getLevel ( ) . intValue ( ) > verbose . intValue ( ) ) { LOGGER_ELKI_TOP . setLevel ( verbose ) ; } } else { // re-increase to given level if it was verbose or "very verbose". if ( LOGGER_GLOBAL_TOP . getLevel ( ) != null && ( // Level . VERBOSE . equals ( LOGGER_GLOBAL_TOP . getLevel ( ) ) || // Level . VERYVERBOSE . equals ( LOGGER_GLOBAL_TOP . getLevel ( ) ) // ) ) { LOGGER_GLOBAL_TOP . setLevel ( verbose ) ; } if ( LOGGER_ELKI_TOP . getLevel ( ) != null && ( // Level . VERBOSE . equals ( LOGGER_ELKI_TOP . getLevel ( ) ) || // Level . VERYVERBOSE . equals ( LOGGER_ELKI_TOP . getLevel ( ) ) // ) ) { LOGGER_ELKI_TOP . setLevel ( verbose ) ; } } }
|
Reconfigure logging to enable verbose logging at the top level .
| 379
| 14
|
156,967
|
public static void setStatistics ( ) { // decrease to INFO if it was higher if ( LOGGER_GLOBAL_TOP . getLevel ( ) == null || LOGGER_GLOBAL_TOP . getLevel ( ) . intValue ( ) > Level . STATISTICS . intValue ( ) ) { LOGGER_GLOBAL_TOP . setLevel ( Level . STATISTICS ) ; } if ( LOGGER_ELKI_TOP . getLevel ( ) == null || LOGGER_ELKI_TOP . getLevel ( ) . intValue ( ) > Level . STATISTICS . intValue ( ) ) { LOGGER_ELKI_TOP . setLevel ( Level . STATISTICS ) ; } if ( LOGGER_TIME_TOP . getLevel ( ) == null || LOGGER_TIME_TOP . getLevel ( ) . intValue ( ) > Level . STATISTICS . intValue ( ) ) { LOGGER_TIME_TOP . setLevel ( Level . STATISTICS ) ; } }
|
Enable runtime performance logging .
| 215
| 5
|
156,968
|
public static void replaceDefaultHandler ( Handler handler ) { Logger rootlogger = LogManager . getLogManager ( ) . getLogger ( "" ) ; for ( Handler h : rootlogger . getHandlers ( ) ) { if ( h instanceof CLISmartHandler ) { rootlogger . removeHandler ( h ) ; } } addHandler ( handler ) ; }
|
Replace the default log handler with the given log handler .
| 79
| 12
|
156,969
|
public static void setDefaultLevel ( java . util . logging . Level level ) { Logger . getLogger ( TOPLEVEL_PACKAGE ) . setLevel ( level ) ; }
|
Set the default level .
| 40
| 5
|
156,970
|
public static double [ ] getRelativeCoordinates ( Event evt , Element reference ) { if ( evt instanceof DOMMouseEvent && reference instanceof SVGLocatable && reference instanceof SVGElement ) { // Get the screen (pixel!) coordinates DOMMouseEvent gnme = ( DOMMouseEvent ) evt ; SVGMatrix mat = ( ( SVGLocatable ) reference ) . getScreenCTM ( ) ; SVGMatrix imat = mat . inverse ( ) ; SVGPoint cPt = ( ( SVGElement ) reference ) . getOwnerSVGElement ( ) . createSVGPoint ( ) ; cPt . setX ( gnme . getClientX ( ) ) ; cPt . setY ( gnme . getClientY ( ) ) ; // Have Batik transform the screen (pixel!) coordinates into SVG element // coordinates cPt = cPt . matrixTransform ( imat ) ; return new double [ ] { cPt . getX ( ) , cPt . getY ( ) } ; } return null ; }
|
Get the relative coordinates of a point within the coordinate system of a particular SVG Element .
| 230
| 17
|
156,971
|
public Clustering < MeanModel > run ( Relation < NumberVector > relation ) { final int dim = RelationUtil . dimensionality ( relation ) ; CFTree tree = cffactory . newTree ( relation . getDBIDs ( ) , relation ) ; // The CFTree does not store points. We have to reassign them (and the // quality is better than if we used the initial assignment, because centers // move in particular in the beginning, so we always had many outliers. Map < ClusteringFeature , ModifiableDBIDs > idmap = new HashMap < ClusteringFeature , ModifiableDBIDs > ( tree . leaves ) ; for ( DBIDIter iter = relation . iterDBIDs ( ) ; iter . valid ( ) ; iter . advance ( ) ) { ClusteringFeature cf = tree . findLeaf ( relation . get ( iter ) ) ; ModifiableDBIDs ids = idmap . get ( cf ) ; if ( ids == null ) { idmap . put ( cf , ids = DBIDUtil . newArray ( cf . n ) ) ; } ids . add ( iter ) ; } Clustering < MeanModel > result = new Clustering <> ( "BIRCH-leaves" , "BIRCH leaves" ) ; for ( Map . Entry < ClusteringFeature , ModifiableDBIDs > ent : idmap . entrySet ( ) ) { ClusteringFeature leaf = ent . getKey ( ) ; double [ ] center = new double [ dim ] ; for ( int i = 0 ; i < dim ; i ++ ) { center [ i ] = leaf . centroid ( i ) ; } result . addToplevelCluster ( new Cluster <> ( ent . getValue ( ) , new MeanModel ( center ) ) ) ; } return result ; }
|
Run the clustering algorithm .
| 394
| 6
|
156,972
|
public OutlierResult run ( Database database , Relation < O > relation ) { // Get a nearest neighbor query on the relation. KNNQuery < O > knnq = QueryUtil . getKNNQuery ( relation , getDistanceFunction ( ) , k ) ; // Output data storage WritableDoubleDataStore scores = DataStoreUtil . makeDoubleStorage ( relation . getDBIDs ( ) , DataStoreFactory . HINT_DB ) ; // Track minimum and maximum scores DoubleMinMax minmax = new DoubleMinMax ( ) ; // Iterate over all objects for ( DBIDIter iter = relation . iterDBIDs ( ) ; iter . valid ( ) ; iter . advance ( ) ) { KNNList neighbors = knnq . getKNNForDBID ( iter , k ) ; // Aggregate distances MeanVariance mv = new MeanVariance ( ) ; for ( DoubleDBIDListIter neighbor = neighbors . iter ( ) ; neighbor . valid ( ) ; neighbor . advance ( ) ) { // Skip the object itself. The 0 is not very informative. if ( DBIDUtil . equal ( iter , neighbor ) ) { continue ; } mv . put ( neighbor . doubleValue ( ) ) ; } // Store score scores . putDouble ( iter , mv . getSampleStddev ( ) ) ; } // Wrap the result in the standard containers // Actual min-max, theoretical min-max! OutlierScoreMeta meta = new BasicOutlierScoreMeta ( minmax . getMin ( ) , minmax . getMax ( ) , 0 , Double . POSITIVE_INFINITY ) ; DoubleRelation rel = new MaterializedDoubleRelation ( relation . getDBIDs ( ) , "stddev-outlier" , scores ) ; return new OutlierResult ( meta , rel ) ; }
|
Run the outlier detection algorithm
| 387
| 6
|
156,973
|
public static void main ( String [ ] args ) { if ( args . length > 0 && args [ 0 ] . charAt ( 0 ) != ' ' ) { Class < ? > cls = ELKIServiceRegistry . findImplementation ( AbstractApplication . class , args [ 0 ] ) ; if ( cls != null ) { try { Method m = cls . getMethod ( "main" , String [ ] . class ) ; Object a = Arrays . copyOfRange ( args , 1 , args . length ) ; m . invoke ( null , a ) ; } catch ( InvocationTargetException e ) { LoggingUtil . exception ( e . getCause ( ) ) ; } catch ( Exception e ) { LoggingUtil . exception ( e ) ; } return ; } } try { Method m = DEFAULT_APPLICATION . getMethod ( "main" , String [ ] . class ) ; m . invoke ( null , ( Object ) args ) ; } catch ( Exception e ) { LoggingUtil . exception ( e ) ; } }
|
Launch ELKI .
| 226
| 4
|
156,974
|
public static Parameterizer getParameterizer ( Class < ? > c ) { for ( Class < ? > inner : c . getDeclaredClasses ( ) ) { if ( Parameterizer . class . isAssignableFrom ( inner ) ) { try { return inner . asSubclass ( Parameterizer . class ) . newInstance ( ) ; } catch ( Exception e ) { LOG . warning ( "Non-usable Parameterizer in class: " + c . getName ( ) ) ; } } } return null ; }
|
Get a parameterizer for the given class .
| 112
| 9
|
156,975
|
private void putRec ( O obj , Rec < O > rec ) { graph . put ( obj , rec ) ; for ( int i = 0 ; i < numelems ; ++ i ) { if ( obj == elems [ i ] ) { return ; } } if ( elems . length == numelems ) { elems = Arrays . copyOf ( elems , ( elems . length << 1 ) + 1 ) ; } elems [ numelems ++ ] = obj ; }
|
Put a record .
| 105
| 4
|
156,976
|
private void removeRec ( O obj ) { graph . remove ( obj ) ; for ( int i = 0 ; i < numelems ; ++ i ) { if ( obj == elems [ i ] ) { System . arraycopy ( elems , i + 1 , elems , i , -- numelems - i ) ; elems [ numelems ] = null ; return ; } } }
|
Remove a record .
| 84
| 4
|
156,977
|
protected JTree createTree ( ) { JTree tree = new JTree ( model ) ; tree . setName ( "TreePopup.tree" ) ; tree . setFont ( getFont ( ) ) ; tree . setForeground ( getForeground ( ) ) ; tree . setBackground ( getBackground ( ) ) ; tree . setBorder ( null ) ; tree . setFocusable ( true ) ; tree . addMouseListener ( handler ) ; tree . addKeyListener ( handler ) ; tree . setCellRenderer ( new Renderer ( ) ) ; return tree ; }
|
Creates the JList used in the popup to display the items in the combo box model . This method is called when the UI class is created .
| 122
| 30
|
156,978
|
protected JScrollPane createScroller ( ) { JScrollPane sp = new JScrollPane ( tree , ScrollPaneConstants . VERTICAL_SCROLLBAR_AS_NEEDED , ScrollPaneConstants . HORIZONTAL_SCROLLBAR_NEVER ) ; sp . setHorizontalScrollBar ( null ) ; sp . setName ( "TreePopup.scrollPane" ) ; sp . setFocusable ( false ) ; sp . getVerticalScrollBar ( ) . setFocusable ( false ) ; sp . setBorder ( null ) ; return sp ; }
|
Creates the scroll pane which houses the scrollable tree .
| 133
| 12
|
156,979
|
public void show ( Component parent ) { Dimension parentSize = parent . getSize ( ) ; Insets insets = getInsets ( ) ; // reduce the width of the scrollpane by the insets so that the popup // is the same width as the combo box. parentSize . setSize ( parentSize . width - ( insets . right + insets . left ) , 10 * parentSize . height ) ; Dimension scrollSize = computePopupBounds ( parent , 0 , getBounds ( ) . height , parentSize . width , parentSize . height ) . getSize ( ) ; scroller . setMaximumSize ( scrollSize ) ; scroller . setPreferredSize ( scrollSize ) ; scroller . setMinimumSize ( scrollSize ) ; super . show ( parent , 0 , parent . getHeight ( ) ) ; tree . requestFocusInWindow ( ) ; }
|
Display the popup attached to the given component .
| 186
| 9
|
156,980
|
protected void fireActionPerformed ( ActionEvent event ) { Object [ ] listeners = listenerList . getListenerList ( ) ; for ( int i = listeners . length - 2 ; i >= 0 ; i -= 2 ) { if ( listeners [ i ] == ActionListener . class ) { ( ( ActionListener ) listeners [ i + 1 ] ) . actionPerformed ( event ) ; } } }
|
Notify action listeners .
| 82
| 5
|
156,981
|
private < T > void setCached ( String prefix , String postfix , T data ) { cache . put ( prefix + ' ' + postfix , data ) ; }
|
Set a cache value
| 36
| 4
|
156,982
|
protected String getPropertyValue ( String prefix , String postfix ) { String ret = properties . getProperty ( prefix + "." + postfix ) ; if ( ret != null ) { // logger.debugFine("Found property: "+prefix + "." + // postfix+" for "+prefix); return ret ; } int pos = prefix . length ( ) ; while ( pos > 0 ) { pos = prefix . lastIndexOf ( ' ' , pos - 1 ) ; if ( pos <= 0 ) { break ; } ret = properties . getProperty ( prefix . substring ( 0 , pos ) + ' ' + postfix ) ; if ( ret != null ) { // logger.debugFine("Found property: "+prefix.substring(0, pos) + "." + // postfix+" for "+prefix); return ret ; } } ret = properties . getProperty ( postfix ) ; if ( ret != null ) { // logger.debugFine("Found property: "+postfix+" for "+prefix); return ret ; } return null ; }
|
Retrieve the property value for a particular path + type pair .
| 220
| 13
|
156,983
|
@ SuppressWarnings ( "unchecked" ) public static < V extends NumberVector > NumberVector . Factory < V > guessFactory ( SimpleTypeInformation < V > in ) { NumberVector . Factory < V > factory = null ; if ( in instanceof VectorTypeInformation ) { factory = ( NumberVector . Factory < V > ) ( ( VectorTypeInformation < V > ) in ) . getFactory ( ) ; } if ( factory == null ) { // FIXME: hack. Add factories to simple type information, too? try { Field f = in . getRestrictionClass ( ) . getField ( "FACTORY" ) ; factory = ( NumberVector . Factory < V > ) f . get ( null ) ; } catch ( Exception e ) { LoggingUtil . warning ( "Cannot determine factory for type " + in . getRestrictionClass ( ) , e ) ; } } return factory ; }
|
Try to guess the appropriate factory .
| 194
| 7
|
156,984
|
public void appendSimple ( Object ... data ) { if ( data . length != meta . size ( ) ) { throw new AbortException ( "Invalid number of attributes in 'append'." ) ; } for ( int i = 0 ; i < data . length ; i ++ ) { @ SuppressWarnings ( "unchecked" ) final List < Object > col = ( List < Object > ) columns . get ( i ) ; col . add ( data [ i ] ) ; } }
|
Append a new record to the data set . Pay attention to having the right number of values!
| 102
| 20
|
156,985
|
public static MultipleObjectsBundle fromStream ( BundleStreamSource source ) { MultipleObjectsBundle bundle = new MultipleObjectsBundle ( ) ; boolean stop = false ; DBIDVar var = null ; ArrayModifiableDBIDs ids = null ; int size = 0 ; while ( ! stop ) { BundleStreamSource . Event ev = source . nextEvent ( ) ; switch ( ev ) { case END_OF_STREAM : stop = true ; break ; case META_CHANGED : BundleMeta smeta = source . getMeta ( ) ; // rebuild bundle meta bundle . meta = new BundleMeta ( ) ; for ( int i = 0 ; i < bundle . columns . size ( ) ; i ++ ) { bundle . meta . add ( smeta . get ( i ) ) ; } for ( int i = bundle . metaLength ( ) ; i < smeta . size ( ) ; i ++ ) { List < Object > data = new ArrayList <> ( bundle . dataLength ( ) + 1 ) ; bundle . appendColumn ( smeta . get ( i ) , data ) ; } if ( var == null && source . hasDBIDs ( ) ) { var = DBIDUtil . newVar ( ) ; ids = DBIDUtil . newArray ( ) ; } continue ; case NEXT_OBJECT : if ( var != null && source . assignDBID ( var ) ) { ids . add ( var ) ; } for ( int i = 0 ; i < bundle . metaLength ( ) ; i ++ ) { @ SuppressWarnings ( "unchecked" ) final List < Object > col = ( List < Object > ) bundle . columns . get ( i ) ; col . add ( source . data ( i ) ) ; } ++ size ; continue ; default : LoggingUtil . warning ( "Unknown event: " + ev ) ; continue ; } } if ( ids != null ) { if ( size != ids . size ( ) ) { LOG . warning ( "Not every object had an DBID - discarding DBIDs: " + size + " != " + ids . size ( ) ) ; } else { bundle . setDBIDs ( ids ) ; } } return bundle ; }
|
Convert an object stream to a bundle
| 474
| 8
|
156,986
|
public Object [ ] getRow ( int row ) { Object [ ] ret = new Object [ columns . size ( ) ] ; for ( int c = 0 ; c < columns . size ( ) ; c ++ ) { ret [ c ] = data ( row , c ) ; } return ret ; }
|
Get an object row .
| 62
| 5
|
156,987
|
protected void batchNN ( AbstractRStarTreeNode < ? , ? > node , Map < DBID , KNNHeap > knnLists ) { if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { SpatialEntry p = node . getEntry ( i ) ; for ( Entry < DBID , KNNHeap > ent : knnLists . entrySet ( ) ) { final DBID q = ent . getKey ( ) ; final KNNHeap knns_q = ent . getValue ( ) ; double knn_q_maxDist = knns_q . getKNNDistance ( ) ; DBID pid = ( ( LeafEntry ) p ) . getDBID ( ) ; // FIXME: objects are NOT accessible by DBID in a plain R-tree // context! double dist_pq = distanceFunction . distance ( relation . get ( pid ) , relation . get ( q ) ) ; tree . statistics . countDistanceCalculation ( ) ; if ( dist_pq <= knn_q_maxDist ) { knns_q . insert ( dist_pq , pid ) ; } } } } else { ModifiableDBIDs ids = DBIDUtil . newArray ( knnLists . size ( ) ) ; for ( DBID id : knnLists . keySet ( ) ) { ids . add ( id ) ; } List < DoubleDistanceEntry > entries = getSortedEntries ( node , ids ) ; for ( DoubleDistanceEntry distEntry : entries ) { double minDist = distEntry . distance ; for ( Entry < DBID , KNNHeap > ent : knnLists . entrySet ( ) ) { final KNNHeap knns_q = ent . getValue ( ) ; double knn_q_maxDist = knns_q . getKNNDistance ( ) ; if ( minDist <= knn_q_maxDist ) { SpatialEntry entry = distEntry . entry ; AbstractRStarTreeNode < ? , ? > child = tree . getNode ( ( ( DirectoryEntry ) entry ) . getPageID ( ) ) ; batchNN ( child , knnLists ) ; break ; } } } } }
|
Performs a batch knn query .
| 494
| 8
|
156,988
|
protected List < DoubleDistanceEntry > getSortedEntries ( AbstractRStarTreeNode < ? , ? > node , DBIDs ids ) { List < DoubleDistanceEntry > result = new ArrayList <> ( ) ; for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { SpatialEntry entry = node . getEntry ( i ) ; double minMinDist = Double . MAX_VALUE ; for ( DBIDIter iter = ids . iter ( ) ; iter . valid ( ) ; iter . advance ( ) ) { double minDist = distanceFunction . minDist ( entry , relation . get ( iter ) ) ; tree . statistics . countDistanceCalculation ( ) ; minMinDist = Math . min ( minDist , minMinDist ) ; } result . add ( new DoubleDistanceEntry ( entry , minMinDist ) ) ; } Collections . sort ( result ) ; return result ; }
|
Sorts the entries of the specified node according to their minimum distance to the specified objects .
| 197
| 18
|
156,989
|
private boolean checkCandidateUpdate ( double [ ] point ) { final double x = point [ 0 ] , y = point [ 1 ] ; if ( points . isEmpty ( ) ) { leftx = rightx = x ; topy = bottomy = y ; topleft = topright = bottomleft = bottomright = point ; return true ; } // A non-regular diamond spanned by left, top, right, and bottom. if ( x <= leftx || x >= rightx || y <= bottomy || y >= topy ) { double xpy = x + y , xmy = x - y ; // Update bounds: boolean changed = false ; if ( xpy < bottomleft [ 0 ] + bottomleft [ 1 ] ) { bottomleft = point ; changed = true ; } else if ( xpy > topright [ 0 ] + topright [ 1 ] ) { topright = point ; changed = true ; } if ( xmy < topleft [ 0 ] - topleft [ 1 ] ) { topleft = point ; changed = true ; } else if ( xmy > bottomright [ 0 ] - bottomright [ 1 ] ) { bottomright = point ; changed = true ; } if ( changed ) { leftx = Math . max ( bottomleft [ 0 ] , topleft [ 0 ] ) ; rightx = Math . min ( bottomright [ 0 ] , topright [ 0 ] ) ; topy = Math . min ( topleft [ 1 ] , topright [ 1 ] ) ; bottomy = Math . max ( bottomleft [ 1 ] , bottomright [ 1 ] ) ; } return true ; } return false ; }
|
Check whether a point is inside the current bounds and update the bounds
| 358
| 13
|
156,990
|
static DBIDs randomSample ( DBIDs ids , int samplesize , Random rnd , DBIDs previous ) { if ( previous == null ) { return DBIDUtil . randomSample ( ids , samplesize , rnd ) ; } ModifiableDBIDs sample = DBIDUtil . newHashSet ( samplesize ) ; sample . addDBIDs ( previous ) ; sample . addDBIDs ( DBIDUtil . randomSample ( ids , samplesize - previous . size ( ) , rnd ) ) ; // If these two were not disjoint, we can be short of the desired size! if ( sample . size ( ) < samplesize ) { // Draw a large enough sample to make sure to be able to fill it now. // This can be less random though, because the iterator may impose an // order; but this is a rare code path. for ( DBIDIter it = DBIDUtil . randomSample ( ids , samplesize , rnd ) . iter ( ) ; sample . size ( ) < samplesize && it . valid ( ) ; it . advance ( ) ) { sample . add ( it ) ; } } return sample ; }
|
Draw a random sample of the desired size .
| 247
| 9
|
156,991
|
@ Override public void actionPerformed ( ActionEvent e ) { // Use a new JFileChooser. Inconsistent behaviour otherwise! final JFileChooser fc = new JFileChooser ( new File ( "." ) ) ; if ( param . isDefined ( ) ) { fc . setSelectedFile ( param . getValue ( ) ) ; } if ( e . getSource ( ) == button ) { int returnVal = fc . showOpenDialog ( button ) ; if ( returnVal == JFileChooser . APPROVE_OPTION ) { textfield . setText ( fc . getSelectedFile ( ) . getPath ( ) ) ; fireValueChanged ( ) ; } // else: do nothing on cancel. } else if ( e . getSource ( ) == textfield ) { fireValueChanged ( ) ; } else { LoggingUtil . warning ( "actionPerformed triggered by unknown source: " + e . getSource ( ) ) ; } }
|
Button callback to show the file selector
| 212
| 7
|
156,992
|
protected Node inlineThumbnail ( Document doc , ParsedURL urldata , Node eold ) { RenderableImage img = ThumbnailRegistryEntry . handleURL ( urldata ) ; if ( img == null ) { LoggingUtil . warning ( "Image not found in registry: " + urldata . toString ( ) ) ; return null ; } ByteArrayOutputStream os = new ByteArrayOutputStream ( ) ; try { os . write ( SVGSyntax . DATA_PROTOCOL_PNG_PREFIX . getBytes ( ) ) ; Base64EncoderStream encoder = new Base64EncoderStream ( os ) ; ImageIO . write ( img . createDefaultRendering ( ) , "png" , encoder ) ; encoder . close ( ) ; } catch ( IOException e ) { LoggingUtil . exception ( "Exception serializing image to png" , e ) ; return null ; } Element i = ( Element ) super . cloneNode ( doc , eold ) ; i . setAttributeNS ( SVGConstants . XLINK_NAMESPACE_URI , SVGConstants . XLINK_HREF_ATTRIBUTE , os . toString ( ) . replaceAll ( "\\s*[\\r\\n]+\\s*" , "" ) ) ; return i ; }
|
Inline a referenced thumbnail .
| 285
| 6
|
156,993
|
private static PrintStream openStream ( File out ) throws IOException { OutputStream os = new FileOutputStream ( out ) ; os = out . getName ( ) . endsWith ( GZIP_POSTFIX ) ? new GZIPOutputStream ( os ) : os ; return new PrintStream ( os ) ; }
|
Open the output stream using gzip if necessary .
| 67
| 10
|
156,994
|
@ Override public void setDimensionality ( int dimensionality ) throws IllegalArgumentException { final int maxdim = getMaxDim ( ) ; if ( maxdim > dimensionality ) { throw new IllegalArgumentException ( "Given dimensionality " + dimensionality + " is too small w.r.t. the given values (occurring maximum: " + maxdim + ")." ) ; } this . dimensionality = dimensionality ; }
|
Sets the dimensionality to the new value .
| 93
| 10
|
156,995
|
protected IndexTreePath < E > findPathToObject ( IndexTreePath < E > subtree , SpatialComparable mbr , DBIDRef id ) { N node = getNode ( subtree . getEntry ( ) ) ; if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { if ( DBIDUtil . equal ( ( ( LeafEntry ) node . getEntry ( i ) ) . getDBID ( ) , id ) ) { return new IndexTreePath <> ( subtree , node . getEntry ( i ) , i ) ; } } } // directory node else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { if ( SpatialUtil . intersects ( node . getEntry ( i ) , mbr ) ) { IndexTreePath < E > childSubtree = new IndexTreePath <> ( subtree , node . getEntry ( i ) , i ) ; IndexTreePath < E > path = findPathToObject ( childSubtree , mbr , id ) ; if ( path != null ) { return path ; } } } } return null ; }
|
Returns the path to the leaf entry in the specified subtree that represents the data object with the specified mbr and id .
| 262
| 25
|
156,996
|
protected void deletePath ( IndexTreePath < E > deletionPath ) { N leaf = getNode ( deletionPath . getParentPath ( ) . getEntry ( ) ) ; int index = deletionPath . getIndex ( ) ; // delete o E entry = leaf . getEntry ( index ) ; leaf . deleteEntry ( index ) ; writeNode ( leaf ) ; // condense the tree Stack < N > stack = new Stack <> ( ) ; condenseTree ( deletionPath . getParentPath ( ) , stack ) ; // reinsert underflow nodes while ( ! stack . empty ( ) ) { N node = stack . pop ( ) ; if ( node . isLeaf ( ) ) { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { settings . getOverflowTreatment ( ) . reinitialize ( ) ; // Intended? this . insertLeafEntry ( node . getEntry ( i ) ) ; } } else { for ( int i = 0 ; i < node . getNumEntries ( ) ; i ++ ) { stack . push ( getNode ( node . getEntry ( i ) ) ) ; } } deleteNode ( node ) ; } postDelete ( entry ) ; doExtraIntegrityChecks ( ) ; }
|
Delete a leaf at a given path - deletions for non - leaves are not supported!
| 271
| 18
|
156,997
|
protected List < E > createBulkLeafNodes ( List < E > objects ) { int minEntries = leafMinimum ; int maxEntries = leafCapacity ; ArrayList < E > result = new ArrayList <> ( ) ; List < List < E > > partitions = settings . bulkSplitter . partition ( objects , minEntries , maxEntries ) ; for ( List < E > partition : partitions ) { // create leaf node N leafNode = createNewLeafNode ( ) ; // insert data for ( E o : partition ) { leafNode . addLeafEntry ( o ) ; } // write to file writeNode ( leafNode ) ; result . add ( createNewDirectoryEntry ( leafNode ) ) ; if ( getLogger ( ) . isDebugging ( ) ) { getLogger ( ) . debugFine ( "Created leaf page " + leafNode . getPageID ( ) ) ; } } if ( getLogger ( ) . isDebugging ( ) ) { getLogger ( ) . debugFine ( "numDataPages = " + result . size ( ) ) ; } return result ; }
|
Creates and returns the leaf nodes for bulk load .
| 240
| 11
|
156,998
|
protected IndexTreePath < E > choosePath ( IndexTreePath < E > subtree , SpatialComparable mbr , int depth , int cur ) { if ( getLogger ( ) . isDebuggingFiner ( ) ) { getLogger ( ) . debugFiner ( "node " + subtree + ", depth " + depth ) ; } N node = getNode ( subtree . getEntry ( ) ) ; if ( node == null ) { throw new RuntimeException ( "Page file did not return node for node id: " + getPageID ( subtree . getEntry ( ) ) ) ; } if ( node . isLeaf ( ) ) { return subtree ; } // first test on containment IndexTreePath < E > newSubtree = containedTest ( subtree , node , mbr ) ; if ( newSubtree != null ) { return ( ++ cur == depth ) ? newSubtree : choosePath ( newSubtree , mbr , depth , cur ) ; } N childNode = getNode ( node . getEntry ( 0 ) ) ; int num = settings . insertionStrategy . choose ( node , NodeArrayAdapter . STATIC , mbr , height , cur ) ; newSubtree = new IndexTreePath <> ( subtree , node . getEntry ( num ) , num ) ; ++ cur ; if ( cur == depth ) { return newSubtree ; } // children are leafs if ( childNode . isLeaf ( ) ) { assert cur == newSubtree . getPathCount ( ) ; // Check for programming errors throw new IllegalArgumentException ( "childNode is leaf, but currentDepth != depth: " + cur + " != " + depth ) ; } // children are directory nodes return choosePath ( newSubtree , mbr , depth , cur ) ; }
|
Chooses the best path of the specified subtree for insertion of the given mbr at the specified level .
| 383
| 22
|
156,999
|
private N split ( N node ) { // choose the split dimension and the split point int minimum = node . isLeaf ( ) ? leafMinimum : dirMinimum ; long [ ] split = settings . nodeSplitter . split ( node , NodeArrayAdapter . STATIC , minimum ) ; // New node final N newNode = node . isLeaf ( ) ? createNewLeafNode ( ) : createNewDirectoryNode ( ) ; // do the split node . splitByMask ( newNode , split ) ; // write changes to file writeNode ( node ) ; writeNode ( newNode ) ; return newNode ; }
|
Splits the specified node and returns the newly created split node .
| 129
| 13
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.