idx int64 0 41.2k | question stringlengths 74 4.21k | target stringlengths 5 888 |
|---|---|---|
21,400 | public static double mean ( double [ ] vector ) { double sum = 0 ; if ( vector . length == 0 ) { return 0 ; } for ( int i = 0 ; i < vector . length ; i ++ ) { sum += vector [ i ] ; } return sum / ( double ) vector . length ; } | Computes the mean for an array of doubles . |
21,401 | public static int minIndex ( int [ ] ints ) { int minimum = 0 ; int minIndex = 0 ; for ( int i = 0 ; i < ints . length ; i ++ ) { if ( ( i == 0 ) || ( ints [ i ] < minimum ) ) { minIndex = i ; minimum = ints [ i ] ; } } return minIndex ; } | Returns index of minimum element in a given array of integers . First minimum is returned . |
21,402 | public static int minIndex ( double [ ] doubles ) { double minimum = 0 ; int minIndex = 0 ; for ( int i = 0 ; i < doubles . length ; i ++ ) { if ( ( i == 0 ) || ( doubles [ i ] < minimum ) ) { minIndex = i ; minimum = doubles [ i ] ; } } return minIndex ; } | Returns index of minimum element in a given array of doubles . First minimum is returned . |
21,403 | public static void normalize ( double [ ] doubles ) { double sum = 0 ; for ( int i = 0 ; i < doubles . length ; i ++ ) { sum += doubles [ i ] ; } normalize ( doubles , sum ) ; } | Normalizes the doubles in the array by their sum . |
21,404 | public static void normalize ( double [ ] doubles , double sum ) { if ( Double . isNaN ( sum ) ) { throw new IllegalArgumentException ( "Can't normalize array. Sum is NaN." ) ; } if ( sum == 0 ) { throw new IllegalArgumentException ( "Can't normalize array. Sum is zero." ) ; } for ( int i = 0 ; i < doubles . length ; i ++ ) { doubles [ i ] /= sum ; } } | Normalizes the doubles in the array using the given value . |
21,405 | public static double [ ] logs2probs ( double [ ] a ) { double max = a [ maxIndex ( a ) ] ; double sum = 0.0 ; double [ ] result = new double [ a . length ] ; for ( int i = 0 ; i < a . length ; i ++ ) { result [ i ] = Math . exp ( a [ i ] - max ) ; sum += result [ i ] ; } normalize ( result , sum ) ; return result ; } | Converts an array containing the natural logarithms of probabilities stored in a vector back into probabilities . The probabilities are assumed to sum to one . |
21,406 | public static double probToLogOdds ( double prob ) { if ( gr ( prob , 1 ) || ( sm ( prob , 0 ) ) ) { throw new IllegalArgumentException ( "probToLogOdds: probability must " + "be in [0,1] " + prob ) ; } double p = SMALL + ( 1.0 - 2 * SMALL ) * prob ; return Math . log ( p / ( 1 - p ) ) ; } | Returns the log - odds for a given probabilitiy . |
21,407 | public static double roundDouble ( double value , int afterDecimalPoint ) { double mask = Math . pow ( 10.0 , ( double ) afterDecimalPoint ) ; return ( double ) ( Math . round ( value * mask ) ) / mask ; } | Rounds a double to the given number of decimal places . |
21,408 | public static double variance ( double [ ] vector ) { double sum = 0 , sumSquared = 0 ; if ( vector . length <= 1 ) { return 0 ; } for ( int i = 0 ; i < vector . length ; i ++ ) { sum += vector [ i ] ; sumSquared += ( vector [ i ] * vector [ i ] ) ; } double result = ( sumSquared - ( sum * sum / ( double ) vector . length ) ) / ( double ) ( vector . length - 1 ) ; if ( result < 0 ) { return 0 ; } else { return result ; } } | Computes the variance for an array of doubles . |
21,409 | public static int sum ( int [ ] ints ) { int sum = 0 ; for ( int i = 0 ; i < ints . length ; i ++ ) { sum += ints [ i ] ; } return sum ; } | Computes the sum of the elements of an array of integers . |
21,410 | private static void quickSort ( double [ ] array , int [ ] index , int left , int right ) { if ( left < right ) { int middle = partition ( array , index , left , right ) ; quickSort ( array , index , left , middle ) ; quickSort ( array , index , middle + 1 , right ) ; } } | assignable index ; |
21,411 | public static String [ ] breakUp ( String s , int columns ) { Vector < String > result ; String line ; BreakIterator boundary ; int boundaryStart ; int boundaryEnd ; String word ; String punctuation ; int i ; String [ ] lines ; result = new Vector < String > ( ) ; punctuation = " .,;:!?'\"" ; lines = s . split ( "\n" ) ; for ( i = 0 ; i < lines . length ; i ++ ) { boundary = BreakIterator . getWordInstance ( ) ; boundary . setText ( lines [ i ] ) ; boundaryStart = boundary . first ( ) ; boundaryEnd = boundary . next ( ) ; line = "" ; while ( boundaryEnd != BreakIterator . DONE ) { word = lines [ i ] . substring ( boundaryStart , boundaryEnd ) ; if ( line . length ( ) >= columns ) { if ( word . length ( ) == 1 ) { if ( punctuation . indexOf ( word . charAt ( 0 ) ) > - 1 ) { line += word ; word = "" ; } } result . add ( line ) ; line = "" ; } line += word ; boundaryStart = boundaryEnd ; boundaryEnd = boundary . next ( ) ; } if ( line . length ( ) > 0 ) result . add ( line ) ; } return result . toArray ( new String [ result . size ( ) ] ) ; } | Breaks up the string if wider than columns characters . |
21,412 | public boolean isMetBy ( Class < ? > klass ) { if ( ! CapabilitiesHandler . class . isAssignableFrom ( klass ) ) return isMetBy ( NON_HANDLER_CAPABILITIES ) ; CapabilitiesHandler instance ; try { instance = ( CapabilitiesHandler ) klass . newInstance ( ) ; } catch ( InstantiationException | IllegalAccessException e ) { throw new RuntimeException ( "Couldn't instantiate CapabilitiesHandler " + klass . getSimpleName ( ) , e ) ; } return isMetBy ( instance ) ; } | Tests if the requirement is met by the given class . |
21,413 | public static CapabilityRequirement hasAll ( Capability ... capabilities ) { return new CapabilityRequirement ( c -> { for ( Capability capability : capabilities ) { if ( ! c . hasCapability ( capability ) ) return false ; } return true ; } ) ; } | Creates a requirement that a given set of capabilities have all of the specified capabilities . |
21,414 | protected void setGraph ( MeasureCollection [ ] measures , MeasureCollection [ ] measureStds , int [ ] processFrequencies , Color [ ] colors ) { super . setGraph ( measures , measureStds , colors ) ; } | Updates the measure collection information and repaints the curves . |
21,415 | private void paintFullCurve ( Graphics g , int i ) { if ( this . measures [ i ] . getNumberOfValues ( this . measureSelected ) == 0 ) { return ; } g . setColor ( this . colors [ i ] ) ; int height = getHeight ( ) ; int n = this . measures [ i ] . getNumberOfValues ( this . measureSelected ) ; int [ ] x = new int [ n ] ; int [ ] y = new int [ n ] ; for ( int j = 0 ; j < n ; j ++ ) { x [ j ] = ( int ) ( j * x_resolution ) ; y [ j ] = ( int ) ( height - ( this . measures [ i ] . getValue ( this . measureSelected , j ) / this . upper_y_value ) * height ) ; if ( this . isStandardDeviationPainted ) { int len = ( int ) ( ( this . measureStds [ i ] . getValue ( this . measureSelected , j ) / this . upper_y_value ) * height ) ; paintStandardDeviation ( g , len , x [ j ] , y [ j ] ) ; } } g . drawPolyline ( x , y , n ) ; } | Draws a single curve on the canvas . |
21,416 | public void computeValue ( DoubleVector values ) { if ( this . isType ( ) ) { setValues ( values ) ; double sumDif = 0.0 ; this . value = this . values . sumOfValues ( ) / ( double ) values . numValues ( ) ; for ( int i = 0 ; i < this . values . numValues ( ) ; i ++ ) { double dif = this . values . getValue ( i ) - this . value ; sumDif += Math . pow ( dif , 2 ) ; } sumDif = sumDif / this . values . numValues ( ) ; this . std = Math . sqrt ( sumDif ) ; } } | Calculates the value of measure |
21,417 | protected void xAxis ( Graphics g ) { g . setColor ( Color . BLACK ) ; g . drawLine ( X_OFFSET_LEFT , calcY ( 0 ) , width + X_OFFSET_LEFT , calcY ( 0 ) ) ; drawXLabels ( g ) ; } | Draws the x axis containing of the axis line and the labels . |
21,418 | private void yAxis ( Graphics g ) { g . setColor ( Color . BLACK ) ; g . drawLine ( X_OFFSET_LEFT , calcY ( 0 ) , X_OFFSET_LEFT , Y_OFFSET_TOP ) ; g . setColor ( new Color ( 220 , 220 , 220 ) ) ; g . drawLine ( X_OFFSET_LEFT , height / 2 + Y_OFFSET_TOP , getWidth ( ) , height / 2 + Y_OFFSET_TOP ) ; g . setColor ( Color . BLACK ) ; DecimalFormat d = new DecimalFormat ( "0.00" ) ; double numLabels = Math . min ( Math . pow ( 2 , y_resolution ) , 32 ) ; for ( int i = 0 ; i <= numLabels ; i ++ ) { double fraction = i / numLabels ; double value = fraction * upper_y_value ; g . drawString ( d . format ( value ) , 1 , ( int ) ( ( 1 - fraction ) * height ) + Y_OFFSET_TOP + 5 ) ; g . drawLine ( X_OFFSET_LEFT - 5 , ( int ) ( ( 1 - fraction ) * height ) + Y_OFFSET_TOP , X_OFFSET_LEFT , ( int ) ( ( 1 - fraction ) * height ) + Y_OFFSET_TOP ) ; } } | Draws the y axis containing og the axis line the horizontal helping line and the labels . |
21,419 | private void constructMeanStdPreviewsForParam ( int numEntriesPerPreview , int numParamValues , int paramValue ) { List < double [ ] > meanParamMeasurements = calculateMeanMeasurementsForParam ( numEntriesPerPreview , numParamValues , paramValue ) ; List < double [ ] > stdParamMeasurements = calculateStdMeasurementsForParam ( numEntriesPerPreview , numParamValues , paramValue , meanParamMeasurements ) ; String [ ] meanMeasurementNames = this . origMultiRunPreviews . getMeasurementNames ( ) ; meanMeasurementNames = Arrays . copyOfRange ( meanMeasurementNames , 4 , meanMeasurementNames . length ) ; String [ ] stdMeasurementNames = new String [ meanMeasurementNames . length ] ; stdMeasurementNames [ 0 ] = meanMeasurementNames [ 0 ] ; for ( int m = 1 ; m < meanMeasurementNames . length ; m ++ ) { stdMeasurementNames [ m ] = "[std] " + meanMeasurementNames [ m ] ; } LearningCurve meanLearningCurve = new LearningCurve ( meanMeasurementNames [ 0 ] ) ; meanLearningCurve . setData ( Arrays . asList ( meanMeasurementNames ) , meanParamMeasurements ) ; LearningCurve stdLearningCurve = new LearningCurve ( stdMeasurementNames [ 0 ] ) ; stdLearningCurve . setData ( Arrays . asList ( stdMeasurementNames ) , stdParamMeasurements ) ; Preview meanParamValuePreview = new PreviewCollectionLearningCurveWrapper ( meanLearningCurve , this . origMultiRunPreviews . taskClass ) ; Preview stdParamValuePreview = new PreviewCollectionLearningCurveWrapper ( stdLearningCurve , this . origMultiRunPreviews . taskClass ) ; this . meanPreviews . setPreview ( paramValue , meanParamValuePreview ) ; this . stdPreviews . setPreview ( paramValue , stdParamValuePreview ) ; } | Construct the mean and standard deviation Previews for one specific parameter value . |
21,420 | private List < double [ ] > calculateMeanMeasurementsForParam ( int numEntriesPerPreview , int numParamValues , int paramValue ) { List < double [ ] > paramMeasurementsSum = new ArrayList < double [ ] > ( numEntriesPerPreview ) ; List < double [ ] > meanParamMeasurements = new ArrayList < double [ ] > ( numEntriesPerPreview ) ; int numCompleteFolds = 0 ; for ( PreviewCollection < Preview > foldPreview : this . origMultiRunPreviews . subPreviews ) { if ( foldPreview . getPreviews ( ) . size ( ) == numParamValues ) { numCompleteFolds ++ ; Preview foldParamPreview = foldPreview . getPreviews ( ) . get ( paramValue ) ; this . addPreviewMeasurementsToSum ( paramMeasurementsSum , foldParamPreview , numEntriesPerPreview ) ; } } for ( int entryIdx = 0 ; entryIdx < numEntriesPerPreview ; entryIdx ++ ) { double [ ] sumEntry = paramMeasurementsSum . get ( entryIdx ) ; double [ ] meanEntry = new double [ sumEntry . length ] ; meanEntry [ 0 ] = sumEntry [ 0 ] ; for ( int m = 1 ; m < sumEntry . length ; m ++ ) { meanEntry [ m ] = sumEntry [ m ] / numCompleteFolds ; } meanParamMeasurements . add ( meanEntry ) ; } return meanParamMeasurements ; } | Calculate the mean measurements for the given parameter value . |
21,421 | private List < double [ ] > calculateStdMeasurementsForParam ( int numEntriesPerPreview , int numParamValues , int paramValue , List < double [ ] > meanParamMeasurements ) { List < double [ ] > paramMeasurementsSquaredDiffSum = new ArrayList < double [ ] > ( numEntriesPerPreview ) ; List < double [ ] > paramMeasurementsStd = new ArrayList < double [ ] > ( numEntriesPerPreview ) ; int numCompleteFolds = 0 ; for ( PreviewCollection < Preview > foldPreview : this . origMultiRunPreviews . subPreviews ) { if ( foldPreview . getPreviews ( ) . size ( ) == numParamValues ) { numCompleteFolds ++ ; Preview foldParamPreview = foldPreview . getPreviews ( ) . get ( paramValue ) ; this . addPreviewMeasurementSquaredDiffsToSum ( meanParamMeasurements , paramMeasurementsSquaredDiffSum , foldParamPreview , numEntriesPerPreview ) ; } } for ( int entryIdx = 0 ; entryIdx < numEntriesPerPreview ; entryIdx ++ ) { double [ ] sumEntry = paramMeasurementsSquaredDiffSum . get ( entryIdx ) ; double [ ] stdEntry = new double [ sumEntry . length ] ; stdEntry [ 0 ] = sumEntry [ 0 ] ; for ( int m = 1 ; m < sumEntry . length ; m ++ ) { if ( numCompleteFolds > 1 ) { stdEntry [ m ] = Math . sqrt ( sumEntry [ m ] / ( numCompleteFolds - 1 ) ) ; } else { stdEntry [ m ] = Math . sqrt ( sumEntry [ m ] ) ; } } paramMeasurementsStd . add ( stdEntry ) ; } return paramMeasurementsStd ; } | Calculate the standard deviation measurements for the given parameter value . |
21,422 | private void addPreviewMeasurementsToSum ( List < double [ ] > measurementsSum , Preview preview , int numEntriesPerPreview ) { List < double [ ] > previewMeasurements = preview . getData ( ) ; for ( int entryIdx = 0 ; entryIdx < numEntriesPerPreview ; entryIdx ++ ) { double [ ] previewEntry = previewMeasurements . get ( entryIdx ) ; double [ ] sumEntry ; if ( measurementsSum . size ( ) > entryIdx ) { sumEntry = measurementsSum . get ( entryIdx ) ; } else { sumEntry = new double [ previewEntry . length ] ; measurementsSum . add ( sumEntry ) ; } if ( sumEntry [ 0 ] == 0.0 ) { sumEntry [ 0 ] = previewEntry [ 0 ] ; } for ( int measure = 1 ; measure < sumEntry . length ; measure ++ ) { sumEntry [ measure ] += previewEntry [ measure ] ; } } } | Add measurements from the given Preview to the overall sum . |
21,423 | private void addPreviewMeasurementSquaredDiffsToSum ( List < double [ ] > meanMeasurements , List < double [ ] > measurementsSquaredDiffSum , Preview preview , int numEntriesPerPreview ) { List < double [ ] > previewMeasurements = preview . getData ( ) ; for ( int entryIdx = 0 ; entryIdx < numEntriesPerPreview ; entryIdx ++ ) { double [ ] meanEntry = meanMeasurements . get ( entryIdx ) ; double [ ] previewEntry = previewMeasurements . get ( entryIdx ) ; double [ ] squaredDiffSumEntry ; if ( measurementsSquaredDiffSum . size ( ) > entryIdx ) { squaredDiffSumEntry = measurementsSquaredDiffSum . get ( entryIdx ) ; } else { squaredDiffSumEntry = new double [ previewEntry . length ] ; measurementsSquaredDiffSum . add ( squaredDiffSumEntry ) ; } if ( squaredDiffSumEntry [ 0 ] == 0.0 ) { squaredDiffSumEntry [ 0 ] = previewEntry [ 0 ] ; } for ( int m = 1 ; m < previewEntry . length ; m ++ ) { double diff = ( meanEntry [ m ] - previewEntry [ m ] ) ; double squaredDiff = diff * diff ; squaredDiffSumEntry [ m ] += squaredDiff ; } } } | Add squared deviations from the mean value from the given Preview to the overall sum . |
21,424 | public static double [ ] doNaiveBayesPredictionLog ( Instance inst , DoubleVector observedClassDistribution , AutoExpandVector < AttributeClassObserver > observers , AutoExpandVector < AttributeClassObserver > observers2 ) { AttributeClassObserver obs ; double [ ] votes = new double [ observedClassDistribution . numValues ( ) ] ; double observedClassSum = observedClassDistribution . sumOfValues ( ) ; for ( int classIndex = 0 ; classIndex < votes . length ; classIndex ++ ) { votes [ classIndex ] = Math . log10 ( observedClassDistribution . getValue ( classIndex ) / observedClassSum ) ; for ( int attIndex = 0 ; attIndex < inst . numAttributes ( ) - 1 ; attIndex ++ ) { int instAttIndex = modelAttIndexToInstanceAttIndex ( attIndex , inst ) ; if ( inst . attribute ( instAttIndex ) . isNominal ( ) ) { obs = observers . get ( attIndex ) ; } else { obs = observers2 . get ( attIndex ) ; } if ( ( obs != null ) && ! inst . isMissing ( instAttIndex ) ) { votes [ classIndex ] += Math . log10 ( obs . probabilityOfAttributeValueGivenClass ( inst . value ( instAttIndex ) , classIndex ) ) ; } } } return votes ; } | Naive Bayes Prediction using log10 for VFDR rules |
21,425 | private double [ ] generatePriors ( Random r , int L , double z , boolean skew ) { double P [ ] = new double [ L ] ; for ( int i = 0 ; i < L ; i ++ ) { P [ i ] = r . nextDouble ( ) ; } do { double c = Utils . sum ( P ) / z ; for ( int i = 0 ; i < L ; i ++ ) { P [ i ] = Math . min ( 1.0 , P [ i ] / c ) ; } } while ( Utils . sum ( P ) < z ) ; return P ; } | Generate Priors . Generate the label priors . |
21,426 | private HashSet generateSet ( ) { int y [ ] = new int [ m_L ] ; int k = samplePMF ( priors_norm ) ; y [ k ] = 1 ; ArrayList < Integer > indices = getShuffledListToLWithoutK ( m_L , k ) ; for ( int j : indices ) { y [ j ] = ( joint ( j , y ) > m_MetaRandom . nextDouble ( ) ) ? 1 : 0 ; } return vector2set ( y ) ; } | Generate Set . |
21,427 | protected double [ ] modifyPriorVector ( double P [ ] , double u , Random r , boolean skew ) { for ( int j = 0 ; j < P . length ; j ++ ) { if ( r . nextDouble ( ) < u ) { P [ j ] = r . nextDouble ( ) ; } } return P ; } | ModifyPriorVector . A certain number of values will be altered . |
21,428 | private HashSet [ ] getTopCombinations ( int n ) { final HashMap < HashSet , Integer > count = new HashMap < HashSet , Integer > ( ) ; HashMap < HashSet , Integer > isets = new HashMap < HashSet , Integer > ( ) ; int N = 100000 ; double lc = 0.0 ; for ( int i = 0 ; i < N ; i ++ ) { HashSet Y = generateSet ( ) ; lc += Y . size ( ) ; count . put ( Y , count . get ( Y ) != null ? count . get ( Y ) + 1 : 1 ) ; } lc = lc / N ; List < HashSet > top_set = new ArrayList < HashSet > ( count . keySet ( ) ) ; Collections . sort ( top_set , new Comparator < HashSet > ( ) { public int compare ( HashSet Y1 , HashSet Y2 ) { return count . get ( Y2 ) . compareTo ( count . get ( Y1 ) ) ; } } ) ; System . err . println ( "The most common labelsets (from which we will build the map) will likely be: " ) ; HashSet map_set [ ] = new HashSet [ n ] ; double weights [ ] = new double [ n ] ; int idx = 0 ; for ( HashSet Y : top_set ) { System . err . println ( " " + Y + " : " + ( count . get ( Y ) * 100.0 / N ) + "%" ) ; weights [ idx ++ ] = count . get ( Y ) ; if ( idx == weights . length ) { break ; } } double sum = Utils . sum ( weights ) ; System . err . println ( "Estimated Label Cardinality: " + lc + "\n\n" ) ; System . err . println ( "Estimated % Unique Labelsets: " + ( count . size ( ) * 100.0 / N ) + "%\n\n" ) ; Utils . normalize ( weights ) ; for ( int i = 0 , k = 0 ; i < top_set . size ( ) && k < map_set . length ; i ++ ) { int num = ( int ) Math . round ( Math . max ( weights [ i ] * map_set . length , 1.0 ) ) ; for ( int j = 0 ; j < num && k < map_set . length ; j ++ ) { map_set [ k ++ ] = top_set . get ( i ) ; } } Collections . shuffle ( Arrays . asList ( map_set ) ) ; return map_set ; } | GetTopCombinations . Calculating the full joint probability distribution is too complex . - sample from the approximate joint many times - record the the n most commonly ocurring Y and their frequencies - create a map based on these frequencies |
21,429 | private int [ ] set2vector ( HashSet < Integer > Y , int L ) { int y [ ] = new int [ L ] ; for ( int j : Y ) { y [ j ] = 1 ; } return y ; } | convert set Y to an L - length vector y |
21,430 | private HashSet < Integer > vector2set ( int y [ ] ) { HashSet < Integer > Y = new HashSet < Integer > ( ) ; for ( int j = 0 ; j < y . length ; j ++ ) { if ( y [ j ] > 0 ) { Y . add ( j ) ; } } return Y ; } | convert L - length vector y to set Y |
21,431 | public Option [ ] discoverOptionsViaReflection ( ) { Class c = this . handler . getClass ( ) ; Field [ ] fields = c . getFields ( ) ; List < Option > optList = new LinkedList < Option > ( ) ; for ( Field field : fields ) { String fName = field . getName ( ) ; Class < ? > fType = field . getType ( ) ; if ( fType . getName ( ) . endsWith ( "Option" ) ) { if ( Option . class . isAssignableFrom ( fType ) ) { Option oVal = null ; try { field . setAccessible ( true ) ; oVal = ( Option ) field . get ( this . handler ) ; } catch ( IllegalAccessException ignored ) { } if ( oVal != null ) { optList . add ( oVal ) ; } } } } return optList . toArray ( new Option [ optList . size ( ) ] ) ; } | Gets the options of this class via reflection . |
21,432 | public Instances kNearestNeighbours ( Instance target , int kNN ) throws Exception { boolean print = false ; MyHeap heap = new MyHeap ( kNN ) ; double distance ; int firstkNN = 0 ; for ( int i = 0 ; i < m_Instances . numInstances ( ) ; i ++ ) { if ( target == m_Instances . instance ( i ) ) continue ; if ( firstkNN < kNN ) { if ( print ) System . out . println ( "K(a): " + ( heap . size ( ) + heap . noOfKthNearest ( ) ) ) ; distance = m_DistanceFunction . distance ( target , m_Instances . instance ( i ) , Double . POSITIVE_INFINITY ) ; if ( distance == 0.0 && m_SkipIdentical ) if ( i < m_Instances . numInstances ( ) - 1 ) continue ; else heap . put ( i , distance ) ; heap . put ( i , distance ) ; firstkNN ++ ; } else { MyHeapElement temp = heap . peek ( ) ; if ( print ) System . out . println ( "K(b): " + ( heap . size ( ) + heap . noOfKthNearest ( ) ) ) ; distance = m_DistanceFunction . distance ( target , m_Instances . instance ( i ) , temp . distance ) ; if ( distance == 0.0 && m_SkipIdentical ) continue ; if ( distance < temp . distance ) { heap . putBySubstitute ( i , distance ) ; } else if ( distance == temp . distance ) { heap . putKthNearest ( i , distance ) ; } } } Instances neighbours = new Instances ( m_Instances , ( heap . size ( ) + heap . noOfKthNearest ( ) ) ) ; m_Distances = new double [ heap . size ( ) + heap . noOfKthNearest ( ) ] ; int [ ] indices = new int [ heap . size ( ) + heap . noOfKthNearest ( ) ] ; int i = 1 ; MyHeapElement h ; while ( heap . noOfKthNearest ( ) > 0 ) { h = heap . getKthNearest ( ) ; indices [ indices . length - i ] = h . index ; m_Distances [ indices . length - i ] = h . distance ; i ++ ; } while ( heap . size ( ) > 0 ) { h = heap . get ( ) ; indices [ indices . length - i ] = h . index ; m_Distances [ indices . length - i ] = h . distance ; i ++ ; } m_DistanceFunction . postProcessDistances ( m_Distances ) ; for ( int k = 0 ; k < indices . length ; k ++ ) { neighbours . add ( m_Instances . instance ( indices [ k ] ) ) ; } return neighbours ; } | Returns k nearest instances in the current neighbourhood to the supplied instance . |
21,433 | public void update ( Instance ins ) throws Exception { if ( m_Instances == null ) throw new Exception ( "No instances supplied yet. Cannot update without" + "supplying a set of instances first." ) ; m_DistanceFunction . update ( ins ) ; } | Updates the LinearNNSearch to cater for the new added instance . This implementation only updates the ranges of the DistanceFunction class since our set of instances is passed by reference and should already have the newly added instance . |
21,434 | public void addInstanceInfo ( Instance ins ) { if ( m_Instances != null ) try { update ( ins ) ; } catch ( Exception ex ) { ex . printStackTrace ( ) ; } } | Adds the given instance info . This implementation updates the range datastructures of the DistanceFunction class . |
21,435 | public static boolean nameIsLegal ( String optionName ) { for ( char illegalChar : illegalNameCharacters ) { if ( optionName . indexOf ( illegalChar ) >= 0 ) { return false ; } } return true ; } | Gets whether the name is valid or not . |
21,436 | public void setGraph ( MeasureCollection [ ] measures , MeasureCollection [ ] measureStds , int [ ] processFrequencies , int min_processFrequency , Color [ ] colors ) { this . measures = measures ; this . processFrequencies = processFrequencies ; this . min_processFrequency = min_processFrequency ; ( ( ProcessGraphAxes ) this . axesPanel ) . setProcessFrequency ( min_processFrequency ) ; ( ( GraphMultiCurve ) this . plotPanel ) . setProcessFrequency ( min_processFrequency ) ; ( ( GraphMultiCurve ) this . plotPanel ) . setGraph ( measures , measureStds , processFrequencies , colors ) ; updateCanvas ( false ) ; } | Sets the graph containing multiple curves . |
21,437 | public void splitNode ( KDTreeNode node , int numNodesCreated , double [ ] [ ] nodeRanges , double [ ] [ ] universe ) throws Exception { correctlyInitialized ( ) ; int splitDim = widestDim ( nodeRanges , universe ) ; double splitVal = m_EuclideanDistance . getMiddle ( nodeRanges [ splitDim ] ) ; int rightStart = rearrangePoints ( m_InstList , node . m_Start , node . m_End , splitDim , splitVal ) ; if ( rightStart == node . m_Start || rightStart > node . m_End ) { if ( rightStart == node . m_Start ) throw new Exception ( "Left child is empty in node " + node . m_NodeNumber + ". Not possible with " + "MidPointofWidestDim splitting method. Please " + "check code." ) ; else throw new Exception ( "Right child is empty in node " + node . m_NodeNumber + ". Not possible with " + "MidPointofWidestDim splitting method. Please " + "check code." ) ; } node . m_SplitDim = splitDim ; node . m_SplitValue = splitVal ; node . m_Left = new KDTreeNode ( numNodesCreated + 1 , node . m_Start , rightStart - 1 , m_EuclideanDistance . initializeRanges ( m_InstList , node . m_Start , rightStart - 1 ) ) ; node . m_Right = new KDTreeNode ( numNodesCreated + 2 , rightStart , node . m_End , m_EuclideanDistance . initializeRanges ( m_InstList , rightStart , node . m_End ) ) ; } | Splits a node into two based on the midpoint value of the dimension in which the points have the widest spread . After splitting two new nodes are created and correctly initialised . And node . left and node . right are set appropriately . |
21,438 | private double computeStabilityIndex ( ) { int m = ( int ) Math . floor ( ( this . ensemble . length - MAXPERMANENT ) / 2 ) ; int [ ] [ ] votes = new int [ m ] [ tau_size ] ; double errors = 0 ; int count = 0 ; Pair [ ] arr = getHalf ( true ) ; for ( int i = 0 ; i < m ; i ++ ) { for ( int j = 0 ; j < tau_size ; j ++ ) { votes [ i ] [ j ] = Utils . maxIndex ( this . ensemble [ arr [ i ] . index ] . getVotesForInstance ( recentChunk . get ( j ) ) ) ; errors += ( votes [ i ] [ j ] == ( int ) this . recentChunk . get ( j ) . classValue ( ) ) ? 0 : 1 ; count ++ ; } } errors = errors / count ; double res = 0 ; count = 0 ; for ( int i = 0 ; i < m ; i ++ ) for ( int j = i + 1 ; j < m ; j ++ ) if ( i != j ) { res += computeKappa ( votes [ i ] , votes [ j ] ) ; count ++ ; } return res / count - errors ; } | Returns the stability index of the adaptive ensemble of classifiers . The ensemble is considered stable here if its diversity level and error rates are low . |
21,439 | private Classifier getBestAdaptiveClassifier ( ) { Pair [ ] newEnsembleWeights = new Pair [ ensembleWeights . length - MAXPERMANENT ] ; for ( int i = 0 ; i < newEnsembleWeights . length ; i ++ ) newEnsembleWeights [ i ] = ensembleWeights [ i ] ; Arrays . sort ( newEnsembleWeights , Collections . reverseOrder ( ) ) ; return this . ensemble [ newEnsembleWeights [ 0 ] . index ] . copy ( ) ; } | Returns the adaptive classifier with the highest weight |
21,440 | public void scaleYResolution ( double factor ) { this . y_resolution = Math . max ( 1.0 , this . y_resolution * factor ) ; updateYResolution ( ) ; updateUpperYValue ( ) ; updateCanvas ( true ) ; } | Scales the resolution on the y - axis by the given factor and updates the canvas . The y - resolution must not be lower than 1 . |
21,441 | private double getMaxSelectedValue ( ) { double max = Double . MIN_VALUE ; for ( int i = 0 ; i < this . measures . length ; i ++ ) { if ( this . measures [ i ] . getMaxValue ( this . measureSelected ) > max ) { max = this . measures [ i ] . getMaxValue ( this . measureSelected ) ; } } return max ; } | Computes the maximum value of the underlying measures at the currently selected measure . |
21,442 | private boolean updateMinMaxValues ( ) { double min_x_value_new ; double max_x_value_new ; double max_y_value_new ; if ( this . measures == null ) { min_x_value_new = 0 ; max_x_value_new = 1 ; max_y_value_new = 1 ; } else { min_x_value_new = getMinXValue ( ) ; max_x_value_new = getMaxXValue ( ) ; max_y_value_new = getMaxSelectedValue ( ) ; } if ( min_x_value_new != this . min_x_value || max_x_value_new != this . max_x_value || max_y_value_new != this . max_y_value ) { this . min_x_value = min_x_value_new ; this . max_x_value = max_x_value_new ; this . max_y_value = max_y_value_new ; updateMinXValue ( ) ; updateMaxXValue ( ) ; updateMaxYValue ( ) ; updateLowerXValue ( ) ; updateUpperXValue ( ) ; updateUpperYValue ( ) ; return true ; } return false ; } | Computes the minimum and maximum values for the x and y - axis and updates the children if necessary . |
21,443 | private void updateLowerXValue ( ) { double lower = 0.0 ; if ( this . measures != null ) { lower = this . min_x_value * ( 1 - ( 0.1 / x_resolution ) ) ; } this . axesPanel . setLowerXValue ( lower ) ; this . plotPanel . setLowerXValue ( lower ) ; } | Updates the lower value on the x - axis . |
21,444 | private void updateUpperXValue ( ) { double upper = 1.0 ; if ( this . measures != null ) { upper = max_x_value * ( 1 + ( 0.1 / x_resolution ) ) ; } this . axesPanel . setUpperXValue ( upper ) ; this . plotPanel . setUpperXValue ( upper ) ; } | Updates the upper value on the x - axis . |
21,445 | private void updateUpperYValue ( ) { double upper = 1.0 ; if ( this . measures != null ) { upper = max_y_value * ( 1 + ( 0.1 / y_resolution ) ) ; } this . axesPanel . setUpperYValue ( upper ) ; this . plotPanel . setUpperYValue ( upper ) ; } | Updates the upper value on the y - axis . |
21,446 | private void updateChildren ( ) { axesPanel . setSize ( getWidth ( ) , getHeight ( ) ) ; plotPanel . setSize ( getWidth ( ) - X_OFFSET_LEFT - X_OFFSET_RIGHT , getHeight ( ) - Y_OFFSET_BOTTOM - Y_OFFSET_TOP ) ; } | Updates the size of the axes curve and event panel . Recomputes the event locations if necessary . |
21,447 | public void setOptions ( String [ ] labels , String [ ] descriptions , int defaultIndex ) { if ( labels . length != descriptions . length ) { throw new IllegalArgumentException ( "Labels/descriptions mismatch." ) ; } if ( labels . length > 0 ) { this . optionLabels = labels . clone ( ) ; this . optionDescriptions = descriptions . clone ( ) ; this . defaultOptionIndex = defaultIndex ; } else { this . optionLabels = new String [ ] { NO_CHOICES } ; this . optionDescriptions = new String [ ] { NO_CHOICES_DESCRIPTION } ; this . defaultOptionIndex = 0 ; } resetToDefault ( ) ; if ( this . editComponent != null ) { this . editComponent . refresh ( ) ; } } | Set new options for this MultiChoiceOption and refresh the edit component . |
21,448 | public void put ( long key , T element ) { Entry < T > entry = new Entry < T > ( key , element ) ; elements . add ( entry ) ; this . numElements ++ ; fileElement ( entry , true ) ; } | Adds an element to the hash table . |
21,449 | private void fileElement ( Entry < T > currentElement , boolean rehash ) { int maxFailures = Math . max ( ( int ) Math . log ( this . numElements ) , this . numTables * 2 ) ; int currentTable = 0 ; for ( int i = 0 ; i < maxFailures ; i ++ ) { int hash = this . hashfunctions . get ( currentTable ) . hash ( currentElement . getKey ( ) ) ; currentElement = this . tables . get ( currentTable ) . set ( hash , currentElement ) ; if ( currentElement == null ) { break ; } currentTable = ( currentTable + 1 ) % this . numTables ; } if ( currentElement != null ) { this . stash . add ( currentElement ) ; this . stashSize ++ ; } while ( rehash && this . stashSize > this . maxStashSize ) { reset ( ) ; if ( this . stashSize > this . maxStashSize ) { increaseAndReset ( ) ; } } } | Adds an element to one of the tables for Cuckoo Hashing . |
21,450 | private void increaseAndReset ( ) { if ( this . hashSize < 30 ) { this . hashSize += 1 ; this . hashfunctions . clear ( ) ; for ( List < Entry < T > > table : this . tables ) { this . hashfunctions . add ( new DietzfelbingerHash ( this . hashSize , this . random ) ) ; ( ( ArrayList < Entry < T > > ) table ) . ensureCapacity ( 1 << this . hashSize ) ; } } else { this . hashfunctions . add ( new DietzfelbingerHash ( this . hashSize , this . random ) ) ; this . tables . add ( new ArrayList < Entry < T > > ( 1 << this . hashSize ) ) ; this . numTables ++ ; } reset ( ) ; } | Adds a new table and rebuild the hash table . |
21,451 | public T get ( long key ) { for ( int i = 0 ; i < this . numTables ; i ++ ) { Entry < T > entry = this . tables . get ( i ) . get ( this . hashfunctions . get ( i ) . hash ( key ) ) ; if ( entry != null && entry . getKey ( ) == key ) { return entry . getValue ( ) ; } } for ( Entry < T > entry : this . stash ) { if ( entry . getKey ( ) == key ) { return entry . getValue ( ) ; } } return null ; } | Gets an element of the hash table . |
21,452 | private void reset ( ) { for ( DietzfelbingerHash hashfunction : this . hashfunctions ) { hashfunction . nextHashFunction ( ) ; } int sizeTables = 1 << this . hashSize ; for ( List < Entry < T > > table : this . tables ) { table . clear ( ) ; for ( int j = 0 ; j < sizeTables ; j ++ ) { table . add ( null ) ; } } this . stash . clear ( ) ; this . stashSize = 0 ; for ( Entry < T > entry : this . elements ) { fileElement ( entry , false ) ; } } | Rebuilds the hash table . |
21,453 | public void clear ( ) { this . hashSize = this . startHashSize ; this . numTables = this . startNumTables ; this . numElements = 0 ; this . hashfunctions . clear ( ) ; this . tables . clear ( ) ; int sizeTables = 1 << this . startHashSize ; for ( int i = 0 ; i < this . startNumTables ; i ++ ) { this . hashfunctions . add ( new DietzfelbingerHash ( this . startHashSize , this . random ) ) ; List < Entry < T > > table = new ArrayList < Entry < T > > ( sizeTables ) ; for ( int j = 0 ; j < sizeTables ; j ++ ) { table . add ( null ) ; } this . tables . add ( table ) ; } this . stash . clear ( ) ; this . stashSize = 0 ; } | Removes all of the elements from this hash table . The hash table will be empty after this call returns . |
21,454 | public void splitNode ( KDTreeNode node , int numNodesCreated , double [ ] [ ] nodeRanges , double [ ] [ ] universe ) throws Exception { correctlyInitialized ( ) ; int splitDim = widestDim ( nodeRanges , universe ) ; int medianIdxIdx = node . m_Start + ( node . m_End - node . m_Start ) / 2 ; int medianIdx = select ( splitDim , m_InstList , node . m_Start , node . m_End , ( node . m_End - node . m_Start ) / 2 + 1 ) ; node . m_SplitDim = splitDim ; node . m_SplitValue = m_Instances . instance ( m_InstList [ medianIdx ] ) . value ( splitDim ) ; node . m_Left = new KDTreeNode ( numNodesCreated + 1 , node . m_Start , medianIdxIdx , m_EuclideanDistance . initializeRanges ( m_InstList , node . m_Start , medianIdxIdx ) ) ; node . m_Right = new KDTreeNode ( numNodesCreated + 2 , medianIdxIdx + 1 , node . m_End , m_EuclideanDistance . initializeRanges ( m_InstList , medianIdxIdx + 1 , node . m_End ) ) ; } | Splits a node into two based on the median value of the dimension in which the points have the widest spread . After splitting two new nodes are created and correctly initialised . And node . left and node . right are set appropriately . |
21,455 | public int select ( int attIdx , int [ ] indices , int left , int right , int k ) { if ( left == right ) { return left ; } else { int middle = partition ( attIdx , indices , left , right ) ; if ( ( middle - left + 1 ) >= k ) { return select ( attIdx , indices , left , middle , k ) ; } else { return select ( attIdx , indices , middle + 1 , right , k - ( middle - left + 1 ) ) ; } } } | Implements computation of the kth - smallest element according to Manber s Introduction to Algorithms . |
21,456 | public void deleteAttributeAt ( Integer integer ) { this . instanceInformation . deleteAttributeAt ( integer ) ; for ( int i = 0 ; i < numInstances ( ) ; i ++ ) { instance ( i ) . setDataset ( null ) ; instance ( i ) . deleteAttributeAt ( integer ) ; instance ( i ) . setDataset ( this ) ; } } | Delete attribute at . |
21,457 | public void insertAttributeAt ( Attribute attribute , int position ) { if ( this . instanceInformation == null ) { this . instanceInformation = new InstanceInformation ( ) ; } this . instanceInformation . insertAttributeAt ( attribute , position ) ; for ( int i = 0 ; i < numInstances ( ) ; i ++ ) { instance ( i ) . setDataset ( null ) ; instance ( i ) . insertAttributeAt ( i ) ; instance ( i ) . setDataset ( this ) ; } } | Insert attribute at . |
21,458 | public Instances trainCV ( int numFolds , int numFold , Random random ) { Instances train = trainCV ( numFolds , numFold ) ; train . randomize ( random ) ; return train ; } | Train cv . |
21,459 | public boolean readInstance ( Reader fileReader ) { Instance inst = arff . readInstance ( ) ; if ( inst != null ) { inst . setDataset ( this ) ; add ( inst ) ; return true ; } else { return false ; } } | Read instance . |
21,460 | protected String stringWithoutHeader ( ) { StringBuffer text = new StringBuffer ( ) ; for ( int i = 0 ; i < numInstances ( ) ; i ++ ) { text . append ( instance ( i ) ) ; if ( i < numInstances ( ) - 1 ) { text . append ( '\n' ) ; } } return text . toString ( ) ; } | Returns the instances in the dataset as a string in ARFF format . Strings are quoted if they contain whitespace characters or if they are a question mark . |
21,461 | private void computeAttributesIndices ( ) { this . hsAttributesIndices = new HashMap < String , Integer > ( ) ; for ( int i = 0 ; i < this . numAttributes ( ) ; i ++ ) { hsAttributesIndices . put ( this . attribute ( i ) . name ( ) , i ) ; } } | Completes the hashset with attributes indices . |
21,462 | public void setIndicesRelevants ( int [ ] indicesRelevants ) { this . indicesRelevants = indicesRelevants ; int numIrrelevantFeatures = this . numAttributes ( ) - this . indicesRelevants . length - 1 ; this . indicesIrrelevants = new int [ numIrrelevantFeatures ] ; int index = 0 ; int indexRel = 0 ; for ( int i = 0 ; i < numAttributes ( ) ; i ++ ) { if ( i != classIndex ( ) ) { while ( indexRel < indicesRelevants . length - 1 && i > indicesRelevants [ indexRel ] ) indexRel ++ ; if ( indicesRelevants [ indexRel ] != i ) { indicesIrrelevants [ index ] = i ; index ++ ; } } } } | Sets the indices of relevant features . This method also sets the irrelevant ones since it is the set complement . |
21,463 | public Instance sample ( Random random ) { double [ ] center = getCenter ( ) ; final int dimensions = center . length ; final double sin [ ] = new double [ dimensions - 1 ] ; final double cos [ ] = new double [ dimensions - 1 ] ; final double length = random . nextDouble ( ) * getRadius ( ) ; double lastValue = 1.0 ; for ( int i = 0 ; i < dimensions - 1 ; i ++ ) { double angle = random . nextDouble ( ) * 2 * Math . PI ; sin [ i ] = lastValue * Math . sin ( angle ) ; cos [ i ] = Math . cos ( angle ) ; lastValue = sin [ i ] ; } double res [ ] = new double [ dimensions ] ; res [ 0 ] = center [ 0 ] + length * cos [ 0 ] ; for ( int i = 1 ; i < dimensions - 1 ; i ++ ) { res [ i ] = center [ i ] + length * sin [ i - 1 ] * cos [ i ] ; } res [ dimensions - 1 ] = center [ dimensions - 1 ] + length * sin [ dimensions - 2 ] ; return new DenseInstance ( 1.0 , res ) ; } | Samples this cluster by returning a point from inside it . |
21,464 | public static List < double [ ] > generatekMeansPlusPlusCentroids ( int k , List < double [ ] > input , Random random ) { int n = input . size ( ) ; assert ( n > 0 ) ; int d = input . get ( 0 ) . length - 1 ; assert ( k <= n ) ; List < double [ ] > centerValue = new ArrayList < double [ ] > ( k ) ; double [ ] lastCenter = new double [ d ] ; System . arraycopy ( input . get ( random . nextInt ( n ) ) , 1 , lastCenter , 0 , d ) ; centerValue . add ( lastCenter ) ; double [ ] distance = new double [ n ] ; for ( int j = 0 ; j < n ; j ++ ) { distance [ j ] = Double . POSITIVE_INFINITY ; } for ( int i = 1 ; i < k ; i ++ ) { double sum = 0.0 ; Iterator < double [ ] > jIter = input . iterator ( ) ; for ( int j = 0 ; j < n ; j ++ ) { double [ ] point = jIter . next ( ) ; distance [ j ] = Math . min ( distance [ j ] , point [ 0 ] * Metric . distanceSquared ( lastCenter , point , 1 ) ) ; sum += distance [ j ] ; } int candidate = 0 ; if ( sum > 0 ) { double nextCenterValue = sum * random . nextDouble ( ) ; double currentValue = distance [ 0 ] ; while ( ! ( nextCenterValue < currentValue ) ) { currentValue += distance [ ++ candidate ] ; } } lastCenter = new double [ d ] ; System . arraycopy ( input . get ( candidate ) , 1 , lastCenter , 0 , d ) ; centerValue . add ( lastCenter ) ; } return centerValue ; } | Generates the initial centroids like the k - means ++ algorithm . |
21,465 | protected boolean isLeaf ( ) { for ( int i = 0 ; i < entries . length ; i ++ ) { Entry entry = entries [ i ] ; if ( entry . getChild ( ) != null ) { return false ; } } return true ; } | Checks if this node is a leaf . A node is a leaf when none of the entries in the node have children . |
21,466 | private int getNextEmptyPosition ( ) { int counter ; for ( counter = 0 ; counter < entries . length ; counter ++ ) { Entry e = entries [ counter ] ; if ( e . isEmpty ( ) ) { break ; } } if ( counter == entries . length ) { throw new RuntimeException ( "Entry added to a node which is already full." ) ; } return counter ; } | Returns the position of the next free Entry . |
21,467 | public void setText ( Preview preview ) { Point p = this . scrollPaneTable . getViewport ( ) . getViewPosition ( ) ; previewTableModel . setPreview ( preview ) ; SwingUtilities . invokeLater ( new Runnable ( ) { boolean structureChanged = previewTableModel . structureChanged ( ) ; public void run ( ) { if ( ! scrollPaneTable . isVisible ( ) ) { topWrapper . remove ( scrollPaneText ) ; scrollPaneText . setVisible ( false ) ; topWrapper . add ( scrollPaneTable , BorderLayout . CENTER ) ; scrollPaneTable . setVisible ( true ) ; topWrapper . validate ( ) ; } if ( structureChanged ) { previewTableModel . fireTableStructureChanged ( ) ; rescaleTableColumns ( ) ; } else { previewTableModel . fireTableDataChanged ( ) ; } previewTable . repaint ( ) ; } } ) ; this . scrollPaneTable . getViewport ( ) . setViewPosition ( p ) ; this . exportButton . setEnabled ( preview != null ) ; } | Updates the preview table based on the information given by preview . |
21,468 | public ParsedPreview readCollection ( PreviewCollection < Preview > pc ) { ParsedPreview pp = new ParsedPreview ( ) ; List < Preview > sps = pc . getPreviews ( ) ; if ( sps . size ( ) > 0 && sps . get ( 0 ) instanceof PreviewCollection ) { for ( Preview sp : sps ) { @ SuppressWarnings ( "unchecked" ) ParsedPreview tmp = readCollection ( ( PreviewCollection < Preview > ) sp ) ; pp . add ( tmp ) ; } } else { for ( Preview sp : sps ) { ParsedPreview tmp = read ( sp ) ; pp . add ( tmp ) ; } } return pp ; } | Parses a PreviewCollection and return the resulting ParsedPreview object . If the PreviewCollection contains PreviewCollections again it recursively adds their results . If it contains simple Previews it adds their properties to the result . |
21,469 | private ParsedPreview read ( Preview p ) { String [ ] measureNames = p . getMeasurementNames ( ) ; int numMeasures = p . getMeasurementNameCount ( ) ; int processFrequencyColumn = - 1 ; int accuracyColumn = - 1 ; int kappaColumn = - 1 ; int kappaTempColumn = - 1 ; int ramColumn = - 1 ; int timeColumn = - 1 ; int memoryColumn = - 1 ; int budgetColumn = - 1 ; for ( int i = 0 ; i < numMeasures ; i ++ ) { switch ( measureNames [ i ] ) { case "learning evaluation instances" : processFrequencyColumn = i ; break ; case "classifications correct (percent)" : case "[avg] classifications correct (percent)" : case "[std] classifications correct (percent)" : accuracyColumn = i ; break ; case "Kappa Statistic (percent)" : case "[avg] Kappa Statistic (percent)" : case "[std] Kappa Statistic (percent)" : kappaColumn = i ; break ; case "Kappa Temporal Statistic (percent)" : case "[avg] Kappa Temporal Statistic (percent)" : case "[std] Kappa Temporal Statistic (percent)" : kappaTempColumn = i ; break ; case "model cost (RAM-Hours)" : case "[std] model cost (RAM-Hours)" : ramColumn = i ; break ; case "evaluation time (cpu seconds)" : case "total train time" : case "[std] evaluation time (cpu seconds)" : timeColumn = i ; break ; case "model serialized size (bytes)" : case "[std] model serialized size (bytes)" : memoryColumn = i ; break ; case "Rel Number of Label Acquisitions" : case "[std] Rel Number of Label Acquisitions" : budgetColumn = i ; break ; default : break ; } } List < double [ ] > data = p . getData ( ) ; MeasureCollection m = new ALMeasureCollection ( ) ; for ( double [ ] entry : data ) { m . addValue ( 0 , entry [ accuracyColumn ] ) ; m . addValue ( 1 , entry [ kappaColumn ] ) ; m . addValue ( 2 , entry [ kappaTempColumn ] ) ; m . addValue ( 3 , Math . abs ( entry [ ramColumn ] ) ) ; m . addValue ( 4 , entry [ timeColumn ] ) ; m . addValue ( 5 , entry [ memoryColumn ] / ( 1024 * 1024 ) ) ; m . addValue ( 6 , entry [ budgetColumn ] ) ; } int processFrequency = ( int ) data . get ( 0 ) [ processFrequencyColumn ] ; ParsedPreview pp = new ParsedPreview ( ) ; pp . addMeasureCollection ( m ) ; pp . addProcessFrequency ( processFrequency ) ; return pp ; } | Parses a preview with respect to the process frequency and several measurements . |
21,470 | public static boolean isJavaVersionOK ( ) { boolean isJavaVersionOK = true ; String versionStr = System . getProperty ( "java.version" ) ; String [ ] parts ; double version ; if ( versionStr . contains ( "." ) ) { parts = versionStr . split ( "\\." ) ; } else { parts = new String [ ] { versionStr } ; } if ( parts . length == 1 ) { try { version = Double . parseDouble ( parts [ 0 ] ) ; } catch ( Exception e ) { System . err . println ( "Unparsable Java version: " + versionStr ) ; return false ; } } else { try { version = Double . parseDouble ( parts [ 0 ] ) + Double . parseDouble ( parts [ 1 ] ) / 10 ; } catch ( Exception e ) { System . err . println ( "Unparsable Java version: " + versionStr ) ; return false ; } } if ( version < 1.8 ) { isJavaVersionOK = false ; System . err . println ( ) ; System . err . println ( Globals . getWorkbenchInfoString ( ) ) ; System . err . println ( ) ; System . err . print ( "Java 8 or higher is required to run MOA. " ) ; System . err . println ( "Java version " + versionStr + " found" ) ; } return isJavaVersionOK ; } | Checks if the Java version is recent enough to run MOA . |
21,471 | protected double computeCandidateWeight ( Classifier candidate , Instances chunk , int numFolds ) { double candidateWeight = 0.0 ; Random random = new Random ( 1 ) ; Instances randData = new Instances ( chunk ) ; randData . randomize ( random ) ; if ( randData . classAttribute ( ) . isNominal ( ) ) { randData . stratify ( numFolds ) ; } for ( int n = 0 ; n < numFolds ; n ++ ) { Instances train = randData . trainCV ( numFolds , n , random ) ; Instances test = randData . testCV ( numFolds , n ) ; Classifier learner = candidate . copy ( ) ; for ( int num = 0 ; num < train . numInstances ( ) ; num ++ ) { learner . trainOnInstance ( train . instance ( num ) ) ; } candidateWeight += computeWeight ( learner , test ) ; } double resultWeight = candidateWeight / numFolds ; if ( Double . isInfinite ( resultWeight ) ) { return Double . MAX_VALUE ; } else { return resultWeight ; } } | Computes the weight of a candidate classifier . |
21,472 | protected double computeWeight ( Classifier learner , Instances chunk ) { double mse_i = 0 ; double mse_r = 0 ; double f_ci ; double voteSum ; for ( int i = 0 ; i < chunk . numInstances ( ) ; i ++ ) { try { voteSum = 0 ; for ( double element : learner . getVotesForInstance ( chunk . instance ( i ) ) ) { voteSum += element ; } if ( voteSum > 0 ) { f_ci = learner . getVotesForInstance ( chunk . instance ( i ) ) [ ( int ) chunk . instance ( i ) . classValue ( ) ] / voteSum ; mse_i += ( 1 - f_ci ) * ( 1 - f_ci ) ; } else { mse_i += 1 ; } } catch ( Exception e ) { mse_i += 1 ; } } mse_i /= this . chunkSize ; mse_r = this . computeMseR ( ) ; return java . lang . Math . max ( mse_r - mse_i , 0 ) ; } | Computes the weight of a given classifie . |
21,473 | public double [ ] getVotesForInstance ( Instance inst ) { DoubleVector combinedVote = new DoubleVector ( ) ; if ( this . trainingWeightSeenByModel > 0.0 ) { for ( int i = 0 ; i < this . ensemble . length ; i ++ ) { if ( this . ensembleWeights [ i ] > 0.0 ) { DoubleVector vote = new DoubleVector ( this . ensemble [ i ] . getVotesForInstance ( inst ) ) ; if ( vote . sumOfValues ( ) > 0.0 ) { vote . normalize ( ) ; vote . scaleValues ( this . ensembleWeights [ i ] / ( 1.0 * this . ensemble . length + 1 ) ) ; combinedVote . addValues ( vote ) ; } } } } combinedVote . normalize ( ) ; return combinedVote . getArrayRef ( ) ; } | Predicts a class for an example . |
21,474 | protected int removePoorestModelBytes ( ) { int poorestIndex = Utils . minIndex ( this . ensembleWeights ) ; int byteSize = this . ensemble [ poorestIndex ] . measureByteSize ( ) ; discardModel ( poorestIndex ) ; return byteSize ; } | Removes the poorest classifier from the model thus decreasing the models size . |
21,475 | private DataSet [ ] splitDataSetUsingEM ( DataSet dataSet , int nrOfPartitions ) throws Exception { if ( dataSet . size ( ) <= 1 ) throw new Exception ( "EMsplit needs at least 2 objects!" ) ; EMProjectedClustering myEM = new EMProjectedClustering ( ) ; int nrOfIterations = 1 ; double log10 = Math . log ( dataSet . size ( ) * 1.0 ) / Math . log ( 10.0 ) ; nrOfIterations = Math . max ( 1 , ( 10 - ( ( Long ) Math . round ( log10 ) ) . intValue ( ) ) ) ; nrOfIterations = Math . min ( 10 , nrOfIterations ) ; int [ ] [ ] emMapping = myEM . getEMClusteringVariancesBestChoice ( dataSet . getFeaturesAsArray ( ) , nrOfPartitions , nrOfIterations ) ; DataSet [ ] subDataSets = new DataSet [ emMapping . length ] ; for ( int i = 0 ; i < subDataSets . length ; i ++ ) { subDataSets [ i ] = new DataSet ( dataSet . getNrOfDimensions ( ) ) ; for ( int j = 0 ; j < emMapping [ i ] . length ; j ++ ) { subDataSets [ i ] . addObject ( dataSet . getObject ( emMapping [ i ] [ j ] ) ) ; } } if ( subDataSets . length < 2 ) { System . out . println ( "mean shift split" ) ; subDataSets = splitDataSetUsingMeanShift ( dataSet ) ; } boolean changes = ! ALLOW_KERNELS_IN_INNER_NODES ; while ( changes ) { changes = false ; for ( int i = 0 ; i < subDataSets . length ; i ++ ) { if ( subDataSets [ i ] . size ( ) == 1 ) { System . out . println ( "merge singular sets" ) ; subDataSets = mergeDataSets ( subDataSets , i ) ; changes = true ; break ; } } } return subDataSets ; } | This methods splits the given data set into partitions using the EM algorithm . The resulting number of partitions is < = nrOfPartitions . If EM returns only one partition the data set is split using mean shift . The size of each resulting partition is AT LEAST 2 i . e . partitions with size 1 are merged with the closest remaining partition . Hence this method might return only one partition containing the whole data set . |
21,476 | private final void setSeed ( int seed ) { if ( mt == null ) mt = new int [ N ] ; mt [ 0 ] = seed ; for ( mti = 1 ; mti < N ; mti ++ ) { mt [ mti ] = ( MAGIC_FACTOR1 * ( mt [ mti - 1 ] ^ ( mt [ mti - 1 ] >>> 30 ) ) + mti ) ; } } | not be made public . |
21,477 | public final synchronized void setSeed ( int [ ] buf ) { int length = buf . length ; if ( length == 0 ) throw new IllegalArgumentException ( "Seed buffer may not be empty" ) ; int i = 1 , j = 0 , k = ( N > length ? N : length ) ; setSeed ( MAGIC_SEED ) ; for ( ; k > 0 ; k -- ) { mt [ i ] = ( mt [ i ] ^ ( ( mt [ i - 1 ] ^ ( mt [ i - 1 ] >>> 30 ) ) * MAGIC_FACTOR2 ) ) + buf [ j ] + j ; i ++ ; j ++ ; if ( i >= N ) { mt [ 0 ] = mt [ N - 1 ] ; i = 1 ; } if ( j >= length ) j = 0 ; } for ( k = N - 1 ; k > 0 ; k -- ) { mt [ i ] = ( mt [ i ] ^ ( ( mt [ i - 1 ] ^ ( mt [ i - 1 ] >>> 30 ) ) * MAGIC_FACTOR3 ) ) - i ; i ++ ; if ( i >= N ) { mt [ 0 ] = mt [ N - 1 ] ; i = 1 ; } } mt [ 0 ] = UPPER_MASK ; } | This method resets the state of this instance using the integer array of seed data provided . This is the canonical way of resetting the pseudo random number sequence . |
21,478 | protected static synchronized void initCache ( ) { if ( m_Cache == null ) { m_Cache = new ClassCache ( ) ; if ( m_Cache . isEmpty ( ) ) { InputStream inputStream = null ; try { inputStream = m_Cache . getClass ( ) . getResourceAsStream ( CLASS_LIST ) ; m_Cache = new ClassCache ( new FixedClassListTraversal ( inputStream ) ) ; } catch ( Exception e ) { System . err . println ( "Failed to initialize class cache from fixed list (" + CLASS_LIST + ")!" ) ; e . printStackTrace ( ) ; } finally { if ( inputStream != null ) { try { inputStream . close ( ) ; } catch ( Exception e ) { } } } } } } | Initializes the class cache |
21,479 | public static List < String > getAllClassNames ( ) { List < String > result = new ArrayList < > ( ) ; Iterator < String > pkgs = m_Cache . packages ( ) ; while ( pkgs . hasNext ( ) ) { String pkg = pkgs . next ( ) ; if ( pkg . startsWith ( "moa" ) ) { Set < String > classnames = m_Cache . getClassnames ( pkg ) ; result . addAll ( classnames ) ; } } return result ; } | Returns all class names stored in the cache . |
21,480 | public static void main ( String [ ] args ) throws Exception { initCache ( ) ; List < String > allClassnames = getAllClassNames ( ) ; PrintStream out = System . out ; if ( args . length > 0 ) out = new PrintStream ( new File ( args [ 0 ] ) ) ; Collections . sort ( allClassnames ) ; for ( String clsname : allClassnames ) out . println ( clsname ) ; out . flush ( ) ; if ( args . length > 0 ) out . close ( ) ; } | Outputs all class names below moa either to stdout or to the file provided as first argument . |
21,481 | public void add ( DATA data ) { if ( root == null ) { root = new RootLeafNode ( data ) ; try { root . addData ( data , 0 ) ; } catch ( SplitNodeReplacement e ) { throw new RuntimeException ( "Should never happen!" ) ; } } else { double distance = distanceFunction . calculate ( data , root . data ) ; try { root . addData ( data , distance ) ; } catch ( SplitNodeReplacement e ) { Node newRoot = new RootNode ( data ) ; root = newRoot ; for ( int i = 0 ; i < e . newNodes . length ; i ++ ) { @ SuppressWarnings ( "unchecked" ) Node newNode = ( Node ) e . newNodes [ i ] ; distance = distanceFunction . calculate ( root . data , newNode . data ) ; root . addChild ( newNode , distance ) ; } } } } | Adds and indexes a data object . |
21,482 | public boolean remove ( DATA data ) { if ( root == null ) { return false ; } double distanceToRoot = distanceFunction . calculate ( data , root . data ) ; try { root . removeData ( data , distanceToRoot ) ; } catch ( RootNodeReplacement e ) { @ SuppressWarnings ( "unchecked" ) Node newRoot = ( Node ) e . newRoot ; root = newRoot ; } catch ( DataNotFound e ) { return false ; } catch ( NodeUnderCapacity e ) { throw new RuntimeException ( "Should have never happened" , e ) ; } return true ; } | Removes a data object from the M - Tree . |
21,483 | public Query getNearestByRange ( DATA queryData , double range ) { return getNearest ( queryData , range , Integer . MAX_VALUE ) ; } | Performs a nearest - neighbors query on the M - Tree constrained by distance . |
21,484 | public Query getNearestByLimit ( DATA queryData , int limit ) { return getNearest ( queryData , Double . POSITIVE_INFINITY , limit ) ; } | Performs a nearest - neighbors query on the M - Tree constrained by the number of neighbors . |
21,485 | public Query getNearest ( DATA queryData ) { return new Query ( queryData , Double . POSITIVE_INFINITY , Integer . MAX_VALUE ) ; } | Performs a nearest - neighbor query on the M - Tree without constraints . |
21,486 | private static double distance ( double [ ] pointA , double [ ] pointB ) { double distance = 0.0 ; for ( int i = 0 ; i < pointA . length ; i ++ ) { double d = pointA [ i ] - pointB [ i ] ; distance += d * d ; } return Math . sqrt ( distance ) ; } | Distance between two vectors . |
21,487 | protected static Clustering cleanUpKMeans ( Clustering kMeansResult , ArrayList < CFCluster > microclusters ) { int k = kMeansResult . size ( ) ; CFCluster [ ] converted = new CFCluster [ k ] ; for ( CFCluster mc : microclusters ) { double minDistance = Double . MAX_VALUE ; int closestCluster = 0 ; for ( int i = 0 ; i < k ; i ++ ) { double distance = distance ( kMeansResult . get ( i ) . getCenter ( ) , mc . getCenter ( ) ) ; if ( distance < minDistance ) { closestCluster = i ; minDistance = distance ; } } if ( converted [ closestCluster ] == null ) { converted [ closestCluster ] = ( CFCluster ) mc . copy ( ) ; } else { converted [ closestCluster ] . add ( mc ) ; } } int count = 0 ; for ( int i = 0 ; i < converted . length ; i ++ ) { if ( converted [ i ] != null ) count ++ ; } CFCluster [ ] cleaned = new CFCluster [ count ] ; count = 0 ; for ( int i = 0 ; i < converted . length ; i ++ ) { if ( converted [ i ] != null ) cleaned [ count ++ ] = converted [ i ] ; } return new Clustering ( cleaned ) ; } | Rearrange the k - means result into a set of CFClusters cleaning up the redundancies . |
21,488 | protected void clear ( ) { this . data . clear ( ) ; this . buffer . clear ( ) ; this . child = null ; this . timestamp = Entry . defaultTimestamp ; } | Clear the Entry . All points in the buffer and in the data cluster are lost the connection to the child is lost and the timestamp is set to the default value . |
21,489 | protected void makeOlder ( long currentTime , double negLambda ) { long diff = currentTime - this . timestamp ; this . buffer . makeOlder ( diff , negLambda ) ; this . data . makeOlder ( diff , negLambda ) ; this . timestamp = currentTime ; } | Ages this entrie s data AND buffer according to the given time and aging constant . |
21,490 | public void resetLearningImpl ( ) { this . windowSize = this . windowSizeOption . getValue ( ) ; this . numTrees = this . numTreesOption . getValue ( ) ; this . maxDepth = this . maxDepthOption . getValue ( ) ; this . sizeLimit = this . sizeLimitOption . getValue ( ) ; this . numInstances = 0 ; this . forest = new HSTreeNode [ numTrees ] ; this . referenceWindow = true ; this . anomalyThreshold = this . anomalyThresholdOption . getValue ( ) ; } | Reset the classifier s parameters and data structures . |
21,491 | public void trainOnInstanceImpl ( Instance inst ) { if ( this . numInstances == 0 ) { this . buildForest ( inst ) ; } for ( int i = 0 ; i < this . numTrees ; i ++ ) { forest [ i ] . updateMass ( inst , referenceWindow ) ; } if ( this . numInstances > 50 ) referenceWindow = false ; if ( this . numInstances % windowSize == 0 ) { for ( int i = 0 ; i < this . numTrees ; i ++ ) { forest [ i ] . updateModel ( ) ; } } this . numInstances ++ ; } | Update the forest with the argument instance |
21,492 | private void buildForest ( Instance inst ) { this . dimensions = inst . numAttributes ( ) ; double [ ] max = new double [ dimensions ] ; double [ ] min = new double [ dimensions ] ; double sq ; for ( int i = 0 ; i < this . numTrees ; i ++ ) { for ( int j = 0 ; j < this . dimensions ; j ++ ) { sq = this . classifierRandom . nextDouble ( ) ; min [ j ] = sq - ( 2.0 * Math . max ( sq , 1.0 - sq ) ) ; max [ j ] = sq + ( 2.0 * Math . max ( sq , 1.0 - sq ) ) ; } forest [ i ] = new HSTreeNode ( min , max , 1 , maxDepth ) ; } } | Build the forest of Streaming Half - Space Trees |
21,493 | public double [ ] getVotesForInstance ( Instance inst ) { double [ ] votes = { 0.5 , 0.5 } ; if ( ! referenceWindow ) { votes [ 1 ] = this . getAnomalyScore ( inst ) + 0.5 - this . anomalyThreshold ; votes [ 0 ] = 1.0 - votes [ 1 ] ; } return votes ; } | Combine the anomaly scores from each HSTree in the forest and convert into a vote score . |
21,494 | public double getAnomalyScore ( Instance inst ) { if ( this . referenceWindow ) return 0.5 ; else { double accumulatedScore = 0.0 ; int massLimit = ( int ) ( Math . ceil ( this . sizeLimit * this . windowSize ) ) ; double maxScore = this . windowSize * Math . pow ( 2.0 , this . maxDepth ) ; for ( int i = 0 ; i < this . numTrees ; i ++ ) { accumulatedScore += ( forest [ i ] . score ( inst , massLimit ) / maxScore ) ; } accumulatedScore = accumulatedScore / ( ( ( double ) this . numTrees ) ) ; return 0.5 - accumulatedScore + this . anomalyThreshold ; } } | Returns the anomaly score for the argument instance . |
21,495 | public void initialize ( Collection < Instance > trainingPoints ) { Iterator < Instance > trgPtsIterator = trainingPoints . iterator ( ) ; if ( trgPtsIterator . hasNext ( ) && this . numInstances == 0 ) { Instance inst = trgPtsIterator . next ( ) ; this . buildForest ( inst ) ; this . trainOnInstance ( inst ) ; } while ( trgPtsIterator . hasNext ( ) ) { this . trainOnInstance ( ( Instance ) trgPtsIterator . next ( ) ) ; } } | Initializes the Streaming HS - Trees classifier on the argument trainingPoints . |
21,496 | protected AttributeSplitSuggestion searchForBestSplitOption ( Node currentNode , AttributeSplitSuggestion currentBestOption , SplitCriterion criterion , int attIndex ) { if ( currentNode == null || countRightTotal == 0.0 ) { return currentBestOption ; } if ( currentNode . left != null ) { currentBestOption = searchForBestSplitOption ( currentNode . left , currentBestOption , criterion , attIndex ) ; } sumTotalLeft += currentNode . leftStatistics . getValue ( 1 ) ; sumTotalRight -= currentNode . leftStatistics . getValue ( 1 ) ; sumSqTotalLeft += currentNode . leftStatistics . getValue ( 2 ) ; sumSqTotalRight -= currentNode . leftStatistics . getValue ( 2 ) ; countLeftTotal += currentNode . leftStatistics . getValue ( 0 ) ; countRightTotal -= currentNode . leftStatistics . getValue ( 0 ) ; double [ ] [ ] postSplitDists = new double [ ] [ ] { { countLeftTotal , sumTotalLeft , sumSqTotalLeft } , { countRightTotal , sumTotalRight , sumSqTotalRight } } ; double [ ] preSplitDist = new double [ ] { ( countLeftTotal + countRightTotal ) , ( sumTotalLeft + sumTotalRight ) , ( sumSqTotalLeft + sumSqTotalRight ) } ; double merit = criterion . getMeritOfSplit ( preSplitDist , postSplitDists ) ; if ( ( currentBestOption == null ) || ( merit > currentBestOption . merit ) ) { currentBestOption = new AttributeSplitSuggestion ( new NumericAttributeBinaryTest ( attIndex , currentNode . cut_point , true ) , postSplitDists , merit ) ; } if ( currentNode . right != null ) { currentBestOption = searchForBestSplitOption ( currentNode . right , currentBestOption , criterion , attIndex ) ; } sumTotalLeft -= currentNode . leftStatistics . getValue ( 1 ) ; sumTotalRight += currentNode . leftStatistics . getValue ( 1 ) ; sumSqTotalLeft -= currentNode . leftStatistics . getValue ( 2 ) ; sumSqTotalRight += currentNode . leftStatistics . getValue ( 2 ) ; countLeftTotal -= currentNode . leftStatistics . getValue ( 0 ) ; countRightTotal += currentNode . leftStatistics . getValue ( 0 ) ; return currentBestOption ; } | Implementation of the FindBestSplit algorithm from E . Ikonomovska et al . |
21,497 | public void removeBadSplits ( SplitCriterion criterion , double lastCheckRatio , double lastCheckSDR , double lastCheckE ) { removeBadSplitNodes ( criterion , this . root , lastCheckRatio , lastCheckSDR , lastCheckE ) ; } | A method to remove all nodes in the E - BST in which it and all it s children represent bad split points |
21,498 | private boolean removeBadSplitNodes ( SplitCriterion criterion , Node currentNode , double lastCheckRatio , double lastCheckSDR , double lastCheckE ) { boolean isBad = false ; if ( currentNode == null ) { return true ; } if ( currentNode . left != null ) { isBad = removeBadSplitNodes ( criterion , currentNode . left , lastCheckRatio , lastCheckSDR , lastCheckE ) ; } if ( currentNode . right != null && isBad ) { isBad = removeBadSplitNodes ( criterion , currentNode . left , lastCheckRatio , lastCheckSDR , lastCheckE ) ; } if ( isBad ) { double [ ] [ ] postSplitDists = new double [ ] [ ] { { currentNode . leftStatistics . getValue ( 0 ) , currentNode . leftStatistics . getValue ( 1 ) , currentNode . leftStatistics . getValue ( 2 ) } , { currentNode . rightStatistics . getValue ( 0 ) , currentNode . rightStatistics . getValue ( 1 ) , currentNode . rightStatistics . getValue ( 2 ) } } ; double [ ] preSplitDist = new double [ ] { ( currentNode . leftStatistics . getValue ( 0 ) + currentNode . rightStatistics . getValue ( 0 ) ) , ( currentNode . leftStatistics . getValue ( 1 ) + currentNode . rightStatistics . getValue ( 1 ) ) , ( currentNode . leftStatistics . getValue ( 2 ) + currentNode . rightStatistics . getValue ( 2 ) ) } ; double merit = criterion . getMeritOfSplit ( preSplitDist , postSplitDists ) ; if ( ( merit / lastCheckSDR ) < ( lastCheckRatio - ( 2 * lastCheckE ) ) ) { currentNode = null ; return true ; } } return false ; } | Recursive method that first checks all of a node s children before deciding if it is bad and may be removed |
21,499 | public void resetLearningImpl ( ) { this . nbhdSize = this . neighbourhoodSizeOption . getValue ( ) ; this . tau = this . thresholdOption . getValue ( ) ; this . neighbourhood = new FixedLengthList < Instance > ( nbhdSize ) ; } | Resets the implementation s parameters and data structures . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.