| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | #ifndef FLANN_AUTOTUNED_INDEX_H_ |
| | #define FLANN_AUTOTUNED_INDEX_H_ |
| |
|
| | #include "FLANN/general.h" |
| | #include "FLANN/algorithms/nn_index.h" |
| | #include "FLANN/nn/ground_truth.h" |
| | #include "FLANN/nn/index_testing.h" |
| | #include "FLANN/util/sampling.h" |
| | #include "FLANN/algorithms/kdtree_index.h" |
| | #include "FLANN/algorithms/kdtree_single_index.h" |
| | #include "FLANN/algorithms/kmeans_index.h" |
| | #include "FLANN/algorithms/composite_index.h" |
| | #include "FLANN/algorithms/linear_index.h" |
| | #include "FLANN/util/logger.h" |
| |
|
| |
|
| | namespace flann |
| | { |
| |
|
| | template<typename Distance> |
| | inline NNIndex<Distance>* |
| | create_index_by_type(const flann_algorithm_t index_type, |
| | const Matrix<typename Distance::ElementType>& dataset, const IndexParams& params, const Distance& distance = Distance()); |
| |
|
| |
|
| | struct AutotunedIndexParams : public IndexParams |
| | { |
| | AutotunedIndexParams(float target_precision = 0.8, float build_weight = 0.01, float memory_weight = 0, float sample_fraction = 0.1) |
| | { |
| | (*this)["algorithm"] = FLANN_INDEX_AUTOTUNED; |
| | |
| | (*this)["target_precision"] = target_precision; |
| | |
| | (*this)["build_weight"] = build_weight; |
| | |
| | (*this)["memory_weight"] = memory_weight; |
| | |
| | (*this)["sample_fraction"] = sample_fraction; |
| | } |
| | }; |
| |
|
| |
|
| | template <typename Distance> |
| | class AutotunedIndex : public NNIndex<Distance> |
| | { |
| | public: |
| | typedef typename Distance::ElementType ElementType; |
| | typedef typename Distance::ResultType DistanceType; |
| |
|
| | typedef NNIndex<Distance> BaseClass; |
| |
|
| | typedef AutotunedIndex<Distance> IndexType; |
| |
|
| | typedef bool needs_kdtree_distance; |
| |
|
| | AutotunedIndex(const Matrix<ElementType>& inputData, const IndexParams& params = AutotunedIndexParams(), Distance d = Distance()) : |
| | BaseClass(params, d), bestIndex_(NULL), speedup_(0), dataset_(inputData) |
| | { |
| | target_precision_ = get_param(params, "target_precision",0.8f); |
| | build_weight_ = get_param(params,"build_weight", 0.01f); |
| | memory_weight_ = get_param(params, "memory_weight", 0.0f); |
| | sample_fraction_ = get_param(params,"sample_fraction", 0.1f); |
| | } |
| |
|
| | AutotunedIndex(const IndexParams& params = AutotunedIndexParams(), Distance d = Distance()) : |
| | BaseClass(params, d), bestIndex_(NULL), speedup_(0) |
| | { |
| | target_precision_ = get_param(params, "target_precision",0.8f); |
| | build_weight_ = get_param(params,"build_weight", 0.01f); |
| | memory_weight_ = get_param(params, "memory_weight", 0.0f); |
| | sample_fraction_ = get_param(params,"sample_fraction", 0.1f); |
| | } |
| |
|
| | AutotunedIndex(const AutotunedIndex& other) : BaseClass(other), |
| | bestParams_(other.bestParams_), |
| | bestSearchParams_(other.bestSearchParams_), |
| | speedup_(other.speedup_), |
| | dataset_(other.dataset_), |
| | target_precision_(other.target_precision_), |
| | build_weight_(other.build_weight_), |
| | memory_weight_(other.memory_weight_), |
| | sample_fraction_(other.sample_fraction_) |
| | { |
| | bestIndex_ = other.bestIndex_->clone(); |
| | } |
| |
|
| | AutotunedIndex& operator=(AutotunedIndex other) |
| | { |
| | this->swap(other); |
| | return * this; |
| | } |
| |
|
| | virtual ~AutotunedIndex() |
| | { |
| | delete bestIndex_; |
| | } |
| |
|
| | BaseClass* clone() const |
| | { |
| | return new AutotunedIndex(*this); |
| | } |
| |
|
| | |
| | |
| | |
| | void buildIndex() |
| | { |
| | bestParams_ = estimateBuildParams(); |
| | Logger::info("----------------------------------------------------\n"); |
| | Logger::info("Autotuned parameters:\n"); |
| | if (Logger::getLevel()>=FLANN_LOG_INFO) |
| | print_params(bestParams_); |
| | Logger::info("----------------------------------------------------\n"); |
| |
|
| | flann_algorithm_t index_type = get_param<flann_algorithm_t>(bestParams_,"algorithm"); |
| | bestIndex_ = create_index_by_type(index_type, dataset_, bestParams_, distance_); |
| | bestIndex_->buildIndex(); |
| | speedup_ = estimateSearchParams(bestSearchParams_); |
| | Logger::info("----------------------------------------------------\n"); |
| | Logger::info("Search parameters:\n"); |
| | if (Logger::getLevel()>=FLANN_LOG_INFO) |
| | print_params(bestSearchParams_); |
| | Logger::info("----------------------------------------------------\n"); |
| | bestParams_["search_params"] = bestSearchParams_; |
| | bestParams_["speedup"] = speedup_; |
| | } |
| |
|
| | void buildIndex(const Matrix<ElementType>& dataset) |
| | { |
| | dataset_ = dataset; |
| | this->buildIndex(); |
| | } |
| |
|
| |
|
| | void addPoints(const Matrix<ElementType>& points, float rebuild_threshold = 2) |
| | { |
| | if (bestIndex_) { |
| | bestIndex_->addPoints(points, rebuild_threshold); |
| | } |
| | } |
| |
|
| | void removePoint(size_t id) |
| | { |
| | if (bestIndex_) { |
| | bestIndex_->removePoint(id); |
| | } |
| | } |
| |
|
| |
|
| | template<typename Archive> |
| | void serialize(Archive& ar) |
| | { |
| | ar.setObject(this); |
| |
|
| | ar & *static_cast<NNIndex<Distance>*>(this); |
| |
|
| | ar & target_precision_; |
| | ar & build_weight_; |
| | ar & memory_weight_; |
| | ar & sample_fraction_; |
| |
|
| | flann_algorithm_t index_type; |
| | if (Archive::is_saving::value) { |
| | index_type = get_param<flann_algorithm_t>(bestParams_,"algorithm"); |
| | } |
| | ar & index_type; |
| | ar & bestSearchParams_.checks; |
| |
|
| | if (Archive::is_loading::value) { |
| | bestParams_["algorithm"] = index_type; |
| |
|
| | index_params_["algorithm"] = getType(); |
| | index_params_["target_precision_"] = target_precision_; |
| | index_params_["build_weight_"] = build_weight_; |
| | index_params_["memory_weight_"] = memory_weight_; |
| | index_params_["sample_fraction_"] = sample_fraction_; |
| | } |
| | } |
| |
|
| | void saveIndex(FILE* stream) |
| | { |
| | { |
| | serialization::SaveArchive sa(stream); |
| | sa & *this; |
| | } |
| |
|
| | bestIndex_->saveIndex(stream); |
| | } |
| |
|
| | void loadIndex(FILE* stream) |
| | { |
| | { |
| | serialization::LoadArchive la(stream); |
| | la & *this; |
| | } |
| |
|
| | IndexParams params; |
| | flann_algorithm_t index_type = get_param<flann_algorithm_t>(bestParams_,"algorithm"); |
| | bestIndex_ = create_index_by_type<Distance>((flann_algorithm_t)index_type, dataset_, params, distance_); |
| | bestIndex_->loadIndex(stream); |
| | } |
| |
|
| | int knnSearch(const Matrix<ElementType>& queries, |
| | Matrix<size_t>& indices, |
| | Matrix<DistanceType>& dists, |
| | size_t knn, |
| | const SearchParams& params) const |
| | { |
| | if (params.checks == FLANN_CHECKS_AUTOTUNED) { |
| | SearchParams autotuned_params = params; |
| | autotuned_params.checks = bestSearchParams_.checks; |
| | return bestIndex_->knnSearch(queries, indices, dists, knn, autotuned_params); |
| | } |
| | else { |
| | return bestIndex_->knnSearch(queries, indices, dists, knn, params); |
| | } |
| | } |
| |
|
| | int knnSearch(const Matrix<ElementType>& queries, |
| | std::vector< std::vector<size_t> >& indices, |
| | std::vector<std::vector<DistanceType> >& dists, |
| | size_t knn, |
| | const SearchParams& params) const |
| | { |
| | if (params.checks == FLANN_CHECKS_AUTOTUNED) { |
| | SearchParams autotuned_params = params; |
| | autotuned_params.checks = bestSearchParams_.checks; |
| | return bestIndex_->knnSearch(queries, indices, dists, knn, autotuned_params); |
| | } |
| | else { |
| | return bestIndex_->knnSearch(queries, indices, dists, knn, params); |
| | } |
| |
|
| | } |
| |
|
| | int radiusSearch(const Matrix<ElementType>& queries, |
| | Matrix<size_t>& indices, |
| | Matrix<DistanceType>& dists, |
| | DistanceType radius, |
| | const SearchParams& params) const |
| | { |
| | if (params.checks == FLANN_CHECKS_AUTOTUNED) { |
| | SearchParams autotuned_params = params; |
| | autotuned_params.checks = bestSearchParams_.checks; |
| | return bestIndex_->radiusSearch(queries, indices, dists, radius, autotuned_params); |
| | } |
| | else { |
| | return bestIndex_->radiusSearch(queries, indices, dists, radius, params); |
| | } |
| | } |
| |
|
| | int radiusSearch(const Matrix<ElementType>& queries, |
| | std::vector< std::vector<size_t> >& indices, |
| | std::vector<std::vector<DistanceType> >& dists, |
| | DistanceType radius, |
| | const SearchParams& params) const |
| | { |
| | if (params.checks == FLANN_CHECKS_AUTOTUNED) { |
| | SearchParams autotuned_params = params; |
| | autotuned_params.checks = bestSearchParams_.checks; |
| | return bestIndex_->radiusSearch(queries, indices, dists, radius, autotuned_params); |
| | } |
| | else { |
| | return bestIndex_->radiusSearch(queries, indices, dists, radius, params); |
| | } |
| | } |
| |
|
| |
|
| |
|
| | |
| | |
| | |
| | void findNeighbors(ResultSet<DistanceType>& result, const ElementType* vec, const SearchParams& searchParams) const |
| | { |
| | |
| | assert(false); |
| | } |
| |
|
| | IndexParams getParameters() const |
| | { |
| | return bestParams_; |
| | } |
| |
|
| | FLANN_DEPRECATED SearchParams getSearchParameters() const |
| | { |
| | return bestSearchParams_; |
| | } |
| |
|
| | FLANN_DEPRECATED float getSpeedup() const |
| | { |
| | return speedup_; |
| | } |
| |
|
| |
|
| | |
| | |
| | |
| | size_t size() const |
| | { |
| | return bestIndex_->size(); |
| | } |
| |
|
| | |
| | |
| | |
| | size_t veclen() const |
| | { |
| | return bestIndex_->veclen(); |
| | } |
| |
|
| | |
| | |
| | |
| | int usedMemory() const |
| | { |
| | return bestIndex_->usedMemory(); |
| | } |
| |
|
| | |
| | |
| | |
| | flann_algorithm_t getType() const |
| | { |
| | return FLANN_INDEX_AUTOTUNED; |
| | } |
| |
|
| | protected: |
| | void buildIndexImpl() |
| | { |
| | |
| | } |
| |
|
| | void freeIndex() |
| | { |
| | |
| | } |
| |
|
| | private: |
| |
|
| | struct CostData |
| | { |
| | float searchTimeCost; |
| | float buildTimeCost; |
| | float memoryCost; |
| | float totalCost; |
| | IndexParams params; |
| | }; |
| |
|
| | void evaluate_kmeans(CostData& cost) |
| | { |
| | StartStopTimer t; |
| | int checks; |
| | const int nn = 1; |
| |
|
| | Logger::info("KMeansTree using params: max_iterations=%d, branching=%d\n", |
| | get_param<int>(cost.params,"iterations"), |
| | get_param<int>(cost.params,"branching")); |
| | KMeansIndex<Distance> kmeans(sampledDataset_, cost.params, distance_); |
| | |
| | t.start(); |
| | kmeans.buildIndex(); |
| | t.stop(); |
| | float buildTime = (float)t.value; |
| |
|
| | |
| | float searchTime = test_index_precision(kmeans, sampledDataset_, testDataset_, gt_matches_, target_precision_, checks, distance_, nn); |
| |
|
| | float datasetMemory = float(sampledDataset_.rows * sampledDataset_.cols * sizeof(float)); |
| | cost.memoryCost = (kmeans.usedMemory() + datasetMemory) / datasetMemory; |
| | cost.searchTimeCost = searchTime; |
| | cost.buildTimeCost = buildTime; |
| | Logger::info("KMeansTree buildTime=%g, searchTime=%g, build_weight=%g\n", buildTime, searchTime, build_weight_); |
| | } |
| |
|
| |
|
| | void evaluate_kdtree(CostData& cost) |
| | { |
| | StartStopTimer t; |
| | int checks; |
| | const int nn = 1; |
| |
|
| | Logger::info("KDTree using params: trees=%d\n", get_param<int>(cost.params,"trees")); |
| | KDTreeIndex<Distance> kdtree(sampledDataset_, cost.params, distance_); |
| |
|
| | t.start(); |
| | kdtree.buildIndex(); |
| | t.stop(); |
| | float buildTime = (float)t.value; |
| |
|
| | |
| | float searchTime = test_index_precision(kdtree, sampledDataset_, testDataset_, gt_matches_, target_precision_, checks, distance_, nn); |
| |
|
| | float datasetMemory = float(sampledDataset_.rows * sampledDataset_.cols * sizeof(float)); |
| | cost.memoryCost = (kdtree.usedMemory() + datasetMemory) / datasetMemory; |
| | cost.searchTimeCost = searchTime; |
| | cost.buildTimeCost = buildTime; |
| | Logger::info("KDTree buildTime=%g, searchTime=%g\n", buildTime, searchTime); |
| | } |
| |
|
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| |
|
| |
|
| | void optimizeKMeans(std::vector<CostData>& costs) |
| | { |
| | Logger::info("KMEANS, Step 1: Exploring parameter space\n"); |
| |
|
| | |
| | int maxIterations[] = { 1, 5, 10, 15 }; |
| | int branchingFactors[] = { 16, 32, 64, 128, 256 }; |
| |
|
| | int kmeansParamSpaceSize = FLANN_ARRAY_LEN(maxIterations) * FLANN_ARRAY_LEN(branchingFactors); |
| | costs.reserve(costs.size() + kmeansParamSpaceSize); |
| |
|
| | |
| | for (size_t i = 0; i < FLANN_ARRAY_LEN(maxIterations); ++i) { |
| | for (size_t j = 0; j < FLANN_ARRAY_LEN(branchingFactors); ++j) { |
| | CostData cost; |
| | cost.params["algorithm"] = FLANN_INDEX_KMEANS; |
| | cost.params["centers_init"] = FLANN_CENTERS_RANDOM; |
| | cost.params["iterations"] = maxIterations[i]; |
| | cost.params["branching"] = branchingFactors[j]; |
| |
|
| | evaluate_kmeans(cost); |
| | costs.push_back(cost); |
| | } |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | } |
| |
|
| |
|
| | void optimizeKDTree(std::vector<CostData>& costs) |
| | { |
| | Logger::info("KD-TREE, Step 1: Exploring parameter space\n"); |
| |
|
| | |
| | int testTrees[] = { 1, 4, 8, 16, 32, 64, 128}; |
| |
|
| | |
| | for (size_t i = 0; i < FLANN_ARRAY_LEN(testTrees); ++i) { |
| | CostData cost; |
| | cost.params["algorithm"] = FLANN_INDEX_KDTREE; |
| | cost.params["trees"] = testTrees[i]; |
| |
|
| | evaluate_kdtree(cost); |
| | costs.push_back(cost); |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | } |
| |
|
| | |
| | |
| | |
| | |
| | |
| | IndexParams estimateBuildParams() |
| | { |
| | std::vector<CostData> costs; |
| |
|
| | int sampleSize = int(sample_fraction_ * dataset_.rows); |
| | int testSampleSize = std::min(sampleSize / 10, 1000); |
| |
|
| | Logger::info("Entering autotuning, dataset size: %d, sampleSize: %d, testSampleSize: %d, target precision: %g\n", dataset_.rows, sampleSize, testSampleSize, target_precision_); |
| |
|
| | |
| | |
| | if (testSampleSize < 10) { |
| | Logger::info("Choosing linear, dataset too small\n"); |
| | return LinearIndexParams(); |
| | } |
| |
|
| | |
| | sampledDataset_ = random_sample(dataset_, sampleSize); |
| | |
| | testDataset_ = random_sample(sampledDataset_, testSampleSize, true); |
| |
|
| | |
| | Logger::info("Computing ground truth... \n"); |
| | gt_matches_ = Matrix<size_t>(new size_t[testDataset_.rows], testDataset_.rows, 1); |
| | StartStopTimer t; |
| | int repeats = 0; |
| | t.reset(); |
| | while (t.value<0.2) { |
| | repeats++; |
| | t.start(); |
| | compute_ground_truth<Distance>(sampledDataset_, testDataset_, gt_matches_, 0, distance_); |
| | t.stop(); |
| | } |
| |
|
| | CostData linear_cost; |
| | linear_cost.searchTimeCost = (float)t.value/repeats; |
| | linear_cost.buildTimeCost = 0; |
| | linear_cost.memoryCost = 0; |
| | linear_cost.params["algorithm"] = FLANN_INDEX_LINEAR; |
| |
|
| | costs.push_back(linear_cost); |
| |
|
| | |
| | Logger::info("Autotuning parameters...\n"); |
| |
|
| | optimizeKMeans(costs); |
| | optimizeKDTree(costs); |
| |
|
| | float bestTimeCost = costs[0].buildTimeCost * build_weight_ + costs[0].searchTimeCost; |
| | for (size_t i = 0; i < costs.size(); ++i) { |
| | float timeCost = costs[i].buildTimeCost * build_weight_ + costs[i].searchTimeCost; |
| | Logger::debug("Time cost: %g\n", timeCost); |
| | if (timeCost < bestTimeCost) { |
| | bestTimeCost = timeCost; |
| | } |
| | } |
| | Logger::debug("Best time cost: %g\n", bestTimeCost); |
| |
|
| | IndexParams bestParams = costs[0].params; |
| | if (bestTimeCost > 0) { |
| | float bestCost = (costs[0].buildTimeCost * build_weight_ + costs[0].searchTimeCost) / bestTimeCost; |
| | for (size_t i = 0; i < costs.size(); ++i) { |
| | float crtCost = (costs[i].buildTimeCost * build_weight_ + costs[i].searchTimeCost) / bestTimeCost + |
| | memory_weight_ * costs[i].memoryCost; |
| | Logger::debug("Cost: %g\n", crtCost); |
| | if (crtCost < bestCost) { |
| | bestCost = crtCost; |
| | bestParams = costs[i].params; |
| | } |
| | } |
| | Logger::debug("Best cost: %g\n", bestCost); |
| | } |
| |
|
| | delete[] gt_matches_.ptr(); |
| | delete[] testDataset_.ptr(); |
| | delete[] sampledDataset_.ptr(); |
| |
|
| | return bestParams; |
| | } |
| |
|
| |
|
| |
|
| | |
| | |
| | |
| | |
| | |
| | float estimateSearchParams(SearchParams& searchParams) |
| | { |
| | const int nn = 1; |
| | const size_t SAMPLE_COUNT = 1000; |
| |
|
| | assert(bestIndex_ != NULL); |
| |
|
| | float speedup = 0; |
| |
|
| | int samples = (int)std::min(dataset_.rows / 10, SAMPLE_COUNT); |
| | if (samples > 0) { |
| | Matrix<ElementType> testDataset = random_sample(dataset_, samples); |
| |
|
| | Logger::info("Computing ground truth\n"); |
| |
|
| | |
| | Matrix<size_t> gt_matches(new size_t[testDataset.rows], testDataset.rows, 1); |
| | StartStopTimer t; |
| | int repeats = 0; |
| | t.reset(); |
| | while (t.value<0.2) { |
| | repeats++; |
| | t.start(); |
| | compute_ground_truth<Distance>(dataset_, testDataset, gt_matches, 1, distance_); |
| | t.stop(); |
| | } |
| | float linear = (float)t.value/repeats; |
| |
|
| | int checks; |
| | Logger::info("Estimating number of checks\n"); |
| |
|
| | float searchTime; |
| | float cb_index; |
| | if (bestIndex_->getType() == FLANN_INDEX_KMEANS) { |
| | Logger::info("KMeans algorithm, estimating cluster border factor\n"); |
| | KMeansIndex<Distance>* kmeans = static_cast<KMeansIndex<Distance>*>(bestIndex_); |
| | float bestSearchTime = -1; |
| | float best_cb_index = -1; |
| | int best_checks = -1; |
| | for (cb_index = 0; cb_index < 1.1f; cb_index += 0.2f) { |
| | kmeans->set_cb_index(cb_index); |
| | searchTime = test_index_precision(*kmeans, dataset_, testDataset, gt_matches, target_precision_, checks, distance_, nn, 1); |
| | if ((searchTime < bestSearchTime) || (bestSearchTime == -1)) { |
| | bestSearchTime = searchTime; |
| | best_cb_index = cb_index; |
| | best_checks = checks; |
| | } |
| | } |
| | searchTime = bestSearchTime; |
| | cb_index = best_cb_index; |
| | checks = best_checks; |
| |
|
| | kmeans->set_cb_index(best_cb_index); |
| | Logger::info("Optimum cb_index: %g\n", cb_index); |
| | bestParams_["cb_index"] = cb_index; |
| | } |
| | else { |
| | searchTime = test_index_precision(*bestIndex_, dataset_, testDataset, gt_matches, target_precision_, checks, distance_, nn, 1); |
| | } |
| |
|
| | Logger::info("Required number of checks: %d \n", checks); |
| | searchParams.checks = checks; |
| |
|
| | speedup = linear / searchTime; |
| |
|
| | delete[] gt_matches.ptr(); |
| | delete[] testDataset.ptr(); |
| | } |
| |
|
| | return speedup; |
| | } |
| |
|
| |
|
| | void swap(AutotunedIndex& other) |
| | { |
| | BaseClass::swap(other); |
| | std::swap(bestIndex_, other.bestIndex_); |
| | std::swap(bestParams_, other.bestParams_); |
| | std::swap(bestSearchParams_, other.bestSearchParams_); |
| | std::swap(speedup_, other.speedup_); |
| | std::swap(dataset_, other.dataset_); |
| | std::swap(target_precision_, other.target_precision_); |
| | std::swap(build_weight_, other.build_weight_); |
| | std::swap(memory_weight_, other.memory_weight_); |
| | std::swap(sample_fraction_, other.sample_fraction_); |
| | } |
| |
|
| | private: |
| | NNIndex<Distance>* bestIndex_; |
| |
|
| | IndexParams bestParams_; |
| | SearchParams bestSearchParams_; |
| |
|
| | Matrix<ElementType> sampledDataset_; |
| | Matrix<ElementType> testDataset_; |
| | Matrix<size_t> gt_matches_; |
| |
|
| | float speedup_; |
| |
|
| | |
| | |
| | |
| | Matrix<ElementType> dataset_; |
| |
|
| | |
| | |
| | |
| | float target_precision_; |
| | float build_weight_; |
| | float memory_weight_; |
| | float sample_fraction_; |
| |
|
| | USING_BASECLASS_SYMBOLS |
| | }; |
| | } |
| |
|
| | #endif |
| |
|