| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | #ifndef COLMAP_SRC_RETRIEVAL_VISUAL_INDEX_H_ |
| | #define COLMAP_SRC_RETRIEVAL_VISUAL_INDEX_H_ |
| |
|
| | #include <Eigen/Core> |
| | #include <boost/heap/fibonacci_heap.hpp> |
| |
|
| | #include "FLANN/flann.hpp" |
| | #include "feature/types.h" |
| | #include "retrieval/inverted_file.h" |
| | #include "retrieval/inverted_index.h" |
| | #include "retrieval/vote_and_verify.h" |
| | #include "util/alignment.h" |
| | #include "util/endian.h" |
| | #include "util/logging.h" |
| | #include "util/math.h" |
| |
|
| | namespace colmap { |
| | namespace retrieval { |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | template <typename kDescType = uint8_t, int kDescDim = 128, |
| | int kEmbeddingDim = 64> |
| | class VisualIndex { |
| | public: |
| | static const int kMaxNumThreads = -1; |
| | typedef InvertedIndex<kDescType, kDescDim, kEmbeddingDim> InvertedIndexType; |
| | typedef FeatureKeypoints GeomType; |
| | typedef typename InvertedIndexType::DescType DescType; |
| | typedef typename InvertedIndexType::EntryType EntryType; |
| |
|
| | struct IndexOptions { |
| | |
| | |
| | int num_neighbors = 1; |
| |
|
| | |
| | int num_checks = 256; |
| |
|
| | |
| | int num_threads = kMaxNumThreads; |
| | }; |
| |
|
| | struct QueryOptions { |
| | |
| | int max_num_images = -1; |
| |
|
| | |
| | |
| | int num_neighbors = 5; |
| |
|
| | |
| | int num_checks = 256; |
| |
|
| | |
| | int num_images_after_verification = 0; |
| |
|
| | |
| | int num_threads = kMaxNumThreads; |
| | }; |
| |
|
| | struct BuildOptions { |
| | |
| | |
| | int num_visual_words = 256 * 256; |
| |
|
| | |
| | int branching = 256; |
| |
|
| | |
| | int num_iterations = 11; |
| |
|
| | |
| | double target_precision = 0.95; |
| |
|
| | |
| | int num_checks = 256; |
| |
|
| | |
| | int num_threads = kMaxNumThreads; |
| | }; |
| |
|
| | VisualIndex(); |
| | ~VisualIndex(); |
| |
|
| | size_t NumVisualWords() const; |
| |
|
| | |
| | void Add(const IndexOptions& options, const int image_id, |
| | const GeomType& geometries, const DescType& descriptors); |
| |
|
| | |
| | bool ImageIndexed(const int image_id) const; |
| |
|
| | |
| | void Query(const QueryOptions& options, const DescType& descriptors, |
| | std::vector<ImageScore>* image_scores) const; |
| |
|
| | |
| | void Query(const QueryOptions& options, const GeomType& geometries, |
| | const DescType& descriptors, |
| | std::vector<ImageScore>* image_scores) const; |
| |
|
| | |
| | void Prepare(); |
| |
|
| | |
| | |
| | void Build(const BuildOptions& options, const DescType& descriptors); |
| |
|
| | |
| | |
| | void Read(const std::string& path); |
| | void Write(const std::string& path); |
| |
|
| | private: |
| | |
| | void Quantize(const BuildOptions& options, const DescType& descriptors); |
| |
|
| | |
| | |
| | void QueryAndFindWordIds(const QueryOptions& options, |
| | const DescType& descriptors, |
| | std::vector<ImageScore>* image_scores, |
| | Eigen::MatrixXi* word_ids) const; |
| |
|
| | |
| | Eigen::MatrixXi FindWordIds(const DescType& descriptors, |
| | const int num_neighbors, const int num_checks, |
| | const int num_threads) const; |
| |
|
| | |
| | flann::AutotunedIndex<flann::L2<kDescType>> visual_word_index_; |
| |
|
| | |
| | flann::Matrix<kDescType> visual_words_; |
| |
|
| | |
| | InvertedIndexType inverted_index_; |
| |
|
| | |
| | std::unordered_set<int> image_ids_; |
| |
|
| | |
| | bool prepared_; |
| | }; |
| |
|
| | |
| | |
| | |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | VisualIndex<kDescType, kDescDim, kEmbeddingDim>::VisualIndex() |
| | : prepared_(false) {} |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | VisualIndex<kDescType, kDescDim, kEmbeddingDim>::~VisualIndex() { |
| | if (visual_words_.ptr() != nullptr) { |
| | delete[] visual_words_.ptr(); |
| | } |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | size_t VisualIndex<kDescType, kDescDim, kEmbeddingDim>::NumVisualWords() const { |
| | return visual_words_.rows; |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | void VisualIndex<kDescType, kDescDim, kEmbeddingDim>::Add( |
| | const IndexOptions& options, const int image_id, const GeomType& geometries, |
| | const DescType& descriptors) { |
| | CHECK_EQ(geometries.size(), descriptors.rows()); |
| |
|
| | |
| | if (ImageIndexed(image_id)) { |
| | return; |
| | } |
| |
|
| | image_ids_.insert(image_id); |
| |
|
| | prepared_ = false; |
| |
|
| | if (descriptors.rows() == 0) { |
| | return; |
| | } |
| |
|
| | const Eigen::MatrixXi word_ids = |
| | FindWordIds(descriptors, options.num_neighbors, options.num_checks, |
| | options.num_threads); |
| |
|
| | for (typename DescType::Index i = 0; i < descriptors.rows(); ++i) { |
| | const auto& descriptor = descriptors.row(i); |
| |
|
| | typename InvertedIndexType::GeomType geometry; |
| | geometry.x = geometries[i].x; |
| | geometry.y = geometries[i].y; |
| | geometry.scale = geometries[i].ComputeScale(); |
| | geometry.orientation = geometries[i].ComputeOrientation(); |
| |
|
| | for (int n = 0; n < options.num_neighbors; ++n) { |
| | const int word_id = word_ids(i, n); |
| | if (word_id != InvertedIndexType::kInvalidWordId) { |
| | inverted_index_.AddEntry(image_id, word_id, i, descriptor, geometry); |
| | } |
| | } |
| | } |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | bool VisualIndex<kDescType, kDescDim, kEmbeddingDim>::ImageIndexed( |
| | const int image_id) const { |
| | return image_ids_.count(image_id) != 0; |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | void VisualIndex<kDescType, kDescDim, kEmbeddingDim>::Query( |
| | const QueryOptions& options, const DescType& descriptors, |
| | std::vector<ImageScore>* image_scores) const { |
| | const GeomType geometries; |
| | Query(options, geometries, descriptors, image_scores); |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | void VisualIndex<kDescType, kDescDim, kEmbeddingDim>::Query( |
| | const QueryOptions& options, const GeomType& geometries, |
| | const DescType& descriptors, std::vector<ImageScore>* image_scores) const { |
| | Eigen::MatrixXi word_ids; |
| | QueryAndFindWordIds(options, descriptors, image_scores, &word_ids); |
| |
|
| | if (options.num_images_after_verification <= 0) { |
| | return; |
| | } |
| |
|
| | CHECK_EQ(descriptors.rows(), geometries.size()); |
| |
|
| | |
| | std::unordered_set<int> image_ids; |
| | for (const auto& image_score : *image_scores) { |
| | image_ids.insert(image_score.image_id); |
| | } |
| |
|
| | |
| | typedef std::vector< |
| | std::pair<float, std::pair<const EntryType*, const EntryType*>>> |
| | OrderedMatchListType; |
| |
|
| | |
| | |
| | std::unordered_map<int, std::unordered_map<int, OrderedMatchListType>> |
| | query_to_db_matches; |
| | std::unordered_map<int, std::unordered_map<int, OrderedMatchListType>> |
| | db_to_query_matches; |
| |
|
| | std::vector<const EntryType*> word_matches; |
| |
|
| | std::vector<EntryType> query_entries; |
| | query_entries.reserve(descriptors.rows()); |
| |
|
| | |
| | const HammingDistWeightFunctor<kEmbeddingDim> hamming_dist_weight_functor; |
| |
|
| | for (typename DescType::Index i = 0; i < descriptors.rows(); ++i) { |
| | const auto& descriptor = descriptors.row(i); |
| |
|
| | EntryType query_entry; |
| | query_entry.feature_idx = i; |
| | query_entry.geometry.x = geometries[i].x; |
| | query_entry.geometry.y = geometries[i].y; |
| | query_entry.geometry.scale = geometries[i].ComputeScale(); |
| | query_entry.geometry.orientation = geometries[i].ComputeOrientation(); |
| | query_entries.push_back(query_entry); |
| |
|
| | |
| | |
| | std::unordered_map< |
| | int, std::unordered_map<int, std::pair<float, const EntryType*>>> |
| | image_matches; |
| |
|
| | for (int j = 0; j < word_ids.cols(); ++j) { |
| | const int word_id = word_ids(i, j); |
| |
|
| | if (word_id != InvertedIndexType::kInvalidWordId) { |
| | inverted_index_.ConvertToBinaryDescriptor(word_id, descriptor, |
| | &query_entries[i].descriptor); |
| |
|
| | const auto idf_weight = inverted_index_.GetIDFWeight(word_id); |
| | const auto squared_idf_weight = idf_weight * idf_weight; |
| |
|
| | inverted_index_.FindMatches(word_id, image_ids, &word_matches); |
| |
|
| | for (const auto& match : word_matches) { |
| | const size_t hamming_dist = |
| | (query_entries[i].descriptor ^ match->descriptor).count(); |
| |
|
| | if (hamming_dist <= hamming_dist_weight_functor.kMaxHammingDistance) { |
| | const float dist = |
| | hamming_dist_weight_functor(hamming_dist) * squared_idf_weight; |
| |
|
| | auto& feature_matches = image_matches[match->image_id]; |
| | const auto feature_match = feature_matches.find(match->feature_idx); |
| |
|
| | if (feature_match == feature_matches.end() || |
| | feature_match->first < dist) { |
| | feature_matches[match->feature_idx] = std::make_pair(dist, match); |
| | } |
| | } |
| | } |
| | } |
| | } |
| |
|
| | |
| | for (const auto& feature_matches : image_matches) { |
| | const auto image_id = feature_matches.first; |
| |
|
| | for (const auto& feature_match : feature_matches.second) { |
| | const auto feature_idx = feature_match.first; |
| | const auto dist = feature_match.second.first; |
| | const auto db_match = feature_match.second.second; |
| |
|
| | const auto entry_pair = std::make_pair(&query_entries[i], db_match); |
| |
|
| | query_to_db_matches[image_id][i].emplace_back(dist, entry_pair); |
| | db_to_query_matches[image_id][feature_idx].emplace_back(dist, |
| | entry_pair); |
| | } |
| | } |
| | } |
| |
|
| | |
| | for (auto& image_score : *image_scores) { |
| | auto& query_matches = query_to_db_matches[image_score.image_id]; |
| | auto& db_matches = db_to_query_matches[image_score.image_id]; |
| |
|
| | |
| | if (query_matches.empty()) { |
| | continue; |
| | } |
| |
|
| | |
| | |
| | |
| | |
| |
|
| | typedef boost::heap::fibonacci_heap<std::pair<int, int>> FibonacciHeapType; |
| | FibonacciHeapType query_heap; |
| | FibonacciHeapType db_heap; |
| | std::unordered_map<int, typename FibonacciHeapType::handle_type> |
| | query_heap_handles; |
| | std::unordered_map<int, typename FibonacciHeapType::handle_type> |
| | db_heap_handles; |
| |
|
| | for (auto& match_data : query_matches) { |
| | std::sort(match_data.second.begin(), match_data.second.end(), |
| | std::greater<std::pair< |
| | float, std::pair<const EntryType*, const EntryType*>>>()); |
| |
|
| | query_heap_handles[match_data.first] = query_heap.push(std::make_pair( |
| | -static_cast<int>(match_data.second.size()), match_data.first)); |
| | } |
| |
|
| | for (auto& match_data : db_matches) { |
| | std::sort(match_data.second.begin(), match_data.second.end(), |
| | std::greater<std::pair< |
| | float, std::pair<const EntryType*, const EntryType*>>>()); |
| |
|
| | db_heap_handles[match_data.first] = db_heap.push(std::make_pair( |
| | -static_cast<int>(match_data.second.size()), match_data.first)); |
| | } |
| |
|
| | |
| | std::vector<FeatureGeometryMatch> matches; |
| |
|
| | auto db_top = db_heap.top(); |
| | auto query_top = query_heap.top(); |
| |
|
| | while (!db_heap.empty() && !query_heap.empty()) { |
| | |
| | |
| | const bool use_query = |
| | (query_top.first >= db_top.first) && !query_heap.empty(); |
| |
|
| | |
| | auto& heap1 = (use_query) ? query_heap : db_heap; |
| | auto& heap2 = (use_query) ? db_heap : query_heap; |
| | auto& handles1 = (use_query) ? query_heap_handles : db_heap_handles; |
| | auto& handles2 = (use_query) ? db_heap_handles : query_heap_handles; |
| | auto& matches1 = (use_query) ? query_matches : db_matches; |
| | auto& matches2 = (use_query) ? db_matches : query_matches; |
| |
|
| | const auto idx1 = heap1.top().second; |
| | heap1.pop(); |
| |
|
| | |
| | |
| | if (handles1.count(idx1) > 0) { |
| | handles1.erase(idx1); |
| |
|
| | bool match_found = false; |
| |
|
| | |
| | |
| | for (auto& entry2 : matches1[idx1]) { |
| | const auto idx2 = (use_query) ? entry2.second.second->feature_idx |
| | : entry2.second.first->feature_idx; |
| |
|
| | if (handles2.count(idx2) > 0) { |
| | if (!match_found) { |
| | match_found = true; |
| | FeatureGeometryMatch match; |
| | match.geometry1 = entry2.second.first->geometry; |
| | match.geometries2.push_back(entry2.second.second->geometry); |
| | matches.push_back(match); |
| |
|
| | handles2.erase(idx2); |
| |
|
| | |
| | |
| | for (auto& entry1 : matches2[idx2]) { |
| | const auto other_idx1 = (use_query) |
| | ? entry1.second.first->feature_idx |
| | : entry1.second.second->feature_idx; |
| | if (handles1.count(other_idx1) > 0) { |
| | (*handles1[other_idx1]).first += 1; |
| | heap1.increase(handles1[other_idx1]); |
| | } |
| | } |
| | } else { |
| | (*handles2[idx2]).first += 1; |
| | heap2.increase(handles2[idx2]); |
| | } |
| | } |
| | } |
| | } |
| |
|
| | if (!query_heap.empty()) { |
| | query_top = query_heap.top(); |
| | } |
| |
|
| | if (!db_heap.empty()) { |
| | db_top = db_heap.top(); |
| | } |
| | } |
| |
|
| | |
| | VoteAndVerifyOptions vote_and_verify_options; |
| | image_score.score += VoteAndVerify(vote_and_verify_options, matches); |
| | } |
| |
|
| | |
| |
|
| | const size_t num_images = std::min<size_t>( |
| | image_scores->size(), options.num_images_after_verification); |
| |
|
| | auto SortFunc = [](const ImageScore& score1, const ImageScore& score2) { |
| | return score1.score > score2.score; |
| | }; |
| |
|
| | if (num_images == image_scores->size()) { |
| | std::sort(image_scores->begin(), image_scores->end(), SortFunc); |
| | } else { |
| | std::partial_sort(image_scores->begin(), image_scores->begin() + num_images, |
| | image_scores->end(), SortFunc); |
| | image_scores->resize(num_images); |
| | } |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | void VisualIndex<kDescType, kDescDim, kEmbeddingDim>::Prepare() { |
| | inverted_index_.Finalize(); |
| | prepared_ = true; |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | void VisualIndex<kDescType, kDescDim, kEmbeddingDim>::Build( |
| | const BuildOptions& options, const DescType& descriptors) { |
| | |
| | Quantize(options, descriptors); |
| |
|
| | |
| | flann::AutotunedIndexParams index_params; |
| | index_params["target_precision"] = |
| | static_cast<float>(options.target_precision); |
| | visual_word_index_ = |
| | flann::AutotunedIndex<flann::L2<kDescType>>(index_params); |
| | visual_word_index_.buildIndex(visual_words_); |
| |
|
| | |
| | inverted_index_ = InvertedIndexType(); |
| | inverted_index_.Initialize(NumVisualWords()); |
| |
|
| | |
| | inverted_index_.GenerateHammingEmbeddingProjection(); |
| |
|
| | |
| | const int kNumNeighbors = 1; |
| | const Eigen::MatrixXi word_ids = FindWordIds( |
| | descriptors, kNumNeighbors, options.num_checks, options.num_threads); |
| | inverted_index_.ComputeHammingEmbedding(descriptors, word_ids); |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | void VisualIndex<kDescType, kDescDim, kEmbeddingDim>::Read( |
| | const std::string& path) { |
| | long int file_offset = 0; |
| |
|
| | |
| |
|
| | { |
| | if (visual_words_.ptr() != nullptr) { |
| | delete[] visual_words_.ptr(); |
| | } |
| |
|
| | std::ifstream file(path, std::ios::binary); |
| | CHECK(file.is_open()) << path; |
| | const uint64_t rows = ReadBinaryLittleEndian<uint64_t>(&file); |
| | const uint64_t cols = ReadBinaryLittleEndian<uint64_t>(&file); |
| | kDescType* visual_words_data = new kDescType[rows * cols]; |
| | for (size_t i = 0; i < rows * cols; ++i) { |
| | visual_words_data[i] = ReadBinaryLittleEndian<kDescType>(&file); |
| | } |
| | visual_words_ = flann::Matrix<kDescType>(visual_words_data, rows, cols); |
| | file_offset = file.tellg(); |
| | } |
| |
|
| | |
| |
|
| | visual_word_index_ = |
| | flann::AutotunedIndex<flann::L2<kDescType>>(visual_words_); |
| |
|
| | { |
| | FILE* fin = fopen(path.c_str(), "rb"); |
| | CHECK_NOTNULL(fin); |
| | fseek(fin, file_offset, SEEK_SET); |
| | visual_word_index_.loadIndex(fin); |
| | file_offset = ftell(fin); |
| | fclose(fin); |
| | } |
| |
|
| | |
| |
|
| | { |
| | std::ifstream file(path, std::ios::binary); |
| | CHECK(file.is_open()) << path; |
| | file.seekg(file_offset, std::ios::beg); |
| | inverted_index_.Read(&file); |
| | } |
| |
|
| | image_ids_.clear(); |
| | inverted_index_.GetImageIds(&image_ids_); |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | void VisualIndex<kDescType, kDescDim, kEmbeddingDim>::Write( |
| | const std::string& path) { |
| | |
| |
|
| | { |
| | CHECK_NOTNULL(visual_words_.ptr()); |
| | std::ofstream file(path, std::ios::binary); |
| | CHECK(file.is_open()) << path; |
| | WriteBinaryLittleEndian<uint64_t>(&file, visual_words_.rows); |
| | WriteBinaryLittleEndian<uint64_t>(&file, visual_words_.cols); |
| | for (size_t i = 0; i < visual_words_.rows * visual_words_.cols; ++i) { |
| | WriteBinaryLittleEndian<kDescType>(&file, visual_words_.ptr()[i]); |
| | } |
| | } |
| |
|
| | |
| |
|
| | { |
| | FILE* fout = fopen(path.c_str(), "ab"); |
| | CHECK_NOTNULL(fout); |
| | visual_word_index_.saveIndex(fout); |
| | fclose(fout); |
| | } |
| |
|
| | |
| |
|
| | { |
| | std::ofstream file(path, std::ios::binary | std::ios::app); |
| | CHECK(file.is_open()) << path; |
| | inverted_index_.Write(&file); |
| | } |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | void VisualIndex<kDescType, kDescDim, kEmbeddingDim>::Quantize( |
| | const BuildOptions& options, const DescType& descriptors) { |
| | static_assert(DescType::IsRowMajor, "Descriptors must be row-major."); |
| |
|
| | CHECK_GE(options.num_visual_words, options.branching); |
| | CHECK_GE(descriptors.rows(), options.num_visual_words); |
| |
|
| | const flann::Matrix<kDescType> descriptor_matrix( |
| | const_cast<kDescType*>(descriptors.data()), descriptors.rows(), |
| | descriptors.cols()); |
| |
|
| | std::vector<typename flann::L2<kDescType>::ResultType> centers_data( |
| | options.num_visual_words * descriptors.cols()); |
| | flann::Matrix<typename flann::L2<kDescType>::ResultType> centers( |
| | centers_data.data(), options.num_visual_words, descriptors.cols()); |
| |
|
| | flann::KMeansIndexParams index_params; |
| | index_params["branching"] = options.branching; |
| | index_params["iterations"] = options.num_iterations; |
| | index_params["centers_init"] = flann::FLANN_CENTERS_KMEANSPP; |
| | const int num_centers = flann::hierarchicalClustering<flann::L2<kDescType>>( |
| | descriptor_matrix, centers, index_params); |
| |
|
| | CHECK_LE(num_centers, options.num_visual_words); |
| |
|
| | const size_t visual_word_data_size = num_centers * descriptors.cols(); |
| | kDescType* visual_words_data = new kDescType[visual_word_data_size]; |
| | for (size_t i = 0; i < visual_word_data_size; ++i) { |
| | if (std::is_integral<kDescType>::value) { |
| | visual_words_data[i] = std::round(centers_data[i]); |
| | } else { |
| | visual_words_data[i] = centers_data[i]; |
| | } |
| | } |
| |
|
| | if (visual_words_.ptr() != nullptr) { |
| | delete[] visual_words_.ptr(); |
| | } |
| |
|
| | visual_words_ = flann::Matrix<kDescType>(visual_words_data, num_centers, |
| | descriptors.cols()); |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | void VisualIndex<kDescType, kDescDim, kEmbeddingDim>::QueryAndFindWordIds( |
| | const QueryOptions& options, const DescType& descriptors, |
| | std::vector<ImageScore>* image_scores, Eigen::MatrixXi* word_ids) const { |
| | CHECK(prepared_); |
| |
|
| | if (descriptors.rows() == 0) { |
| | image_scores->clear(); |
| | return; |
| | } |
| |
|
| | *word_ids = FindWordIds(descriptors, options.num_neighbors, |
| | options.num_checks, options.num_threads); |
| | inverted_index_.Query(descriptors, *word_ids, image_scores); |
| |
|
| | auto SortFunc = [](const ImageScore& score1, const ImageScore& score2) { |
| | return score1.score > score2.score; |
| | }; |
| |
|
| | size_t num_images = image_scores->size(); |
| | if (options.max_num_images >= 0) { |
| | num_images = std::min<size_t>(image_scores->size(), options.max_num_images); |
| | } |
| |
|
| | if (num_images == image_scores->size()) { |
| | std::sort(image_scores->begin(), image_scores->end(), SortFunc); |
| | } else { |
| | std::partial_sort(image_scores->begin(), image_scores->begin() + num_images, |
| | image_scores->end(), SortFunc); |
| | image_scores->resize(num_images); |
| | } |
| | } |
| |
|
| | template <typename kDescType, int kDescDim, int kEmbeddingDim> |
| | Eigen::MatrixXi VisualIndex<kDescType, kDescDim, kEmbeddingDim>::FindWordIds( |
| | const DescType& descriptors, const int num_neighbors, const int num_checks, |
| | const int num_threads) const { |
| | static_assert(DescType::IsRowMajor, "Descriptors must be row-major"); |
| |
|
| | CHECK_GT(descriptors.rows(), 0); |
| | CHECK_GT(num_neighbors, 0); |
| |
|
| | Eigen::Matrix<size_t, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor> |
| | word_ids(descriptors.rows(), num_neighbors); |
| | word_ids.setConstant(InvertedIndexType::kInvalidWordId); |
| | flann::Matrix<size_t> indices(word_ids.data(), descriptors.rows(), |
| | num_neighbors); |
| |
|
| | Eigen::Matrix<typename flann::L2<kDescType>::ResultType, Eigen::Dynamic, |
| | Eigen::Dynamic, Eigen::RowMajor> |
| | distance_matrix(descriptors.rows(), num_neighbors); |
| | flann::Matrix<typename flann::L2<kDescType>::ResultType> distances( |
| | distance_matrix.data(), descriptors.rows(), num_neighbors); |
| |
|
| | const flann::Matrix<kDescType> query( |
| | const_cast<kDescType*>(descriptors.data()), descriptors.rows(), |
| | descriptors.cols()); |
| |
|
| | flann::SearchParams search_params(num_checks); |
| | if (num_threads < 0) { |
| | search_params.cores = std::thread::hardware_concurrency(); |
| | } else { |
| | search_params.cores = num_threads; |
| | } |
| | if (search_params.cores <= 0) { |
| | search_params.cores = 1; |
| | } |
| |
|
| | visual_word_index_.knnSearch(query, indices, distances, num_neighbors, |
| | search_params); |
| |
|
| | return word_ids.cast<int>(); |
| | } |
| |
|
| | } |
| | } |
| |
|
| | #endif |
| |
|