File size: 2,498 Bytes
7b295db
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import torch
from torch import nn
from transformers import AutoTokenizer, AutoModel
import os.path
import faiss

class EmbeddingRetriever:
    def __init__(self, embedding_model_name, PATH_IDX, chunks):
        self.PATH_IDX = PATH_IDX
        self.embedding_model_name = embedding_model_name
        self.tokenizer = AutoTokenizer.from_pretrained(embedding_model_name)
        self.embedding_model = AutoModel.from_pretrained(embedding_model_name)

        self.index = self.get_idx(chunks)

    def get_idx(self, chunks):
        if os.path.exists(self.PATH_IDX):
            index = self.load_faiss_index(self.PATH_IDX)
        else:
            encoded_docs = self.tokenizer(["source: {}, content: {}".format(chunk.metadata['source'], chunk.page_content) for chunk in chunks],
                                            padding = 'max_length',
                                            return_tensors="pt")
            word_embeddings = self.embedding_model(**encoded_docs).last_hidden_state

            index = self.build_faiss_index(word_embeddings)
            self.save_faiss_index(index, self.PATH_IDX)
        return index


    def retrieve_data(self, query, TOP_K):
        query_tokens = self.tokenizer(query, padding = 'max_length', return_tensors="pt")
        query_embedding = self.embedding_model(**query_tokens).last_hidden_state
        m = nn.Flatten()
        np_query_embedding = m(query_embedding).detach().numpy()

        distances, indices = self.index.search(np_query_embedding, TOP_K)

        return indices[0]

    def build_faiss_index(self,embeddings):
        """Builds a FAISS index for efficient similarity search."""
        m = nn.Flatten()
        embeddings = m(embeddings)
        dimension = embeddings.shape[1]
        index = faiss.IndexFlatL2(dimension)  # L2 distance for similarity
        np_emb = embeddings.detach().numpy()
        print("shape index:",np_emb.shape)
        index.add(np_emb)
        return index

    def save_faiss_index(self, index, index_file_path):
        """Saves a FAISS index to a file."""
        faiss.write_index(index, index_file_path)
        print(f"FAISS index saved to {index_file_path}")

    def load_faiss_index(self, index_file_path):
        """Loads a FAISS index from a file."""
        if os.path.exists(index_file_path):
            index = faiss.read_index(index_file_path)
            print(f"FAISS index loaded from {index_file_path}")
            return index
        else:
            return None