Update llm.py
Browse files
llm.py
CHANGED
|
@@ -5,7 +5,7 @@ from langchain.vectorstores import Chroma
|
|
| 5 |
from langchain_core.prompts import ChatPromptTemplate
|
| 6 |
from langchain.chains.combine_documents import create_stuff_documents_chain
|
| 7 |
from langchain.chains import create_retrieval_chain
|
| 8 |
-
from
|
| 9 |
import pandas as pd
|
| 10 |
import os
|
| 11 |
from langchain.schema import Document
|
|
@@ -27,9 +27,9 @@ def load_and_process_pdf():
|
|
| 27 |
return splits
|
| 28 |
|
| 29 |
def create_vectorstore(splits = load_and_process_pdf()):
|
| 30 |
-
model_name = "nomic-ai/nomic-embed-text-v1"
|
| 31 |
-
embeddings =
|
| 32 |
-
|
| 33 |
vectorstore = Chroma(
|
| 34 |
collection_name="example_collection",
|
| 35 |
embedding_function=embeddings,
|
|
|
|
| 5 |
from langchain_core.prompts import ChatPromptTemplate
|
| 6 |
from langchain.chains.combine_documents import create_stuff_documents_chain
|
| 7 |
from langchain.chains import create_retrieval_chain
|
| 8 |
+
from langchain_huggingface import HuggingFaceEmbeddings
|
| 9 |
import pandas as pd
|
| 10 |
import os
|
| 11 |
from langchain.schema import Document
|
|
|
|
| 27 |
return splits
|
| 28 |
|
| 29 |
def create_vectorstore(splits = load_and_process_pdf()):
|
| 30 |
+
# model_name = "nomic-ai/nomic-embed-text-v1"
|
| 31 |
+
embeddings = HuggingFaceEmbedding()
|
| 32 |
+
# model_name=model_name)
|
| 33 |
vectorstore = Chroma(
|
| 34 |
collection_name="example_collection",
|
| 35 |
embedding_function=embeddings,
|