Spaces:
Sleeping
Sleeping
Commit
·
dad9632
1
Parent(s):
c586a3a
rag: updated deprecated imports and calls
Browse files- src/rag.py +11 -10
src/rag.py
CHANGED
|
@@ -1,14 +1,14 @@
|
|
| 1 |
|
| 2 |
import dotenv
|
| 3 |
import os
|
| 4 |
-
from
|
| 5 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
| 6 |
|
| 7 |
-
|
| 8 |
-
from langchain_community.
|
| 9 |
-
from
|
| 10 |
-
from
|
| 11 |
-
from
|
| 12 |
from langchain.chains import RetrievalQA
|
| 13 |
from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
|
| 14 |
from tqdm import tqdm
|
|
@@ -85,9 +85,10 @@ class RAG():
|
|
| 85 |
temperature=0)
|
| 86 |
elif self.use_model == 'zephyr-7b-alpha':
|
| 87 |
print(f'As llm, using HF model: {self.use_model}')
|
| 88 |
-
llm =
|
| 89 |
-
repo_id="huggingfaceh4/
|
| 90 |
-
|
|
|
|
| 91 |
)
|
| 92 |
return llm
|
| 93 |
|
|
@@ -133,7 +134,7 @@ class RAG():
|
|
| 133 |
)
|
| 134 |
|
| 135 |
def ask_QAbot(self, question):
|
| 136 |
-
result = self.QAbot({"query": question})
|
| 137 |
sources = [doc.metadata.get('source', 'Unknown source') for doc in result["source_documents"]]
|
| 138 |
response = {
|
| 139 |
"question": question,
|
|
|
|
| 1 |
|
| 2 |
import dotenv
|
| 3 |
import os
|
| 4 |
+
from langchain_community.document_loaders import UnstructuredURLLoader, PyPDFLoader
|
| 5 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
| 6 |
|
| 7 |
+
from langchain_huggingface import HuggingFaceEmbeddings
|
| 8 |
+
from langchain_community.vectorstores import Chroma
|
| 9 |
+
from langchain_openai import ChatOpenAI
|
| 10 |
+
# from langchain_community.llms import HuggingFaceHub
|
| 11 |
+
from langchain_huggingface import HuggingFaceEndpoint
|
| 12 |
from langchain.chains import RetrievalQA
|
| 13 |
from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
|
| 14 |
from tqdm import tqdm
|
|
|
|
| 85 |
temperature=0)
|
| 86 |
elif self.use_model == 'zephyr-7b-alpha':
|
| 87 |
print(f'As llm, using HF model: {self.use_model}')
|
| 88 |
+
llm = HuggingFaceEndpoint(
|
| 89 |
+
repo_id=f"huggingfaceh4/{self.use_model}",
|
| 90 |
+
temperature=0.1,
|
| 91 |
+
max_new_tokens=512
|
| 92 |
)
|
| 93 |
return llm
|
| 94 |
|
|
|
|
| 134 |
)
|
| 135 |
|
| 136 |
def ask_QAbot(self, question):
|
| 137 |
+
result = self.QAbot.invoke({"query": question})
|
| 138 |
sources = [doc.metadata.get('source', 'Unknown source') for doc in result["source_documents"]]
|
| 139 |
response = {
|
| 140 |
"question": question,
|