OnurKerimoglu commited on
Commit
dad9632
·
1 Parent(s): c586a3a

rag: updated deprecated imports and calls

Browse files
Files changed (1) hide show
  1. src/rag.py +11 -10
src/rag.py CHANGED
@@ -1,14 +1,14 @@
1
 
2
  import dotenv
3
  import os
4
- from langchain.document_loaders import UnstructuredURLLoader, PyPDFLoader
5
  from langchain.text_splitter import RecursiveCharacterTextSplitter
6
 
7
- # from langchain.embeddings import OpenAIEmbeddings
8
- from langchain_community.embeddings import HuggingFaceEmbeddings
9
- from langchain.vectorstores import Chroma
10
- from langchain.chat_models import ChatOpenAI
11
- from langchain.llms import HuggingFaceHub
12
  from langchain.chains import RetrievalQA
13
  from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
14
  from tqdm import tqdm
@@ -85,9 +85,10 @@ class RAG():
85
  temperature=0)
86
  elif self.use_model == 'zephyr-7b-alpha':
87
  print(f'As llm, using HF model: {self.use_model}')
88
- llm = HuggingFaceHub(
89
- repo_id="huggingfaceh4/zephyr-7b-alpha",
90
- model_kwargs={"temperature": 0.5, "max_length": 64,"max_new_tokens":512}
 
91
  )
92
  return llm
93
 
@@ -133,7 +134,7 @@ class RAG():
133
  )
134
 
135
  def ask_QAbot(self, question):
136
- result = self.QAbot({"query": question})
137
  sources = [doc.metadata.get('source', 'Unknown source') for doc in result["source_documents"]]
138
  response = {
139
  "question": question,
 
1
 
2
  import dotenv
3
  import os
4
+ from langchain_community.document_loaders import UnstructuredURLLoader, PyPDFLoader
5
  from langchain.text_splitter import RecursiveCharacterTextSplitter
6
 
7
+ from langchain_huggingface import HuggingFaceEmbeddings
8
+ from langchain_community.vectorstores import Chroma
9
+ from langchain_openai import ChatOpenAI
10
+ # from langchain_community.llms import HuggingFaceHub
11
+ from langchain_huggingface import HuggingFaceEndpoint
12
  from langchain.chains import RetrievalQA
13
  from langchain.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, HumanMessagePromptTemplate
14
  from tqdm import tqdm
 
85
  temperature=0)
86
  elif self.use_model == 'zephyr-7b-alpha':
87
  print(f'As llm, using HF model: {self.use_model}')
88
+ llm = HuggingFaceEndpoint(
89
+ repo_id=f"huggingfaceh4/{self.use_model}",
90
+ temperature=0.1,
91
+ max_new_tokens=512
92
  )
93
  return llm
94
 
 
134
  )
135
 
136
  def ask_QAbot(self, question):
137
+ result = self.QAbot.invoke({"query": question})
138
  sources = [doc.metadata.get('source', 'Unknown source') for doc in result["source_documents"]]
139
  response = {
140
  "question": question,