Bofandra commited on
Commit
a0ee27c
·
verified ·
1 Parent(s): 4a69ed6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -4
app.py CHANGED
@@ -5,7 +5,7 @@ from langchain.vectorstores import FAISS
5
  from langchain.embeddings import HuggingFaceEmbeddings
6
  from langchain.text_splitter import RecursiveCharacterTextSplitter
7
  from langchain.document_loaders import PyPDFLoader
8
- from langchain_community.llms import HuggingFaceHub
9
  import tempfile
10
 
11
  # Initialize global variables
@@ -29,9 +29,10 @@ def process_pdf(file):
29
  retriever = vectorstore.as_retriever(search_kwargs={"k": 4})
30
 
31
  # ✅ Wrap DeepSeek model properly
32
- llm = HuggingFaceHub(
33
  repo_id="deepseek-ai/DeepSeek-R1-0528",
34
- model_kwargs={"temperature": 0.7, "max_new_tokens": 512}
 
35
  )
36
 
37
  retrieval_chain = ConversationalRetrievalChain.from_llm(
@@ -60,7 +61,7 @@ def respond(
60
  # Reformat history for LangChain
61
  chat_history = [(h["content"], h.get("response", "")) for h in history if h["role"] == "user"]
62
 
63
- result = retrieval_chain({"question": message, "chat_history": chat_history})
64
 
65
  return result["answer"]
66
 
 
5
  from langchain.embeddings import HuggingFaceEmbeddings
6
  from langchain.text_splitter import RecursiveCharacterTextSplitter
7
  from langchain.document_loaders import PyPDFLoader
8
+ from langchain_huggingface import HuggingFaceEndpoint
9
  import tempfile
10
 
11
  # Initialize global variables
 
29
  retriever = vectorstore.as_retriever(search_kwargs={"k": 4})
30
 
31
  # ✅ Wrap DeepSeek model properly
32
+ llm = HuggingFaceEndpoint(
33
  repo_id="deepseek-ai/DeepSeek-R1-0528",
34
+ temperature=0.7,
35
+ max_new_tokens=512
36
  )
37
 
38
  retrieval_chain = ConversationalRetrievalChain.from_llm(
 
61
  # Reformat history for LangChain
62
  chat_history = [(h["content"], h.get("response", "")) for h in history if h["role"] == "user"]
63
 
64
+ result = retrieval_chain.invoke({"question": message, "chat_history": chat_history})
65
 
66
  return result["answer"]
67