Vizznu19 commited on
Commit
dd729e7
Β·
verified Β·
1 Parent(s): fd0698d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -29
app.py CHANGED
@@ -1,22 +1,20 @@
1
- # βœ… app.py
2
-
3
- import os
4
- from typing import List, Optional
5
- import gradio as gr
6
-
7
  from langchain_community.embeddings import HuggingFaceEmbeddings
8
  from langchain_community.vectorstores import FAISS
9
  from langchain.text_splitter import RecursiveCharacterTextSplitter
10
  from langchain.document_loaders import PyMuPDFLoader
11
  from langchain.chains import RetrievalQA
12
  from langchain.llms.base import LLM
 
13
  from groq import Groq
 
 
 
 
14
 
15
 
16
- # βœ… Custom GroqLLM that reads key from Hugging Face Secret
17
  class GroqLLM(LLM):
18
  model: str = "llama3-8b-8192"
19
- api_key: str = os.getenv("GROQ_API_KEY") # βœ… secure access to private Hugging Face secret
20
  temperature: float = 0.0
21
 
22
  def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
@@ -37,7 +35,6 @@ class GroqLLM(LLM):
37
  return "groq-llm"
38
 
39
 
40
- # βœ… PDF processing logic
41
  def process_pdf(pdf_path):
42
  loader = PyMuPDFLoader(pdf_path)
43
  documents = loader.load()
@@ -49,7 +46,7 @@ def process_pdf(pdf_path):
49
  vectorstore = FAISS.from_documents(chunks, embedding)
50
 
51
  retriever = vectorstore.as_retriever()
52
- llm = GroqLLM()
53
 
54
  qa = RetrievalQA.from_chain_type(
55
  llm=llm,
@@ -60,35 +57,25 @@ def process_pdf(pdf_path):
60
  return qa
61
 
62
 
63
- # βœ… Global QA chain
64
- qa_chain = None
65
 
 
66
 
67
  def upload_file(file):
68
  global qa_chain
69
- try:
70
- qa_chain = process_pdf(file.name)
71
- return "βœ… PDF processed! Ask your questions below."
72
- except Exception as e:
73
- return f"❌ Error processing PDF: {str(e)}"
74
-
75
 
76
  def ask_question(query):
77
  if qa_chain is None:
78
- return "⚠️ Please upload a PDF first."
79
- try:
80
- result = qa_chain({"query": query})
81
- return result["result"]
82
- except Exception as e:
83
- return f"❌ Error during response: {str(e)}"
84
 
85
 
86
- # βœ… Gradio App
87
  with gr.Blocks() as demo:
88
- gr.Markdown("# πŸ“š PDF Q&A Chatbot with Groq LLaMA3 (via Hugging Face Spaces)")
89
-
90
  with gr.Row():
91
- uploader = gr.File(label="πŸ“€ Upload PDF")
92
  status = gr.Textbox(label="Status")
93
 
94
  uploader.change(fn=upload_file, inputs=uploader, outputs=status)
@@ -98,4 +85,6 @@ with gr.Blocks() as demo:
98
 
99
  question.submit(fn=ask_question, inputs=question, outputs=answer)
100
 
101
- demo.launch()
 
 
 
 
 
 
 
 
 
1
  from langchain_community.embeddings import HuggingFaceEmbeddings
2
  from langchain_community.vectorstores import FAISS
3
  from langchain.text_splitter import RecursiveCharacterTextSplitter
4
  from langchain.document_loaders import PyMuPDFLoader
5
  from langchain.chains import RetrievalQA
6
  from langchain.llms.base import LLM
7
+
8
  from groq import Groq
9
+ from typing import List, Optional
10
+ import os
11
+ import gradio as gr
12
+
13
 
14
 
 
15
  class GroqLLM(LLM):
16
  model: str = "llama3-8b-8192"
17
+ api_key: str = "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"
18
  temperature: float = 0.0
19
 
20
  def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
 
35
  return "groq-llm"
36
 
37
 
 
38
  def process_pdf(pdf_path):
39
  loader = PyMuPDFLoader(pdf_path)
40
  documents = loader.load()
 
46
  vectorstore = FAISS.from_documents(chunks, embedding)
47
 
48
  retriever = vectorstore.as_retriever()
49
+ llm = GroqLLM(api_key="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")
50
 
51
  qa = RetrievalQA.from_chain_type(
52
  llm=llm,
 
57
  return qa
58
 
59
 
 
 
60
 
61
+ qa_chain = None
62
 
63
  def upload_file(file):
64
  global qa_chain
65
+ qa_chain = process_pdf(file.name)
66
+ return "PDF processed! You can now ask questions."
 
 
 
 
67
 
68
  def ask_question(query):
69
  if qa_chain is None:
70
+ return "Please upload a PDF first."
71
+ result = qa_chain({"query": query})
72
+ return result["result"]
 
 
 
73
 
74
 
 
75
  with gr.Blocks() as demo:
76
+ gr.Markdown("# 🧠 PDF Q&A with GROQ + LangChain")
 
77
  with gr.Row():
78
+ uploader = gr.File(label="Upload your PDF")
79
  status = gr.Textbox(label="Status")
80
 
81
  uploader.change(fn=upload_file, inputs=uploader, outputs=status)
 
85
 
86
  question.submit(fn=ask_question, inputs=question, outputs=answer)
87
 
88
+ demo.launch(share=True)
89
+
90
+