Spaces:
Runtime error
Runtime error
| from langchain_community.embeddings import HuggingFaceEmbeddings | |
| from langchain_community.vectorstores import FAISS | |
| from langchain.text_splitter import RecursiveCharacterTextSplitter | |
| from langchain.document_loaders import PyMuPDFLoader | |
| from langchain.chains import RetrievalQA | |
| from langchain.llms.base import LLM | |
| from groq import Groq | |
| from typing import List, Optional | |
| import os | |
| import gradio as gr | |
| class GroqLLM(LLM): | |
| model: str = "llama3-8b-8192" | |
| api_key: str = "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" | |
| temperature: float = 0.0 | |
| def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str: | |
| client = Groq(api_key=self.api_key) | |
| messages = [ | |
| {"role": "system", "content": "You are a helpful assistant."}, | |
| {"role": "user", "content": prompt} | |
| ] | |
| response = client.chat.completions.create( | |
| model=self.model, | |
| messages=messages, | |
| temperature=self.temperature, | |
| ) | |
| return response.choices[0].message.content | |
| def _llm_type(self) -> str: | |
| return "groq-llm" | |
| def process_pdf(pdf_path): | |
| loader = PyMuPDFLoader(pdf_path) | |
| documents = loader.load() | |
| splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=50) | |
| chunks = splitter.split_documents(documents) | |
| embedding = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2") | |
| vectorstore = FAISS.from_documents(chunks, embedding) | |
| retriever = vectorstore.as_retriever() | |
| llm = GroqLLM(api_key="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") | |
| qa = RetrievalQA.from_chain_type( | |
| llm=llm, | |
| retriever=retriever, | |
| return_source_documents=True | |
| ) | |
| return qa | |
| qa_chain = None | |
| def upload_file(file): | |
| global qa_chain | |
| qa_chain = process_pdf(file.name) | |
| return "PDF processed! You can now ask questions." | |
| def ask_question(query): | |
| if qa_chain is None: | |
| return "Please upload a PDF first." | |
| result = qa_chain({"query": query}) | |
| return result["result"] | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# 🧠 PDF Q&A with GROQ + LangChain") | |
| with gr.Row(): | |
| uploader = gr.File(label="Upload your PDF") | |
| status = gr.Textbox(label="Status") | |
| uploader.change(fn=upload_file, inputs=uploader, outputs=status) | |
| question = gr.Textbox(label="Ask a question") | |
| answer = gr.Textbox(label="Answer") | |
| question.submit(fn=ask_question, inputs=question, outputs=answer) | |
| demo.launch(share=True) | |