Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from langchain_community.document_loaders import PyPDFLoader | |
| from langchain_huggingface import HuggingFaceEmbeddings | |
| from langchain_text_splitters import RecursiveCharacterTextSplitter | |
| from langchain_community.vectorstores import FAISS | |
| from langchain_groq import ChatGroq | |
| from langchain_core.prompts import ChatPromptTemplate | |
| from langchain.chains import create_retrieval_chain | |
| from langchain.chains.combine_documents import create_stuff_documents_chain | |
| from dotenv import load_dotenv | |
| import os | |
| # Load environment variables | |
| load_dotenv() | |
| groq_api_key = os.getenv("GROQ_API_KEY") | |
| # Initialize embedding model and LLM | |
| embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2") | |
| llm = ChatGroq(model_name="Llama3-8b-8192", groq_api_key=groq_api_key) | |
| # Prompt template | |
| prompt = ChatPromptTemplate.from_messages([ | |
| ("system", "You are an assistant for question-answering tasks." | |
| "Answer the question in detail." | |
| "You are an assistant for question-answering tasks." | |
| "act as a Q/A chatbot." | |
| "answer concise and detailed." | |
| "\n\n" | |
| "{context}" | |
| ), | |
| ("human", "{input}") | |
| ]) | |
| # Store the chain globally | |
| rag_chain = None | |
| # Upload and process PDF | |
| def handle_upload(pdf_file): | |
| global rag_chain | |
| try: | |
| loader = PyPDFLoader(pdf_file.name) | |
| docs = loader.load() | |
| splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=30) | |
| split_docs = splitter.split_documents(docs) | |
| vectorstore = FAISS.from_documents(split_docs, embeddings) | |
| retriever = vectorstore.as_retriever() | |
| qa_chain = create_stuff_documents_chain(llm, prompt) | |
| rag_chain = create_retrieval_chain(retriever, qa_chain) | |
| return "β PDF uploaded. Ask your question below." | |
| except Exception as e: | |
| return f"β Error: {str(e)}" | |
| # Answer user question | |
| def ask_question(question): | |
| if not rag_chain: | |
| return "β οΈ Please upload a PDF first." | |
| try: | |
| result = rag_chain.invoke({"input": question}) | |
| return result["answer"] | |
| except Exception as e: | |
| return f"β Error generating answer: {str(e)}" | |
| # Gradio UI | |
| with gr.Blocks() as demo: | |
| gr.Markdown("## π€ Simple PDF Q&A ChatBot") | |
| with gr.Row(): | |
| pdf_file = gr.File(label="π Upload PDF", file_types=[".pdf"]) | |
| upload_btn = gr.Button("Upload") | |
| upload_status = gr.Textbox(label="Status", interactive=False) | |
| question = gr.Textbox(label="Ask a Question", placeholder="e.g., What is this paper about?") | |
| answer = gr.Textbox(label="Answer", lines=10) | |
| upload_btn.click(handle_upload, inputs=[pdf_file], outputs=[upload_status]) | |
| question.submit(ask_question, inputs=question, outputs=answer) | |
| # Run the app | |
| demo.launch() | |