Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -7,48 +7,57 @@ from langchain_community.embeddings import HuggingFaceEmbeddings
|
|
| 7 |
from langchain_community.vectorstores import FAISS
|
| 8 |
import google.generativeai as genai
|
| 9 |
|
| 10 |
-
#
|
| 11 |
INDEX_DIR = "rag_multi_pdf_index"
|
| 12 |
|
| 13 |
-
#
|
| 14 |
def create_knowledge_base(pdf_files: List[gr.File]) -> str:
|
|
|
|
|
|
|
|
|
|
| 15 |
all_chunks = []
|
| 16 |
splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=100)
|
| 17 |
|
| 18 |
for file in pdf_files:
|
| 19 |
loader = PyPDFLoader(file.name)
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
|
| 24 |
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
|
| 25 |
vectorstore = FAISS.from_documents(all_chunks, embeddings)
|
| 26 |
vectorstore.save_local(INDEX_DIR)
|
| 27 |
|
| 28 |
-
return f"β
Knowledge base created from {len(pdf_files)} PDFs."
|
| 29 |
|
| 30 |
-
#
|
| 31 |
def load_vectorstore() -> FAISS:
|
| 32 |
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
|
| 33 |
return FAISS.load_local(INDEX_DIR, embeddings)
|
| 34 |
|
| 35 |
-
# Ask
|
| 36 |
def chat_with_rag(api_key: str, question: str) -> str:
|
| 37 |
if not api_key or not api_key.startswith("AI"):
|
| 38 |
-
return "β Invalid Gemini API Key.
|
| 39 |
|
| 40 |
try:
|
| 41 |
genai.configure(api_key=api_key)
|
| 42 |
model = genai.GenerativeModel("gemini-pro")
|
| 43 |
except Exception as e:
|
| 44 |
-
return f"β
|
| 45 |
|
| 46 |
try:
|
| 47 |
vs = load_vectorstore()
|
| 48 |
top_docs = vs.similarity_search(question, k=3)
|
| 49 |
context = "\n\n".join([doc.page_content for doc in top_docs])
|
| 50 |
except Exception as e:
|
| 51 |
-
return f"β Error retrieving context: {str(e)}"
|
| 52 |
|
| 53 |
prompt = f"""Use the following context to answer the question:\n\n{context}\n\nQuestion: {question}"""
|
| 54 |
|
|
@@ -56,26 +65,26 @@ def chat_with_rag(api_key: str, question: str) -> str:
|
|
| 56 |
response = model.generate_content(prompt)
|
| 57 |
return response.text
|
| 58 |
except Exception as e:
|
| 59 |
-
return f"β Gemini
|
|
|
|
|
|
|
|
|
|
|
|
|
| 60 |
|
| 61 |
-
|
| 62 |
-
with gr.Blocks(title="π RAG Chat with Gemini (Multi-PDF)") as demo:
|
| 63 |
-
gr.Markdown("## π§ Upload Multiple PDFs & Chat Using Gemini")
|
| 64 |
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
pdfs = gr.File(label="π Upload PDFs", file_types=[".pdf"], file_count="multiple")
|
| 69 |
-
create_btn = gr.Button("π Create Knowledge Base")
|
| 70 |
-
kb_status = gr.Textbox(label="π¦ Knowledge Base Status", interactive=False)
|
| 71 |
|
| 72 |
create_btn.click(fn=create_knowledge_base, inputs=[pdfs], outputs=[kb_status])
|
| 73 |
|
| 74 |
-
question = gr.Textbox(label="β
|
| 75 |
-
answer = gr.Textbox(label="π¬ Gemini Answer", lines=10)
|
| 76 |
ask_btn = gr.Button("π Ask")
|
| 77 |
|
| 78 |
ask_btn.click(fn=chat_with_rag, inputs=[api_key, question], outputs=[answer])
|
| 79 |
|
|
|
|
| 80 |
if __name__ == "__main__":
|
| 81 |
demo.launch()
|
|
|
|
| 7 |
from langchain_community.vectorstores import FAISS
|
| 8 |
import google.generativeai as genai
|
| 9 |
|
| 10 |
+
# Path to save vector index
|
| 11 |
INDEX_DIR = "rag_multi_pdf_index"
|
| 12 |
|
| 13 |
+
# Step 1: Create knowledge base from PDFs
|
| 14 |
def create_knowledge_base(pdf_files: List[gr.File]) -> str:
|
| 15 |
+
if not pdf_files:
|
| 16 |
+
return "β No PDFs uploaded."
|
| 17 |
+
|
| 18 |
all_chunks = []
|
| 19 |
splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=100)
|
| 20 |
|
| 21 |
for file in pdf_files:
|
| 22 |
loader = PyPDFLoader(file.name)
|
| 23 |
+
try:
|
| 24 |
+
docs = loader.load()
|
| 25 |
+
chunks = splitter.split_documents(docs)
|
| 26 |
+
all_chunks.extend(chunks)
|
| 27 |
+
except Exception as e:
|
| 28 |
+
return f"β Error reading {file.name}: {str(e)}"
|
| 29 |
+
|
| 30 |
+
if not all_chunks:
|
| 31 |
+
return "β No content extracted from PDFs."
|
| 32 |
|
| 33 |
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
|
| 34 |
vectorstore = FAISS.from_documents(all_chunks, embeddings)
|
| 35 |
vectorstore.save_local(INDEX_DIR)
|
| 36 |
|
| 37 |
+
return f"β
Knowledge base created with {len(all_chunks)} chunks from {len(pdf_files)} PDFs."
|
| 38 |
|
| 39 |
+
# Step 2: Load vectorstore
|
| 40 |
def load_vectorstore() -> FAISS:
|
| 41 |
embeddings = HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2")
|
| 42 |
return FAISS.load_local(INDEX_DIR, embeddings)
|
| 43 |
|
| 44 |
+
# Step 3: Ask question via Gemini using retrieved context
|
| 45 |
def chat_with_rag(api_key: str, question: str) -> str:
|
| 46 |
if not api_key or not api_key.startswith("AI"):
|
| 47 |
+
return "β Invalid Gemini API Key. It should start with 'AI'."
|
| 48 |
|
| 49 |
try:
|
| 50 |
genai.configure(api_key=api_key)
|
| 51 |
model = genai.GenerativeModel("gemini-pro")
|
| 52 |
except Exception as e:
|
| 53 |
+
return f"β Gemini configuration error: {str(e)}"
|
| 54 |
|
| 55 |
try:
|
| 56 |
vs = load_vectorstore()
|
| 57 |
top_docs = vs.similarity_search(question, k=3)
|
| 58 |
context = "\n\n".join([doc.page_content for doc in top_docs])
|
| 59 |
except Exception as e:
|
| 60 |
+
return f"β Error loading vectorstore or retrieving context: {str(e)}"
|
| 61 |
|
| 62 |
prompt = f"""Use the following context to answer the question:\n\n{context}\n\nQuestion: {question}"""
|
| 63 |
|
|
|
|
| 65 |
response = model.generate_content(prompt)
|
| 66 |
return response.text
|
| 67 |
except Exception as e:
|
| 68 |
+
return f"β Gemini error: {str(e)}"
|
| 69 |
+
|
| 70 |
+
# Step 4: Gradio UI
|
| 71 |
+
with gr.Blocks(title="π RAG Q&A with Gemini") as demo:
|
| 72 |
+
gr.Markdown("## π Upload multiple PDFs β π§ Build Knowledge Base β π€ Ask Questions with Gemini")
|
| 73 |
|
| 74 |
+
api_key = gr.Textbox(label="π Gemini API Key", placeholder="Enter your Gemini API Key", type="password")
|
|
|
|
|
|
|
| 75 |
|
| 76 |
+
pdfs = gr.File(label="π Upload PDFs", file_types=[".pdf"], file_count="multiple")
|
| 77 |
+
create_btn = gr.Button("π Create Knowledge Base")
|
| 78 |
+
kb_status = gr.Textbox(label="π¦ Knowledge Base Status", interactive=False)
|
|
|
|
|
|
|
|
|
|
| 79 |
|
| 80 |
create_btn.click(fn=create_knowledge_base, inputs=[pdfs], outputs=[kb_status])
|
| 81 |
|
| 82 |
+
question = gr.Textbox(label="β Ask a Question")
|
| 83 |
+
answer = gr.Textbox(label="π¬ Gemini Answer", lines=10, interactive=False)
|
| 84 |
ask_btn = gr.Button("π Ask")
|
| 85 |
|
| 86 |
ask_btn.click(fn=chat_with_rag, inputs=[api_key, question], outputs=[answer])
|
| 87 |
|
| 88 |
+
# Step 5: Launch app
|
| 89 |
if __name__ == "__main__":
|
| 90 |
demo.launch()
|