FreeRag / app.py
GitHub Actions
Deploy from GitHub Actions
c9622da
"""Gradio web interface for FreeRAG - designed for HuggingFace Spaces."""
import gradio as gr
from pathlib import Path
import tempfile
import os
from src.config import Config
from src.rag.pipeline import RAGPipeline
# Global pipeline instance
pipeline: RAGPipeline = None
def get_pipeline() -> RAGPipeline:
"""Get or create the RAG pipeline."""
global pipeline
if pipeline is None:
pipeline = RAGPipeline(Config.default())
return pipeline
def process_files(files):
"""Process uploaded files and add to vector store."""
if not files:
return "Please upload at least one file.", get_stats_text()
pipe = get_pipeline()
total_chunks = 0
processed_files = []
for file in files:
try:
# Get the file path from gradio
file_path = file.name if hasattr(file, 'name') else file
count = pipe.ingest_file(file_path)
total_chunks += count
processed_files.append(Path(file_path).name)
except Exception as e:
return f"Error processing file: {e}", get_stats_text()
return (
f"βœ… Successfully processed {len(processed_files)} file(s)!\n"
f"πŸ“„ Files: {', '.join(processed_files)}\n"
f"πŸ“Š Added {total_chunks} chunks to the knowledge base.",
get_stats_text()
)
def answer_question(question, top_k, chat_history):
"""Answer a question using RAG."""
if not question.strip():
return chat_history, ""
pipe = get_pipeline()
if pipe.vector_store.get_count() == 0:
response = "⚠️ No documents have been uploaded yet. Please upload some documents first."
else:
try:
result = pipe.query(question, top_k=int(top_k))
response = result["answer"]
# Add sources
if result["sources"]:
sources = [s["filename"] for s in result["sources"]]
response += f"\n\n---\nπŸ“š *Sources: {', '.join(sources)}*"
except Exception as e:
response = f"❌ Error: {e}"
chat_history.append((question, response))
return chat_history, ""
def get_stats_text() -> str:
"""Get stats as formatted text."""
pipe = get_pipeline()
stats = pipe.get_stats()
return (
f"πŸ“Š Documents: {stats['documents_count']} chunks\n"
f"πŸ€– Model: Phi-3.5-mini\n"
f"πŸ“ Embeddings: {stats['embedding_model']}"
)
def clear_knowledge_base():
"""Clear all documents from the vector store."""
pipe = get_pipeline()
pipe.vector_store.clear()
return "πŸ—‘οΈ Knowledge base cleared.", get_stats_text()
# Custom CSS for modern dark theme
custom_css = """
.gradio-container {
max-width: 1200px !important;
}
.chat-message {
padding: 12px;
border-radius: 8px;
margin: 8px 0;
}
footer {
display: none !important;
}
"""
# Build Gradio interface
with gr.Blocks(
title="FreeRAG - Local RAG System",
theme=gr.themes.Soft(
primary_hue="blue",
secondary_hue="slate"
),
css=custom_css
) as demo:
gr.Markdown("""
# πŸš€ FreeRAG
### Local RAG System powered by Phi-3.5-mini
Upload your documents and ask questions! Everything runs locally with no data leaving your machine.
""")
with gr.Row():
# Left column - Document Upload
with gr.Column(scale=1):
gr.Markdown("### πŸ“ Upload Documents")
file_upload = gr.File(
label="Upload files (PDF, DOCX, TXT, MD)",
file_count="multiple",
file_types=[".pdf", ".docx", ".txt", ".md"]
)
upload_btn = gr.Button("πŸ“€ Process Documents", variant="primary")
upload_status = gr.Textbox(label="Status", lines=3, interactive=False)
gr.Markdown("### πŸ“Š Knowledge Base Stats")
stats_display = gr.Textbox(
label="",
value=get_stats_text,
lines=3,
interactive=False,
every=5 # Refresh every 5 seconds
)
clear_btn = gr.Button("πŸ—‘οΈ Clear Knowledge Base", variant="secondary")
# Right column - Chat Interface
with gr.Column(scale=2):
gr.Markdown("### πŸ’¬ Ask Questions")
chatbot = gr.Chatbot(
label="Conversation",
height=400,
show_copy_button=True
)
with gr.Row():
question_input = gr.Textbox(
label="Your Question",
placeholder="Ask anything about your documents...",
scale=4,
show_label=False
)
top_k_slider = gr.Slider(
minimum=1,
maximum=10,
value=3,
step=1,
label="Sources",
scale=1
)
with gr.Row():
submit_btn = gr.Button("πŸ” Ask", variant="primary", scale=2)
clear_chat_btn = gr.Button("🧹 Clear Chat", scale=1)
# Event handlers
upload_btn.click(
fn=process_files,
inputs=[file_upload],
outputs=[upload_status, stats_display]
)
submit_btn.click(
fn=answer_question,
inputs=[question_input, top_k_slider, chatbot],
outputs=[chatbot, question_input]
)
question_input.submit(
fn=answer_question,
inputs=[question_input, top_k_slider, chatbot],
outputs=[chatbot, question_input]
)
clear_btn.click(
fn=clear_knowledge_base,
outputs=[upload_status, stats_display]
)
clear_chat_btn.click(
fn=lambda: [],
outputs=[chatbot]
)
gr.Markdown("""
---
<center>
<p style="color: gray;">
Built with πŸ’™ using Phi-3.5-mini, ChromaDB, and Gradio |
<a href="https://github.com/yourusername/FreeRAG">GitHub</a>
</p>
</center>
""")
if __name__ == "__main__":
demo.launch()