Spaces:
Sleeping
Sleeping
| # Install required packages | |
| # !pip install agno gradio reportlab groq pillow tavily-python lancedb -q | |
| # Install required packages | |
| # !pip install agno gradio reportlab groq pillow tavily-python lancedb python-dotenv -q | |
| # Import libraries | |
| import os | |
| from agno.agent import Agent | |
| from agno.models.groq import Groq | |
| from agno.tools.tavily import TavilyTools | |
| from agno.knowledge.pdf_url import PDFUrlKnowledgeBase | |
| from agno.vectordb.lancedb import LanceDb, SearchType | |
| from agno.embedder.google import GeminiEmbedder | |
| from reportlab.lib.pagesizes import letter | |
| from reportlab.pdfgen import canvas | |
| import datetime | |
| from PIL import Image | |
| import gradio as gr | |
| from dotenv import load_dotenv | |
| import base64 | |
| # Load environment variables (optional in Colab; set directly if preferred) | |
| load_dotenv() | |
| # Set API keys (replace with your actual keys or use os.getenv in a .env file) | |
| groq_api_key = os.getenv("GROQ_API_KEY") | |
| tavily_api_key = os.getenv("TAVILY_API_KEY") | |
| gemini_api_key = os.getenv("GEMINI_API_KEY") | |
| print(groq_api_key) | |
| # Setup knowledge base with Colab-compatible path | |
| def setup_knowledge_base(file_paths=None): | |
| if not file_paths: | |
| file_paths = [] | |
| knowledge = PDFUrlKnowledgeBase( | |
| urls=file_paths, | |
| vector_db=LanceDb( | |
| uri="lancedb_data", # Colab-friendly path | |
| table_name="docs", | |
| search_type=SearchType.hybrid, | |
| embedder=GeminiEmbedder(api_key=gemini_api_key) | |
| ) | |
| ) | |
| if file_paths: | |
| knowledge.load() | |
| return knowledge | |
| # Define PDF generation tool | |
| def generate_pdf(text): | |
| filename = "output.pdf" | |
| c = canvas.Canvas(filename, pagesize=letter) | |
| c.drawString(100, 750, text[:100]) | |
| c.save() | |
| return filename | |
| # Image analysis tool | |
| # def analyze_images(image_paths): | |
| # if not isinstance(image_paths, list): | |
| # image_paths = [image_paths] | |
| # descriptions = [] | |
| # for path in image_paths: | |
| # img = Image.open(path) | |
| # descriptions.append(f"{os.path.basename(path)}: size={img.size}px") | |
| # return "\n".join(descriptions) | |
| def analyze_images(image_paths): | |
| if not isinstance(image_paths, list): | |
| image_paths = [image_paths] | |
| descriptions = [] | |
| for path in image_paths: | |
| with open(path, "rb") as img_file: | |
| img_base64 = base64.b64encode(img_file.read()).decode("utf-8") | |
| descriptions.append(f"Image {os.path.basename(path)} analyzed (size via PIL: {Image.open(path).size}px)") | |
| return "\n".join(descriptions) | |
| # Define Agents | |
| web_agent = Agent( | |
| model=Groq(id="gemma2-9b-it"), | |
| description="Web search expert", | |
| instructions=["Use Tavily to search the web."], | |
| tools=[TavilyTools()], | |
| markdown=True | |
| ) | |
| date_agent = Agent( | |
| model=Groq(id="gemma2-9b-it"), | |
| description="Date-time expert", | |
| markdown=True | |
| ) | |
| rag_agent = Agent( | |
| model=Groq(id="gemma2-9b-it"), | |
| description="Knowledge retrieval expert", | |
| instructions=["Search the knowledge base."], | |
| knowledge=setup_knowledge_base(), | |
| markdown=True | |
| ) | |
| pdf_agent = Agent( | |
| model=Groq(id="gemma2-9b-it"), | |
| description="PDF creator", | |
| tools=[generate_pdf], | |
| markdown=True | |
| ) | |
| image_agent = Agent( | |
| model=Groq(id="llama-3.2-90b-vision-preview"), | |
| description="Image analyzer", | |
| tools=[analyze_images], | |
| markdown=True | |
| ) | |
| # Coordinator class | |
| class Coordinator: | |
| def __init__(self): | |
| self.team = { | |
| "Web Browsing": web_agent, | |
| "Date-Time": date_agent, | |
| "RAG": rag_agent, | |
| "PDF Generation": pdf_agent, | |
| "Image Analysis": image_agent | |
| } | |
| self.chat_history = [] | |
| def process_query(self, query, tool, files=None): | |
| self.chat_history.append({"role": "user", "content": query}) | |
| response_parts = [] | |
| # Handle uploaded files | |
| pdf_files = [f.name for f in files or [] if f.name.lower().endswith(".pdf")] | |
| img_files = [f.name for f in files or [] if f.name.lower().endswith((".png", ".jpg", ".jpeg"))] | |
| if pdf_files: | |
| rag_agent.knowledge = setup_knowledge_base(pdf_files) | |
| response_parts.append(f"✅ Loaded {len(pdf_files)} PDF(s).") | |
| if img_files: | |
| img_response = image_agent.run(img_files).content | |
| print("img_response", img_response) | |
| response_parts.append(img_response) | |
| if response_parts: | |
| response = "\n".join(response_parts) | |
| else: | |
| selected_agent = self.team.get(tool, rag_agent) | |
| if tool == "Date-Time": | |
| response = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') | |
| else: | |
| # Extract the content from the RunResponse object | |
| response_obj = selected_agent.run(query) | |
| response = response_obj.content if hasattr(response_obj, "content") else str(response_obj) | |
| self.chat_history.append({"role": "assistant", "content": response}) | |
| return response | |
| coordinator = Coordinator() | |
| # Gradio Interface | |
| def chat_interface(query, tool, files, history): | |
| if not history: | |
| history = [] | |
| response = coordinator.process_query(query, tool, files) | |
| history.append((query, response)) | |
| return history, history | |
| # Launch Gradio in Colab with public link | |
| with gr.Blocks(title="Multimodal AI Chat") as demo: | |
| gr.Markdown("# 🤖 Multimodal AI Chat") | |
| chatbot = gr.Chatbot() | |
| with gr.Row(): | |
| query_input = gr.Textbox(label="Query") | |
| tool_dropdown = gr.Dropdown( | |
| choices=["Web Browsing", "Date-Time", "RAG", "PDF Generation", "Image Analysis"], | |
| value="RAG", | |
| label="Select Tool" | |
| ) | |
| file_upload = gr.File(file_count="multiple", file_types=[".pdf", ".png", ".jpg", ".jpeg"]) | |
| submit_btn = gr.Button("Submit") | |
| submit_btn.click( | |
| chat_interface, | |
| [query_input, tool_dropdown, file_upload, chatbot], | |
| [chatbot, chatbot] | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() | |