Spaces:
Sleeping
Sleeping
| import os | |
| import gradio as gr | |
| import google.generativeai as genai | |
| from tavily import TavilyClient | |
| from sentence_transformers import SentenceTransformer, CrossEncoder | |
| import markdown | |
| from weasyprint import HTML, CSS as WeasyCSS | |
| from datetime import datetime | |
| import tempfile | |
| import re | |
| from research_agent.config import AgentConfig | |
| from research_agent.agent import get_clarifying_questions, research_and_plan, write_report_stream | |
| google_key = os.getenv("GOOGLE_API_KEY") | |
| tavily_key = os.getenv("TAVILY_API_KEY") | |
| if not google_key or not tavily_key: | |
| raise ValueError("API keys not found.") | |
| # Enhanced CSS for a professional research interface | |
| CSS = """ | |
| @import url('https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap'); | |
| :root { | |
| --primary-color: #2563eb; | |
| --primary-hover: #1d4ed8; | |
| --bg-primary: #0f172a; | |
| --bg-secondary: #1e293b; | |
| --bg-tertiary: #334155; | |
| --text-primary: #f1f5f9; | |
| --text-secondary: #cbd5e1; | |
| --text-muted: #94a3b8; | |
| --border-color: #334155; | |
| --success-color: #10b981; | |
| --warning-color: #f59e0b; | |
| --error-color: #ef4444; | |
| } | |
| body, .gradio-container { | |
| font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif !important; | |
| background-color: var(--bg-primary) !important; | |
| color: var(--text-primary) !important; | |
| } | |
| .gradio-container { | |
| max-width: 1200px !important; | |
| margin: 0 auto !important; | |
| padding: 2rem !important; | |
| } | |
| /* Header Styling */ | |
| .header-container { | |
| text-align: center; | |
| margin-bottom: 3rem; | |
| padding: 2rem; | |
| background: linear-gradient(135deg, var(--bg-secondary) 0%, var(--bg-tertiary) 100%); | |
| border-radius: 16px; | |
| border: 1px solid var(--border-color); | |
| } | |
| h1 { | |
| font-size: 3rem; | |
| font-weight: 700; | |
| background: linear-gradient(135deg, #60a5fa 0%, #a78bfa 100%); | |
| -webkit-background-clip: text; | |
| -webkit-text-fill-color: transparent; | |
| margin-bottom: 0.5rem; | |
| } | |
| .subtitle { | |
| color: var(--text-secondary); | |
| font-size: 1.25rem; | |
| font-weight: 400; | |
| } | |
| /* Status Bar */ | |
| .status-bar { | |
| background: var(--bg-secondary); | |
| border: 1px solid var(--border-color); | |
| border-radius: 12px; | |
| padding: 1rem 1.5rem; | |
| margin-bottom: 2rem; | |
| display: flex; | |
| align-items: center; | |
| justify-content: space-between; | |
| } | |
| .status-indicator { | |
| display: flex; | |
| align-items: center; | |
| gap: 0.5rem; | |
| } | |
| .status-dot { | |
| width: 8px; | |
| height: 8px; | |
| border-radius: 50%; | |
| background: var(--success-color); | |
| animation: pulse 2s infinite; | |
| } | |
| @keyframes pulse { | |
| 0% { opacity: 1; } | |
| 50% { opacity: 0.5; } | |
| 100% { opacity: 1; } | |
| } | |
| /* Chat Interface */ | |
| #chatbot { | |
| background: var(--bg-secondary) !important; | |
| border: 1px solid var(--border-color) !important; | |
| border-radius: 16px !important; | |
| overflow: hidden !important; | |
| } | |
| #chatbot .message { | |
| border: none !important; | |
| padding: 1.5rem !important; | |
| } | |
| #chatbot .user { | |
| background: var(--bg-tertiary) !important; | |
| border-left: 4px solid var(--primary-color) !important; | |
| } | |
| #chatbot .bot { | |
| background: var(--bg-secondary) !important; | |
| } | |
| /* Progress Indicators */ | |
| .progress-container { | |
| background: var(--bg-tertiary); | |
| border-radius: 8px; | |
| padding: 1rem; | |
| margin: 1rem 0; | |
| } | |
| .progress-bar { | |
| height: 4px; | |
| background: var(--border-color); | |
| border-radius: 2px; | |
| overflow: hidden; | |
| margin-top: 0.5rem; | |
| } | |
| .progress-fill { | |
| height: 100%; | |
| background: linear-gradient(90deg, var(--primary-color) 0%, #60a5fa 100%); | |
| transition: width 0.3s ease; | |
| animation: shimmer 2s infinite; | |
| } | |
| @keyframes shimmer { | |
| 0% { opacity: 0.8; } | |
| 50% { opacity: 1; } | |
| 100% { opacity: 0.8; } | |
| } | |
| /* Input Area */ | |
| .input-container { | |
| background: var(--bg-secondary); | |
| border: 1px solid var(--border-color); | |
| border-radius: 12px; | |
| padding: 1.5rem; | |
| margin-top: 2rem; | |
| } | |
| #chat-input textarea { | |
| background: var(--bg-tertiary) !important; | |
| color: var(--text-primary) !important; | |
| border: 1px solid var(--border-color) !important; | |
| border-radius: 8px !important; | |
| padding: 1rem !important; | |
| font-size: 1rem !important; | |
| transition: all 0.2s ease !important; | |
| } | |
| #chat-input textarea:focus { | |
| border-color: var(--primary-color) !important; | |
| box-shadow: 0 0 0 3px rgba(37, 99, 235, 0.1) !important; | |
| } | |
| /* Buttons */ | |
| .gr-button { | |
| background: var(--primary-color) !important; | |
| color: white !important; | |
| border: none !important; | |
| border-radius: 8px !important; | |
| padding: 0.75rem 1.5rem !important; | |
| font-weight: 600 !important; | |
| transition: all 0.2s ease !important; | |
| cursor: pointer !important; | |
| } | |
| .gr-button:hover { | |
| background: var(--primary-hover) !important; | |
| transform: translateY(-1px); | |
| box-shadow: 0 4px 12px rgba(37, 99, 235, 0.3) !important; | |
| } | |
| .gr-button.secondary { | |
| background: var(--bg-tertiary) !important; | |
| color: var(--text-primary) !important; | |
| } | |
| .gr-button.secondary:hover { | |
| background: #475569 !important; | |
| } | |
| /* Report Display */ | |
| .report-section { | |
| background: var(--bg-secondary); | |
| border: 1px solid var(--border-color); | |
| border-radius: 12px; | |
| padding: 2rem; | |
| margin: 1rem 0; | |
| } | |
| .report-section h2 { | |
| color: var(--text-primary); | |
| font-size: 1.75rem; | |
| font-weight: 600; | |
| margin-bottom: 1rem; | |
| padding-bottom: 0.75rem; | |
| border-bottom: 2px solid var(--border-color); | |
| } | |
| .report-section h3 { | |
| color: var(--text-secondary); | |
| font-size: 1.25rem; | |
| font-weight: 500; | |
| margin: 1.5rem 0 0.75rem 0; | |
| } | |
| .source-list { | |
| background: var(--bg-tertiary); | |
| border-radius: 8px; | |
| padding: 1rem; | |
| margin-top: 1rem; | |
| } | |
| .source-item { | |
| display: flex; | |
| align-items: center; | |
| gap: 0.5rem; | |
| padding: 0.5rem 0; | |
| color: var(--text-secondary); | |
| text-decoration: none; | |
| transition: color 0.2s ease; | |
| } | |
| .source-item:hover { | |
| color: var(--primary-color); | |
| } | |
| /* Loading States */ | |
| .thinking-indicator { | |
| display: flex; | |
| align-items: center; | |
| gap: 0.75rem; | |
| color: var(--text-secondary); | |
| font-style: italic; | |
| } | |
| .thinking-dots { | |
| display: flex; | |
| gap: 0.25rem; | |
| } | |
| .thinking-dots span { | |
| width: 6px; | |
| height: 6px; | |
| background: var(--text-muted); | |
| border-radius: 50%; | |
| animation: bounce 1.4s infinite ease-in-out both; | |
| } | |
| .thinking-dots span:nth-child(1) { animation-delay: -0.32s; } | |
| .thinking-dots span:nth-child(2) { animation-delay: -0.16s; } | |
| @keyframes bounce { | |
| 0%, 80%, 100% { transform: scale(0); } | |
| 40% { transform: scale(1); } | |
| } | |
| /* Export Options */ | |
| .export-container { | |
| background: var(--bg-secondary); | |
| border: 1px solid var(--border-color); | |
| border-radius: 12px; | |
| padding: 1.5rem; | |
| margin-top: 2rem; | |
| } | |
| .export-buttons { | |
| display: flex; | |
| gap: 1rem; | |
| margin-top: 1rem; | |
| } | |
| /* Responsive Design */ | |
| @media (max-width: 768px) { | |
| .gradio-container { | |
| padding: 1rem !important; | |
| } | |
| h1 { | |
| font-size: 2rem; | |
| } | |
| .export-buttons { | |
| flex-direction: column; | |
| } | |
| } | |
| """ | |
| # Initialize models | |
| config = AgentConfig() | |
| writer_model, planner_model, embedding_model, reranker, tavily_client = None, None, None, None, None | |
| IS_PROCESSING = False | |
| def initialize_models(): | |
| """Initializes all the models and clients using keys from environment variables.""" | |
| global writer_model, planner_model, embedding_model, reranker, tavily_client, IS_PROCESSING | |
| try: | |
| genai.configure(api_key=google_key) | |
| tavily_client = TavilyClient(api_key=tavily_key) | |
| writer_model = genai.GenerativeModel(config.WRITER_MODEL) | |
| planner_model = genai.GenerativeModel(config.WRITER_MODEL) | |
| embedding_model = SentenceTransformer('all-MiniLM-L6-v2', device='cpu') | |
| reranker = CrossEncoder('cross-encoder/ms-marco-MiniLM-L-6-v2', device='cpu') | |
| except Exception as e: | |
| print(f"FATAL: Failed to initialize models. Error: {str(e)}") | |
| raise gr.Error(f"Failed to initialize models. Please check the logs. Error: {str(e)}") | |
| IS_PROCESSING = False | |
| print("Models and clients initialized successfully.") | |
| # Initialize models on startup | |
| initialize_models() | |
| # Helper functions for better UI | |
| def format_progress_message(message): | |
| """Formats progress messages with visual indicators""" | |
| if "Step" in message: | |
| return f"π **{message}**" | |
| elif "Searching" in message: | |
| return f"π {message}" | |
| elif "Found" in message: | |
| return f"β {message}" | |
| elif "Processing" in message: | |
| return f"βοΈ {message}" | |
| elif "Writing" in message or "Synthesizing" in message: | |
| return f"βοΈ {message}" | |
| elif "Fact-checking" in message: | |
| return f"π {message}" | |
| else: | |
| return message | |
| def export_to_pdf(report_content, filename="research_report.pdf"): | |
| """Exports the report to PDF with proper formatting""" | |
| try: | |
| # Convert markdown to HTML | |
| html_content = markdown.markdown(report_content, extensions=['extra', 'codehilite']) | |
| # Add CSS for PDF | |
| pdf_css = """ | |
| @page { size: A4; margin: 2cm; } | |
| body { font-family: Arial, sans-serif; line-height: 1.6; color: #333; } | |
| h1 { color: #2563eb; border-bottom: 2px solid #2563eb; padding-bottom: 10px; } | |
| h2 { color: #1e40af; margin-top: 30px; } | |
| h3 { color: #3730a3; } | |
| pre { background: #f3f4f6; padding: 10px; border-radius: 4px; } | |
| code { background: #e5e7eb; padding: 2px 4px; border-radius: 2px; } | |
| blockquote { border-left: 4px solid #2563eb; padding-left: 16px; color: #6b7280; } | |
| """ | |
| # Create PDF | |
| with tempfile.NamedTemporaryFile(suffix='.pdf', delete=False) as tmp_file: | |
| HTML(string=f"<html><body>{html_content}</body></html>").write_pdf( | |
| tmp_file.name, | |
| stylesheets=[WeasyCSS(string=pdf_css)] | |
| ) | |
| return tmp_file.name | |
| except Exception as e: | |
| print(f"Error creating PDF: {e}") | |
| return None | |
| def chat_step_wrapper(user_input, history, current_agent_state, topic_state, progress_state): | |
| """Enhanced wrapper with progress tracking""" | |
| global IS_PROCESSING | |
| if IS_PROCESSING: | |
| print("Ignoring duplicate request while processing.") | |
| if False: yield | |
| return | |
| IS_PROCESSING = True | |
| try: | |
| for update in chat_step(user_input, history, current_agent_state, topic_state, progress_state): | |
| yield update | |
| except Exception as e: | |
| error_message = f"β **Error**: {str(e)}" | |
| history.append((None, error_message)) | |
| yield history, "INITIAL", "", {}, gr.update(interactive=True, placeholder="Let's try again. What would you like to research?"), None, gr.update(visible=False) | |
| finally: | |
| IS_PROCESSING = False | |
| print("Processing finished. Lock released.") | |
| def chat_step(user_input, history, current_agent_state, topic_state, progress_state): | |
| """Enhanced chat step with visual progress tracking""" | |
| history = history or [] | |
| history.append((user_input, None)) | |
| if current_agent_state == "INITIAL": | |
| yield history, "CLARIFYING", user_input, progress_state, gr.update(interactive=False, placeholder="Analyzing your topic..."), None, gr.update(visible=False) | |
| # Show thinking animation | |
| thinking_msg = """<div class="thinking-indicator"> | |
| <span>Analyzing your research topic</span> | |
| <div class="thinking-dots"> | |
| <span></span><span></span><span></span> | |
| </div> | |
| </div>""" | |
| history[-1] = (user_input, thinking_msg) | |
| yield history, "CLARIFYING", user_input, progress_state, gr.update(interactive=False), None, gr.update(visible=False) | |
| questions = get_clarifying_questions(planner_model, user_input) | |
| formatted_questions = f""" | |
| ### π― Let's refine your research | |
| To create the most comprehensive report on **{user_input}**, I'd like to understand your specific interests: | |
| {questions} | |
| Please provide your answers below to help me tailor the research to your needs. | |
| """ | |
| history[-1] = (user_input, formatted_questions) | |
| yield history, "CLARIFYING", user_input, progress_state, gr.update(interactive=True, placeholder="Type your answers here..."), None, gr.update(visible=False) | |
| elif current_agent_state == "CLARIFYING": | |
| # Show initial processing message | |
| history[-1] = (user_input, "π **Perfect! I have all the information I need.**\n\nStarting deep research process...") | |
| yield history, "GENERATING", topic_state, {"current_step": 1, "total_steps": 5}, gr.update(interactive=False, placeholder="Generating report..."), None, gr.update(visible=False) | |
| try: | |
| # Research and planning phase | |
| plan = research_and_plan(config, planner_model, tavily_client, topic_state, user_input) | |
| # Show research plan - FIXED: Safe access to section titles | |
| sections_preview = "\n".join([f" {i+1}. {s.get('title', f'Section {i+1}')}" for i, s in enumerate(plan['sections'])]) | |
| planning_update = f""" | |
| ### π Research Plan Created | |
| **Topic**: {plan['detailed_topic']} | |
| **Report Structure**: | |
| {sections_preview} | |
| Now conducting deep research and writing each section... | |
| """ | |
| history[-1] = (user_input, planning_update) | |
| yield history, "GENERATING", topic_state, {"current_step": 2, "total_steps": 5}, gr.update(interactive=False), None, gr.update(visible=False) | |
| # Stream report generation | |
| report_generator = write_report_stream(config, writer_model, tavily_client, embedding_model, reranker, plan) | |
| full_report = "" | |
| for update in report_generator: | |
| # Format the update for better display | |
| if update.startswith("#"): | |
| full_report = update | |
| # Add progress indicators to the report display | |
| display_report = full_report | |
| else: | |
| # Show progress updates | |
| progress_msg = format_progress_message(update) | |
| display_report = f"{planning_update}\n\n---\n\n**Current Progress**: {progress_msg}\n\n---\n\n### π Report Preview:\n\n{full_report}" | |
| history[-1] = (user_input, display_report) | |
| yield history, "GENERATING", topic_state, progress_state, gr.update(interactive=False), None, gr.update(visible=False) | |
| # Final report display | |
| completion_message = f""" | |
| ### β Research Complete! | |
| Your comprehensive research report is ready. You can: | |
| - π₯ Download as PDF using the button below | |
| - π Copy the text directly from the report | |
| - π Start a new research topic | |
| --- | |
| {full_report} | |
| """ | |
| history[-1] = (user_input, completion_message) | |
| # Enable PDF download | |
| pdf_path = export_to_pdf(full_report) | |
| yield history, "INITIAL", "", {}, gr.update(interactive=True, placeholder="What would you like to research next?"), pdf_path, gr.update(visible=True) | |
| except Exception as e: | |
| error_msg = f"β **Error during research**: {str(e)}\n\nPlease try again with a different topic or check your API keys." | |
| history.append((None, error_msg)) | |
| yield history, "INITIAL", "", {}, gr.update(interactive=True, placeholder="Let's try again. What would you like to research?"), None, gr.update(visible=False) | |
| # Build the Gradio interface | |
| with gr.Blocks(css=CSS, theme=gr.themes.Base()) as app: | |
| # Header | |
| gr.HTML(""" | |
| <div class="header-container"> | |
| <h1>DeepSearch Research Agent</h1> | |
| <p class="subtitle">AI-powered comprehensive research and analysis</p> | |
| </div> | |
| """) | |
| # Status bar | |
| gr.HTML(""" | |
| <div class="status-bar"> | |
| <div class="status-indicator"> | |
| <span class="status-dot"></span> | |
| <span>System Online</span> | |
| </div> | |
| <div> | |
| <span style="color: var(--text-muted);">Powered by Gemini & Tavily</span> | |
| </div> | |
| </div> | |
| """) | |
| # State management | |
| agent_state = gr.State("INITIAL") | |
| initial_topic_state = gr.State("") | |
| progress_state = gr.State({}) | |
| # Chat interface | |
| chatbot = gr.Chatbot( | |
| elem_id="chatbot", | |
| bubble_full_width=False, | |
| height=600, | |
| visible=True, | |
| value=[(None, "π **Welcome to DeepSearch!**\n\nI'm your AI research assistant. I can help you create comprehensive, well-researched reports on any topic.\n\n**How it works:**\n1. Tell me what you'd like to research\n2. I'll ask a few clarifying questions\n3. I'll conduct deep research and write a detailed report\n4. You'll get a downloadable PDF with all sources\n\n**What would you like to research today?**")], | |
| avatar_images=(None, "π¬") | |
| ) | |
| # Input area | |
| with gr.Group(elem_classes="input-container"): | |
| with gr.Row(): | |
| chat_input = gr.Textbox( | |
| placeholder="Enter your research topic (e.g., 'Impact of AI on healthcare', 'Climate change solutions', 'History of quantum computing')", | |
| interactive=True, | |
| visible=True, | |
| show_label=False, | |
| scale=8, | |
| elem_id="chat-input" | |
| ) | |
| submit_button = gr.Button("π Start Research", scale=2, variant="primary") | |
| # Export section | |
| with gr.Group(elem_classes="export-container", visible=False) as export_group: | |
| gr.Markdown("### π₯ Export Options") | |
| with gr.Row(elem_classes="export-buttons"): | |
| pdf_download = gr.File(label="Download PDF Report", visible=False) | |
| # Event handlers | |
| submit_event = submit_button.click( | |
| fn=chat_step_wrapper, | |
| inputs=[chat_input, chatbot, agent_state, initial_topic_state, progress_state], | |
| outputs=[chatbot, agent_state, initial_topic_state, progress_state, chat_input, pdf_download, export_group], | |
| ).then( | |
| fn=lambda: gr.update(value=""), | |
| inputs=None, | |
| outputs=[chat_input], | |
| queue=False | |
| ) | |
| chat_input.submit( | |
| fn=chat_step_wrapper, | |
| inputs=[chat_input, chatbot, agent_state, initial_topic_state, progress_state], | |
| outputs=[chatbot, agent_state, initial_topic_state, progress_state, chat_input, pdf_download, export_group], | |
| ).then( | |
| fn=lambda: gr.update(value=""), | |
| inputs=None, | |
| outputs=[chat_input], | |
| queue=False | |
| ) | |
| # Launch the app | |
| if __name__ == "__main__": | |
| app.queue() | |
| app.launch(debug=True, share=False) |