Spaces:
Sleeping
Sleeping
| """ | |
| Modified app.py - Fixed Gradio interface for RobotPai Agent | |
| Uses Hugging Face models and fixes authentication issues | |
| """ | |
| import os | |
| import sys | |
| import gradio as gr | |
| import pandas as pd | |
| import logging | |
| from typing import List, Tuple, Optional | |
| import warnings | |
| # Suppress warnings | |
| warnings.filterwarnings("ignore") | |
| logging.basicConfig(level=logging.ERROR) | |
| # Disable LangSmith tracing globally to avoid authentication errors | |
| os.environ["LANGCHAIN_TRACING_V2"] = "false" | |
| try: | |
| from agent import RobotPaiAgent, create_agent | |
| print("β Agent module imported successfully") | |
| except ImportError as e: | |
| print(f"β Failed to import agent: {e}") | |
| print("Creating fallback agent...") | |
| class FallbackAgent: | |
| def process_query(self, query: str) -> str: | |
| return f"π§ Agent setup incomplete. You asked: {query}\n\nPlease check:\n1. Environment variables are set\n2. Database is configured\n3. Dependencies are installed" | |
| def add_documents(self, texts: List[str], metadatas: List = None) -> bool: | |
| return False | |
| def load_csv_for_analysis(self, file_path: str) -> bool: | |
| return False | |
| # Global agent instance | |
| global_agent = None | |
| def initialize_agent(): | |
| """Initialize the RobotPai agent with error handling""" | |
| global global_agent | |
| try: | |
| print("π€ Initializing RobotPai Agent...") | |
| global_agent = create_agent() | |
| if global_agent: | |
| # Try to load existing CSV data | |
| global_agent.load_csv_for_analysis("supabase_docs.csv") | |
| print("β Agent initialized successfully") | |
| return "β RobotPai Agent is ready! You can now ask questions about documents or CSV data." | |
| else: | |
| global_agent = FallbackAgent() | |
| return "β οΈ Agent initialized in fallback mode. Some features may not work." | |
| except Exception as e: | |
| print(f"β Agent initialization failed: {e}") | |
| global_agent = FallbackAgent() | |
| return f"β Agent initialization failed: {str(e)}\n\nUsing fallback mode." | |
| def chat_with_agent(message: str, history: List[Tuple[str, str]]) -> Tuple[str, List[Tuple[str, str]]]: | |
| """Process chat message through the agent""" | |
| if not message.strip(): | |
| return "", history | |
| try: | |
| if global_agent is None: | |
| response = "π§ Agent not initialized. Please wait for setup to complete." | |
| else: | |
| # Process the query through the agent | |
| response = global_agent.process_query(message) | |
| # Add to history | |
| history.append((message, response)) | |
| return "", history | |
| except Exception as e: | |
| error_response = f"β Error processing your message: {str(e)}" | |
| history.append((message, error_response)) | |
| return "", history | |
| def upload_and_process_file(file) -> str: | |
| """Handle file upload and processing""" | |
| if file is None: | |
| return "β No file uploaded" | |
| try: | |
| # Get file extension | |
| file_name = file.name | |
| file_path = file.name | |
| if file_name.endswith('.csv'): | |
| # Process CSV file | |
| df = pd.read_csv(file_path) | |
| # Add to agent if available | |
| if global_agent and hasattr(global_agent, 'load_csv_for_analysis'): | |
| success = global_agent.load_csv_for_analysis(file_path) | |
| if success: | |
| return f"β CSV file processed successfully!\nπ {len(df)} rows, {len(df.columns)} columns\nπ You can now ask questions about this data." | |
| else: | |
| return f"β οΈ CSV file loaded but not added to vector store.\nπ {len(df)} rows, {len(df.columns)} columns" | |
| else: | |
| return f"π CSV file loaded: {len(df)} rows, {len(df.columns)} columns\nβ οΈ Vector store not available for indexing." | |
| elif file_name.endswith('.txt'): | |
| # Process text file | |
| with open(file_path, 'r', encoding='utf-8') as f: | |
| content = f.read() | |
| if global_agent and hasattr(global_agent, 'add_documents'): | |
| success = global_agent.add_documents([content], [{"source": file_name}]) | |
| if success: | |
| return f"β Text file processed and added to knowledge base!\nπ {len(content)} characters processed." | |
| else: | |
| return f"β οΈ Text file loaded but couldn't add to knowledge base.\nπ {len(content)} characters" | |
| else: | |
| return f"π Text file loaded: {len(content)} characters\nβ οΈ Vector store not available for indexing." | |
| else: | |
| return f"β Unsupported file type: {file_name}\nSupported types: .csv, .txt" | |
| except Exception as e: | |
| return f"β Error processing file: {str(e)}" | |
| def get_system_status() -> str: | |
| """Get current system status""" | |
| status_parts = [] | |
| # Check agent status | |
| if global_agent: | |
| if hasattr(global_agent, 'vectorstore') and global_agent.vectorstore: | |
| status_parts.append("β Vector Store: Connected") | |
| else: | |
| status_parts.append("β οΈ Vector Store: Not available") | |
| if hasattr(global_agent, 'llm') and global_agent.llm: | |
| status_parts.append("β Language Model: Loaded") | |
| else: | |
| status_parts.append("β οΈ Language Model: Not available") | |
| if hasattr(global_agent, 'supabase_client') and global_agent.supabase_client: | |
| status_parts.append("β Supabase: Connected") | |
| else: | |
| status_parts.append("β οΈ Supabase: Not connected") | |
| else: | |
| status_parts.append("β Agent: Not initialized") | |
| # Check environment variables | |
| required_vars = ["SUPABASE_URL", "SUPABASE_SERVICE_ROLE_KEY"] | |
| for var in required_vars: | |
| if os.getenv(var): | |
| status_parts.append(f"β {var}: Set") | |
| else: | |
| status_parts.append(f"β {var}: Missing") | |
| # Check CSV data | |
| if os.path.exists("supabase_docs.csv"): | |
| try: | |
| df = pd.read_csv("supabase_docs.csv") | |
| status_parts.append(f"β CSV Data: {len(df)} rows available") | |
| except: | |
| status_parts.append("β οΈ CSV Data: File exists but couldn't read") | |
| else: | |
| status_parts.append("β οΈ CSV Data: No supabase_docs.csv found") | |
| return "\n".join(status_parts) | |
| def clear_chat() -> List: | |
| """Clear chat history""" | |
| return [] | |
| def get_example_queries() -> List[str]: | |
| """Get example queries for users to try""" | |
| return [ | |
| "What is RobotPai?", | |
| "Search for information about Supabase", | |
| "Analyze the CSV data - how many rows are there?", | |
| "What columns are in the CSV file?", | |
| "Show me the first few rows of data", | |
| "Help me understand vector databases", | |
| ] | |
| # Create Gradio interface | |
| def create_interface(): | |
| """Create the Gradio interface""" | |
| with gr.Blocks( | |
| title="π€ RobotPai - AI Assistant", | |
| theme=gr.themes.Soft(), | |
| css=""" | |
| .gradio-container { | |
| max-width: 1200px; | |
| margin: auto; | |
| } | |
| .status-box { | |
| background-color: #f0f0f0; | |
| padding: 10px; | |
| border-radius: 5px; | |
| font-family: monospace; | |
| font-size: 12px; | |
| } | |
| """ | |
| ) as demo: | |
| gr.Markdown(""" | |
| # π€ RobotPai - AI Assistant | |
| An intelligent assistant that can search documents, analyze CSV data, and answer questions using Hugging Face models. | |
| **Features:** | |
| - π Document search and Q&A | |
| - π CSV data analysis | |
| - π Vector-based similarity search | |
| - π€ Powered by Hugging Face models | |
| """) | |
| with gr.Row(): | |
| with gr.Column(scale=2): | |
| # Chat interface | |
| chatbot = gr.Chatbot( | |
| label="Chat with RobotPai", | |
| height=400, | |
| show_label=True, | |
| container=True, | |
| bubble_full_width=False | |
| ) | |
| with gr.Row(): | |
| msg = gr.Textbox( | |
| placeholder="Ask me anything about documents or data...", | |
| label="Your Message", | |
| scale=4 | |
| ) | |
| send_btn = gr.Button("Send", variant="primary", scale=1) | |
| with gr.Row(): | |
| clear_btn = gr.Button("Clear Chat", variant="secondary") | |
| # Example queries | |
| gr.Markdown("### π‘ Example Queries:") | |
| with gr.Row(): | |
| for i, example in enumerate(get_example_queries()[:3]): | |
| gr.Button(example, size="sm").click( | |
| lambda x=example: (x, []), outputs=[msg, chatbot] | |
| ) | |
| with gr.Row(): | |
| for i, example in enumerate(get_example_queries()[3:]): | |
| gr.Button(example, size="sm").click( | |
| lambda x=example: (x, []), outputs=[msg, chatbot] | |
| ) | |
| with gr.Column(scale=1): | |
| # File upload | |
| gr.Markdown("### π File Upload") | |
| file_upload = gr.File( | |
| label="Upload CSV or TXT file", | |
| file_types=[".csv", ".txt"], | |
| type="filepath" | |
| ) | |
| upload_status = gr.Textbox( | |
| label="Upload Status", | |
| interactive=False, | |
| max_lines=5 | |
| ) | |
| # System status | |
| gr.Markdown("### π§ System Status") | |
| status_display = gr.Textbox( | |
| label="Current Status", | |
| interactive=False, | |
| max_lines=10, | |
| elem_classes=["status-box"] | |
| ) | |
| refresh_status_btn = gr.Button("Refresh Status", variant="secondary") | |
| # Setup initialization on load | |
| demo.load(initialize_agent, outputs=[upload_status]) | |
| demo.load(get_system_status, outputs=[status_display]) | |
| # Event handlers | |
| send_btn.click( | |
| chat_with_agent, | |
| inputs=[msg, chatbot], | |
| outputs=[msg, chatbot] | |
| ) | |
| msg.submit( | |
| chat_with_agent, | |
| inputs=[msg, chatbot], | |
| outputs=[msg, chatbot] | |
| ) | |
| clear_btn.click(clear_chat, outputs=[chatbot]) | |
| file_upload.change( | |
| upload_and_process_file, | |
| inputs=[file_upload], | |
| outputs=[upload_status] | |
| ) | |
| refresh_status_btn.click( | |
| get_system_status, | |
| outputs=[status_display] | |
| ) | |
| return demo | |
| # Launch the app | |
| if __name__ == "__main__": | |
| # Environment check | |
| print("π Checking environment...") | |
| required_vars = ["SUPABASE_URL", "SUPABASE_SERVICE_ROLE_KEY"] | |
| missing_vars = [var for var in required_vars if not os.getenv(var)] | |
| if missing_vars: | |
| print(f"β οΈ Missing environment variables: {missing_vars}") | |
| print("Please set these in your Hugging Face Space settings or .env file") | |
| # Create and launch interface | |
| try: | |
| demo = create_interface() | |
| demo.launch( | |
| server_name="0.0.0.0", | |
| server_port=7860, | |
| share=False, # Set to True if you want a public link | |
| show_error=True, | |
| quiet=False | |
| ) | |
| except Exception as e: | |
| print(f"β Failed to launch app: {e}") | |
| print("Try running with simpler configuration...") | |
| # Fallback simple interface | |
| def simple_chat(message): | |
| return f"Echo: {message} (Fallback mode - please check setup)" | |
| simple_demo = gr.Interface( | |
| fn=simple_chat, | |
| inputs=gr.Textbox(placeholder="Enter your message..."), | |
| outputs=gr.Textbox(), | |
| title="π€ RobotPai (Fallback Mode)", | |
| description="The full interface failed to load. Please check your environment setup." | |
| ) | |
| simple_demo.launch(server_name="0.0.0.0", server_port=7860) |