Penny_V2 / app.py
pythonprincess's picture
Upload 2 files
c1821b1 verified
raw
history blame
8.74 kB
"""
πŸ€– PENNY Gradio Interface
Hugging Face Space Entry Point
This file connects PENNY's FastAPI backend to a Gradio chat interface,
allowing users to interact with Penny through a web UI on Hugging Face Spaces.
"""
import gradio as gr
import logging
import sys
from typing import List, Tuple
from datetime import datetime
# Setup logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
handlers=[logging.StreamHandler(sys.stdout)]
)
logger = logging.getLogger(__name__)
# Import Penny's orchestrator
try:
from app.orchestrator import run_orchestrator, get_service_availability
from app.location_utils import SupportedCities
logger.info("βœ… Successfully imported PENNY modules")
except ImportError as e:
logger.error(f"❌ Failed to import PENNY modules: {e}")
raise
# ============================================================
# GRADIO CHAT INTERFACE
# ============================================================
async def chat_with_penny(
message: str,
city: str,
history: List[Tuple[str, str]]
) -> Tuple[List[Tuple[str, str]], str]:
"""
Process user message through PENNY's orchestrator and return response.
Args:
message: User's input text
city: Selected city/tenant_id
history: Chat history (list of (user_msg, bot_msg) tuples)
Returns:
Tuple of (updated_history, empty_string_to_clear_input)
"""
if not message.strip():
return history, ""
try:
# Build context from selected city
context = {}
if city and city != "Not sure / Other":
context["tenant_id"] = city
logger.info(f"Processing message: '{message[:50]}...' for city: {city}")
# Call PENNY's orchestrator
result = await run_orchestrator(message, context)
# Extract response
reply = result.get("reply", "I'm having trouble right now. Please try again! πŸ’›")
# Add to history
history.append((message, reply))
logger.info(f"Response generated successfully (intent: {result.get('intent')})")
return history, ""
except Exception as e:
logger.error(f"Error processing message: {e}", exc_info=True)
error_reply = (
"I'm having trouble processing your request right now. "
"Please try again in a moment! πŸ’›"
)
history.append((message, error_reply))
return history, ""
def get_city_choices() -> List[str]:
"""Get list of supported cities for dropdown."""
try:
cities = SupportedCities.get_all_cities()
city_names = [city.full_name for city in cities]
return ["Not sure / Other"] + sorted(city_names)
except Exception as e:
logger.error(f"Error loading cities: {e}")
return ["Not sure / Other", "Atlanta, GA", "Seattle, WA"]
def get_service_status() -> str:
"""Display current service availability status."""
try:
services = get_service_availability()
status_lines = ["**PENNY Service Status:**\n"]
for service, available in services.items():
icon = "βœ…" if available else "⚠️"
status = "Available" if available else "Limited"
status_lines.append(f"{icon} {service.replace('_', ' ').title()}: {status}")
return "\n".join(status_lines)
except Exception as e:
logger.error(f"Error getting service status: {e}")
return "**Status:** Unable to check service availability"
# ============================================================
# GRADIO UI DEFINITION
# ============================================================
# Custom CSS for a polished look
custom_css = """
#chatbot {
height: 500px;
overflow-y: auto;
}
.gradio-container {
font-family: 'Inter', sans-serif;
}
footer {
display: none !important;
}
"""
# Build the Gradio interface
with gr.Blocks(
theme=gr.themes.Soft(primary_hue="amber", secondary_hue="blue"),
css=custom_css,
title="PENNY - Civic Assistant"
) as demo:
# Header
gr.Markdown(
"""
# πŸ€– PENNY - People's Engagement Network Navigator for You
**Your multilingual civic assistant connecting residents to local services.**
Ask me about:
- 🌀️ Weather conditions
- πŸ“… Community events
- πŸ›οΈ Local resources (shelters, libraries, food banks)
- 🌍 Translation (27 languages)
- πŸ“„ Document help
"""
)
with gr.Row():
with gr.Column(scale=2):
# City selector
city_dropdown = gr.Dropdown(
choices=get_city_choices(),
value="Not sure / Other",
label="πŸ“ Select Your City",
info="Choose your city for location-specific information"
)
# Chat interface
chatbot = gr.Chatbot(
label="Chat with PENNY",
elem_id="chatbot",
avatar_images=(None, "πŸ€–"),
show_label=False
)
# Input row
with gr.Row():
msg_input = gr.Textbox(
placeholder="Type your message here... (e.g., 'What's the weather today?')",
show_label=False,
scale=4,
container=False
)
submit_btn = gr.Button("Send", variant="primary", scale=1)
# Example queries
gr.Examples(
examples=[
["What's the weather today?"],
["Any events this weekend?"],
["I need help finding a library"],
["Show me local resources"],
["Translate 'hello' to Spanish"]
],
inputs=msg_input,
label="πŸ’‘ Try asking:"
)
with gr.Column(scale=1):
# Service status panel
status_display = gr.Markdown(
value=get_service_status(),
label="System Status"
)
gr.Markdown(
"""
### 🌟 Features
- **27 Languages** supported
- **Real-time weather** via Azure Maps
- **Community events** database
- **Local resource** finder
- **Document processing** help
---
### πŸ“ Supported Cities
- Atlanta, GA
- Birmingham, AL
- Chesterfield, VA
- El Paso, TX
- Providence, RI
- Seattle, WA
---
πŸ’› *PENNY is here to help connect you with civic resources!*
"""
)
# Event handlers
submit_btn.click(
fn=chat_with_penny,
inputs=[msg_input, city_dropdown, chatbot],
outputs=[chatbot, msg_input]
)
msg_input.submit(
fn=chat_with_penny,
inputs=[msg_input, city_dropdown, chatbot],
outputs=[chatbot, msg_input]
)
# Footer
gr.Markdown(
"""
---
**Built with:** FastAPI β€’ Gradio β€’ Azure ML β€’ Transformers
"""
)
# ============================================================
# LAUNCH
# ============================================================
if __name__ == "__main__":
logger.info("=" * 60)
logger.info("πŸš€ Launching PENNY Gradio Interface")
logger.info("=" * 60)
# Display service availability at startup
logger.info("\nπŸ“Š Service Availability Check:")
services = get_service_availability()
for service, available in services.items():
status = "βœ…" if available else "❌"
logger.info(f" {status} {service}: {'Available' if available else 'Not loaded'}")
logger.info("\n" + "=" * 60)
logger.info("πŸ€– PENNY is ready to help residents!")
logger.info("=" * 60 + "\n")
# Launch the Gradio app
demo.launch(
server_name="0.0.0.0",
server_port=7860,
share=False
)