pythonprincess commited on
Commit
14f6dd3
·
verified ·
1 Parent(s): e0f3474

Delete gradio_app.py

Browse files
Files changed (1) hide show
  1. gradio_app.py +0 -520
gradio_app.py DELETED
@@ -1,520 +0,0 @@
1
- """
2
- 🤖 PENNY V2.2 Gradio Interface
3
- Hugging Face Space Entry Point
4
-
5
- This file connects PENNY's backend to a Gradio chat interface,
6
- allowing users to interact with PENNY through a web UI on Hugging Face Spaces.
7
- """
8
-
9
- import gradio as gr
10
- import logging
11
- import sys
12
- import asyncio
13
- import os
14
- from dotenv import load_dotenv # Add this
15
- from typing import List, Tuple, Dict, Any
16
- from datetime import datetime
17
-
18
- # Load environment variables from .env file
19
- load_dotenv() # Add this line
20
-
21
- # Verify the keys loaded (optional debug)
22
- if os.getenv("AZURE_MAPS_KEY"):
23
- print("✅ AZURE_MAPS_KEY loaded successfully")
24
- else:
25
- print("⚠️ AZURE_MAPS_KEY not found!")
26
-
27
- # Check for HF_TOKEN or READTOKEN (Hugging Face Spaces uses READTOKEN)
28
- hf_token = os.getenv("HF_TOKEN") or os.getenv("READTOKEN")
29
- if hf_token:
30
- print("✅ HF_TOKEN/READTOKEN loaded successfully")
31
- else:
32
- print("⚠️ HF_TOKEN/READTOKEN not found! Set it in Hugging Face Spaces secrets or .env file")
33
-
34
- from typing import List, Tuple, Dict, Any
35
- from datetime import datetime
36
-
37
- # Setup logging
38
- logging.basicConfig(
39
- level=logging.INFO,
40
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
41
- handlers=[logging.StreamHandler(sys.stdout)]
42
- )
43
- logger = logging.getLogger(__name__)
44
-
45
- # ============================================================
46
- # IMPORT PENNY MODULES - FIXED FOR ACTUAL FILE STRUCTURE
47
- # ============================================================
48
-
49
- try:
50
- # Core orchestration and routing
51
- from app.orchestrator import run_orchestrator
52
- # REMOVED: from app.router import route_query # Function doesn't exist
53
-
54
- # Utilities
55
- from app.location_utils import geocode_address, get_user_location
56
- from app.logging_utils import setup_logger
57
-
58
- # Event and weather handling
59
- from app.event_weather import get_event_recommendations_with_weather # FIXED: was get_weather_info
60
-
61
- # Tool agent for officials and resources
62
- from app.tool_agent import handle_tool_request # FIXED: removed non-existent functions
63
-
64
- # REMOVED: initialize_models doesn't exist in model_loader
65
- # from app.model_loader import initialize_models
66
-
67
- # Intent classification
68
- from app.intents import classify_intent, IntentType
69
-
70
- logger.info("✅ Successfully imported PENNY modules from app/")
71
-
72
- except ImportError as import_error:
73
- logger.error(f"❌ Failed to import PENNY modules: {import_error}")
74
- logger.error(f" Make sure all files exist in app/ folder")
75
- logger.error(f" Current error: {str(import_error)}")
76
-
77
- # Create fallback functions so the interface can still load
78
- async def run_orchestrator(message: str, context: Dict[str, Any]) -> Dict[str, Any]:
79
- return {
80
- "reply": "⚠️ PENNY is initializing. Please try again in a moment.",
81
- "intent": "error",
82
- "confidence": 0.0
83
- }
84
-
85
- def get_service_availability() -> Dict[str, bool]:
86
- return {
87
- "orchestrator": False,
88
- "weather_service": False,
89
- "event_database": False,
90
- "resource_finder": False
91
- }
92
-
93
- # ============================================================
94
- # SERVICE AVAILABILITY CHECK
95
- # ============================================================
96
-
97
- def get_service_availability() -> Dict[str, bool]:
98
- """
99
- Check which PENNY services are available.
100
- Returns dict of service_name -> availability status.
101
- """
102
- services = {}
103
-
104
- try:
105
- # Check if orchestrator is callable
106
- services["orchestrator"] = callable(run_orchestrator)
107
- except:
108
- services["orchestrator"] = False
109
-
110
- try:
111
- # Check if event/weather module loaded
112
- from app.event_weather import get_event_recommendations_with_weather # FIXED
113
- services["weather_service"] = True
114
- except:
115
- services["weather_service"] = False
116
-
117
- try:
118
- # Check if event database accessible
119
- from app.event_weather import get_event_recommendations_with_weather # FIXED
120
- services["event_database"] = True
121
- except:
122
- services["event_database"] = False
123
-
124
- try:
125
- # Check if tool agent loaded
126
- from app.tool_agent import handle_tool_request # FIXED: was search_resources
127
- services["resource_finder"] = True
128
- except:
129
- services["resource_finder"] = False
130
-
131
- return services
132
-
133
-
134
- # ============================================================
135
- # SUPPORTED CITIES CONFIGURATION
136
- # ============================================================
137
-
138
- SUPPORTED_CITIES = [
139
- "Atlanta, GA",
140
- "Birmingham, AL",
141
- "Chesterfield, VA",
142
- "El Paso, TX",
143
- "Norfolk, VA",
144
- "Providence, RI",
145
- "Seattle, WA"
146
- ]
147
-
148
- def get_city_choices() -> List[str]:
149
- """Get list of supported cities for dropdown."""
150
- try:
151
- return ["Not sure / Other"] + sorted(SUPPORTED_CITIES)
152
- except Exception as e:
153
- logger.error(f"Error loading cities: {e}")
154
- return ["Not sure / Other", "Norfolk, VA"]
155
-
156
-
157
- # ============================================================
158
- # CHAT HANDLER
159
- # ============================================================
160
-
161
- async def chat_with_penny(
162
- message: str,
163
- city: str,
164
- history: List[Tuple[str, str]]
165
- ) -> Tuple[List[Tuple[str, str]], str]:
166
- """
167
- Process user message through PENNY's orchestrator and return response.
168
-
169
- Args:
170
- message: User's input text
171
- city: Selected city/location
172
- history: Chat history (list of (user_msg, bot_msg) tuples)
173
-
174
- Returns:
175
- Tuple of (updated_history, empty_string_to_clear_input)
176
- """
177
- if not message.strip():
178
- return history, ""
179
-
180
- try:
181
- # Build context from selected city
182
- context = {
183
- "timestamp": datetime.now().isoformat(),
184
- "conversation_history": history[-5:] if history else [] # Last 5 exchanges
185
- }
186
-
187
- # Add location if specified
188
- if city and city != "Not sure / Other":
189
- context["location"] = city
190
- context["tenant_id"] = city.split(",")[0].lower().replace(" ", "_")
191
-
192
- logger.info(f"📨 Processing: '{message[:60]}...' | City: {city}")
193
-
194
- # Call PENNY's orchestrator
195
- result = await run_orchestrator(message, context)
196
-
197
- # Handle both dict and OrchestrationResult objects
198
- if hasattr(result, 'to_dict'):
199
- result = result.to_dict()
200
- elif not isinstance(result, dict):
201
- # Fallback: try to access attributes directly
202
- reply = getattr(result, 'reply', "I'm having trouble right now. Please try again! 💛")
203
- intent = getattr(result, 'intent', 'unknown')
204
- confidence = getattr(result, 'confidence', 0.0)
205
- history.append((message, reply))
206
- logger.info(f"✅ Response generated | Intent: {intent} | Confidence: {confidence:.2f}")
207
- return history, ""
208
-
209
- # Extract response from dictionary
210
- reply = result.get("reply", "I'm having trouble right now. Please try again! 💛")
211
- intent = result.get("intent", "unknown")
212
- confidence = result.get("confidence")
213
-
214
- # Handle None confidence values
215
- if confidence is None:
216
- confidence = 0.0
217
-
218
- # Add to history
219
- history.append((message, reply))
220
-
221
- logger.info(f"✅ Response generated | Intent: {intent} | Confidence: {confidence:.2f}")
222
-
223
- return history, ""
224
-
225
- except Exception as e:
226
- logger.error(f"❌ Error processing message: {e}", exc_info=True)
227
-
228
- error_reply = (
229
- "I'm having trouble processing your request right now. "
230
- "Please try again in a moment! 💛\n\n"
231
- f"_Error: {str(e)[:100]}_"
232
- )
233
- history.append((message, error_reply))
234
- return history, ""
235
-
236
-
237
- def chat_with_penny_sync(message: str, city: str, history: List[Tuple[str, str]]) -> Tuple[List[Tuple[str, str]], str]:
238
- """
239
- Synchronous wrapper for chat_with_penny to work with Gradio.
240
- Gradio expects sync functions, so we create an event loop here.
241
- """
242
- try:
243
- # Create new event loop for this call
244
- loop = asyncio.new_event_loop()
245
- asyncio.set_event_loop(loop)
246
- result = loop.run_until_complete(chat_with_penny(message, city, history))
247
- loop.close()
248
- return result
249
- except Exception as e:
250
- logger.error(f"Error in sync wrapper: {e}")
251
- error_msg = f"Error: {str(e)}"
252
- history.append((message, error_msg))
253
- return history, ""
254
-
255
-
256
- # ============================================================
257
- # SERVICE STATUS DISPLAY
258
- # ============================================================
259
-
260
- def get_service_status() -> str:
261
- """Display current service availability status."""
262
- try:
263
- services = get_service_availability()
264
- status_lines = ["**🔧 PENNY Service Status:**\n"]
265
-
266
- service_names = {
267
- "orchestrator": "🧠 Core Orchestrator",
268
- "weather_service": "🌤️ Weather Service",
269
- "event_database": "📅 Event Database",
270
- "resource_finder": "🏛️ Resource Finder"
271
- }
272
-
273
- for service_key, available in services.items():
274
- icon = "✅" if available else "⚠️"
275
- status = "Online" if available else "Limited"
276
- name = service_names.get(service_key, service_key.replace('_', ' ').title())
277
- status_lines.append(f"{icon} **{name}**: {status}")
278
-
279
- return "\n".join(status_lines)
280
- except Exception as e:
281
- logger.error(f"Error getting service status: {e}")
282
- return "**⚠️ Status:** Unable to check service availability"
283
-
284
-
285
- # ============================================================
286
- # GRADIO UI DEFINITION
287
- # ============================================================
288
-
289
- # Custom CSS for enhanced styling
290
- custom_css = """
291
- #chatbot {
292
- height: 500px;
293
- overflow-y: auto;
294
- border-radius: 8px;
295
- }
296
- .gradio-container {
297
- font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif;
298
- }
299
- #status-panel {
300
- background: linear-gradient(135deg, #f5f7fa 0%, #c3cfe2 100%);
301
- padding: 15px;
302
- border-radius: 8px;
303
- margin: 10px 0;
304
- }
305
- footer {
306
- display: none !important;
307
- }
308
- .message-user {
309
- background-color: #e3f2fd !important;
310
- }
311
- .message-bot {
312
- background-color: #fff3e0 !important;
313
- }
314
- """
315
-
316
- # Build the Gradio interface
317
- with gr.Blocks(
318
- theme=gr.themes.Soft(primary_hue="amber", secondary_hue="blue"),
319
- css=custom_css,
320
- title="PENNY V2.2 - Civic Assistant"
321
- ) as demo:
322
-
323
- # Header
324
- gr.Markdown(
325
- """
326
- # 🤖 PENNY V2.2 - People's Engagement Network Navigator for You
327
-
328
- **Your multilingual civic assistant connecting residents to local government services and community resources.**
329
-
330
- ### 💬 Ask me about:
331
- - 🌤️ **Weather conditions** and forecasts
332
- - 📅 **Community events** and activities
333
- - 🏛️ **Local resources** (shelters, libraries, food banks, healthcare)
334
- - 👥 **Elected officials** and government contacts
335
- - 🌍 **Translation** services (27+ languages)
336
- - 📄 **Document assistance** and form help
337
- """
338
- )
339
-
340
- with gr.Row():
341
- with gr.Column(scale=2):
342
- # City selector
343
- city_dropdown = gr.Dropdown(
344
- choices=get_city_choices(),
345
- value="Norfolk, VA",
346
- label="📍 Select Your City",
347
- info="Choose your city for location-specific information",
348
- interactive=True
349
- )
350
-
351
- # Chat interface
352
- chatbot = gr.Chatbot(
353
- label="💬 Chat with PENNY",
354
- elem_id="chatbot",
355
- avatar_images=(None, "🤖"),
356
- show_label=True,
357
- height=500,
358
- bubble_full_width=False
359
- )
360
-
361
- # Input row
362
- with gr.Row():
363
- msg_input = gr.Textbox(
364
- placeholder="Type your message here... (e.g., 'What's the weather today?')",
365
- show_label=False,
366
- scale=4,
367
- container=False,
368
- lines=1
369
- )
370
- submit_btn = gr.Button("Send 📤", variant="primary", scale=1)
371
-
372
- # Clear button
373
- clear_btn = gr.Button("🗑️ Clear Chat", variant="secondary", size="sm")
374
-
375
- # Example queries
376
- gr.Examples(
377
- examples=[
378
- ["What's the weather in Norfolk today?"],
379
- ["Any community events this weekend?"],
380
- ["I need help finding a food bank"],
381
- ["Who is my city council representative?"],
382
- ["Show me local libraries"],
383
- ["Translate 'hello' to Spanish"],
384
- ["Help me understand this document"]
385
- ],
386
- inputs=msg_input,
387
- label="💡 Try asking:"
388
- )
389
-
390
- with gr.Column(scale=1):
391
- # Service status panel
392
- status_display = gr.Markdown(
393
- value=get_service_status(),
394
- label="System Status",
395
- elem_id="status-panel"
396
- )
397
-
398
- # Refresh status button
399
- refresh_btn = gr.Button("🔄 Refresh Status", size="sm", variant="secondary")
400
-
401
- gr.Markdown(
402
- """
403
- ### 🌟 Key Features
404
-
405
- - ✅ **27+ Languages** supported
406
- - ✅ **Real-time weather** via Azure Maps
407
- - ✅ **Community events** database
408
- - ✅ **Local resource** finder
409
- - ✅ **Government contact** lookup
410
- - ✅ **Document processing** help
411
- - ✅ **Multilingual** support
412
-
413
- ---
414
-
415
- ### 📍 Supported Cities
416
-
417
- - Atlanta, GA
418
- - Birmingham, AL
419
- - Chesterfield, VA
420
- - El Paso, TX
421
- - Norfolk, VA
422
- - Providence, RI
423
- - Seattle, WA
424
-
425
- ---
426
-
427
- ### 🆘 Need Help?
428
-
429
- PENNY can assist with:
430
- - Finding emergency services
431
- - Locating government offices
432
- - Understanding civic processes
433
- - Accessing community programs
434
-
435
- ---
436
-
437
- 💛 *PENNY is here to help connect you with civic resources!*
438
- """
439
- )
440
-
441
- # Event handlers
442
- submit_btn.click(
443
- fn=chat_with_penny_sync,
444
- inputs=[msg_input, city_dropdown, chatbot],
445
- outputs=[chatbot, msg_input]
446
- )
447
-
448
- msg_input.submit(
449
- fn=chat_with_penny_sync,
450
- inputs=[msg_input, city_dropdown, chatbot],
451
- outputs=[chatbot, msg_input]
452
- )
453
-
454
- clear_btn.click(
455
- fn=lambda: ([], ""),
456
- inputs=None,
457
- outputs=[chatbot, msg_input]
458
- )
459
-
460
- refresh_btn.click(
461
- fn=get_service_status,
462
- inputs=None,
463
- outputs=status_display
464
- )
465
-
466
- # Footer
467
- gr.Markdown(
468
- """
469
- ---
470
- **Built with:** Python • FastAPI • Gradio • Azure ML • Hugging Face Transformers
471
-
472
- **Version:** 2.2 | **Last Updated:** November 2025
473
-
474
- _PENNY is an open-source civic engagement platform designed to improve access to government services._
475
- """
476
- )
477
-
478
-
479
- # ============================================================
480
- # INITIALIZATION AND LAUNCH
481
- # ============================================================
482
-
483
- def initialize_penny():
484
- """Initialize PENNY services at startup."""
485
- logger.info("=" * 70)
486
- logger.info("🚀 Initializing PENNY V2.2 Gradio Interface")
487
- logger.info("=" * 70)
488
-
489
- # Display service availability at startup
490
- logger.info("\n📊 Service Availability Check:")
491
- services = get_service_availability()
492
-
493
- all_available = True
494
- for service, available in services.items():
495
- status = "✅ Available" if available else "❌ Not loaded"
496
- logger.info(f" {service.ljust(20)}: {status}")
497
- if not available:
498
- all_available = False
499
-
500
- if all_available:
501
- logger.info("\n✅ All services loaded successfully!")
502
- else:
503
- logger.warning("\n⚠️ Some services are not available. PENNY will run with limited functionality.")
504
-
505
- logger.info("\n" + "=" * 70)
506
- logger.info("🤖 PENNY is ready to help residents!")
507
- logger.info("=" * 70 + "\n")
508
-
509
-
510
- if __name__ == "__main__":
511
- # Initialize services
512
- initialize_penny()
513
-
514
- # Launch the Gradio app
515
- demo.launch(
516
- server_name="0.0.0.0",
517
- server_port=7860,
518
- share=False,
519
- show_error=True
520
- )