pythonprincess commited on
Commit
b9ea88f
·
verified ·
1 Parent(s): 84ab3c6

Update gradio_app.py

Browse files
Files changed (1) hide show
  1. gradio_app.py +438 -507
gradio_app.py CHANGED
@@ -1,507 +1,438 @@
1
- """
2
- 🤖 PENNY V2.2 Gradio Interface
3
- Hugging Face Space Entry Point
4
-
5
- This file connects PENNY's backend to a Gradio chat interface,
6
- allowing users to interact with PENNY through a web UI on Hugging Face Spaces.
7
- """
8
-
9
- import gradio as gr
10
- import logging
11
- import sys
12
- import asyncio
13
- import os
14
- from dotenv import load_dotenv
15
- from typing import List, Tuple, Dict, Any
16
- from datetime import datetime
17
-
18
- # Load environment variables from .env file
19
- load_dotenv() # Add this line
20
-
21
- # Verify the key loaded (optional debug)
22
- if os.getenv("AZURE_MAPS_KEY"):
23
- print("✅ AZURE_MAPS_KEY loaded successfully")
24
- else:
25
- print("⚠️ AZURE_MAPS_KEY not found!")
26
-
27
- from typing import List, Tuple, Dict, Any
28
- from datetime import datetime
29
-
30
- # Setup logging
31
- logging.basicConfig(
32
- level=logging.INFO,
33
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
34
- handlers=[logging.StreamHandler(sys.stdout)]
35
- )
36
- logger = logging.getLogger(__name__)
37
-
38
- # ============================================================
39
- # IMPORT PENNY MODULES - FIXED FOR ACTUAL FILE STRUCTURE
40
- # ============================================================
41
-
42
- try:
43
- # Core orchestration and routing
44
- from app.orchestrator import run_orchestrator
45
- from app.router import route_query
46
-
47
- # Utilities
48
- from app.location_utils import geocode_address, get_user_location
49
- from app.logging_utils import setup_logger
50
-
51
- # Event and weather handling
52
- from app.event_weather import (
53
- get_weather_info,
54
- search_events,
55
- format_weather_response,
56
- format_event_response
57
- )
58
-
59
- # Tool agent for officials and resources
60
- from app.tool_agent import (
61
- search_officials,
62
- search_resources,
63
- format_official_response,
64
- format_resource_response
65
- )
66
-
67
- # Model loader (if needed for initialization)
68
- from app.model_loader import initialize_models
69
-
70
- # Intent classification
71
- from app.intents import classify_intent, IntentType
72
-
73
- logger.info("✅ Successfully imported PENNY modules from app/")
74
-
75
- except ImportError as import_error:
76
- logger.error(f"❌ Failed to import PENNY modules: {import_error}")
77
- logger.error(f" Make sure all files exist in app/ folder")
78
- logger.error(f" Current error: {str(import_error)}")
79
-
80
- # Create fallback functions so the interface can still load
81
- async def run_orchestrator(message: str, context: Dict[str, Any]) -> Dict[str, Any]:
82
- return {
83
- "reply": "⚠️ PENNY is initializing. Please try again in a moment.",
84
- "intent": "error",
85
- "confidence": 0.0
86
- }
87
-
88
- def get_service_availability() -> Dict[str, bool]:
89
- return {
90
- "orchestrator": False,
91
- "weather_service": False,
92
- "event_database": False,
93
- "resource_finder": False
94
- }
95
-
96
- # ============================================================
97
- # SERVICE AVAILABILITY CHECK
98
- # ============================================================
99
-
100
- def get_service_availability() -> Dict[str, bool]:
101
- """
102
- Check which PENNY services are available.
103
- Returns dict of service_name -> availability status.
104
- """
105
- services = {}
106
-
107
- try:
108
- # Check if orchestrator is callable
109
- services["orchestrator"] = callable(run_orchestrator)
110
- except:
111
- services["orchestrator"] = False
112
-
113
- try:
114
- # Check if event/weather module loaded
115
- from app.event_weather import get_weather_info
116
- services["weather_service"] = True
117
- except:
118
- services["weather_service"] = False
119
-
120
- try:
121
- # Check if event database accessible
122
- from app.event_weather import search_events
123
- services["event_database"] = True
124
- except:
125
- services["event_database"] = False
126
-
127
- try:
128
- # Check if tool agent loaded
129
- from app.tool_agent import search_resources
130
- services["resource_finder"] = True
131
- except:
132
- services["resource_finder"] = False
133
-
134
- return services
135
-
136
-
137
- # ============================================================
138
- # SUPPORTED CITIES CONFIGURATION
139
- # ============================================================
140
-
141
- SUPPORTED_CITIES = [
142
- "Atlanta, GA",
143
- "Birmingham, AL",
144
- "Chesterfield, VA",
145
- "El Paso, TX",
146
- "Norfolk, VA",
147
- "Providence, RI",
148
- "Seattle, WA"
149
- ]
150
-
151
- def get_city_choices() -> List[str]:
152
- """Get list of supported cities for dropdown."""
153
- try:
154
- return ["Not sure / Other"] + sorted(SUPPORTED_CITIES)
155
- except Exception as e:
156
- logger.error(f"Error loading cities: {e}")
157
- return ["Not sure / Other", "Norfolk, VA"]
158
-
159
-
160
- # ============================================================
161
- # CHAT HANDLER
162
- # ============================================================
163
-
164
- async def chat_with_penny(
165
- message: str,
166
- city: str,
167
- history: List[Tuple[str, str]]
168
- ) -> Tuple[List[Tuple[str, str]], str]:
169
- """
170
- Process user message through PENNY's orchestrator and return response.
171
-
172
- Args:
173
- message: User's input text
174
- city: Selected city/location
175
- history: Chat history (list of (user_msg, bot_msg) tuples)
176
-
177
- Returns:
178
- Tuple of (updated_history, empty_string_to_clear_input)
179
- """
180
- if not message.strip():
181
- return history, ""
182
-
183
- try:
184
- # Build context from selected city
185
- context = {
186
- "timestamp": datetime.now().isoformat(),
187
- "conversation_history": history[-5:] if history else [] # Last 5 exchanges
188
- }
189
-
190
- # Add location if specified
191
- if city and city != "Not sure / Other":
192
- context["location"] = city
193
- context["tenant_id"] = city.split(",")[0].lower().replace(" ", "_")
194
-
195
- logger.info(f"📨 Processing: '{message[:60]}...' | City: {city}")
196
-
197
- # Call PENNY's orchestrator
198
- result = await run_orchestrator(message, context)
199
-
200
- # Extract response
201
- reply = result.get("reply", "I'm having trouble right now. Please try again! 💛")
202
- intent = result.get("intent", "unknown")
203
- confidence = result.get("confidence", 0.0)
204
-
205
- # Add to history
206
- history.append((message, reply))
207
-
208
- logger.info(f"✅ Response generated | Intent: {intent} | Confidence: {confidence:.2f}")
209
-
210
- return history, ""
211
-
212
- except Exception as e:
213
- logger.error(f"❌ Error processing message: {e}", exc_info=True)
214
-
215
- error_reply = (
216
- "I'm having trouble processing your request right now. "
217
- "Please try again in a moment! 💛\n\n"
218
- f"_Error: {str(e)[:100]}_"
219
- )
220
- history.append((message, error_reply))
221
- return history, ""
222
-
223
-
224
- def chat_with_penny_sync(message: str, city: str, history: List[Tuple[str, str]]) -> Tuple[List[Tuple[str, str]], str]:
225
- """
226
- Synchronous wrapper for chat_with_penny to work with Gradio.
227
- Gradio expects sync functions, so we create an event loop here.
228
- """
229
- try:
230
- # Create new event loop for this call
231
- loop = asyncio.new_event_loop()
232
- asyncio.set_event_loop(loop)
233
- result = loop.run_until_complete(chat_with_penny(message, city, history))
234
- loop.close()
235
- return result
236
- except Exception as e:
237
- logger.error(f"Error in sync wrapper: {e}")
238
- error_msg = f"Error: {str(e)}"
239
- history.append((message, error_msg))
240
- return history, ""
241
-
242
-
243
- # ============================================================
244
- # SERVICE STATUS DISPLAY
245
- # ============================================================
246
-
247
- def get_service_status() -> str:
248
- """Display current service availability status."""
249
- try:
250
- services = get_service_availability()
251
- status_lines = ["**🔧 PENNY Service Status:**\n"]
252
-
253
- service_names = {
254
- "orchestrator": "🧠 Core Orchestrator",
255
- "weather_service": "🌤️ Weather Service",
256
- "event_database": "📅 Event Database",
257
- "resource_finder": "🏛️ Resource Finder"
258
- }
259
-
260
- for service_key, available in services.items():
261
- icon = "✅" if available else "⚠️"
262
- status = "Online" if available else "Limited"
263
- name = service_names.get(service_key, service_key.replace('_', ' ').title())
264
- status_lines.append(f"{icon} **{name}**: {status}")
265
-
266
- return "\n".join(status_lines)
267
- except Exception as e:
268
- logger.error(f"Error getting service status: {e}")
269
- return "**⚠️ Status:** Unable to check service availability"
270
-
271
-
272
- # ============================================================
273
- # GRADIO UI DEFINITION
274
- # ============================================================
275
-
276
- # Custom CSS for enhanced styling
277
- custom_css = """
278
- #chatbot {
279
- height: 500px;
280
- overflow-y: auto;
281
- border-radius: 8px;
282
- }
283
- .gradio-container {
284
- font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif;
285
- }
286
- #status-panel {
287
- background: linear-gradient(135deg, #f5f7fa 0%, #c3cfe2 100%);
288
- padding: 15px;
289
- border-radius: 8px;
290
- margin: 10px 0;
291
- }
292
- footer {
293
- display: none !important;
294
- }
295
- .message-user {
296
- background-color: #e3f2fd !important;
297
- }
298
- .message-bot {
299
- background-color: #fff3e0 !important;
300
- }
301
- """
302
-
303
- # Build the Gradio interface
304
- with gr.Blocks(
305
- theme=gr.themes.Soft(primary_hue="amber", secondary_hue="blue"),
306
- css=custom_css,
307
- title="PENNY V2.2 - Civic Assistant"
308
- ) as demo:
309
-
310
- # Header
311
- gr.Markdown(
312
- """
313
- # 🤖 PENNY V2.2 - People's Engagement Network Navigator for You
314
-
315
- **Your multilingual civic assistant connecting residents to local government services and community resources.**
316
-
317
- ### 💬 Ask me about:
318
- - 🌤️ **Weather conditions** and forecasts
319
- - 📅 **Community events** and activities
320
- - 🏛️ **Local resources** (shelters, libraries, food banks, healthcare)
321
- - 👥 **Elected officials** and government contacts
322
- - 🌍 **Translation** services (27+ languages)
323
- - 📄 **Document assistance** and form help
324
- """
325
- )
326
-
327
- with gr.Row():
328
- with gr.Column(scale=2):
329
- # City selector
330
- city_dropdown = gr.Dropdown(
331
- choices=get_city_choices(),
332
- value="Norfolk, VA",
333
- label="📍 Select Your City",
334
- info="Choose your city for location-specific information",
335
- interactive=True
336
- )
337
-
338
- # Chat interface
339
- chatbot = gr.Chatbot(
340
- label="💬 Chat with PENNY",
341
- elem_id="chatbot",
342
- avatar_images=(None, "🤖"),
343
- show_label=True,
344
- height=500,
345
- bubble_full_width=False
346
- )
347
-
348
- # Input row
349
- with gr.Row():
350
- msg_input = gr.Textbox(
351
- placeholder="Type your message here... (e.g., 'What's the weather today?')",
352
- show_label=False,
353
- scale=4,
354
- container=False,
355
- lines=1
356
- )
357
- submit_btn = gr.Button("Send 📤", variant="primary", scale=1)
358
-
359
- # Clear button
360
- clear_btn = gr.Button("🗑️ Clear Chat", variant="secondary", size="sm")
361
-
362
- # Example queries
363
- gr.Examples(
364
- examples=[
365
- ["What's the weather in Norfolk today?"],
366
- ["Any community events this weekend?"],
367
- ["I need help finding a food bank"],
368
- ["Who is my city council representative?"],
369
- ["Show me local libraries"],
370
- ["Translate 'hello' to Spanish"],
371
- ["Help me understand this document"]
372
- ],
373
- inputs=msg_input,
374
- label="💡 Try asking:"
375
- )
376
-
377
- with gr.Column(scale=1):
378
- # Service status panel
379
- status_display = gr.Markdown(
380
- value=get_service_status(),
381
- label="System Status",
382
- elem_id="status-panel"
383
- )
384
-
385
- # Refresh status button
386
- refresh_btn = gr.Button("🔄 Refresh Status", size="sm", variant="secondary")
387
-
388
- gr.Markdown(
389
- """
390
- ### 🌟 Key Features
391
-
392
- - ✅ **27+ Languages** supported
393
- - ✅ **Real-time weather** via Azure Maps
394
- - ✅ **Community events** database
395
- - ✅ **Local resource** finder
396
- - **Government contact** lookup
397
- - ✅ **Document processing** help
398
- - ✅ **Multilingual** support
399
-
400
- ---
401
-
402
- ### 📍 Supported Cities
403
-
404
- - Atlanta, GA
405
- - Birmingham, AL
406
- - Chesterfield, VA
407
- - El Paso, TX
408
- - Norfolk, VA
409
- - Providence, RI
410
- - Seattle, WA
411
-
412
- ---
413
-
414
- ### 🆘 Need Help?
415
-
416
- PENNY can assist with:
417
- - Finding emergency services
418
- - Locating government offices
419
- - Understanding civic processes
420
- - Accessing community programs
421
-
422
- ---
423
-
424
- 💛 *PENNY is here to help connect you with civic resources!*
425
- """
426
- )
427
-
428
- # Event handlers
429
- submit_btn.click(
430
- fn=chat_with_penny_sync,
431
- inputs=[msg_input, city_dropdown, chatbot],
432
- outputs=[chatbot, msg_input]
433
- )
434
-
435
- msg_input.submit(
436
- fn=chat_with_penny_sync,
437
- inputs=[msg_input, city_dropdown, chatbot],
438
- outputs=[chatbot, msg_input]
439
- )
440
-
441
- clear_btn.click(
442
- fn=lambda: ([], ""),
443
- inputs=None,
444
- outputs=[chatbot, msg_input]
445
- )
446
-
447
- refresh_btn.click(
448
- fn=get_service_status,
449
- inputs=None,
450
- outputs=status_display
451
- )
452
-
453
- # Footer
454
- gr.Markdown(
455
- """
456
- ---
457
- **Built with:** Python • FastAPI • Gradio • Azure ML • Hugging Face Transformers
458
-
459
- **Version:** 2.2 | **Last Updated:** November 2025
460
-
461
- _PENNY is an open-source civic engagement platform designed to improve access to government services._
462
- """
463
- )
464
-
465
-
466
- # ============================================================
467
- # INITIALIZATION AND LAUNCH
468
- # ============================================================
469
-
470
- def initialize_penny():
471
- """Initialize PENNY services at startup."""
472
- logger.info("=" * 70)
473
- logger.info("🚀 Initializing PENNY V2.2 Gradio Interface")
474
- logger.info("=" * 70)
475
-
476
- # Display service availability at startup
477
- logger.info("\n📊 Service Availability Check:")
478
- services = get_service_availability()
479
-
480
- all_available = True
481
- for service, available in services.items():
482
- status = "✅ Available" if available else "❌ Not loaded"
483
- logger.info(f" {service.ljust(20)}: {status}")
484
- if not available:
485
- all_available = False
486
-
487
- if all_available:
488
- logger.info("\n✅ All services loaded successfully!")
489
- else:
490
- logger.warning("\n⚠️ Some services are not available. PENNY will run with limited functionality.")
491
-
492
- logger.info("\n" + "=" * 70)
493
- logger.info("🤖 PENNY is ready to help residents!")
494
- logger.info("=" * 70 + "\n")
495
-
496
-
497
- if __name__ == "__main__":
498
- # Initialize services
499
- initialize_penny()
500
-
501
- # Launch the Gradio app
502
- demo.launch(
503
- server_name="0.0.0.0",
504
- server_port=7860,
505
- share=False,
506
- show_error=True
507
- )
 
1
+ """
2
+ 🤖 PENNY V2.2 Gradio Interface
3
+ Hugging Face Space Entry Point
4
+
5
+ This file connects PENNY's backend to a Gradio chat interface,
6
+ allowing users to interact with PENNY through a web UI on Hugging Face Spaces.
7
+ """
8
+
9
+ import gradio as gr
10
+ import logging
11
+ import sys
12
+ import asyncio
13
+ import os
14
+ from dotenv import load_dotenv
15
+ from typing import List, Tuple, Dict, Any
16
+ from datetime import datetime
17
+
18
+ # Load environment variables from .env file
19
+ load_dotenv() # Add this line
20
+
21
+ # Verify the key loaded (optional debug)
22
+ if os.getenv("AZURE_MAPS_KEY"):
23
+ print("✅ AZURE_MAPS_KEY loaded successfully")
24
+ else:
25
+ print("⚠️ AZURE_MAPS_KEY not found!")
26
+
27
+ from typing import List, Tuple, Dict, Any
28
+ from datetime import datetime
29
+
30
+ # Setup logging
31
+ logging.basicConfig(
32
+ level=logging.INFO,
33
+ format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
34
+ handlers=[logging.StreamHandler(sys.stdout)]
35
+ )
36
+ logger = logging.getLogger(__name__)
37
+
38
+ # ============================================================
39
+ # IMPORT PENNY MODULES - FIXED FOR ACTUAL FILE STRUCTURE
40
+ # ============================================================
41
+
42
+ try:
43
+ # Core orchestration and routing
44
+ from app.orchestrator import run_orchestrator
45
+
46
+ # ❗ FIXED LINE (route_query ➜ route_request)
47
+ from app.router import route_request
48
+
49
+ # Utilities
50
+ from app.location_utils import geocode_address, get_user_location
51
+ from app.logging_utils import setup_logger
52
+
53
+ # Event and weather handling
54
+ from app.event_weather import (
55
+ get_weather_info,
56
+ search_events,
57
+ format_weather_response,
58
+ format_event_response
59
+ )
60
+
61
+ # Tool agent for officials and resources
62
+ from app.tool_agent import (
63
+ search_officials,
64
+ search_resources,
65
+ format_official_response,
66
+ format_resource_response
67
+ )
68
+
69
+ # Model loader (if needed for initialization)
70
+ from app.model_loader import initialize_models
71
+
72
+ # Intent classification
73
+ from app.intents import classify_intent, IntentType
74
+
75
+ logger.info("✅ Successfully imported PENNY modules from app/")
76
+
77
+ except ImportError as import_error:
78
+ logger.error(f" Failed to import PENNY modules: {import_error}")
79
+ logger.error(f" Make sure all files exist in app/ folder")
80
+ logger.error(f" Current error: {str(import_error)}")
81
+
82
+ # Create fallback functions so the interface can still load
83
+ async def run_orchestrator(message: str, context: Dict[str, Any]) -> Dict[str, Any]:
84
+ return {
85
+ "reply": "⚠️ PENNY is initializing. Please try again in a moment.",
86
+ "intent": "error",
87
+ "confidence": 0.0
88
+ }
89
+
90
+ def get_service_availability() -> Dict[str, bool]:
91
+ return {
92
+ "orchestrator": False,
93
+ "weather_service": False,
94
+ "event_database": False,
95
+ "resource_finder": False
96
+ }
97
+
98
+ # ============================================================
99
+ # SERVICE AVAILABILITY CHECK
100
+ # ============================================================
101
+
102
+ def get_service_availability() -> Dict[str, bool]:
103
+ """
104
+ Check which PENNY services are available.
105
+ Returns dict of service_name -> availability status.
106
+ """
107
+ services = {}
108
+
109
+ try:
110
+ services["orchestrator"] = callable(run_orchestrator)
111
+ except:
112
+ services["orchestrator"] = False
113
+
114
+ try:
115
+ from app.event_weather import get_weather_info
116
+ services["weather_service"] = True
117
+ except:
118
+ services["weather_service"] = False
119
+
120
+ try:
121
+ from app.event_weather import search_events
122
+ services["event_database"] = True
123
+ except:
124
+ services["event_database"] = False
125
+
126
+ try:
127
+ from app.tool_agent import search_resources
128
+ services["resource_finder"] = True
129
+ except:
130
+ services["resource_finder"] = False
131
+
132
+ return services
133
+
134
+
135
+ # ============================================================
136
+ # SUPPORTED CITIES CONFIGURATION
137
+ # ============================================================
138
+
139
+ SUPPORTED_CITIES = [
140
+ "Atlanta, GA",
141
+ "Birmingham, AL",
142
+ "Chesterfield, VA",
143
+ "El Paso, TX",
144
+ "Norfolk, VA",
145
+ "Providence, RI",
146
+ "Seattle, WA"
147
+ ]
148
+
149
+ def get_city_choices() -> List[str]:
150
+ try:
151
+ return ["Not sure / Other"] + sorted(SUPPORTED_CITIES)
152
+ except Exception as e:
153
+ logger.error(f"Error loading cities: {e}")
154
+ return ["Not sure / Other", "Norfolk, VA"]
155
+
156
+
157
+ # ============================================================
158
+ # CHAT HANDLER
159
+ # ============================================================
160
+
161
+ async def chat_with_penny(
162
+ message: str,
163
+ city: str,
164
+ history: List[Tuple[str, str]]
165
+ ) -> Tuple[List[Tuple[str, str]], str]:
166
+
167
+ if not message.strip():
168
+ return history, ""
169
+
170
+ try:
171
+ context = {
172
+ "timestamp": datetime.now().isoformat(),
173
+ "conversation_history": history[-5:] if history else []
174
+ }
175
+
176
+ if city and city != "Not sure / Other":
177
+ context["location"] = city
178
+ context["tenant_id"] = city.split(",")[0].lower().replace(" ", "_")
179
+
180
+ logger.info(f"📨 Processing: '{message[:60]}...' | City: {city}")
181
+
182
+ result = await run_orchestrator(message, context)
183
+
184
+ reply = result.get("reply", "I'm having trouble right now. Please try again! 💛")
185
+ intent = result.get("intent", "unknown")
186
+ confidence = result.get("confidence", 0.0)
187
+
188
+ history.append((message, reply))
189
+
190
+ logger.info(f"✅ Response generated | Intent: {intent} | Confidence: {confidence:.2f}")
191
+
192
+ return history, ""
193
+
194
+ except Exception as e:
195
+ logger.error(f" Error processing message: {e}", exc_info=True)
196
+
197
+ error_reply = (
198
+ "I'm having trouble processing your request right now. "
199
+ "Please try again in a moment! 💛\n\n"
200
+ f"_Error: {str(e)[:100]}_"
201
+ )
202
+ history.append((message, error_reply))
203
+ return history, ""
204
+
205
+
206
+ def chat_with_penny_sync(message: str, city: str, history: List[Tuple[str, str]]) -> Tuple[List[Tuple[str, str]], str]:
207
+
208
+ try:
209
+ loop = asyncio.new_event_loop()
210
+ asyncio.set_event_loop(loop)
211
+ result = loop.run_until_complete(chat_with_penny(message, city, history))
212
+ loop.close()
213
+ return result
214
+ except Exception as e:
215
+ logger.error(f"Error in sync wrapper: {e}")
216
+ error_msg = f"Error: {str(e)}"
217
+ history.append((message, error_msg))
218
+ return history, ""
219
+
220
+
221
+ # ============================================================
222
+ # SERVICE STATUS DISPLAY
223
+ # ============================================================
224
+
225
+ def get_service_status() -> str:
226
+ try:
227
+ services = get_service_availability()
228
+ status_lines = ["**🔧 PENNY Service Status:**\n"]
229
+
230
+ service_names = {
231
+ "orchestrator": "🧠 Core Orchestrator",
232
+ "weather_service": "🌤️ Weather Service",
233
+ "event_database": "📅 Event Database",
234
+ "resource_finder": "🏛️ Resource Finder"
235
+ }
236
+
237
+ for service_key, available in services.items():
238
+ icon = "✅" if available else "⚠️"
239
+ status = "Online" if available else "Limited"
240
+ name = service_names.get(service_key, service_key.replace('_', ' ').title())
241
+ status_lines.append(f"{icon} **{name}**: {status}")
242
+
243
+ return "\n".join(status_lines)
244
+ except Exception as e:
245
+ logger.error(f"Error getting service status: {e}")
246
+ return "**⚠️ Status:** Unable to check service availability"
247
+
248
+
249
+ # ============================================================
250
+ # GRADIO UI DEFINITION
251
+ # ============================================================
252
+
253
+ custom_css = """
254
+ #chatbot {
255
+ height: 500px;
256
+ overflow-y: auto;
257
+ border-radius: 8px;
258
+ }
259
+ .gradio-container {
260
+ font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif;
261
+ }
262
+ #status-panel {
263
+ background: linear-gradient(135deg, #f5f7fa 0%, #c3cfe2 100%);
264
+ padding: 15px;
265
+ border-radius: 8px;
266
+ margin: 10px 0;
267
+ }
268
+ footer {
269
+ display: none !important;
270
+ }
271
+ .message-user {
272
+ background-color: #e3f2fd !important;
273
+ }
274
+ .message-bot {
275
+ background-color: #fff3e0 !important;
276
+ }
277
+ """
278
+
279
+ with gr.Blocks(
280
+ theme=gr.themes.Soft(primary_hue="amber", secondary_hue="blue"),
281
+ css=custom_css,
282
+ title="PENNY V2.2 - Civic Assistant"
283
+ ) as demo:
284
+
285
+ gr.Markdown(
286
+ """
287
+ # 🤖 PENNY V2.2 - People's Engagement Network Navigator for You
288
+
289
+ **Your multilingual civic assistant connecting residents to local government services and community resources.**
290
+
291
+ ### 💬 Ask me about:
292
+ - 🌤️ Weather conditions
293
+ - 📅 Community events
294
+ - 🏛️ Local resources
295
+ - 👥 Elected officials
296
+ - 🌍 Translation
297
+ - 📄 Document assistance
298
+ """
299
+ )
300
+
301
+ with gr.Row():
302
+ with gr.Column(scale=2):
303
+
304
+ city_dropdown = gr.Dropdown(
305
+ choices=get_city_choices(),
306
+ value="Norfolk, VA",
307
+ label="📍 Select Your City",
308
+ info="Choose your city for location-specific information",
309
+ interactive=True
310
+ )
311
+
312
+ chatbot = gr.Chatbot(
313
+ label="💬 Chat with PENNY",
314
+ elem_id="chatbot",
315
+ avatar_images=(None, "🤖"),
316
+ show_label=True,
317
+ height=500,
318
+ bubble_full_width=False
319
+ )
320
+
321
+ with gr.Row():
322
+ msg_input = gr.Textbox(
323
+ placeholder="Type your message here...",
324
+ show_label=False,
325
+ scale=4,
326
+ container=False,
327
+ lines=1
328
+ )
329
+ submit_btn = gr.Button("Send 📤", variant="primary", scale=1)
330
+
331
+ clear_btn = gr.Button("🗑️ Clear Chat", variant="secondary", size="sm")
332
+
333
+ gr.Examples(
334
+ examples=[
335
+ ["What's the weather in Norfolk today?"],
336
+ ["Any community events this weekend?"],
337
+ ["I need help finding a food bank"],
338
+ ["Who is my city council representative?"],
339
+ ["Translate 'hello' to Spanish"],
340
+ ],
341
+ inputs=msg_input,
342
+ label="💡 Try asking:"
343
+ )
344
+
345
+ with gr.Column(scale=1):
346
+ status_display = gr.Markdown(
347
+ value=get_service_status(),
348
+ label="System Status",
349
+ elem_id="status-panel"
350
+ )
351
+
352
+ refresh_btn = gr.Button("🔄 Refresh Status", size="sm", variant="secondary")
353
+
354
+ gr.Markdown(
355
+ """
356
+ ### 🌟 Key Features
357
+
358
+ - 27+ Languages
359
+ - Real time weather
360
+ - Community events
361
+ - Resource finder
362
+ - Elected officials lookup
363
+ - Document processing
364
+ """
365
+ )
366
+
367
+ submit_btn.click(
368
+ fn=chat_with_penny_sync,
369
+ inputs=[msg_input, city_dropdown, chatbot],
370
+ outputs=[chatbot, msg_input]
371
+ )
372
+
373
+ msg_input.submit(
374
+ fn=chat_with_penny_sync,
375
+ inputs=[msg_input, city_dropdown, chatbot],
376
+ outputs=[chatbot, msg_input]
377
+ )
378
+
379
+ clear_btn.click(
380
+ fn=lambda: ([], ""),
381
+ inputs=None,
382
+ outputs=[chatbot, msg_input]
383
+ )
384
+
385
+ refresh_btn.click(
386
+ fn=get_service_status,
387
+ inputs=None,
388
+ outputs=status_display
389
+ )
390
+
391
+ gr.Markdown(
392
+ """
393
+ ---
394
+ **Built with:** Python • FastAPI • Gradio • Azure ML • Hugging Face Transformers
395
+
396
+ **Version:** 2.2 | **Last Updated:** November 2025
397
+ """
398
+ )
399
+
400
+
401
+ # ============================================================
402
+ # INITIALIZATION AND LAUNCH
403
+ # ============================================================
404
+
405
+ def initialize_penny():
406
+ logger.info("=" * 70)
407
+ logger.info("🚀 Initializing PENNY V2.2 Gradio Interface")
408
+ logger.info("=" * 70)
409
+
410
+ logger.info("\n📊 Service Availability Check:")
411
+ services = get_service_availability()
412
+
413
+ all_available = True
414
+ for service, available in services.items():
415
+ status = "✅ Available" if available else "❌ Not loaded"
416
+ logger.info(f" {service.ljust(20)}: {status}")
417
+ if not available:
418
+ all_available = False
419
+
420
+ if all_available:
421
+ logger.info("\n✅ All services loaded successfully!")
422
+ else:
423
+ logger.warning("\n⚠️ Some services are not available. PENNY will run with limited functionality.")
424
+
425
+ logger.info("\n" + "=" * 70)
426
+ logger.info("🤖 PENNY is ready to help residents!")
427
+ logger.info("=" * 70 + "\n")
428
+
429
+
430
+ if __name__ == "__main__":
431
+ initialize_penny()
432
+
433
+ demo.launch(
434
+ server_name="0.0.0.0",
435
+ server_port=7860,
436
+ share=False,
437
+ show_error=True
438
+ )