pythonprincess commited on
Commit
c82ee1b
·
verified ·
1 Parent(s): fdf54b4

Delete gradio_app.py

Browse files
Files changed (1) hide show
  1. gradio_app.py +0 -438
gradio_app.py DELETED
@@ -1,438 +0,0 @@
1
- """
2
- 🤖 PENNY V2.2 Gradio Interface
3
- Hugging Face Space Entry Point
4
-
5
- This file connects PENNY's backend to a Gradio chat interface,
6
- allowing users to interact with PENNY through a web UI on Hugging Face Spaces.
7
- """
8
-
9
- import gradio as gr
10
- import logging
11
- import sys
12
- import asyncio
13
- import os
14
- from dotenv import load_dotenv
15
- from typing import List, Tuple, Dict, Any
16
- from datetime import datetime
17
-
18
- # Load environment variables from .env file
19
- load_dotenv() # Add this line
20
-
21
- # Verify the key loaded (optional debug)
22
- if os.getenv("AZURE_MAPS_KEY"):
23
- print("✅ AZURE_MAPS_KEY loaded successfully")
24
- else:
25
- print("⚠️ AZURE_MAPS_KEY not found!")
26
-
27
- from typing import List, Tuple, Dict, Any
28
- from datetime import datetime
29
-
30
- # Setup logging
31
- logging.basicConfig(
32
- level=logging.INFO,
33
- format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
34
- handlers=[logging.StreamHandler(sys.stdout)]
35
- )
36
- logger = logging.getLogger(__name__)
37
-
38
- # ============================================================
39
- # IMPORT PENNY MODULES - FIXED FOR ACTUAL FILE STRUCTURE
40
- # ============================================================
41
-
42
- try:
43
- # Core orchestration and routing
44
- from app.orchestrator import run_orchestrator
45
-
46
- # ❗ FIXED LINE (route_query ➜ route_request)
47
- from app.router import route_request
48
-
49
- # Utilities
50
- from app.location_utils import geocode_address, get_user_location
51
- from app.logging_utils import setup_logger
52
-
53
- # Event and weather handling
54
- from app.event_weather import (
55
- get_weather_info,
56
- search_events,
57
- format_weather_response,
58
- format_event_response
59
- )
60
-
61
- # Tool agent for officials and resources
62
- from app.tool_agent import (
63
- search_officials,
64
- search_resources,
65
- format_official_response,
66
- format_resource_response
67
- )
68
-
69
- # Model loader (if needed for initialization)
70
- from app.model_loader import initialize_models
71
-
72
- # Intent classification
73
- from app.intents import classify_intent, IntentType
74
-
75
- logger.info("✅ Successfully imported PENNY modules from app/")
76
-
77
- except ImportError as import_error:
78
- logger.error(f"❌ Failed to import PENNY modules: {import_error}")
79
- logger.error(f" Make sure all files exist in app/ folder")
80
- logger.error(f" Current error: {str(import_error)}")
81
-
82
- # Create fallback functions so the interface can still load
83
- async def run_orchestrator(message: str, context: Dict[str, Any]) -> Dict[str, Any]:
84
- return {
85
- "reply": "⚠️ PENNY is initializing. Please try again in a moment.",
86
- "intent": "error",
87
- "confidence": 0.0
88
- }
89
-
90
- def get_service_availability() -> Dict[str, bool]:
91
- return {
92
- "orchestrator": False,
93
- "weather_service": False,
94
- "event_database": False,
95
- "resource_finder": False
96
- }
97
-
98
- # ============================================================
99
- # SERVICE AVAILABILITY CHECK
100
- # ============================================================
101
-
102
- def get_service_availability() -> Dict[str, bool]:
103
- """
104
- Check which PENNY services are available.
105
- Returns dict of service_name -> availability status.
106
- """
107
- services = {}
108
-
109
- try:
110
- services["orchestrator"] = callable(run_orchestrator)
111
- except:
112
- services["orchestrator"] = False
113
-
114
- try:
115
- from app.event_weather import get_weather_info
116
- services["weather_service"] = True
117
- except:
118
- services["weather_service"] = False
119
-
120
- try:
121
- from app.event_weather import search_events
122
- services["event_database"] = True
123
- except:
124
- services["event_database"] = False
125
-
126
- try:
127
- from app.tool_agent import search_resources
128
- services["resource_finder"] = True
129
- except:
130
- services["resource_finder"] = False
131
-
132
- return services
133
-
134
-
135
- # ============================================================
136
- # SUPPORTED CITIES CONFIGURATION
137
- # ============================================================
138
-
139
- SUPPORTED_CITIES = [
140
- "Atlanta, GA",
141
- "Birmingham, AL",
142
- "Chesterfield, VA",
143
- "El Paso, TX",
144
- "Norfolk, VA",
145
- "Providence, RI",
146
- "Seattle, WA"
147
- ]
148
-
149
- def get_city_choices() -> List[str]:
150
- try:
151
- return ["Not sure / Other"] + sorted(SUPPORTED_CITIES)
152
- except Exception as e:
153
- logger.error(f"Error loading cities: {e}")
154
- return ["Not sure / Other", "Norfolk, VA"]
155
-
156
-
157
- # ============================================================
158
- # CHAT HANDLER
159
- # ============================================================
160
-
161
- async def chat_with_penny(
162
- message: str,
163
- city: str,
164
- history: List[Tuple[str, str]]
165
- ) -> Tuple[List[Tuple[str, str]], str]:
166
-
167
- if not message.strip():
168
- return history, ""
169
-
170
- try:
171
- context = {
172
- "timestamp": datetime.now().isoformat(),
173
- "conversation_history": history[-5:] if history else []
174
- }
175
-
176
- if city and city != "Not sure / Other":
177
- context["location"] = city
178
- context["tenant_id"] = city.split(",")[0].lower().replace(" ", "_")
179
-
180
- logger.info(f"📨 Processing: '{message[:60]}...' | City: {city}")
181
-
182
- result = await run_orchestrator(message, context)
183
-
184
- reply = result.get("reply", "I'm having trouble right now. Please try again! 💛")
185
- intent = result.get("intent", "unknown")
186
- confidence = result.get("confidence", 0.0)
187
-
188
- history.append((message, reply))
189
-
190
- logger.info(f"✅ Response generated | Intent: {intent} | Confidence: {confidence:.2f}")
191
-
192
- return history, ""
193
-
194
- except Exception as e:
195
- logger.error(f"❌ Error processing message: {e}", exc_info=True)
196
-
197
- error_reply = (
198
- "I'm having trouble processing your request right now. "
199
- "Please try again in a moment! 💛\n\n"
200
- f"_Error: {str(e)[:100]}_"
201
- )
202
- history.append((message, error_reply))
203
- return history, ""
204
-
205
-
206
- def chat_with_penny_sync(message: str, city: str, history: List[Tuple[str, str]]) -> Tuple[List[Tuple[str, str]], str]:
207
-
208
- try:
209
- loop = asyncio.new_event_loop()
210
- asyncio.set_event_loop(loop)
211
- result = loop.run_until_complete(chat_with_penny(message, city, history))
212
- loop.close()
213
- return result
214
- except Exception as e:
215
- logger.error(f"Error in sync wrapper: {e}")
216
- error_msg = f"Error: {str(e)}"
217
- history.append((message, error_msg))
218
- return history, ""
219
-
220
-
221
- # ============================================================
222
- # SERVICE STATUS DISPLAY
223
- # ============================================================
224
-
225
- def get_service_status() -> str:
226
- try:
227
- services = get_service_availability()
228
- status_lines = ["**🔧 PENNY Service Status:**\n"]
229
-
230
- service_names = {
231
- "orchestrator": "🧠 Core Orchestrator",
232
- "weather_service": "🌤️ Weather Service",
233
- "event_database": "📅 Event Database",
234
- "resource_finder": "🏛️ Resource Finder"
235
- }
236
-
237
- for service_key, available in services.items():
238
- icon = "✅" if available else "⚠️"
239
- status = "Online" if available else "Limited"
240
- name = service_names.get(service_key, service_key.replace('_', ' ').title())
241
- status_lines.append(f"{icon} **{name}**: {status}")
242
-
243
- return "\n".join(status_lines)
244
- except Exception as e:
245
- logger.error(f"Error getting service status: {e}")
246
- return "**⚠️ Status:** Unable to check service availability"
247
-
248
-
249
- # ============================================================
250
- # GRADIO UI DEFINITION
251
- # ============================================================
252
-
253
- custom_css = """
254
- #chatbot {
255
- height: 500px;
256
- overflow-y: auto;
257
- border-radius: 8px;
258
- }
259
- .gradio-container {
260
- font-family: 'Inter', -apple-system, BlinkMacSystemFont, sans-serif;
261
- }
262
- #status-panel {
263
- background: linear-gradient(135deg, #f5f7fa 0%, #c3cfe2 100%);
264
- padding: 15px;
265
- border-radius: 8px;
266
- margin: 10px 0;
267
- }
268
- footer {
269
- display: none !important;
270
- }
271
- .message-user {
272
- background-color: #e3f2fd !important;
273
- }
274
- .message-bot {
275
- background-color: #fff3e0 !important;
276
- }
277
- """
278
-
279
- with gr.Blocks(
280
- theme=gr.themes.Soft(primary_hue="amber", secondary_hue="blue"),
281
- css=custom_css,
282
- title="PENNY V2.2 - Civic Assistant"
283
- ) as demo:
284
-
285
- gr.Markdown(
286
- """
287
- # 🤖 PENNY V2.2 - People's Engagement Network Navigator for You
288
-
289
- **Your multilingual civic assistant connecting residents to local government services and community resources.**
290
-
291
- ### 💬 Ask me about:
292
- - 🌤️ Weather conditions
293
- - 📅 Community events
294
- - 🏛️ Local resources
295
- - 👥 Elected officials
296
- - 🌍 Translation
297
- - 📄 Document assistance
298
- """
299
- )
300
-
301
- with gr.Row():
302
- with gr.Column(scale=2):
303
-
304
- city_dropdown = gr.Dropdown(
305
- choices=get_city_choices(),
306
- value="Norfolk, VA",
307
- label="📍 Select Your City",
308
- info="Choose your city for location-specific information",
309
- interactive=True
310
- )
311
-
312
- chatbot = gr.Chatbot(
313
- label="💬 Chat with PENNY",
314
- elem_id="chatbot",
315
- avatar_images=(None, "🤖"),
316
- show_label=True,
317
- height=500,
318
- bubble_full_width=False
319
- )
320
-
321
- with gr.Row():
322
- msg_input = gr.Textbox(
323
- placeholder="Type your message here...",
324
- show_label=False,
325
- scale=4,
326
- container=False,
327
- lines=1
328
- )
329
- submit_btn = gr.Button("Send 📤", variant="primary", scale=1)
330
-
331
- clear_btn = gr.Button("🗑️ Clear Chat", variant="secondary", size="sm")
332
-
333
- gr.Examples(
334
- examples=[
335
- ["What's the weather in Norfolk today?"],
336
- ["Any community events this weekend?"],
337
- ["I need help finding a food bank"],
338
- ["Who is my city council representative?"],
339
- ["Translate 'hello' to Spanish"],
340
- ],
341
- inputs=msg_input,
342
- label="💡 Try asking:"
343
- )
344
-
345
- with gr.Column(scale=1):
346
- status_display = gr.Markdown(
347
- value=get_service_status(),
348
- label="System Status",
349
- elem_id="status-panel"
350
- )
351
-
352
- refresh_btn = gr.Button("🔄 Refresh Status", size="sm", variant="secondary")
353
-
354
- gr.Markdown(
355
- """
356
- ### 🌟 Key Features
357
-
358
- - 27+ Languages
359
- - Real time weather
360
- - Community events
361
- - Resource finder
362
- - Elected officials lookup
363
- - Document processing
364
- """
365
- )
366
-
367
- submit_btn.click(
368
- fn=chat_with_penny_sync,
369
- inputs=[msg_input, city_dropdown, chatbot],
370
- outputs=[chatbot, msg_input]
371
- )
372
-
373
- msg_input.submit(
374
- fn=chat_with_penny_sync,
375
- inputs=[msg_input, city_dropdown, chatbot],
376
- outputs=[chatbot, msg_input]
377
- )
378
-
379
- clear_btn.click(
380
- fn=lambda: ([], ""),
381
- inputs=None,
382
- outputs=[chatbot, msg_input]
383
- )
384
-
385
- refresh_btn.click(
386
- fn=get_service_status,
387
- inputs=None,
388
- outputs=status_display
389
- )
390
-
391
- gr.Markdown(
392
- """
393
- ---
394
- **Built with:** Python • FastAPI • Gradio • Azure ML • Hugging Face Transformers
395
-
396
- **Version:** 2.2 | **Last Updated:** November 2025
397
- """
398
- )
399
-
400
-
401
- # ============================================================
402
- # INITIALIZATION AND LAUNCH
403
- # ============================================================
404
-
405
- def initialize_penny():
406
- logger.info("=" * 70)
407
- logger.info("🚀 Initializing PENNY V2.2 Gradio Interface")
408
- logger.info("=" * 70)
409
-
410
- logger.info("\n📊 Service Availability Check:")
411
- services = get_service_availability()
412
-
413
- all_available = True
414
- for service, available in services.items():
415
- status = "✅ Available" if available else "❌ Not loaded"
416
- logger.info(f" {service.ljust(20)}: {status}")
417
- if not available:
418
- all_available = False
419
-
420
- if all_available:
421
- logger.info("\n✅ All services loaded successfully!")
422
- else:
423
- logger.warning("\n⚠️ Some services are not available. PENNY will run with limited functionality.")
424
-
425
- logger.info("\n" + "=" * 70)
426
- logger.info("🤖 PENNY is ready to help residents!")
427
- logger.info("=" * 70 + "\n")
428
-
429
-
430
- if __name__ == "__main__":
431
- initialize_penny()
432
-
433
- demo.launch(
434
- server_name="0.0.0.0",
435
- server_port=7860,
436
- share=False,
437
- show_error=True
438
- )