File size: 15,827 Bytes
16a929f
 
 
 
 
 
 
 
 
 
 
2ab59fc
16a929f
f9b2d64
 
 
 
16a929f
2ab59fc
 
 
 
 
 
 
 
 
f9b2d64
16a929f
 
f73fcec
 
16a929f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f73fcec
0ede81a
4ff72c2
 
f73fcec
 
 
af32d69
 
68d83ed
 
 
 
4ff72c2
 
 
 
 
68d83ed
 
 
 
 
4ff72c2
 
68d83ed
 
 
 
 
4ff72c2
 
68d83ed
 
 
 
f9b2d64
 
 
4ff72c2
 
 
 
 
 
 
 
 
f9b2d64
f73fcec
 
 
f9b2d64
f73fcec
 
 
4ff72c2
f73fcec
 
 
 
4ff72c2
 
 
f73fcec
 
4ff72c2
f73fcec
4ff72c2
f73fcec
4ff72c2
f73fcec
4ff72c2
 
f9b2d64
 
4ff72c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f9b2d64
f73fcec
f9b2d64
4ff72c2
 
 
 
f9b2d64
 
 
4ff72c2
 
 
0ede81a
 
 
 
4ff72c2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0ede81a
f73fcec
 
11c7301
 
f73fcec
 
 
 
 
16a929f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5263e3c
 
 
 
bde69ab
 
08072ad
bde69ab
 
16a929f
 
 
 
 
dc66623
16a929f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bde69ab
6b0be86
16a929f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
"""
AI Personas for Urban Planning - Web UI

A beautiful web interface for interacting with urban planning stakeholder personas.

Usage:
    streamlit run web_app.py
"""

import streamlit as st
import sys
import os
from pathlib import Path
from dotenv import load_dotenv

# Load environment variables from .env file
load_dotenv()

# Add current directory and src to path for imports
current_dir = Path(__file__).parent.resolve()
sys.path.insert(0, str(current_dir))
sys.path.insert(0, str(current_dir / "src"))

# Debug: Print path information (will appear in logs)
print(f"Current directory: {current_dir}")
print(f"Python path: {sys.path[:3]}")
print(f"Contents: {list(current_dir.iterdir())[:10]}")
print(f"ANTHROPIC_API_KEY present: {bool(os.getenv('ANTHROPIC_API_KEY'))}")

from src.pipeline.query_engine import QueryEngine
from src.llm.anthropic_client import AnthropicClient
from src.llm.local_model_client import LocalModelClient


# Page configuration
st.set_page_config(
    page_title="AI Personas - Urban Planning",
    page_icon="πŸ™οΈ",
    layout="wide",
    initial_sidebar_state="collapsed"
)

# Custom CSS for better styling
st.markdown("""
<style>
    .persona-card {
        padding: 1rem;
        border-radius: 10px;
        margin-bottom: 0.5rem;
        cursor: pointer;
        transition: all 0.3s ease;
        border: 2px solid transparent;
    }
    .persona-card:hover {
        transform: translateY(-2px);
        box-shadow: 0 4px 8px rgba(0,0,0,0.1);
    }
    .persona-card.selected {
        border: 2px solid #1f77b4;
        background-color: #e7f3ff;
    }
    .persona-avatar {
        font-size: 3rem;
        text-align: center;
        margin-bottom: 0.5rem;
    }
    .persona-name {
        font-weight: bold;
        font-size: 1.1rem;
        text-align: center;
        margin-bottom: 0.3rem;
    }
    .persona-role {
        font-size: 0.9rem;
        color: #666;
        text-align: center;
    }
    .chat-message {
        padding: 1rem;
        border-radius: 10px;
        margin-bottom: 1rem;
    }
    .user-message {
        background-color: #e3f2fd;
        border-left: 4px solid #2196f3;
    }
    .assistant-message {
        background-color: #f5f5f5;
        border-left: 4px solid #4caf50;
    }
    .stButton>button {
        width: 100%;
        border-radius: 5px;
        height: 3rem;
        font-weight: bold;
    }
</style>
""", unsafe_allow_html=True)


# Initialize session state
if "selected_persona" not in st.session_state:
    st.session_state.selected_persona = None

if "conversation_history" not in st.session_state:
    st.session_state.conversation_history = []

if "current_question" not in st.session_state:
    st.session_state.current_question = ""

# Sidebar - Model Selection
llm_client = None  # Initialize to None to avoid NameError
initialization_error = None  # Store any error that occurs

with st.sidebar:
    st.title("πŸ€– LLM Model")

    # Diagnostics (collapsed by default)
    with st.expander("πŸ” System Diagnostics", expanded=False):
        api_key = os.getenv("ANTHROPIC_API_KEY")
        st.write(f"**API Key Status:** {'βœ“ Found' if api_key else 'βœ— Missing'}")
        if api_key:
            st.write(f"**API Key Preview:** `{api_key[:15]}...`")
            st.write(f"**API Key Length:** {len(api_key)} chars")

        # Check if we're on HF Spaces
        is_hf_space = os.path.exists('/home/user/app')
        st.write(f"**Environment:** {'πŸ€— HF Spaces' if is_hf_space else 'πŸ’» Local'}")

        # Show package versions
        try:
            import anthropic
            st.write(f"**Anthropic SDK:** v{anthropic.__version__}")
        except Exception as e:
            st.write(f"**Anthropic SDK:** ❌ Error: {e}")

        try:
            import torch
            st.write(f"**PyTorch:** v{torch.__version__}")
            st.write(f"**Device:** {torch.device('mps' if torch.backends.mps.is_available() else 'cpu')}")
        except Exception as e:
            st.write(f"**PyTorch:** Not installed ({e})")

        st.write(f"**Python:** {sys.version.split()[0]}")
        st.write(f"**Streamlit:** v{st.__version__}")

    # Check API key availability
    api_key_available = bool(os.getenv("ANTHROPIC_API_KEY"))
    if not api_key_available:
        st.error("🚨 **No Anthropic API key detected!**")
        st.info("**To use Anthropic Claude:**")
        if os.path.exists('/home/user/app'):
            st.info("1. Go to Space Settings")
            st.info("2. Add secret: `ANTHROPIC_API_KEY`")
            st.info("3. Restart the Space")
        else:
            st.info("Set `ANTHROPIC_API_KEY` in your `.env` file")
        st.info("**Alternative:** Use 'Local Be.FM' model (no API key needed)")

    model_choice = st.radio(
        "Select Model:",
        ["Anthropic Claude", "Local Be.FM"],
        index=0 if api_key_available else 1,  # Default to Local Be.FM if no API key
        help="""
        **Anthropic Claude**: Use Claude API (requires API key)

        **Local Be.FM**: Run Stanford's Be.FM model locally (GPU-accelerated)
        """
    )

    # Initialize LLM client based on selection
    st.markdown("---")
    st.markdown("### Initialization Status")

    try:
        if model_choice == "Anthropic Claude":
            st.info("πŸ”„ Initializing Anthropic Claude...")
            llm_client = AnthropicClient()
            st.success("βœ… Anthropic Claude initialized successfully!")
        else:
            st.info("πŸ”„ Initializing Local Be.FM...")
            llm_client = LocalModelClient()
            st.success("βœ… Local Be.FM initialized successfully!")
            st.caption("πŸ’‘ First run will download the model (~16GB)")
    except ValueError as e:
        # Handle missing API key specifically
        error_msg = str(e)
        initialization_error = error_msg
        st.error(f"❌ **Configuration Error**")
        st.error(error_msg)

        if "API key" in error_msg:
            st.markdown("**πŸ“‹ Fix this by:**")
            if os.path.exists('/home/user/app'):
                st.markdown("1. Go to your HF Space Settings")
                st.markdown("2. Variables and secrets β†’ Add a secret")
                st.markdown("3. Name: `ANTHROPIC_API_KEY`")
                st.markdown("4. Value: Your API key (starts with `sk-ant-`)")
                st.markdown("5. Save and restart Space")
            else:
                st.markdown("1. Create/update `.env` file")
                st.markdown("2. Add: `ANTHROPIC_API_KEY=sk-ant-...`")
                st.markdown("3. Restart the app")
        llm_client = None
    except Exception as e:
        # Handle other errors
        error_msg = f"{type(e).__name__}: {e}"
        initialization_error = error_msg
        st.error(f"❌ **Initialization Failed**")
        st.error(error_msg)

        # Show detailed error for debugging
        import traceback
        error_trace = traceback.format_exc()
        with st.expander("πŸ” Full Error Details (for debugging)"):
            st.code(error_trace)
        llm_client = None

# Only proceed if LLM client was successfully initialized
if llm_client is None:
    st.error("## ❌ Failed to initialize LLM client")

    if initialization_error:
        st.error(f"**Error:** {initialization_error}")

    st.warning("### πŸ‘ˆ Please check the sidebar for detailed error information and setup instructions")

    # Show helpful tips in main area
    st.markdown("---")
    st.markdown("### πŸ”§ Quick Troubleshooting")

    api_key = os.getenv("ANTHROPIC_API_KEY")
    if not api_key:
        st.markdown("**Issue:** No API key found")
        if os.path.exists('/home/user/app'):
            st.markdown("""
            **Solution for HF Spaces:**
            1. Go to your Space Settings (βš™οΈ)
            2. Click on "Variables and secrets"
            3. Add a new secret with name `ANTHROPIC_API_KEY`
            4. Paste your Anthropic API key (get one from https://console.anthropic.com/)
            5. Save and restart your Space
            """)
        else:
            st.markdown("""
            **Solution for Local:**
            1. Create a `.env` file in the project root
            2. Add: `ANTHROPIC_API_KEY=your-key-here`
            3. Get your API key from https://console.anthropic.com/
            4. Restart the app
            """)

        st.info("**Alternative:** Select 'Local Be.FM' model in the sidebar (no API key needed)")
    else:
        st.markdown(f"**API Key Detected:** Yes ({len(api_key)} chars)")
        st.markdown("**Issue:** Initialization failed despite having an API key")
        st.markdown("Check the sidebar for the specific error details.")

    st.stop()

# Initialize QueryEngine with selected LLM client
# Use .get() to safely check llm_client without raising KeyError
if "engine" not in st.session_state or st.session_state.get("llm_client") is not llm_client:
    with st.spinner("πŸ”§ Initializing AI Personas system..."):
        st.session_state.engine = QueryEngine(llm_client=llm_client)
        st.session_state.llm_client = llm_client
        st.session_state.engine.test_system()


# Persona definitions with avatars and colors
PERSONAS = {
    "sarah_chen": {
        "name": "Sarah Chen",
        "role": "Urban Planner",
        "avatar": "🌱",
        "color": "#4CAF50",
        "tagline": "Progressive, sustainability-focused"
    },
    "marcus_thompson": {
        "name": "Marcus Thompson",
        "role": "Business Owner",
        "avatar": "πŸͺ",
        "color": "#FF9800",
        "tagline": "Pragmatic, economy-focused"
    },
    "elena_rodriguez": {
        "name": "Dr. Elena Rodriguez",
        "role": "Transportation Engineer",
        "avatar": "πŸš‡",
        "color": "#2196F3",
        "tagline": "Data-driven, safety-first"
    },
    "james_obrien": {
        "name": "James O'Brien",
        "role": "Long-time Resident",
        "avatar": "🏑",
        "color": "#795548",
        "tagline": "Traditional, community-focused"
    },
    "priya_patel": {
        "name": "Priya Patel",
        "role": "Housing Advocate",
        "avatar": "✊",
        "color": "#E91E63",
        "tagline": "Activist, equity-focused"
    },
    "david_kim": {
        "name": "David Kim",
        "role": "Real Estate Developer",
        "avatar": "🏒",
        "color": "#607D8B",
        "tagline": "Market-driven, growth-oriented"
    }
}


def select_persona(persona_id):
    """Select a persona and clear conversation history"""
    st.session_state.selected_persona = persona_id
    st.session_state.conversation_history = []


def send_question():
    """Send question to selected persona"""
    if not st.session_state.current_question.strip():
        return

    if not st.session_state.selected_persona:
        st.error("Please select a persona first!")
        return

    question = st.session_state.current_question

    # Add question to history
    st.session_state.conversation_history.append({
        "role": "user",
        "content": question
    })

    # Get response
    with st.spinner(f"πŸ’­ {PERSONAS[st.session_state.selected_persona]['name']} is thinking..."):
        response = st.session_state.engine.query(
            persona_id=st.session_state.selected_persona,
            question=question,
            context_id="downtown_district"
        )

    # Add response to history
    st.session_state.conversation_history.append({
        "role": "assistant",
        "content": response.response
    })

    # Clear input
    st.session_state.current_question = ""


# Main layout
st.title("πŸ™οΈ AI Personas for Urban Planning")
st.markdown("### Explore diverse stakeholder perspectives on urban planning issues")

# Create two-column layout
left_col, right_col = st.columns([2, 1])

# LEFT COLUMN: Chat Interface
with left_col:
    st.markdown("### πŸ’¬ Conversation")

    # Show selected persona
    if st.session_state.selected_persona:
        persona = PERSONAS[st.session_state.selected_persona]
        st.info(f"**Currently talking with:** {persona['avatar']} {persona['name']} ({persona['role']})")
    else:
        st.warning("πŸ‘‰ **Select a persona from the right panel to start!**")

    # Initialize current_question if it doesn't exist
    if "current_question" not in st.session_state:
        st.session_state.current_question = ""

    # Check for example question from session state BEFORE creating widget
    if "example_question" in st.session_state:
        st.session_state.current_question = st.session_state.example_question
        del st.session_state.example_question

    # Input area
    col1, col2 = st.columns([5, 1])
    with col1:
        question = st.text_input(
            "Your question:",
            key="current_question",
            placeholder="e.g., What do you think about the bike lane proposal?",
            label_visibility="collapsed"
        )

    with col2:
        st.button("Send", on_click=send_question, type="primary", use_container_width=True)

    # Quick suggestions
    if not st.session_state.conversation_history:
        st.markdown("**πŸ’‘ Try asking:**")
        suggestions = [
            "What's the most important issue facing downtown?",
            "Should we allow food trucks in the plaza?",
            "How can we make the city more sustainable?",
            "What do you think about the affordable housing crisis?"
        ]
        cols = st.columns(2)
        for i, suggestion in enumerate(suggestions):
            with cols[i % 2]:
                if st.button(suggestion, key=f"suggestion_{i}", use_container_width=True):
                    st.session_state.example_question = suggestion
                    # Streamlit auto-reruns on button click - no manual st.rerun() needed

    # Conversation history
    if st.session_state.conversation_history:
        st.markdown("---")
        st.markdown("### πŸ“œ Conversation History")

        for msg in st.session_state.conversation_history:
            if msg["role"] == "user":
                st.markdown(f"""
                <div class="chat-message user-message">
                    <strong>πŸ™‹ You:</strong><br>
                    {msg["content"]}
                </div>
                """, unsafe_allow_html=True)
            else:
                persona = PERSONAS[st.session_state.selected_persona]
                st.markdown(f"""
                <div class="chat-message assistant-message">
                    <strong>{persona['avatar']} {persona['name']}:</strong><br>
                    {msg["content"]}
                </div>
                """, unsafe_allow_html=True)

        # Clear conversation button
        if st.button("πŸ—‘οΈ Clear Conversation", use_container_width=True):
            st.session_state.conversation_history = []
            st.rerun()


# RIGHT COLUMN: Persona Selection
with right_col:
    st.markdown("### πŸ‘₯ Select a Persona")
    st.markdown("Click to start conversation")

    for persona_id, persona in PERSONAS.items():
        is_selected = st.session_state.selected_persona == persona_id

        # Create persona card
        if st.button(
            f"{persona['avatar']}\n\n**{persona['name']}**\n\n{persona['role']}\n\n_{persona['tagline']}_",
            key=f"persona_{persona_id}",
            use_container_width=True,
            type="primary" if is_selected else "secondary"
        ):
            select_persona(persona_id)
            st.rerun()


# Footer
st.markdown("---")
st.markdown("""
<div style='text-align: center; color: #666; padding: 1rem;'>
    <small>
        AI Personas for Urban Planning β€’ Phase 1 β€’
        Powered by Claude 3 Haiku β€’
        <a href='https://github.com' target='_blank'>View Code</a>
    </small>
</div>
""", unsafe_allow_html=True)