Spaces:
Sleeping
Sleeping
| """ | |
| AI Personas for Urban Planning - Web UI | |
| A beautiful web interface for interacting with urban planning stakeholder personas. | |
| Usage: | |
| streamlit run web_app.py | |
| """ | |
| import streamlit as st | |
| import sys | |
| import os | |
| from pathlib import Path | |
| from dotenv import load_dotenv | |
| # Load environment variables from .env file | |
| load_dotenv() | |
| # Add current directory and src to path for imports | |
| current_dir = Path(__file__).parent.resolve() | |
| sys.path.insert(0, str(current_dir)) | |
| sys.path.insert(0, str(current_dir / "src")) | |
| # Debug: Print path information (will appear in logs) | |
| print(f"Current directory: {current_dir}") | |
| print(f"Python path: {sys.path[:3]}") | |
| print(f"Contents: {list(current_dir.iterdir())[:10]}") | |
| print(f"ANTHROPIC_API_KEY present: {bool(os.getenv('ANTHROPIC_API_KEY'))}") | |
| from src.pipeline.query_engine import QueryEngine | |
| from src.llm.anthropic_client import AnthropicClient | |
| from src.llm.local_model_client import LocalModelClient | |
| # Page configuration | |
| st.set_page_config( | |
| page_title="AI Personas - Urban Planning", | |
| page_icon="ποΈ", | |
| layout="wide", | |
| initial_sidebar_state="collapsed" | |
| ) | |
| # Custom CSS for better styling | |
| st.markdown(""" | |
| <style> | |
| .persona-card { | |
| padding: 1rem; | |
| border-radius: 10px; | |
| margin-bottom: 0.5rem; | |
| cursor: pointer; | |
| transition: all 0.3s ease; | |
| border: 2px solid transparent; | |
| } | |
| .persona-card:hover { | |
| transform: translateY(-2px); | |
| box-shadow: 0 4px 8px rgba(0,0,0,0.1); | |
| } | |
| .persona-card.selected { | |
| border: 2px solid #1f77b4; | |
| background-color: #e7f3ff; | |
| } | |
| .persona-avatar { | |
| font-size: 3rem; | |
| text-align: center; | |
| margin-bottom: 0.5rem; | |
| } | |
| .persona-name { | |
| font-weight: bold; | |
| font-size: 1.1rem; | |
| text-align: center; | |
| margin-bottom: 0.3rem; | |
| } | |
| .persona-role { | |
| font-size: 0.9rem; | |
| color: #666; | |
| text-align: center; | |
| } | |
| .chat-message { | |
| padding: 1rem; | |
| border-radius: 10px; | |
| margin-bottom: 1rem; | |
| } | |
| .user-message { | |
| background-color: #e3f2fd; | |
| border-left: 4px solid #2196f3; | |
| } | |
| .assistant-message { | |
| background-color: #f5f5f5; | |
| border-left: 4px solid #4caf50; | |
| } | |
| .stButton>button { | |
| width: 100%; | |
| border-radius: 5px; | |
| height: 3rem; | |
| font-weight: bold; | |
| } | |
| </style> | |
| """, unsafe_allow_html=True) | |
| # Initialize session state | |
| if "selected_persona" not in st.session_state: | |
| st.session_state.selected_persona = None | |
| if "conversation_history" not in st.session_state: | |
| st.session_state.conversation_history = [] | |
| if "current_question" not in st.session_state: | |
| st.session_state.current_question = "" | |
| # Sidebar - Model Selection | |
| llm_client = None # Initialize to None to avoid NameError | |
| initialization_error = None # Store any error that occurs | |
| with st.sidebar: | |
| st.title("π€ LLM Model") | |
| # Diagnostics (collapsed by default) | |
| with st.expander("π System Diagnostics", expanded=False): | |
| api_key = os.getenv("ANTHROPIC_API_KEY") | |
| st.write(f"**API Key Status:** {'β Found' if api_key else 'β Missing'}") | |
| if api_key: | |
| st.write(f"**API Key Preview:** `{api_key[:15]}...`") | |
| st.write(f"**API Key Length:** {len(api_key)} chars") | |
| # Check if we're on HF Spaces | |
| is_hf_space = os.path.exists('/home/user/app') | |
| st.write(f"**Environment:** {'π€ HF Spaces' if is_hf_space else 'π» Local'}") | |
| # Show package versions | |
| try: | |
| import anthropic | |
| st.write(f"**Anthropic SDK:** v{anthropic.__version__}") | |
| except Exception as e: | |
| st.write(f"**Anthropic SDK:** β Error: {e}") | |
| try: | |
| import torch | |
| st.write(f"**PyTorch:** v{torch.__version__}") | |
| st.write(f"**Device:** {torch.device('mps' if torch.backends.mps.is_available() else 'cpu')}") | |
| except Exception as e: | |
| st.write(f"**PyTorch:** Not installed ({e})") | |
| st.write(f"**Python:** {sys.version.split()[0]}") | |
| st.write(f"**Streamlit:** v{st.__version__}") | |
| # Check API key availability | |
| api_key_available = bool(os.getenv("ANTHROPIC_API_KEY")) | |
| if not api_key_available: | |
| st.error("π¨ **No Anthropic API key detected!**") | |
| st.info("**To use Anthropic Claude:**") | |
| if os.path.exists('/home/user/app'): | |
| st.info("1. Go to Space Settings") | |
| st.info("2. Add secret: `ANTHROPIC_API_KEY`") | |
| st.info("3. Restart the Space") | |
| else: | |
| st.info("Set `ANTHROPIC_API_KEY` in your `.env` file") | |
| st.info("**Alternative:** Use 'Local Be.FM' model (no API key needed)") | |
| model_choice = st.radio( | |
| "Select Model:", | |
| ["Anthropic Claude", "Local Be.FM"], | |
| index=0 if api_key_available else 1, # Default to Local Be.FM if no API key | |
| help=""" | |
| **Anthropic Claude**: Use Claude API (requires API key) | |
| **Local Be.FM**: Run Stanford's Be.FM model locally (GPU-accelerated) | |
| """ | |
| ) | |
| # Initialize LLM client based on selection | |
| st.markdown("---") | |
| st.markdown("### Initialization Status") | |
| try: | |
| if model_choice == "Anthropic Claude": | |
| st.info("π Initializing Anthropic Claude...") | |
| llm_client = AnthropicClient() | |
| st.success("β Anthropic Claude initialized successfully!") | |
| else: | |
| st.info("π Initializing Local Be.FM...") | |
| llm_client = LocalModelClient() | |
| st.success("β Local Be.FM initialized successfully!") | |
| st.caption("π‘ First run will download the model (~16GB)") | |
| except ValueError as e: | |
| # Handle missing API key specifically | |
| error_msg = str(e) | |
| initialization_error = error_msg | |
| st.error(f"β **Configuration Error**") | |
| st.error(error_msg) | |
| if "API key" in error_msg: | |
| st.markdown("**π Fix this by:**") | |
| if os.path.exists('/home/user/app'): | |
| st.markdown("1. Go to your HF Space Settings") | |
| st.markdown("2. Variables and secrets β Add a secret") | |
| st.markdown("3. Name: `ANTHROPIC_API_KEY`") | |
| st.markdown("4. Value: Your API key (starts with `sk-ant-`)") | |
| st.markdown("5. Save and restart Space") | |
| else: | |
| st.markdown("1. Create/update `.env` file") | |
| st.markdown("2. Add: `ANTHROPIC_API_KEY=sk-ant-...`") | |
| st.markdown("3. Restart the app") | |
| llm_client = None | |
| except Exception as e: | |
| # Handle other errors | |
| error_msg = f"{type(e).__name__}: {e}" | |
| initialization_error = error_msg | |
| st.error(f"β **Initialization Failed**") | |
| st.error(error_msg) | |
| # Show detailed error for debugging | |
| import traceback | |
| error_trace = traceback.format_exc() | |
| with st.expander("π Full Error Details (for debugging)"): | |
| st.code(error_trace) | |
| llm_client = None | |
| # Only proceed if LLM client was successfully initialized | |
| if llm_client is None: | |
| st.error("## β Failed to initialize LLM client") | |
| if initialization_error: | |
| st.error(f"**Error:** {initialization_error}") | |
| st.warning("### π Please check the sidebar for detailed error information and setup instructions") | |
| # Show helpful tips in main area | |
| st.markdown("---") | |
| st.markdown("### π§ Quick Troubleshooting") | |
| api_key = os.getenv("ANTHROPIC_API_KEY") | |
| if not api_key: | |
| st.markdown("**Issue:** No API key found") | |
| if os.path.exists('/home/user/app'): | |
| st.markdown(""" | |
| **Solution for HF Spaces:** | |
| 1. Go to your Space Settings (βοΈ) | |
| 2. Click on "Variables and secrets" | |
| 3. Add a new secret with name `ANTHROPIC_API_KEY` | |
| 4. Paste your Anthropic API key (get one from https://console.anthropic.com/) | |
| 5. Save and restart your Space | |
| """) | |
| else: | |
| st.markdown(""" | |
| **Solution for Local:** | |
| 1. Create a `.env` file in the project root | |
| 2. Add: `ANTHROPIC_API_KEY=your-key-here` | |
| 3. Get your API key from https://console.anthropic.com/ | |
| 4. Restart the app | |
| """) | |
| st.info("**Alternative:** Select 'Local Be.FM' model in the sidebar (no API key needed)") | |
| else: | |
| st.markdown(f"**API Key Detected:** Yes ({len(api_key)} chars)") | |
| st.markdown("**Issue:** Initialization failed despite having an API key") | |
| st.markdown("Check the sidebar for the specific error details.") | |
| st.stop() | |
| # Initialize QueryEngine with selected LLM client | |
| # Use .get() to safely check llm_client without raising KeyError | |
| if "engine" not in st.session_state or st.session_state.get("llm_client") is not llm_client: | |
| with st.spinner("π§ Initializing AI Personas system..."): | |
| st.session_state.engine = QueryEngine(llm_client=llm_client) | |
| st.session_state.llm_client = llm_client | |
| st.session_state.engine.test_system() | |
| # Persona definitions with avatars and colors | |
| PERSONAS = { | |
| "sarah_chen": { | |
| "name": "Sarah Chen", | |
| "role": "Urban Planner", | |
| "avatar": "π±", | |
| "color": "#4CAF50", | |
| "tagline": "Progressive, sustainability-focused" | |
| }, | |
| "marcus_thompson": { | |
| "name": "Marcus Thompson", | |
| "role": "Business Owner", | |
| "avatar": "πͺ", | |
| "color": "#FF9800", | |
| "tagline": "Pragmatic, economy-focused" | |
| }, | |
| "elena_rodriguez": { | |
| "name": "Dr. Elena Rodriguez", | |
| "role": "Transportation Engineer", | |
| "avatar": "π", | |
| "color": "#2196F3", | |
| "tagline": "Data-driven, safety-first" | |
| }, | |
| "james_obrien": { | |
| "name": "James O'Brien", | |
| "role": "Long-time Resident", | |
| "avatar": "π‘", | |
| "color": "#795548", | |
| "tagline": "Traditional, community-focused" | |
| }, | |
| "priya_patel": { | |
| "name": "Priya Patel", | |
| "role": "Housing Advocate", | |
| "avatar": "β", | |
| "color": "#E91E63", | |
| "tagline": "Activist, equity-focused" | |
| }, | |
| "david_kim": { | |
| "name": "David Kim", | |
| "role": "Real Estate Developer", | |
| "avatar": "π’", | |
| "color": "#607D8B", | |
| "tagline": "Market-driven, growth-oriented" | |
| } | |
| } | |
| def select_persona(persona_id): | |
| """Select a persona and clear conversation history""" | |
| st.session_state.selected_persona = persona_id | |
| st.session_state.conversation_history = [] | |
| def send_question(): | |
| """Send question to selected persona""" | |
| if not st.session_state.current_question.strip(): | |
| return | |
| if not st.session_state.selected_persona: | |
| st.error("Please select a persona first!") | |
| return | |
| question = st.session_state.current_question | |
| # Add question to history | |
| st.session_state.conversation_history.append({ | |
| "role": "user", | |
| "content": question | |
| }) | |
| # Get response | |
| with st.spinner(f"π {PERSONAS[st.session_state.selected_persona]['name']} is thinking..."): | |
| response = st.session_state.engine.query( | |
| persona_id=st.session_state.selected_persona, | |
| question=question, | |
| context_id="downtown_district" | |
| ) | |
| # Add response to history | |
| st.session_state.conversation_history.append({ | |
| "role": "assistant", | |
| "content": response.response | |
| }) | |
| # Clear input | |
| st.session_state.current_question = "" | |
| # Main layout | |
| st.title("ποΈ AI Personas for Urban Planning") | |
| st.markdown("### Explore diverse stakeholder perspectives on urban planning issues") | |
| # Create two-column layout | |
| left_col, right_col = st.columns([2, 1]) | |
| # LEFT COLUMN: Chat Interface | |
| with left_col: | |
| st.markdown("### π¬ Conversation") | |
| # Show selected persona | |
| if st.session_state.selected_persona: | |
| persona = PERSONAS[st.session_state.selected_persona] | |
| st.info(f"**Currently talking with:** {persona['avatar']} {persona['name']} ({persona['role']})") | |
| else: | |
| st.warning("π **Select a persona from the right panel to start!**") | |
| # Initialize current_question if it doesn't exist | |
| if "current_question" not in st.session_state: | |
| st.session_state.current_question = "" | |
| # Check for example question from session state BEFORE creating widget | |
| if "example_question" in st.session_state: | |
| st.session_state.current_question = st.session_state.example_question | |
| del st.session_state.example_question | |
| # Input area | |
| col1, col2 = st.columns([5, 1]) | |
| with col1: | |
| question = st.text_input( | |
| "Your question:", | |
| key="current_question", | |
| placeholder="e.g., What do you think about the bike lane proposal?", | |
| label_visibility="collapsed" | |
| ) | |
| with col2: | |
| st.button("Send", on_click=send_question, type="primary", use_container_width=True) | |
| # Quick suggestions | |
| if not st.session_state.conversation_history: | |
| st.markdown("**π‘ Try asking:**") | |
| suggestions = [ | |
| "What's the most important issue facing downtown?", | |
| "Should we allow food trucks in the plaza?", | |
| "How can we make the city more sustainable?", | |
| "What do you think about the affordable housing crisis?" | |
| ] | |
| cols = st.columns(2) | |
| for i, suggestion in enumerate(suggestions): | |
| with cols[i % 2]: | |
| if st.button(suggestion, key=f"suggestion_{i}", use_container_width=True): | |
| st.session_state.example_question = suggestion | |
| # Streamlit auto-reruns on button click - no manual st.rerun() needed | |
| # Conversation history | |
| if st.session_state.conversation_history: | |
| st.markdown("---") | |
| st.markdown("### π Conversation History") | |
| for msg in st.session_state.conversation_history: | |
| if msg["role"] == "user": | |
| st.markdown(f""" | |
| <div class="chat-message user-message"> | |
| <strong>π You:</strong><br> | |
| {msg["content"]} | |
| </div> | |
| """, unsafe_allow_html=True) | |
| else: | |
| persona = PERSONAS[st.session_state.selected_persona] | |
| st.markdown(f""" | |
| <div class="chat-message assistant-message"> | |
| <strong>{persona['avatar']} {persona['name']}:</strong><br> | |
| {msg["content"]} | |
| </div> | |
| """, unsafe_allow_html=True) | |
| # Clear conversation button | |
| if st.button("ποΈ Clear Conversation", use_container_width=True): | |
| st.session_state.conversation_history = [] | |
| st.rerun() | |
| # RIGHT COLUMN: Persona Selection | |
| with right_col: | |
| st.markdown("### π₯ Select a Persona") | |
| st.markdown("Click to start conversation") | |
| for persona_id, persona in PERSONAS.items(): | |
| is_selected = st.session_state.selected_persona == persona_id | |
| # Create persona card | |
| if st.button( | |
| f"{persona['avatar']}\n\n**{persona['name']}**\n\n{persona['role']}\n\n_{persona['tagline']}_", | |
| key=f"persona_{persona_id}", | |
| use_container_width=True, | |
| type="primary" if is_selected else "secondary" | |
| ): | |
| select_persona(persona_id) | |
| st.rerun() | |
| # Footer | |
| st.markdown("---") | |
| st.markdown(""" | |
| <div style='text-align: center; color: #666; padding: 1rem;'> | |
| <small> | |
| AI Personas for Urban Planning β’ Phase 1 β’ | |
| Powered by Claude 3 Haiku β’ | |
| <a href='https://github.com' target='_blank'>View Code</a> | |
| </small> | |
| </div> | |
| """, unsafe_allow_html=True) | |