| import streamlit as st |
| import requests |
| import json |
| import os |
|
|
| |
| |
| API_URL = os.getenv("API_URL", "http://127.0.0.1:8000/chat/stream") |
|
|
| st.set_page_config(page_title="FinAgent Portfolio", page_icon="π", layout="wide") |
|
|
| st.title("π π€ FinAgent: Autonomous Financial AI") |
|
|
| with st.sidebar: |
| st.markdown("### π¨βπ» About this Agent") |
| st.markdown( |
| "This application uses **LangGraph** to construct a deterministic multi-agent state machine. " |
| "The **Planner Agent** parses the query, while the **Supervisor** appropriately routes tasks " |
| "to specialized **Quant, Fundamental, Sentiment, and Earnings Agents**.\n\n" |
| "Finally, the **Summarizer** compiles a comprehensive Investment Memo." |
| ) |
| |
| st.divider() |
| |
| |
| available_tickers = [] |
| earnings_tickers = [] |
| if os.path.exists("./chroma_db"): |
| for d in os.listdir("./chroma_db"): |
| if d.endswith("_10k"): |
| available_tickers.append(d.replace("_10k", "")) |
| elif d.endswith("_earnings"): |
| earnings_tickers.append(d.replace("_earnings", "")) |
| |
| if available_tickers: |
| st.markdown("### π Supported 10-K Data") |
| st.markdown("Deep RAG (Fundamental SEC filings) currently verified & compiled for:") |
| cols = st.columns(4) |
| for i, t in enumerate(sorted(available_tickers)): |
| cols[i % 4].code(t) |
|
|
| if earnings_tickers: |
| st.markdown("### ποΈ Earnings Call Data") |
| st.markdown("Ingested earnings-call transcripts available for:") |
| cols = st.columns(4) |
| for i, t in enumerate(sorted(earnings_tickers)): |
| cols[i % 4].code(t) |
| |
| st.divider() |
| |
| st.markdown("### β‘ Recruiter Quick-Test") |
| st.markdown("Try one of these example queries to see the multi-agent graph in action:") |
| |
| if st.button("π Apple Financial Overview"): |
| st.session_state.example_query = "What is the price, sentiment, and recent 10-K risks for Apple (AAPL)?" |
| |
| if st.button("ποΈ Tesla Breaking Sentiment"): |
| st.session_state.example_query = "What is the latest news sentiment for TSLA?" |
| |
| if st.button("π» MSFT vs GOOGL"): |
| st.session_state.example_query = "Compare the current stock performance of Microsoft and Google." |
|
|
| if st.button("ποΈ Earnings Call Analysis"): |
| st.session_state.example_query = "Analyze the latest earnings call for Apple (AAPL) β compare management tone in prepared remarks vs Q&A and show keyword trends." |
| |
| st.divider() |
| st.caption("Powered by Llama-3.1-8B via Groq") |
|
|
| |
| if "messages" not in st.session_state: |
| st.session_state.messages = [] |
|
|
| |
| for message in st.session_state.messages: |
| with st.chat_message(message["role"]): |
| if message["role"] == "assistant" and "steps" in message and message["steps"]: |
| |
| total_time = message.get("total_latency", 0) |
| title = f"β
Investment Memo Generated! (Total Latency: {total_time}s)" if total_time else "β
Investment Memo Generated!" |
| with st.status(title, expanded=False): |
| for step in message["steps"]: |
| lat = step.get('step_latency', 0) |
| lat_str = f"({lat}s) " if lat else "" |
| st.write(f"**[{step['node']}]** {lat_str}{step['content']}") |
| st.markdown(message["content"]) |
|
|
| |
| chat_val = st.chat_input("Ask about any stock ticker (e.g. AAPL, TSLA, NVDA)...") |
|
|
| if "example_query" in st.session_state and st.session_state.example_query: |
| prompt = st.session_state.example_query |
| st.session_state.example_query = "" |
| else: |
| prompt = chat_val |
|
|
| if prompt: |
| |
| st.session_state.messages.append({"role": "user", "content": prompt}) |
| with st.chat_message("user"): |
| st.markdown(prompt) |
|
|
| with st.chat_message("assistant"): |
| |
| status_box = st.status("π§ Consulting Specialized AI Agents...", expanded=True) |
| final_memo_placeholder = st.empty() |
| |
| try: |
| |
| with requests.post(API_URL, json={"query": prompt}, stream=True) as response: |
| response.raise_for_status() |
| |
| final_memo = "" |
| session_steps = [] |
| |
| |
| for line in response.iter_lines(): |
| if line: |
| decoded_line = line.decode('utf-8') |
| if decoded_line.startswith("data: "): |
| data_str = decoded_line[len("data: "):] |
| try: |
| data = json.loads(data_str) |
| node = data.get("node") |
| content = data.get("content") |
| step_latency = data.get("step_latency", 0) |
| total_latency = data.get("total_latency", 0) |
| |
| if node == "Summarizer": |
| |
| final_memo = content |
| final_memo_placeholder.markdown(final_memo) |
| status_box.update(label=f"β
Investment Memo Generated! (Total Latency: {total_latency}s)", state="complete", expanded=False) |
| else: |
| |
| lat_str = f"({step_latency}s) " if step_latency else "" |
| status_box.write(f"**[{node}]** {lat_str}{content}") |
| session_steps.append({ |
| "node": node, |
| "content": content, |
| "step_latency": step_latency, |
| "total_latency": total_latency |
| }) |
| except json.JSONDecodeError: |
| pass |
| |
| |
| if final_memo: |
| final_total_latency = session_steps[-1].get("total_latency", 0) if session_steps else 0 |
| st.session_state.messages.append({ |
| "role": "assistant", |
| "content": final_memo, |
| "steps": session_steps, |
| "total_latency": final_total_latency |
| }) |
| |
| except requests.exceptions.RequestException as e: |
| status_box.update(label="β Connection Error", state="error", expanded=False) |
| st.error(f"Failed to connect to the backend FastAPI server: {e}") |
|
|