legolasyiu commited on
Commit
4cfbdb4
·
verified ·
1 Parent(s): a169da5

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +122 -38
src/streamlit_app.py CHANGED
@@ -1,40 +1,124 @@
1
- import altair as alt
2
- import numpy as np
3
- import pandas as pd
4
  import streamlit as st
 
 
 
 
 
 
 
5
 
6
- """
7
- # Welcome to Streamlit!
8
-
9
- Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
- forums](https://discuss.streamlit.io).
12
-
13
- In the meantime, below is an example of what you can do with just a few lines of code:
14
- """
15
-
16
- num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
- num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
-
19
- indices = np.linspace(0, 1, num_points)
20
- theta = 2 * np.pi * num_turns * indices
21
- radius = indices
22
-
23
- x = radius * np.cos(theta)
24
- y = radius * np.sin(theta)
25
-
26
- df = pd.DataFrame({
27
- "x": x,
28
- "y": y,
29
- "idx": indices,
30
- "rand": np.random.randn(num_points),
31
- })
32
-
33
- st.altair_chart(alt.Chart(df, height=700, width=700)
34
- .mark_point(filled=True)
35
- .encode(
36
- x=alt.X("x", axis=None),
37
- y=alt.Y("y", axis=None),
38
- color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
- size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
- ))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import torch
 
3
  import streamlit as st
4
+ from typing import Literal
5
+ from tavily import TavilyClient
6
+ from deepagents import create_deep_agent
7
+ from langchain.callbacks.streamlit import StreamlitCallbackHandler
8
+ from langgraph.store.memory import InMemoryStore
9
+ from langchain.chat_models import init_chat_model
10
+ from langchain_huggingface import ChatHuggingFace, HuggingFacePipeline
11
 
12
+
13
+ # --- Sidebar: API Keys ---
14
+ st.sidebar.header("🔑 API Keys")
15
+
16
+ tavily_api_key = st.sidebar.text_input("Tavily API Key", type="password", placeholder="sk-tavily-XXXX")
17
+ openai_api_key = st.sidebar.text_input("OpenAI API Key", type="password", placeholder="sk-openai-XXXX")
18
+ claude_api_key = st.sidebar.text_input("Anthropic API Key", type="password", placeholder="sk-ant-XXXX")
19
+ gemini_api_key = st.sidebar.text_input("Google Gemini API Key", type="password", placeholder="AIzaSyXXXX")
20
+
21
+ # --- Sidebar: Model Selection ---
22
+ st.sidebar.header("🧠 Choose Model Provider")
23
+ provider = st.sidebar.selectbox(
24
+ "Select provider:",
25
+ ["OpenAI", "Claude", "Gemini", "Hugging Face"],
26
+ index=1,
27
+ )
28
+
29
+ # --- Editable system prompt ---
30
+ st.sidebar.header("🧩 Research Instructions")
31
+ research_instructions = st.sidebar.text_area(
32
+ "System prompt for the agent:",
33
+ value=(
34
+ "You are an expert code researcher. Your job is to conduct thorough research, analyze complex information, "
35
+ "and write a concise, well-structured report.\n\n"
36
+ "You have access to an internet search tool as your primary method of gathering external information.\n"
37
+ "## `internet_search`\n"
38
+ "Use this function to query the web. Specify a topic, max results, and whether to include raw content."
39
+ ),
40
+ height=200,
41
+ )
42
+
43
+ # --- Store & Tavily Client ---
44
+ if not tavily_api_key:
45
+ st.warning("Please enter your Tavily API key in the left menu to start the agent.")
46
+ st.stop()
47
+
48
+ store = InMemoryStore()
49
+ tavily_client = TavilyClient(api_key=tavily_api_key)
50
+
51
+ # --- Web search tool ---
52
+ def internet_search(
53
+ query: str,
54
+ max_results: int = 5,
55
+ topic: Literal["general", "news", "finance"] = "general",
56
+ include_raw_content: bool = False,
57
+ ):
58
+ return tavily_client.search(
59
+ query,
60
+ max_results=max_results,
61
+ include_raw_content=include_raw_content,
62
+ topic=topic,
63
+ )
64
+
65
+ # --- Model Loader ---
66
+ def load_model(provider: str):
67
+ if provider == "OpenAI":
68
+ if not openai_api_key:
69
+ st.error("Please enter your OpenAI API key.")
70
+ st.stop()
71
+ os.environ["OPENAI_API_KEY"] = openai_api_key
72
+ return init_chat_model(model="openai:gpt-5")
73
+
74
+ elif provider == "Claude":
75
+ if not claude_api_key:
76
+ st.error("Please enter your Anthropic API key.")
77
+ st.stop()
78
+ os.environ["ANTHROPIC_API_KEY"] = claude_api_key
79
+ return init_chat_model(model="claude-sonnet-4-5-20250929")
80
+
81
+ elif provider == "Gemini":
82
+ if not gemini_api_key:
83
+ st.error("Please enter your Google Gemini API key.")
84
+ st.stop()
85
+ os.environ["GOOGLE_API_KEY"] = gemini_api_key
86
+ return init_chat_model(model="gemini-2.5-pro")
87
+
88
+ elif provider == "Hugging Face":
89
+ device = 0 if torch.cuda.is_available() else -1
90
+ llm = HuggingFacePipeline.from_model_id(
91
+ model_id="EpistemeAI/Episteme-gptoss-20b-RL",
92
+ task="text-generation",
93
+ device=device,
94
+ pipeline_kwargs={"max_new_tokens": 8000},
95
+ )
96
+ return ChatHuggingFace(llm=llm)
97
+
98
+ else:
99
+ st.error("Unknown provider selected.")
100
+ st.stop()
101
+
102
+ # --- Initialize Model ---
103
+ model = load_model(provider)
104
+
105
+ # --- Create the Deep Agent ---
106
+ agent = create_deep_agent(
107
+ store=store,
108
+ use_longterm_memory=True,
109
+ model=model,
110
+ tools=[internet_search],
111
+ system_prompt=research_instructions,
112
+ )
113
+
114
+ # --- Streamlit Chat UI ---
115
+ st.title("🧠 Deep Research Agent")
116
+ st.caption(f"Running on: **{provider}** backend")
117
+
118
+ if prompt := st.chat_input("Ask your research question..."):
119
+ st.chat_message("user").write(prompt)
120
+ with st.chat_message("assistant"):
121
+ st_callback = StreamlitCallbackHandler(st.container())
122
+ response = agent.invoke({"input": prompt}, {"callbacks": [st_callback]})
123
+ output = response.get("output", str(response))
124
+ st.write(output)