OpenDeepAgent-Preview / src /streamlit_app.py
legolasyiu's picture
Update src/streamlit_app.py
645e4e4 verified
import os
import streamlit as st
from typing import Literal
from tavily import TavilyClient
from deepagents import create_deep_agent
from langgraph.store.memory import InMemoryStore
from langchain.chat_models import init_chat_model
from langchain_huggingface import ChatHuggingFace, HuggingFacePipeline
from langchain_community.callbacks.streamlit import (
StreamlitCallbackHandler,
)
st.title("🦜🔗 Open DeepAgent App")
# --- Sidebar: API Keys ---
st.sidebar.header("🔑 API Keys")
tavily_api_key = st.sidebar.text_input("Tavily API Key", type="password", placeholder="sk-tavily-XXXX")
openai_api_key = st.sidebar.text_input("OpenAI API Key", type="password", placeholder="sk-openai-XXXX")
claude_api_key = st.sidebar.text_input("Anthropic API Key", type="password", placeholder="sk-ant-XXXX")
gemini_api_key = st.sidebar.text_input("Google Gemini API Key", type="password", placeholder="AIzaSyXXXX")
# --- Sidebar: Model Selection ---
st.sidebar.header("🧠 Choose Model Provider")
provider = st.sidebar.selectbox(
"Select provider:",
["OpenAI", "Claude", "Gemini", "Hugging Face"],
index=1,
)
# --- Editable system prompt ---
st.sidebar.header("🧩 Research Instructions")
research_instructions = st.sidebar.text_area(
"System prompt for the agent:",
value=(
"You are an expert code researcher. Your job is to conduct thorough research, analyze complex information, "
"and write a concise, well-structured report.\n\n"
"You have access to an internet search tool as your primary method of gathering external information.\n"
"## `internet_search`\n"
"Use this function to query the web. Specify a topic, max results, and whether to include raw content."
),
height=200,
)
# --- Store & Tavily Client ---
if not tavily_api_key:
st.warning("Please enter your Tavily API key in the left menu to start the agent.")
st.stop()
store = InMemoryStore()
tavily_client = TavilyClient(api_key=tavily_api_key)
# --- Web search tool ---
def internet_search(
query: str,
max_results: int = 5,
topic: Literal["general", "news", "finance"] = "general",
include_raw_content: bool = False,
):
"""
Perform an internet search using the Tavily API.
Args:
query (str): The search query.
max_results (int): Maximum number of results to return.
topic (Literal["general", "news", "finance"]): Type of search to perform.
include_raw_content (bool): Whether to include raw page content in results.
Returns:
dict: The structured search results from the Tavily API.
"""
return tavily_client.search(
query,
max_results=max_results,
include_raw_content=include_raw_content,
topic=topic,
)
# --- Model Loader ---
def load_model(provider: str):
if provider == "OpenAI":
if not openai_api_key:
st.error("Please enter your OpenAI API key.")
st.stop()
os.environ["OPENAI_API_KEY"] = openai_api_key
return init_chat_model(model="openai:gpt-5")
elif provider == "Claude":
if not claude_api_key:
st.error("Please enter your Anthropic API key.")
st.stop()
os.environ["ANTHROPIC_API_KEY"] = claude_api_key
return init_chat_model(model="claude-sonnet-4-5-20250929")
elif provider == "Gemini":
if not gemini_api_key:
st.error("Please enter your Google Gemini API key.")
st.stop()
os.environ["GOOGLE_API_KEY"] = gemini_api_key
return init_chat_model(model="gemini-2.5-pro")
else:
st.error("Unknown provider selected.")
st.stop()
# --- Initialize Model ---
model = load_model(provider)
# --- Create the Deep Agent ---
agent = create_deep_agent(
store=store,
use_longterm_memory=True,
model=model,
tools=[internet_search],
system_prompt=research_instructions,
)
# Initialize chat history
if "messages" not in st.session_state:
st.session_state["messages"] = [
{"role": "assistant", "content": "Hello! I’m your Deep Research Agent. What would you like to explore today?"}
]
# Display chat history
for msg in st.session_state.messages:
with st.chat_message(msg["role"]):
st.markdown(msg["content"])
# --- Chat Input ---
if prompt := st.chat_input("Ask your research question..."):
# Save user message
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)
# Process assistant response
with st.chat_message("assistant"):
st_callback = StreamlitCallbackHandler(st.container())
response = agent.invoke(
{"messages": st.session_state.messages},
{"callbacks": [st_callback]},
)
# Extract and display assistant reply
reply = response["messages"][-1].content
st.markdown(reply)
st.session_state.messages.append({"role": "assistant", "content": reply})