Spaces:
Sleeping
Sleeping
File size: 5,002 Bytes
4cfbdb4 2f3945d 4cfbdb4 2f3945d c37da7c 645e4e4 4cfbdb4 1bb9cb6 4cfbdb4 6b68172 645e4e4 4cfbdb4 6b68172 4cfbdb4 6b68172 4cfbdb4 75df4df 6b68172 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 |
import os
import streamlit as st
from typing import Literal
from tavily import TavilyClient
from deepagents import create_deep_agent
from langgraph.store.memory import InMemoryStore
from langchain.chat_models import init_chat_model
from langchain_huggingface import ChatHuggingFace, HuggingFacePipeline
from langchain_community.callbacks.streamlit import (
StreamlitCallbackHandler,
)
st.title("๐ฆ๐ Open DeepAgent App")
# --- Sidebar: API Keys ---
st.sidebar.header("๐ API Keys")
tavily_api_key = st.sidebar.text_input("Tavily API Key", type="password", placeholder="sk-tavily-XXXX")
openai_api_key = st.sidebar.text_input("OpenAI API Key", type="password", placeholder="sk-openai-XXXX")
claude_api_key = st.sidebar.text_input("Anthropic API Key", type="password", placeholder="sk-ant-XXXX")
gemini_api_key = st.sidebar.text_input("Google Gemini API Key", type="password", placeholder="AIzaSyXXXX")
# --- Sidebar: Model Selection ---
st.sidebar.header("๐ง Choose Model Provider")
provider = st.sidebar.selectbox(
"Select provider:",
["OpenAI", "Claude", "Gemini", "Hugging Face"],
index=1,
)
# --- Editable system prompt ---
st.sidebar.header("๐งฉ Research Instructions")
research_instructions = st.sidebar.text_area(
"System prompt for the agent:",
value=(
"You are an expert code researcher. Your job is to conduct thorough research, analyze complex information, "
"and write a concise, well-structured report.\n\n"
"You have access to an internet search tool as your primary method of gathering external information.\n"
"## `internet_search`\n"
"Use this function to query the web. Specify a topic, max results, and whether to include raw content."
),
height=200,
)
# --- Store & Tavily Client ---
if not tavily_api_key:
st.warning("Please enter your Tavily API key in the left menu to start the agent.")
st.stop()
store = InMemoryStore()
tavily_client = TavilyClient(api_key=tavily_api_key)
# --- Web search tool ---
def internet_search(
query: str,
max_results: int = 5,
topic: Literal["general", "news", "finance"] = "general",
include_raw_content: bool = False,
):
"""
Perform an internet search using the Tavily API.
Args:
query (str): The search query.
max_results (int): Maximum number of results to return.
topic (Literal["general", "news", "finance"]): Type of search to perform.
include_raw_content (bool): Whether to include raw page content in results.
Returns:
dict: The structured search results from the Tavily API.
"""
return tavily_client.search(
query,
max_results=max_results,
include_raw_content=include_raw_content,
topic=topic,
)
# --- Model Loader ---
def load_model(provider: str):
if provider == "OpenAI":
if not openai_api_key:
st.error("Please enter your OpenAI API key.")
st.stop()
os.environ["OPENAI_API_KEY"] = openai_api_key
return init_chat_model(model="openai:gpt-5")
elif provider == "Claude":
if not claude_api_key:
st.error("Please enter your Anthropic API key.")
st.stop()
os.environ["ANTHROPIC_API_KEY"] = claude_api_key
return init_chat_model(model="claude-sonnet-4-5-20250929")
elif provider == "Gemini":
if not gemini_api_key:
st.error("Please enter your Google Gemini API key.")
st.stop()
os.environ["GOOGLE_API_KEY"] = gemini_api_key
return init_chat_model(model="gemini-2.5-pro")
else:
st.error("Unknown provider selected.")
st.stop()
# --- Initialize Model ---
model = load_model(provider)
# --- Create the Deep Agent ---
agent = create_deep_agent(
store=store,
use_longterm_memory=True,
model=model,
tools=[internet_search],
system_prompt=research_instructions,
)
# Initialize chat history
if "messages" not in st.session_state:
st.session_state["messages"] = [
{"role": "assistant", "content": "Hello! Iโm your Deep Research Agent. What would you like to explore today?"}
]
# Display chat history
for msg in st.session_state.messages:
with st.chat_message(msg["role"]):
st.markdown(msg["content"])
# --- Chat Input ---
if prompt := st.chat_input("Ask your research question..."):
# Save user message
st.session_state.messages.append({"role": "user", "content": prompt})
st.chat_message("user").write(prompt)
# Process assistant response
with st.chat_message("assistant"):
st_callback = StreamlitCallbackHandler(st.container())
response = agent.invoke(
{"messages": st.session_state.messages},
{"callbacks": [st_callback]},
)
# Extract and display assistant reply
reply = response["messages"][-1].content
st.markdown(reply)
st.session_state.messages.append({"role": "assistant", "content": reply})
|