Spaces:
Sleeping
Sleeping
Update src/streamlit_app.py
Browse files- src/streamlit_app.py +2 -2
src/streamlit_app.py
CHANGED
|
@@ -3,7 +3,7 @@ import streamlit as st
|
|
| 3 |
from typing import Literal
|
| 4 |
from tavily import TavilyClient
|
| 5 |
from deepagents import create_deep_agent
|
| 6 |
-
from langchain.callbacks.streamlit import StreamlitCallbackHandler
|
| 7 |
from langgraph.store.memory import InMemoryStore
|
| 8 |
from langchain.chat_models import init_chat_model
|
| 9 |
from langchain_huggingface import ChatHuggingFace, HuggingFacePipeline
|
|
@@ -107,7 +107,7 @@ st.caption(f"Running on: **{provider}** backend")
|
|
| 107 |
if prompt := st.chat_input("Ask your research question..."):
|
| 108 |
st.chat_message("user").write(prompt)
|
| 109 |
with st.chat_message("assistant"):
|
| 110 |
-
st_callback = StreamlitCallbackHandler(st.container())
|
| 111 |
response = agent.invoke({"input": prompt}, {"callbacks": [st_callback]})
|
| 112 |
output = response.get("output", str(response))
|
| 113 |
st.write(output)
|
|
|
|
| 3 |
from typing import Literal
|
| 4 |
from tavily import TavilyClient
|
| 5 |
from deepagents import create_deep_agent
|
| 6 |
+
#from langchain.callbacks.streamlit import StreamlitCallbackHandler
|
| 7 |
from langgraph.store.memory import InMemoryStore
|
| 8 |
from langchain.chat_models import init_chat_model
|
| 9 |
from langchain_huggingface import ChatHuggingFace, HuggingFacePipeline
|
|
|
|
| 107 |
if prompt := st.chat_input("Ask your research question..."):
|
| 108 |
st.chat_message("user").write(prompt)
|
| 109 |
with st.chat_message("assistant"):
|
| 110 |
+
#st_callback = StreamlitCallbackHandler(st.container())
|
| 111 |
response = agent.invoke({"input": prompt}, {"callbacks": [st_callback]})
|
| 112 |
output = response.get("output", str(response))
|
| 113 |
st.write(output)
|