legolasyiu commited on
Commit
75df4df
·
verified ·
1 Parent(s): 1bb9cb6

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +2 -2
src/streamlit_app.py CHANGED
@@ -3,7 +3,7 @@ import streamlit as st
3
  from typing import Literal
4
  from tavily import TavilyClient
5
  from deepagents import create_deep_agent
6
- #from langchain.callbacks.streamlit import StreamlitCallbackHandler
7
  from langgraph.store.memory import InMemoryStore
8
  from langchain.chat_models import init_chat_model
9
  from langchain_huggingface import ChatHuggingFace, HuggingFacePipeline
@@ -119,7 +119,7 @@ st.caption(f"Running on: **{provider}** backend")
119
  if prompt := st.chat_input("Ask your research question..."):
120
  st.chat_message("user").write(prompt)
121
  with st.chat_message("assistant"):
122
- #st_callback = StreamlitCallbackHandler(st.container())
123
  response = agent.invoke({"input": prompt}, {"callbacks": [st_callback]})
124
  output = response.get("output", str(response))
125
  st.write(output)
 
3
  from typing import Literal
4
  from tavily import TavilyClient
5
  from deepagents import create_deep_agent
6
+ from langchain.callbacks.streamlit import StreamlitCallbackHandler
7
  from langgraph.store.memory import InMemoryStore
8
  from langchain.chat_models import init_chat_model
9
  from langchain_huggingface import ChatHuggingFace, HuggingFacePipeline
 
119
  if prompt := st.chat_input("Ask your research question..."):
120
  st.chat_message("user").write(prompt)
121
  with st.chat_message("assistant"):
122
+ st_callback = StreamlitCallbackHandler(st.container())
123
  response = agent.invoke({"input": prompt}, {"callbacks": [st_callback]})
124
  output = response.get("output", str(response))
125
  st.write(output)