Spaces:
Sleeping
Sleeping
| import streamlit as st | |
| from dotenv import load_dotenv | |
| from langchain_core.messages import HumanMessage, AIMessage, SystemMessage | |
| from application.agents.scraper_agent import app | |
| from main import graph | |
| from application.utils.logger import get_logger | |
| logger = get_logger() | |
| st.set_page_config(page_title="Sustainability AI Assistant", layout="wide") | |
| st.title("♻️ Sustainability Report AI Assistant") | |
| st.caption( | |
| "Ask about sustainability reports by company or industry! " | |
| "(e.g., 'Get sustainability report for Apple', 'Download sustainability report for Microsoft 2023', " | |
| "'Find sustainability reports for top 3 airline companies', 'Download this pdf <link>')" | |
| ) | |
| load_dotenv() | |
| def initialize_chat_history(): | |
| """Initialize session chat history.""" | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| logger.info("Initialized empty chat history in session state.") | |
| def display_chat_history(): | |
| """Render previous chat messages.""" | |
| for message in st.session_state.messages: | |
| # if isinstance(message, SystemMessage): | |
| # # st.info(f"System: {message.content}") | |
| # pass | |
| if isinstance(message, HumanMessage): | |
| with st.chat_message("user"): | |
| st.markdown(message.content) | |
| elif isinstance(message, AIMessage): | |
| with st.chat_message("assistant"): | |
| st.markdown(message.content) | |
| def invoke_agent(): | |
| """Invoke the LangGraph agent and update session state.""" | |
| try: | |
| graph_input = {"messages": st.session_state.messages} | |
| logger.info("Invoking LangGraph agent...") | |
| # final_output_state = graph.invoke(graph_input, {"recursion_limit": 15}) | |
| final_output_state = app.invoke(graph_input, {"recursion_limit": 15}) | |
| logger.info("Agent invocation completed successfully.") | |
| return final_output_state | |
| except Exception as e: | |
| logger.error("Agent invocation failed.", exc_info=True) | |
| st.error(f"An error occurred while processing your request: {e}") | |
| return None | |
| def display_last_ai_response(): | |
| """Display the latest AI message, if any.""" | |
| last_ai_message = next( | |
| (msg for msg in reversed(st.session_state.messages) if isinstance(msg, AIMessage)), | |
| None | |
| ) | |
| if last_ai_message: | |
| with st.chat_message("assistant"): | |
| st.markdown(last_ai_message.content) | |
| logger.info("Displayed latest AI response.") | |
| else: | |
| st.warning("Agent completed without a final AI message.") | |
| logger.warning("No AI message found in the final output.") | |
| initialize_chat_history() | |
| if user_query := st.chat_input("Your question about sustainability reports..."): | |
| logger.info(f"User input received: {user_query}") | |
| display_chat_history() | |
| st.session_state.messages.append(HumanMessage(content=user_query)) | |
| with st.chat_message("user"): | |
| st.markdown(user_query) | |
| with st.spinner("Processing your request... Please wait."): | |
| final_output_state = invoke_agent() | |
| if final_output_state: | |
| st.session_state.messages = final_output_state['messages'] | |
| display_last_ai_response() | |
| with st.sidebar: | |
| st.markdown("---") | |
| if st.button("Clear Chat History"): | |
| st.session_state.messages = [] | |
| logger.info("Chat history cleared by user.") | |
| st.rerun() |