sahandkh1419's picture
Upload 21 files
d108f4c verified
import streamlit as st
from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
from src.langgraphagenticai.LLMS.groqllm import GroqLLM
from src.langgraphagenticai.graph.graph_builder import GraphBuilder
from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
def load_langgraph_agenticai_app():
"""
Loads and runs the LangGraph AgenticAI application using Streamlit.
This function:
1. Initializes and loads the Streamlit UI components.
2. Reads user inputs such as selected use case and model options.
3. Sets up the appropriate LLM (Groq-based).
4. Builds a LangGraph workflow based on the selected use case.
5. Executes the workflow and displays results interactively.
Includes robust exception handling to provide meaningful Streamlit error messages.
"""
# Step 1: Load the Streamlit-based user interface
ui = LoadStreamlitUI()
user_input = ui.load_streamlit_ui()
# If UI failed to return valid inputs, display error and exit
if not user_input:
st.error("Error: Failed to load user input from the UI.")
return
# Step 2: Capture user input message based on interaction type
# If user clicked a button to fetch AI news → use the 'timeframe' value (daily/weekly/monthly)
# Otherwise, show a chat input box for general chatbot interaction
if st.session_state.IsFetchButtonClicked:
user_message = st.session_state.timeframe
else:
user_message = st.chat_input("Enter your message:")
# Continue only if a message was provided
if user_message:
try:
# Step 3: Initialize and configure the selected LLM model (e.g., Groq’s Llama)
obj_llm_config = GroqLLM(user_contols_input=user_input)
model = obj_llm_config.get_llm_model()
# If model setup fails, show error and stop execution
if not model:
st.error("Error: LLM model could not be initialized.")
return
# Step 4: Get the selected use case (e.g., Basic Chatbot / Chatbot With Web / AI News)
usecase = user_input.get("selected_usecase")
if not usecase:
st.error("Error: No use case selected.")
return
# Step 5: Build the corresponding LangGraph for the chosen use case
graph_builder = GraphBuilder(model)
try:
# Compile the graph (depending on use case type)
graph = graph_builder.setup_graph(usecase)
# Print user message in terminal/log for debugging purposes
print(user_message)
# Step 6: Display the result on the Streamlit UI
DisplayResultStreamlit(usecase, graph, user_message).display_result_on_ui()
except Exception as e:
# Handle graph-building specific errors
st.error(f"Error: Graph setup failed - {e}")
return
except Exception as e:
# Handle any general exception (e.g., model or UI failure)
st.error(f"Error: Unexpected issue occurred - {e}")
return