Spaces:
Sleeping
Sleeping
File size: 2,243 Bytes
6fe4093 b6751b3 6fe4093 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import streamlit as st
from langgraphagenticai.ui.streamlitui.loadui4 import LoadStreamlitUI
from langgraphagenticai.LLMS.groqllm import GroqLLM
from langgraphagenticai.graph.graph_builder import GraphBuilder
from langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
def load_langgraph_agenticai_app():
"""
This function launches and manages the LangGraph AgenticAI application with a Streamlit user interface.
It initializes the UI, collects user input, configures the selected LLM model,
and sets up the agentic graph workflow based on the chosen use case. Throughout execution,
it displays results interactively and incorporates robust exception handling to ensure reliability and a smooth user experience.
"""
##Load UI
ui=LoadStreamlitUI()
user_input=ui.load_streamlit_ui()
if not user_input:
st.error("Error: Failed to load user input from the UI.")
return
# Text input for user message
if st.session_state.IsFetchButtonClicked:
user_message = st.session_state.timeframe
else :
user_message = st.chat_input("Enter your message:")
if user_message:
try:
## Configure The LLM's
obj_llm_config=GroqLLM(user_contols_input=user_input)
model=obj_llm_config.get_llm_model()
if not model:
st.error("Error: LLM model could not be initialized")
return
# Initialize and set up the graph based on use case
usecase=user_input.get("selected_usecase")
if not usecase:
st.error("Error: No use case selected.")
return
## Graph Builder
graph_builder=GraphBuilder(model)
try:
graph=graph_builder.setup_graph(usecase)
print(user_message)
DisplayResultStreamlit(usecase,graph,user_message).display_result_on_ui()
except Exception as e:
st.error(f"Error: Graph set up failed- {e}")
return
except Exception as e:
st.error(f"Error: Graph set up failed- {e}")
return
|