genaitiwari commited on
Commit
e1283cc
·
1 Parent(s): 2b2818d

seperated for display results

Browse files
src/langgraphagenticai/main.py CHANGED
@@ -1,61 +1,69 @@
 
1
  from src.langgraphagenticai.LLMS.groqllm import GroqLLM
2
  from src.langgraphagenticai.graph.graph_builder import GraphBuilder
3
  from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
4
 
5
  import streamlit as st
6
- from langchain_core.messages import HumanMessage,AIMessage,ToolMessage
7
 
8
 
9
  # MAIN Function START
10
  def load_langgraph_agenticai_app():
11
-
12
- # load ui
13
- ui = LoadStreamlitUI()
14
- user_input = ui.load_streamlit_ui()
15
 
16
- # Text input for user message
17
- user_message = st.chat_input("Enter your message:")
18
- if user_message:
19
- # Configure LLM
20
- obj_llm_config = GroqLLM(user_controls_input=user_input)
21
- model = obj_llm_config.get_llm_model()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
 
23
- # Initialize and set up the graph based on use case
24
- usecase = user_input['selected_usecase']
25
- graph_builder = GraphBuilder(model)
26
- graph = graph_builder.setup_graph(usecase)
27
 
28
- # Display output in UI
29
- if usecase =="Basic Chatbot":
30
- for event in graph.stream({'messages':("user",user_message)}):
31
- print(event.values())
32
- for value in event.values():
33
- print(value['messages'])
34
- with st.chat_message("user"):
35
- st.write(user_message)
36
- with st.chat_message("assistant"):
37
- st.write(value["messages"].content)
38
- elif usecase =="Chatbot with Tool":
39
- # Prepare state and invoke the graph
40
- initial_state = {"messages": [user_message]}
41
- res = graph.invoke(initial_state)
42
- for message in res['messages']:
43
- if type(message) == HumanMessage:
44
- with st.chat_message("user"):
45
- st.write(message.content)
46
- elif type(message)==ToolMessage:
47
- with st.chat_message("ai"):
48
- st.write("Tool Call Start")
49
- st.write(message.content)
50
- st.write("Tool Call End")
51
- elif type(message)==AIMessage and message.content:
52
- with st.chat_message("assistant"):
53
- st.write(message.content)
54
 
55
- # display graph
56
- if graph:
57
- st.write('state graph - workflow')
58
- st.image(graph.get_graph(xray=True).draw_mermaid_png())
59
 
60
 
61
 
 
1
+ from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
2
  from src.langgraphagenticai.LLMS.groqllm import GroqLLM
3
  from src.langgraphagenticai.graph.graph_builder import GraphBuilder
4
  from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
5
 
6
  import streamlit as st
 
7
 
8
 
9
  # MAIN Function START
10
  def load_langgraph_agenticai_app():
11
+ """
12
+ Loads and runs the LangGraph AgenticAI application with Streamlit UI.
 
 
13
 
14
+ This function initializes the UI, handles user input, configures the LLM model,
15
+ sets up the graph based on the selected use case, and displays the output while
16
+ implementing exception handling for robustness.
17
+ """
18
+ try:
19
+ # Load UI
20
+ ui = LoadStreamlitUI()
21
+ user_input = ui.load_streamlit_ui()
22
+
23
+ if not user_input:
24
+ st.error("Error: Failed to load user input from the UI.")
25
+ return
26
+
27
+ # Text input for user message
28
+ user_message = st.chat_input("Enter your message:")
29
+ if user_message:
30
+ try:
31
+ # Configure LLM
32
+ obj_llm_config = GroqLLM(user_controls_input=user_input)
33
+ model = obj_llm_config.get_llm_model()
34
+
35
+ if not model:
36
+ st.error("Error: LLM model could not be initialized.")
37
+ return
38
+
39
+ # Initialize and set up the graph based on use case
40
+ usecase = user_input.get('selected_usecase')
41
+ if not usecase:
42
+ st.error("Error: No use case selected.")
43
+ return
44
+
45
+ graph_builder = GraphBuilder(model)
46
+
47
+ try:
48
+ graph = graph_builder.setup_graph(usecase)
49
+ except Exception as e:
50
+ st.error(f"Error: Graph setup failed - {e}")
51
+ return
52
+
53
+ # Display output in UI
54
+ try:
55
+ DisplayResultStreamlit(usecase,graph,user_message).display_result_on_ui()
56
+ except Exception as e:
57
+ st.error(f"Error: Failed to display results on UI - {e}")
58
+
59
+ except Exception as e:
60
+ st.error(f"Error: LLM configuration failed - {e}")
61
+
62
+ except Exception as e:
63
+ st.error(f"Unexpected error occurred: {e}")
64
 
 
 
 
 
65
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
 
 
 
 
 
67
 
68
 
69
 
src/langgraphagenticai/ui/streamlitui/display_result.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from langchain_core.messages import HumanMessage,AIMessage,ToolMessage
3
+
4
+ class DisplayResultStreamlit:
5
+ def __init__(self,usecase,graph,user_message):
6
+ self.usecase= usecase
7
+ self.graph = graph
8
+ self.user_message = user_message
9
+
10
+
11
+ def display_result_on_ui(self):
12
+ usecase= self.usecase
13
+ graph = self.graph
14
+ user_message = self.user_message
15
+ if usecase =="Basic Chatbot":
16
+ for event in graph.stream({'messages':("user",user_message)}):
17
+ print(event.values())
18
+ for value in event.values():
19
+ print(value['messages'])
20
+ with st.chat_message("user"):
21
+ st.write(user_message)
22
+ with st.chat_message("assistant"):
23
+ st.write(value["messages"].content)
24
+ elif usecase =="Chatbot with Tool":
25
+ # Prepare state and invoke the graph
26
+ initial_state = {"messages": [user_message]}
27
+ res = graph.invoke(initial_state)
28
+ for message in res['messages']:
29
+ if type(message) == HumanMessage:
30
+ with st.chat_message("user"):
31
+ st.write(message.content)
32
+ elif type(message)==ToolMessage:
33
+ with st.chat_message("ai"):
34
+ st.write("Tool Call Start")
35
+ st.write(message.content)
36
+ st.write("Tool Call End")
37
+ elif type(message)==AIMessage and message.content:
38
+ with st.chat_message("assistant"):
39
+ st.write(message.content)
40
+
41
+ # display graph
42
+ if graph:
43
+ st.write('state graph - workflow')
44
+ st.image(graph.get_graph(xray=True).draw_mermaid_png())