Gorantla Krishna commited on
Commit
cf4e043
·
1 Parent(s): e151f47
src/langgraphagenticai/LLMS/groqllm.py CHANGED
@@ -9,7 +9,7 @@ class GroqLLM:
9
  def get_llm_model(self):
10
  try:
11
  groq_api_key = self.user_controls_input["GROQ_API_KEY"]
12
- selected_groq_model = self.user_controls_input["selected_groq_models"]
13
  if groq_api_key=='' and os.environ["GROQ_API_KEY"]=='':
14
  st.error("Please Enter the Groq API key")
15
  llm = ChatGroq(api_key=groq_api_key,model=selected_groq_model)
 
9
  def get_llm_model(self):
10
  try:
11
  groq_api_key = self.user_controls_input["GROQ_API_KEY"]
12
+ selected_groq_model = self.user_controls_input["selected_groq_model"]
13
  if groq_api_key=='' and os.environ["GROQ_API_KEY"]=='':
14
  st.error("Please Enter the Groq API key")
15
  llm = ChatGroq(api_key=groq_api_key,model=selected_groq_model)
src/langgraphagenticai/main.py CHANGED
@@ -3,6 +3,7 @@ import json
3
  from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
4
  from src.langgraphagenticai.LLMS.groqllm import GroqLLM
5
  from src.langgraphagenticai.graph.graph_builder import GraphBuilder
 
6
 
7
  def load_langgraph_agenticai_app():
8
  """
@@ -26,30 +27,36 @@ def load_langgraph_agenticai_app():
26
 
27
  # Initializing the LLM
28
  if user_message:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  try:
30
- obj_llm_config = GroqLLM(user_controls_input=user_input)
31
- model = obj_llm_config.get_llm_model()
32
-
33
- if not model:
34
- st.error("Error: LLM model could not be initialized.")
35
- return
36
-
37
- usecase = user_input.get("selected_usecase")
38
- if not usecase:
39
- st.error("Error: Usecase not selected.")
40
- return
41
-
42
-
43
- # Graph Builder
44
- graph_builder = GraphBuilder(model)
45
- try:
46
- graph = graph_builder.setup_graph(usecase=usecase)
47
- except Exception as e:
48
- raise ValueError(f"Error: Graph set up Failed - {e}")
49
- return
50
-
51
  except Exception as e:
52
- raise ValueError(f"Error occured with exception : {e}")
 
 
 
 
 
 
 
 
 
 
53
 
54
 
55
 
 
3
  from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
4
  from src.langgraphagenticai.LLMS.groqllm import GroqLLM
5
  from src.langgraphagenticai.graph.graph_builder import GraphBuilder
6
+ from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
7
 
8
  def load_langgraph_agenticai_app():
9
  """
 
27
 
28
  # Initializing the LLM
29
  if user_message:
30
+
31
+ obj_llm_config = GroqLLM(user_controls_input=user_input)
32
+ model = obj_llm_config.get_llm_model()
33
+
34
+ if not model:
35
+ st.error("Error: LLM model could not be initialized.")
36
+ return
37
+
38
+ usecase = user_input.get("selected_usecase")
39
+ if not usecase:
40
+ st.error("Error: Usecase not selected.")
41
+ return
42
+
43
+
44
+ # Graph Builder
45
+ graph_builder = GraphBuilder(model)
46
  try:
47
+ graph = graph_builder.setup_graph(usecase=usecase)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
  except Exception as e:
49
+ raise ValueError(f"Error: Graph set up Failed - {e}")
50
+ return
51
+
52
+ # Display Result
53
+ display_obj = DisplayResultStreamlit(
54
+ usecase=usecase,
55
+ graph=graph,
56
+ user_message=user_message
57
+ )
58
+ display_obj.display_result_on_ui()
59
+
60
 
61
 
62
 
src/langgraphagenticai/nodes/basic_chatbot_node.py CHANGED
@@ -6,6 +6,7 @@ class BasicChatbotNode:
6
  """
7
  def __init__(self,model):
8
  self.llm = model
 
9
 
10
  def process(self,state):
11
  """
 
6
  """
7
  def __init__(self,model):
8
  self.llm = model
9
+
10
 
11
  def process(self,state):
12
  """
src/langgraphagenticai/state/state.py CHANGED
@@ -1,10 +1,10 @@
1
- from typing import Annotated,Literal,Optional
2
  from typing_extensions import TypedDict
3
  from langgraph.graph.message import add_messages
4
- from langchain_core.messages import AIMessage,HumanMessage
5
 
6
  class State(TypedDict):
7
  """
8
  Represents the structure of the state used in the graph
9
  """
10
- messages = Annotated[list,add_messages]
 
1
+ from typing import Annotated,Literal,Optional,List
2
  from typing_extensions import TypedDict
3
  from langgraph.graph.message import add_messages
4
+ from langchain_core.messages import AIMessage,HumanMessage,BaseMessage
5
 
6
  class State(TypedDict):
7
  """
8
  Represents the structure of the state used in the graph
9
  """
10
+ messages: Annotated[list,add_messages]
src/langgraphagenticai/ui/streamlitui/display_result.py CHANGED
@@ -14,11 +14,11 @@ class DisplayResultStreamlit:
14
  graph = self.graph
15
  user_message = self.user_message
16
  if usecase =="Basic Chatbot":
17
- for event in graph.stream({'messages':("user",user_message)}):
18
  print(event.values())
19
  for value in event.values():
20
  print(value['messages'])
21
  with st.chat_message("user"):
22
  st.write(user_message)
23
  with st.chat_message("assistant"):
24
- st.write(value["messages"].content)
 
14
  graph = self.graph
15
  user_message = self.user_message
16
  if usecase =="Basic Chatbot":
17
+ for event in graph.stream({"messages":("user",user_message)}):
18
  print(event.values())
19
  for value in event.values():
20
  print(value['messages'])
21
  with st.chat_message("user"):
22
  st.write(user_message)
23
  with st.chat_message("assistant"):
24
+ st.write(value["messages"][-1].content)
src/langgraphagenticai/ui/uiconfigfile.ini CHANGED
@@ -1,5 +1,5 @@
1
  [DEFAULT]
2
  PAGE_TITLE = LangGraph: Build stateful Agentic AI Graph
3
  LLM_OPTIONS = Groq
4
- USECASE_OPTIONS = Basic chatbot
5
  GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
 
1
  [DEFAULT]
2
  PAGE_TITLE = LangGraph: Build stateful Agentic AI Graph
3
  LLM_OPTIONS = Groq
4
+ USECASE_OPTIONS = Basic Chatbot
5
  GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i