Gorantla Krishna commited on
Commit
42b1b7d
·
1 Parent(s): f589cf3
app.py CHANGED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from src.langgraphagenticai.main import load_langgraph_agenticai_app
2
+
3
+
4
+ if __name__=="__main__":
5
+ load_langgraph_agenticai_app()
src/langgraphagenticai/LLMS/groqllm.py CHANGED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ from langchain_groq import ChatGroq
4
+
5
+ class GroqLLM:
6
+ def __init__(self,user_controls_input):
7
+ self.user_controls_input = user_controls_input
8
+
9
+ def get_llm_model(self):
10
+ try:
11
+ groq_api_key = self.user_controls_input["GROQ_API_KEY"]
12
+ selected_groq_model = self.user_controls_input["selected_groq_models"]
13
+ if groq_api_key=='' and os.environ["GROQ_API_KEY"]=='':
14
+ st.error("Please Enter the Groq API key")
15
+ llm = ChatGroq(api_key=groq_api_key,model=selected_groq_model)
16
+ except Exception as e:
17
+ raise ValueError(f"Error occured with Exception: {e}")
18
+
19
+ return llm
20
+
21
+
src/langgraphagenticai/graph/graph_builder.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.graph import StateGraph,START,END,MessagesState
2
+ from langgraph.prebuilt import tools_condition,ToolNode
3
+ from langchain_core.prompts import ChatPromptTemplate
4
+ import datetime
5
+ from src.langgraphagenticai.state.state import State
6
+
7
+ class GraphBuilder:
8
+
9
+ def __init__(self,model):
10
+ self.llm = model
11
+ self.graph_builder = StateGraph(State)
12
+
13
+ def basic_chatbot_build_graph(self):
14
+
15
+
src/langgraphagenticai/main.py CHANGED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import json
3
+ from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
4
+ from src.langgraphagenticai.LLMS.groqllm import GroqLLM
5
+
6
+ def load_langgraph_agenticai_app():
7
+ """
8
+ Loads and runs the LangGraph AgenticAI application with Streamlit UI.
9
+ This function initializes the UI, handles user input, configures the LLM model,
10
+ sets up the graph based on the selected use case, and displays the output while
11
+ implementing exception handling for robustness.
12
+ """
13
+
14
+ ui = LoadStreamlitUI()
15
+ user_input = ui.load_streamlit_ui()
16
+
17
+ if not user_input:
18
+ st.error("Error:Failed to load user input from UI")
19
+
20
+ #Text input for user message
21
+ if st.session_state.IsFetchButtonClicked:
22
+ user_message = st.session_state.timeframe
23
+ else:
24
+ user_message = st.chat_input("Enter your message:")
25
+
26
+ # Initializing the LLM
27
+ if user_message:
28
+ try:
29
+ obj_llm_config = GroqLLM(user_controls_input=user_input)
30
+ model = obj_llm_config.get_llm_model()
31
+
32
+ if not model:
33
+ st.error("Error: LLM model could not be initialized.")
34
+ return
35
+
36
+ usecase = user_input.get("selected_usecase")
37
+ if not usecase:
38
+ st.error("Error: Usecase not selected.")
39
+ return
40
+
41
+ except Exception as e:
42
+ raise ValueError(f"Error occured with exception : {e}")
43
+
44
+ # Graph Builder
45
+
46
+
src/langgraphagenticai/nodes/basic_chatbot_node.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+ class BasicChatbotNode:
4
+ """
5
+ Basic chatbot logic implementation.
6
+ """
7
+ def __init__(self,model):
8
+ self.llm = model
9
+
10
+ def process(self,state):
11
+ """
12
+ Processes the input state and generate the chatbot response
13
+ """
14
+ return {"messages":[self.llm.invoke(state["messages"])]}
15
+
16
+
17
+
src/langgraphagenticai/state/state.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated,Literal,Optional
2
+ from typing_extensions import TypedDict
3
+ from langgraph.graph.message import add_messages
4
+ from langchain_core.messages import AIMessage,HumanMessage
5
+
6
+ class State(TypedDict):
7
+ """
8
+ Represents the structure of the state used in the graph
9
+ """
10
+ messages = Annotated[list,add_messages]
src/langgraphagenticai/ui/streamlitui/display_result.py CHANGED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from langchain_core.messages import HumanMessage,AIMessage
3
+ import json
4
+
5
+
6
+ class DisplayResultStreamlit:
7
+ def __init__(self,usecase,graph,user_message):
8
+ self.usecase= usecase
9
+ self.graph = graph
10
+ self.user_message = user_message
11
+
12
+ def display_result_on_ui(self):
13
+ usecase= self.usecase
14
+ graph = self.graph
15
+ user_message = self.user_message
16
+ if usecase =="Basic Chatbot":
17
+ for event in graph.stream({'messages':("user",user_message)}):
18
+ print(event.values())
19
+ for value in event.values():
20
+ print(value['messages'])
21
+ with st.chat_message("user"):
22
+ st.write(user_message)
23
+ with st.chat_message("assistant"):
24
+ st.write(value["messages"].content)
src/langgraphagenticai/ui/streamlitui/loadui.py CHANGED
@@ -9,6 +9,55 @@ class LoadStreamlitUI:
9
  self.config = config()
10
  self.user_controls = {}
11
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  def load_streamlit_ui(self):
13
- st.set_page_config()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
 
 
9
  self.config = config()
10
  self.user_controls = {}
11
 
12
+ def initialize_session(self):
13
+ return {
14
+ "current_step": "requirements",
15
+ "requirements": "",
16
+ "user_stories": "",
17
+ "po_feedback": "",
18
+ "generated_code": "",
19
+ "review_feedback": "",
20
+ "decision": None
21
+ }
22
+
23
+
24
  def load_streamlit_ui(self):
25
+ st.set_page_config(page_title= "🤖 " + self.config.get_page_title(), layout="wide")
26
+ st.header("🤖 " + self.config.get_page_title())
27
+ st.session_state.timeframe = ''
28
+ st.session_state.IsFetchButtonClicked = False
29
+ st.session_state.IsSDLC = False
30
+
31
+
32
+
33
+ with st.sidebar:
34
+ # Get options from config
35
+ llm_options = self.config.get_llm_options()
36
+ usecase_options = self.config.get_usecase_options()
37
+
38
+ # LLM selection
39
+ self.user_controls["selected_llm"] = st.selectbox("Select LLM", llm_options)
40
+
41
+ if self.user_controls["selected_llm"] == 'Groq':
42
+ # Model selection
43
+ model_options = self.config.get_groq_model_options()
44
+ self.user_controls["selected_groq_model"] = st.selectbox("Select Model", model_options)
45
+ # API key input
46
+ self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
47
+ type="password")
48
+ # Validate API key
49
+ if not self.user_controls["GROQ_API_KEY"]:
50
+ st.warning("⚠️ Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ")
51
+
52
+
53
+ # Use case selection
54
+ self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
55
+
56
+ if "state" not in st.session_state:
57
+ st.session_state.state = self.initialize_session()
58
+
59
+
60
+
61
+
62
+ return self.user_controls
63
 
src/langgraphagenticai/ui/uiconfigfile.ini CHANGED
@@ -2,4 +2,4 @@
2
  PAGE_TITLE = LangGraph: Build stateful Agentic AI Graph
3
  LLM_OPTIONS = Groq
4
  USECASE_OPTIONS = Basic chatbot
5
- GROQ_MODEL_OPTIONS = mixtral-8x7b-32768,llama3-8b-8192,llama3-70b-8192,gemma-7b-i
 
2
  PAGE_TITLE = LangGraph: Build stateful Agentic AI Graph
3
  LLM_OPTIONS = Groq
4
  USECASE_OPTIONS = Basic chatbot
5
+ GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
src/langgraphagenticai/ui/uiconfigfile.py CHANGED
@@ -5,7 +5,7 @@ class config:
5
  self.config = ConfigParser()
6
  self.config.read(config_file)
7
 
8
- def get_llm_option(self):
9
  return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ")
10
 
11
  def get_usecase_options(self):
 
5
  self.config = ConfigParser()
6
  self.config.read(config_file)
7
 
8
+ def get_llm_options(self):
9
  return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ")
10
 
11
  def get_usecase_options(self):