Shivi14321 commited on
Commit
a407200
·
1 Parent(s): 80e63e4

Basic chtbot

Browse files
app.py CHANGED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from src.langgraph_agenticAI.main import load_langgraph_agenticai_app
2
+
3
+
4
+ if __name__=="__main__":
5
+ load_langgraph_agenticai_app()
src/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (179 Bytes). View file
 
src/langgraph_agenticAI/Graphs/graph_builder.py ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.graph import StateGraph, START,END, MessagesState
2
+ from langgraph.prebuilt import tools_condition,ToolNode
3
+ from langchain_core.prompts import ChatPromptTemplate
4
+ from src.langgraph_agenticAI.States.state import State
5
+ from src.langgraph_agenticAI.Nodes.basic_chatbot_node import BasicChatbotNode
6
+ from src.langgraph_agenticAI.Nodes.chatbot_with_Tool_node import ChatbotWithToolNode
7
+ from src.langgraph_agenticAI.Tools.search_tool import get_tools,create_tool_node
8
+
9
+
10
+ class GraphBuilder:
11
+
12
+ def __init__(self,model):
13
+ self.llm=model
14
+ self.graph_builder=StateGraph(State)
15
+
16
+ def basic_chatbot_build_graph(self):
17
+ """
18
+ Builds a basic chatbot graph using LangGraph.
19
+ This method initializes a chatbot node using the `BasicChatbotNode` class
20
+ and integrates it into the graph. The chatbot node is set as both the
21
+ entry and exit point of the graph.
22
+ """
23
+ self.basic_chatbot_node=BasicChatbotNode(self.llm)
24
+ self.graph_builder.add_node("chatbot",self.basic_chatbot_node.process)
25
+ self.graph_builder.add_edge(START,"chatbot")
26
+ self.graph_builder.add_edge("chatbot",END)
27
+
28
+
29
+ def chatbot_with_tools_build_graph(self):
30
+ """
31
+ Builds an advanced chatbot graph with tool integration.
32
+ This method creates a chatbot graph that includes both a chatbot node
33
+ and a tool node. It defines tools, initializes the chatbot with tool
34
+ capabilities, and sets up conditional and direct edges between nodes.
35
+ The chatbot node is set as the entry point.
36
+ """
37
+ ## Define the tool and tool node
38
+
39
+ tools=get_tools()
40
+ tool_node=create_tool_node(tools)
41
+
42
+ ##Define LLM
43
+ llm = self.llm
44
+
45
+ # Define chatbot node
46
+ obj_chatbot_with_node = ChatbotWithToolNode(llm)
47
+ chatbot_node = obj_chatbot_with_node.create_chatbot(tools)
48
+
49
+ # Add nodes
50
+ self.graph_builder.add_node("chatbot", chatbot_node)
51
+ self.graph_builder.add_node("tools", tool_node)
52
+
53
+ # Define conditional and direct edges
54
+ self.graph_builder.add_edge(START,"chatbot")
55
+ self.graph_builder.add_conditional_edges("chatbot", tools_condition)
56
+ self.graph_builder.add_edge("tools","chatbot")
57
+
58
+ def setup_graph(self, usecase: str):
59
+ """
60
+ Sets up the graph for the selected use case.
61
+ """
62
+ if usecase == "Basic Chatbot":
63
+ self.basic_chatbot_build_graph()
64
+
65
+ if usecase == "Chatbot with Tool":
66
+ self.chatbot_with_tools_build_graph()
67
+ return self.graph_builder.compile()
68
+
69
+
70
+
71
+
72
+
73
+
src/langgraph_agenticAI/LLMs/GroqLLM.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ from langchain_groq import ChatGroq
4
+
5
+ class GroqLLM:
6
+ def __init__(self,user_controls_input):
7
+ self.user_controls_input=user_controls_input
8
+
9
+ def get_llm_model(self):
10
+ try:
11
+ groq_api_key=self.user_controls_input['GROQ_API_KEY']
12
+ selected_groq_model=self.user_controls_input['selected_groq_model']
13
+ if groq_api_key=='' and os.environ["GROQ_API_KEY"] =='':
14
+ st.error("Please Enter the Groq API KEY")
15
+
16
+ llm = ChatGroq(api_key =groq_api_key, model=selected_groq_model)
17
+
18
+ except Exception as e:
19
+ raise ValueError(f"Error Occurred with Exception : {e}")
20
+ return llm
src/langgraph_agenticAI/LLMs/__pycache__/GroqLLM.cpython-311.pyc ADDED
Binary file (203 Bytes). View file
 
src/langgraph_agenticAI/LLMs/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (204 Bytes). View file
 
src/langgraph_agenticAI/LLMs/__pycache__/groq_llm.cpython-311.pyc ADDED
Binary file (204 Bytes). View file
 
src/langgraph_agenticAI/LLMs/groq_llm.py DELETED
File without changes
src/langgraph_agenticAI/Nodes/basic_chatbot_node.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraph_agenticAI.States.state import State
2
+
3
+ class BasicChatbotNode:
4
+ """
5
+ Basic chatbot logic implementation.
6
+ """
7
+ def __init__(self,model):
8
+ self.llm = model
9
+
10
+ def process(self, state: State) -> dict:
11
+ """
12
+ Processes the input state and generates a chatbot response.
13
+ """
14
+ return {"messages":self.llm.invoke(state['messages'])}
src/langgraph_agenticAI/States/state.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Annotated, Literal, Optional
2
+ from typing_extensions import TypedDict
3
+ from langgraph.graph.message import add_messages
4
+ from typing import TypedDict, Annotated, List
5
+ from langchain_core.messages import HumanMessage, AIMessage
6
+
7
+ class State(TypedDict):
8
+ """
9
+ Represents the structure of the state used in the graph.
10
+ """
11
+ messages: Annotated[list, add_messages]
src/langgraph_agenticAI/UI/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (202 Bytes). View file
 
src/langgraph_agenticAI/UI/__pycache__/uiconfigfile.cpython-311.pyc ADDED
Binary file (2.06 kB). View file
 
src/langgraph_agenticAI/UI/streamlitui/__pycache__/display_result.cpython-311.pyc ADDED
Binary file (220 Bytes). View file
 
src/langgraph_agenticAI/UI/streamlitui/__pycache__/load_ui.cpython-311.pyc ADDED
Binary file (4.03 kB). View file
 
src/langgraph_agenticAI/UI/streamlitui/load_ui.py CHANGED
@@ -2,11 +2,80 @@ import streamlit as st
2
  import os
3
  from datetime import date
4
 
5
- from langchain_core import AIMessage, HumanMessage #because msg we are going to mention in UI is either AImsg or Humanmsg
6
  from src.langgraph_agenticAI.UI.uiconfigfile import Config #Config is class
 
7
  class LoadStreamLitUI:
8
  def __init__(self):
9
  self.config=Config() #for this cofig we created ini file i.e, uiconfig.ini (text file to store configs in form of key-value pairs) like yaml config.
10
- self.user_controls={}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
 
 
2
  import os
3
  from datetime import date
4
 
5
+ #from langchain_core import AIMessage, HumanMessage #because msg we are going to mention in UI is either AImsg or Humanmsg
6
  from src.langgraph_agenticAI.UI.uiconfigfile import Config #Config is class
7
+
8
  class LoadStreamLitUI:
9
  def __init__(self):
10
  self.config=Config() #for this cofig we created ini file i.e, uiconfig.ini (text file to store configs in form of key-value pairs) like yaml config.
11
+ self.user_controls={} #dictionary
12
+
13
+ def initialize_session(self):
14
+ return {
15
+ "current_step": "requirements",
16
+ "requirements": "",
17
+ "user_stories": "",
18
+ "po_feedback": "",
19
+ "generated_code": "",
20
+ "review_feedback": "",
21
+ "decision": None
22
+ }
23
+ """ def render_requirements(self):
24
+ st.markdown('### Requirement Submission')
25
+ st. session_state.state["requirements"]= st.text_area(
26
+ "Enter your Requirements:",
27
+ height=200,
28
+ key="req-input"
29
+ )
30
+ if st.button("Submit requirements", key="req-input"):
31
+ st.session_state.state["current_step"] = " generate_User_stories" """
32
+
33
+
34
+ def load_streamlit_ui(self):
35
+ st.set_page_config(page_title= "🤖 " + self.config.get_page_title(), layout="wide")
36
+ st.header("🤖 " + self.config.get_page_title())
37
+ st.session_state.timeframe = ''
38
+ st.session_state.IsFetchButtonClicked = False
39
+ st.session_state.IsSDLC = False
40
+
41
+
42
+
43
+ with st.sidebar: #sidebar for left side code
44
+ # Get options from config
45
+ llm_options = self.config.get_llm_options()
46
+ usecase_options = self.config.get_usecase_options()
47
+
48
+ # LLM selection
49
+ self.user_controls["selected_llm"] = st.selectbox("Select LLM", llm_options)
50
+
51
+ if self.user_controls["selected_llm"] == 'Groq':
52
+ # Model selection
53
+ model_options = self.config.get_groq_model_options()
54
+ self.user_controls["selected_groq_model"] = st.selectbox("Select Model", model_options)
55
+ # API key input
56
+ self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
57
+ type="password")
58
+ # Validate API key
59
+ if not self.user_controls["GROQ_API_KEY"]:
60
+ st.warning("⚠️ Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ")
61
+
62
+
63
+ # Use case selection
64
+ self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
65
+
66
+ if self.user_controls["selected_usecase"] =="Chatbot with Tool":
67
+ # API key input
68
+ os.environ["TAVILY_API_KEY"] = self.user_controls["TAVILY_API_KEY"] = st.session_state["TAVILY_API_KEY"] = st.text_input("TAVILY API KEY",
69
+ type="password")
70
+ # Validate API key
71
+ if not self.user_controls["TAVILY_API_KEY"]:
72
+ st.warning("⚠️ Please enter your TAVILY_API_KEY key to proceed. Don't have? refer : https://app.tavily.com/home")
73
+
74
+ if "state" not in st.session_state:
75
+ st.session_state.state = self.initialize_session()
76
+
77
+ #self.render_requirements() #this is to load right side of the page with left sidebar
78
+
79
+ return self.user_controls
80
 
81
 
src/langgraph_agenticAI/UI/uiconfigfile.ini CHANGED
@@ -1,5 +1,5 @@
1
  [DEFAULT]
2
- PAGE_TITLE= Langgraph: Build Stateful Agentic AI graph
3
  LLM_OPTIONS= Groq
4
- USECASE_OPTIONS= Basic Chatbot, Chatbot With Tool, Travel Partner, SDLC Workflow, AI News
5
  GROQ_MODEL_OPTIONS= mixtral-8x7b-32768, llama3-8b-8192, llama-70b-8192, gemma-7b-i
 
1
  [DEFAULT]
2
+ PAGE_TITLE = LangGraph: Build Stateful Agentic AI graph
3
  LLM_OPTIONS= Groq
4
+ USECASE_OPTIONS= Basic Chatbot
5
  GROQ_MODEL_OPTIONS= mixtral-8x7b-32768, llama3-8b-8192, llama-70b-8192, gemma-7b-i
src/langgraph_agenticAI/UI/uiconfigfile.py CHANGED
@@ -1,18 +1,18 @@
1
  from configparser import ConfigParser #class to parse cofig text file
2
 
3
  class Config: #inside any class, we start with constructor
4
- def __init__(self,config_file="C:\Users\blues\OneDrive\Documents\E2E_Langraph_Project\src\langgraph_agenticAI\UI\uiconfigfile.ini"):
5
  self.config=ConfigParser() #initialize the constructor, config is public variable , use this configParser object will read the config file and store in config variable
6
  self.config.read(config_file)
7
 
8
- def get_llm_option(self): #to only read llm field from config file
9
- return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ") #DEFAULT is root note for all the below node hving access of everything written below
10
 
11
- def get_usecase_options(self):
12
- return self.config["DEFAULT"].get("USECASE_OPTIONS").split(", ")
13
 
14
- def get_groq_model_options(self):
15
- return self.config["DEFAULT"].get("GROQ_MODEL_OPTIONS").split(", ")
16
 
17
- def get_page_title(self):
18
- return self.config["DEFAULT"].get("PAGE_TITLE")
 
1
  from configparser import ConfigParser #class to parse cofig text file
2
 
3
  class Config: #inside any class, we start with constructor
4
+ def __init__(self,config_file="./src/langgraph_agenticAI/UI/uiconfigfile.ini"):
5
  self.config=ConfigParser() #initialize the constructor, config is public variable , use this configParser object will read the config file and store in config variable
6
  self.config.read(config_file)
7
 
8
+ def get_llm_options(self): #to only read llm field from config file
9
+ return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ") #DEFAULT is root note for all the below node hving access of everything written below
10
 
11
+ def get_usecase_options(self):
12
+ return self.config["DEFAULT"].get("USECASE_OPTIONS").split(", ")
13
 
14
+ def get_groq_model_options(self):
15
+ return self.config["DEFAULT"].get("GROQ_MODEL_OPTIONS").split(", ")
16
 
17
+ def get_page_title(self):
18
+ return self.config["DEFAULT"].get("PAGE_TITLE")
src/langgraph_agenticAI/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (199 Bytes). View file
 
src/langgraph_agenticAI/__pycache__/main.cpython-311.pyc ADDED
Binary file (2.64 kB). View file
 
src/langgraph_agenticAI/main.py CHANGED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import json
3
+ from src.langgraph_agenticAI.UI.streamlitui.load_ui import LoadStreamLitUI
4
+ from src.langgraph_agenticAI.LLMs.GroqLLM import GroqLLM
5
+ from src.langgraph_agenticAI.Graphs.graph_builder import GraphBuilder
6
+ #from src.langgraph_agenticAI.UI.streamlitui.display_result import DisplayResultStreamlit
7
+
8
+ # MAIN Function START
9
+ def load_langgraph_agenticai_app():
10
+ """
11
+ Loads and runs the LangGraph AgenticAI application with Streamlit UI.
12
+ This function initializes the UI, handles user input, configures the LLM model,
13
+ sets up the graph based on the selected use case, and displays the output while
14
+ implementing exception handling for robustness.
15
+ """
16
+
17
+ # Load UI
18
+ ui = LoadStreamLitUI()
19
+ user_input = ui.load_streamlit_ui()
20
+
21
+ if not user_input:
22
+ st.error("Error: Failed to load user input from the UI.")
23
+ return
24
+
25
+ # Text input for user message
26
+ if st.session_state.IsFetchButtonClicked:
27
+ user_message = st.session_state.timeframe
28
+ else :
29
+ user_message = st.chat_input("Enter your message:")
30
+
31
+ if user_message: #after we enter message, execution start from here
32
+ try:
33
+ # Configure LLM
34
+ obj_llm_config = GroqLLM(user_controls_input=user_input)
35
+ model = obj_llm_config.get_llm_model()
36
+
37
+ if not model:
38
+ st.error("Error: LLM model could not be initialized.")
39
+ return
40
+
41
+ # Initialize and set up the graph based on use case
42
+ usecase = user_input.get('selected_usecase')
43
+ if not usecase:
44
+ st.error("Error: No use case selected.")
45
+ return
46
+
47
+
48
+ ### Graph Builder
49
+ graph_builder=GraphBuilder(model)
50
+ try:
51
+ graph = graph_builder.setup_graph(usecase)
52
+ DisplayResultStreamlit(usecase,graph,user_message).display_result_on_ui()
53
+ except Exception as e:
54
+ st.error(f"Error: Graph setup failed - {e}")
55
+ return
56
+
57
+
58
+ except Exception as e:
59
+ raise ValueError(f"Error Occurred with Exception : {e}")
60
+
61
+
62
+