kamaleswar Mohanta commited on
Commit
cd63bb3
·
1 Parent(s): e641960

load ui, main, app

Browse files
app.py CHANGED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from src.langgraphagenticai.main import load_langgraph_agenticai_app
2
+
3
+
4
+ if __name__=="__main__":
5
+ load_langgraph_agenticai_app()
src/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (185 Bytes). View file
 
src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (209 Bytes). View file
 
src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-312.pyc ADDED
Binary file (208 Bytes). View file
 
src/langgraphagenticai/LLMS/groqllm.py CHANGED
@@ -0,0 +1 @@
 
 
1
+ pass
src/langgraphagenticai/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (204 Bytes). View file
 
src/langgraphagenticai/__pycache__/main.cpython-312.pyc ADDED
Binary file (1.29 kB). View file
 
src/langgraphagenticai/main.py CHANGED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import json
3
+ from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
4
+ # from src.langgraphagenticai.LLMS.groqllm import GroqLLM
5
+ # from src.langgraphagenticai.graph.graph_builder import GraphBuilder
6
+ # from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
7
+
8
+ # MAIN Function START
9
+ def load_langgraph_agenticai_app():
10
+ """
11
+ Loads and runs the LangGraph AgenticAI application with Streamlit UI.
12
+ This function initializes the UI, handles user input, configures the LLM model,
13
+ sets up the graph based on the selected use case, and displays the output while
14
+ implementing exception handling for robustness.
15
+ """
16
+
17
+ # Load UI
18
+ ui = LoadStreamlitUI()
19
+ user_input = ui.load_streamlit_ui()
20
+
21
+ if not user_input:
22
+ st.error("Error: Failed to load user input from the UI.")
23
+ return
24
+
25
+ # Text input for user message
26
+ if st.session_state.IsFetchButtonClicked:
27
+ user_message = st.session_state.timeframe
28
+ else :
29
+ user_message = st.chat_input("Enter your message:")
30
+
31
+ # if user_message:
32
+ # try:
33
+ # # Configure LLM
34
+ # obj_llm_config = GroqLLM(user_controls_input=user_input)
35
+ # model = obj_llm_config.get_llm_model()
36
+
37
+ # if not model:
38
+ # st.error("Error: LLM model could not be initialized.")
39
+ # return
40
+
41
+ # # Initialize and set up the graph based on use case
42
+ # usecase = user_input.get('selected_usecase')
43
+ # if not usecase:
44
+ # st.error("Error: No use case selected.")
45
+ # return
46
+
47
+
48
+ # ### Graph Builder
49
+ # graph_builder=GraphBuilder(model)
50
+ # try:
51
+ # graph = graph_builder.setup_graph(usecase)
52
+ # DisplayResultStreamlit(usecase,graph,user_message).display_result_on_ui()
53
+ # except Exception as e:
54
+ # st.error(f"Error: Graph setup failed - {e}")
55
+ # return
56
+
57
+
58
+ # except Exception as e:
59
+ # raise ValueError(f"Error Occurred with Exception : {e}")
src/langgraphagenticai/ui/__pycache__/__init__.cpython-312.pyc ADDED
Binary file (207 Bytes). View file
 
src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-312.pyc ADDED
Binary file (2.18 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-312.pyc ADDED
Binary file (4.02 kB). View file
 
src/langgraphagenticai/ui/streamlitui/loadui.py CHANGED
@@ -8,5 +8,65 @@ from src.langgraphagenticai.ui.uiconfigfile import Config
8
 
9
  class LoadStreamlitUI:
10
  def __init__(self):
11
- self.config=Config() # config
12
- self.user_controls={}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
  class LoadStreamlitUI:
10
  def __init__(self):
11
+ self.config = Config() # config
12
+ self.user_controls = {}
13
+
14
+ def initialize_session(self):
15
+ return {
16
+ "current_step": "requirements",
17
+ "requirements": "",
18
+ "user_stories": "",
19
+ "po_feedback": "",
20
+ "generated_code": "",
21
+ "review_feedback": "",
22
+ "decision": None
23
+ }
24
+
25
+
26
+
27
+ def load_streamlit_ui(self):
28
+ st.set_page_config(page_title= "🤖 " + self.config.get_page_title(), layout="wide")
29
+ st.header("🤖 " + self.config.get_page_title())
30
+ st.session_state.timeframe = ''
31
+ st.session_state.IsFetchButtonClicked = False
32
+ st.session_state.IsSDLC = False
33
+
34
+
35
+
36
+ with st.sidebar:
37
+ # Get options from config
38
+ llm_options = self.config.get_llm_options()
39
+ usecase_options = self.config.get_usecase_options()
40
+
41
+ # LLM selection
42
+ self.user_controls["selected_llm"] = st.selectbox("Select LLM", llm_options)
43
+
44
+ if self.user_controls["selected_llm"] == 'Groq':
45
+ # Model selection
46
+ model_options = self.config.get_groq_model_options()
47
+ self.user_controls["selected_groq_model"] = st.selectbox("Select Model", model_options)
48
+ # API key input
49
+ self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input("API Key",
50
+ type="password")
51
+ # Validate API key
52
+ if not self.user_controls["GROQ_API_KEY"]:
53
+ st.warning("⚠️ Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ")
54
+
55
+
56
+ # Use case selection
57
+ self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
58
+
59
+ if self.user_controls["selected_usecase"] =="Chatbot with Tool":
60
+ # API key input
61
+ os.environ["TAVILY_API_KEY"] = self.user_controls["TAVILY_API_KEY"] = st.session_state["TAVILY_API_KEY"] = st.text_input("TAVILY API KEY",
62
+ type="password")
63
+ # Validate API key
64
+ if not self.user_controls["TAVILY_API_KEY"]:
65
+ st.warning("⚠️ Please enter your TAVILY_API_KEY key to proceed. Don't have? refer : https://app.tavily.com/home")
66
+
67
+ if "state" not in st.session_state:
68
+ st.session_state.state = self.initialize_session()
69
+
70
+
71
+
72
+ return self.user_controls
src/langgraphagenticai/ui/uiconfigfile.py CHANGED
@@ -11,7 +11,7 @@ class Config:
11
  self.config = ConfigParser()
12
  self.config.read(config_file)
13
 
14
- def get_llm_option(self):
15
  return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ")
16
 
17
  def get_usecase_options(self):
 
11
  self.config = ConfigParser()
12
  self.config.read(config_file)
13
 
14
+ def get_llm_options(self):
15
  return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ")
16
 
17
  def get_usecase_options(self):