sahandkh1419 commited on
Commit
d108f4c
·
verified ·
1 Parent(s): 4c56aee

Upload 21 files

Browse files
app.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from src.langgraphagenticai.main import load_langgraph_agenticai_app
2
+
3
+ if __name__=="__main__":
4
+ load_langgraph_agenticai_app()
src/__init__.py ADDED
File without changes
src/langgraphagenticai/LLMS/__init__.py ADDED
File without changes
src/langgraphagenticai/LLMS/groqllm.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ from langchain_groq import ChatGroq
4
+
5
+ class GroqLLM:
6
+ def __init__(self,user_contols_input):
7
+ self.user_controls_input=user_contols_input
8
+
9
+ def get_llm_model(self):
10
+ try:
11
+ groq_api_key=self.user_controls_input["GROQ_API_KEY"]
12
+ selected_groq_model=self.user_controls_input["selected_groq_model"]
13
+ if groq_api_key=='' and os.environ["GROQ_API_KEY"] =='':
14
+ st.error("Please Enter the Groq API KEY")
15
+
16
+ llm=ChatGroq(api_key=groq_api_key,model=selected_groq_model)
17
+
18
+ except Exception as e:
19
+ raise ValueError(f"Error Ocuured With Exception : {e}")
20
+ return llm
src/langgraphagenticai/__init__.py ADDED
File without changes
src/langgraphagenticai/graph/__init__.py ADDED
File without changes
src/langgraphagenticai/graph/graph_builder.py ADDED
@@ -0,0 +1,135 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.graph import StateGraph
2
+ from src.langgraphagenticai.state.state import State
3
+ from langgraph.graph import START, END
4
+ from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
5
+ from src.langgraphagenticai.tools.search_tool import get_tools, create_tool_node
6
+ from langgraph.prebuilt import tools_condition, ToolNode
7
+ from src.langgraphagenticai.nodes.chatbot_with_Tool_node import ChatbotWithToolNode
8
+ from src.langgraphagenticai.nodes.ai_news_node import AINewsNode
9
+
10
+
11
+ class GraphBuilder:
12
+ def __init__(self, model):
13
+ """
14
+ Initializes the GraphBuilder class.
15
+
16
+ Args:
17
+ model: The language model (LLM) to be used in nodes.
18
+ """
19
+ # Store the provided language model
20
+ self.llm = model
21
+
22
+ # Initialize a StateGraph — the core structure used by LangGraph
23
+ # to define workflows (nodes and their connections).
24
+ self.graph_builder = StateGraph(State)
25
+
26
+ # ----------------------------------------------------------------------
27
+ def basic_chatbot_build_graph(self):
28
+ """
29
+ Builds a simple chatbot workflow graph.
30
+
31
+ The graph consists of only one node (the chatbot node) that:
32
+ - Takes a user message as input
33
+ - Returns an AI-generated reply
34
+ - Starts and ends at this single node
35
+ """
36
+
37
+ # Create an instance of the basic chatbot node using the LLM
38
+ self.basic_chatbot_node = BasicChatbotNode(self.llm)
39
+
40
+ # Add a node named "chatbot" and assign its processing function
41
+ self.graph_builder.add_node("chatbot", self.basic_chatbot_node.process)
42
+
43
+ # Define the flow of the graph:
44
+ # Start → chatbot → End
45
+ self.graph_builder.add_edge(START, "chatbot")
46
+ self.graph_builder.add_edge("chatbot", END)
47
+
48
+ # ----------------------------------------------------------------------
49
+ def chatbot_with_tools_build_graph(self):
50
+ """
51
+ Builds an advanced chatbot workflow that can use external tools.
52
+
53
+ This graph includes:
54
+ - A chatbot node (main conversation handler)
55
+ - A tools node (used when external info or web data is needed)
56
+ - Conditional edges to decide when to call tools
57
+ """
58
+
59
+ # 1️⃣ Load the external tools (e.g., web search APIs)
60
+ tools = get_tools()
61
+
62
+ # 2️⃣ Create a LangGraph ToolNode using those tools
63
+ tool_node = create_tool_node(tools)
64
+
65
+ # 3️⃣ Initialize the LLM
66
+ llm = self.llm
67
+
68
+ # 4️⃣ Create a chatbot node that supports tool calling
69
+ obj_chatbot_with_node = ChatbotWithToolNode(llm)
70
+ chatbot_node = obj_chatbot_with_node.create_chatbot(tools)
71
+
72
+ # 5️⃣ Add both chatbot and tool nodes to the graph
73
+ self.graph_builder.add_node("chatbot", chatbot_node)
74
+ self.graph_builder.add_node("tools", tool_node)
75
+
76
+ # 6️⃣ Define how data flows between nodes:
77
+ # Start → Chatbot
78
+ self.graph_builder.add_edge(START, "chatbot")
79
+
80
+ # From chatbot → tools (conditionally, only when the chatbot needs to call a tool)
81
+ self.graph_builder.add_conditional_edges("chatbot", tools_condition)
82
+
83
+ # From tools → chatbot (after tool execution, control returns to chatbot)
84
+ self.graph_builder.add_edge("tools", "chatbot")
85
+
86
+ # ----------------------------------------------------------------------
87
+ def ai_news_builder_graph(self):
88
+ """
89
+ Builds a graph for the AI News use case.
90
+
91
+ This workflow automates:
92
+ 1. Fetching AI-related news from the web (Tavily API)
93
+ 2. Summarizing it using an LLM
94
+ 3. Saving the result as a markdown file
95
+ """
96
+
97
+ # Create an AI news node object (handles fetch, summarize, and save)
98
+ ai_news_node = AINewsNode(self.llm)
99
+
100
+ # Add nodes for each stage of the process
101
+ self.graph_builder.add_node("fetch_news", ai_news_node.fetch_news)
102
+ self.graph_builder.add_node("summarize_news", ai_news_node.summarize_news)
103
+ self.graph_builder.add_node("save_result", ai_news_node.save_result)
104
+
105
+ # Define the flow of the process:
106
+ # fetch_news → summarize_news → save_result → End
107
+ self.graph_builder.set_entry_point("fetch_news")
108
+ self.graph_builder.add_edge("fetch_news", "summarize_news")
109
+ self.graph_builder.add_edge("summarize_news", "save_result")
110
+ self.graph_builder.add_edge("save_result", END)
111
+
112
+ # ----------------------------------------------------------------------
113
+ def setup_graph(self, usecase: str):
114
+ """
115
+ Builds and compiles the appropriate LangGraph workflow
116
+ based on the selected use case.
117
+
118
+ Args:
119
+ usecase (str): The name of the selected use case.
120
+ Options: "Basic Chatbot", "Chatbot With Web", "AI News"
121
+
122
+ Returns:
123
+ Compiled graph object ready for execution.
124
+ """
125
+
126
+ # Based on the selected use case, build the appropriate graph
127
+ if usecase == "Basic Chatbot":
128
+ self.basic_chatbot_build_graph()
129
+ if usecase == "Chatbot With Web":
130
+ self.chatbot_with_tools_build_graph()
131
+ if usecase == "AI News":
132
+ self.ai_news_builder_graph()
133
+
134
+ # Compile the graph so it can be executed by LangGraph
135
+ return self.graph_builder.compile()
src/langgraphagenticai/main.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from src.langgraphagenticai.ui.streamlitui.loadui import LoadStreamlitUI
3
+ from src.langgraphagenticai.LLMS.groqllm import GroqLLM
4
+ from src.langgraphagenticai.graph.graph_builder import GraphBuilder
5
+ from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
6
+
7
+
8
+ def load_langgraph_agenticai_app():
9
+ """
10
+ Loads and runs the LangGraph AgenticAI application using Streamlit.
11
+
12
+ This function:
13
+ 1. Initializes and loads the Streamlit UI components.
14
+ 2. Reads user inputs such as selected use case and model options.
15
+ 3. Sets up the appropriate LLM (Groq-based).
16
+ 4. Builds a LangGraph workflow based on the selected use case.
17
+ 5. Executes the workflow and displays results interactively.
18
+
19
+ Includes robust exception handling to provide meaningful Streamlit error messages.
20
+ """
21
+
22
+ # Step 1: Load the Streamlit-based user interface
23
+ ui = LoadStreamlitUI()
24
+ user_input = ui.load_streamlit_ui()
25
+
26
+ # If UI failed to return valid inputs, display error and exit
27
+ if not user_input:
28
+ st.error("Error: Failed to load user input from the UI.")
29
+ return
30
+
31
+ # Step 2: Capture user input message based on interaction type
32
+ # If user clicked a button to fetch AI news → use the 'timeframe' value (daily/weekly/monthly)
33
+ # Otherwise, show a chat input box for general chatbot interaction
34
+ if st.session_state.IsFetchButtonClicked:
35
+ user_message = st.session_state.timeframe
36
+ else:
37
+ user_message = st.chat_input("Enter your message:")
38
+
39
+ # Continue only if a message was provided
40
+ if user_message:
41
+ try:
42
+ # Step 3: Initialize and configure the selected LLM model (e.g., Groq’s Llama)
43
+ obj_llm_config = GroqLLM(user_contols_input=user_input)
44
+ model = obj_llm_config.get_llm_model()
45
+
46
+ # If model setup fails, show error and stop execution
47
+ if not model:
48
+ st.error("Error: LLM model could not be initialized.")
49
+ return
50
+
51
+ # Step 4: Get the selected use case (e.g., Basic Chatbot / Chatbot With Web / AI News)
52
+ usecase = user_input.get("selected_usecase")
53
+
54
+ if not usecase:
55
+ st.error("Error: No use case selected.")
56
+ return
57
+
58
+ # Step 5: Build the corresponding LangGraph for the chosen use case
59
+ graph_builder = GraphBuilder(model)
60
+ try:
61
+ # Compile the graph (depending on use case type)
62
+ graph = graph_builder.setup_graph(usecase)
63
+
64
+ # Print user message in terminal/log for debugging purposes
65
+ print(user_message)
66
+
67
+ # Step 6: Display the result on the Streamlit UI
68
+ DisplayResultStreamlit(usecase, graph, user_message).display_result_on_ui()
69
+
70
+ except Exception as e:
71
+ # Handle graph-building specific errors
72
+ st.error(f"Error: Graph setup failed - {e}")
73
+ return
74
+
75
+ except Exception as e:
76
+ # Handle any general exception (e.g., model or UI failure)
77
+ st.error(f"Error: Unexpected issue occurred - {e}")
78
+ return
src/langgraphagenticai/nodes/__init__.py ADDED
File without changes
src/langgraphagenticai/nodes/ai_news_node.py ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from tavily import TavilyClient
2
+ from langchain_core.prompts import ChatPromptTemplate
3
+
4
+
5
+ class AINewsNode:
6
+ def __init__(self, llm):
7
+ """
8
+ Initializes the AI News Node.
9
+
10
+ This class handles fetching and summarizing recent AI-related news
11
+ using Tavily (a web search API) and an LLM (language model such as Groq).
12
+
13
+ Args:
14
+ llm: The language model used for generating summaries.
15
+ """
16
+ # Create an instance of Tavily client to perform news searches
17
+ self.tavily = TavilyClient()
18
+ # Store the LLM (used later for summarization)
19
+ self.llm = llm
20
+ # A dictionary to store intermediate data such as frequency, fetched news, and summaries
21
+ self.state = {}
22
+
23
+ # ----------------------------------------------------------------------
24
+ def fetch_news(self, state: dict) -> dict:
25
+ """
26
+ Fetch AI-related news articles using the Tavily API.
27
+
28
+ Args:
29
+ state (dict): The current graph state, expected to include
30
+ 'messages' containing the news frequency (e.g., daily, weekly, monthly).
31
+
32
+ Returns:
33
+ dict: Updated state with 'news_data' containing the fetched news articles.
34
+ """
35
+
36
+ # Extract frequency from user input (e.g., "daily", "weekly", etc.)
37
+ frequency = state['messages'][0].content.lower()
38
+ self.state['frequency'] = frequency
39
+
40
+ # Mapping for Tavily's time range codes and days
41
+ time_range_map = {'daily': 'd', 'weekly': 'w', 'monthly': 'm', 'year': 'y'}
42
+ days_map = {'daily': 1, 'weekly': 7, 'monthly': 30, 'year': 366}
43
+
44
+ # Perform a Tavily API search for the latest AI news
45
+ response = self.tavily.search(
46
+ query="Top Artificial Intelligence (AI) technology news globally",
47
+ topic="news",
48
+ time_range=time_range_map[frequency], # How far back to look
49
+ include_answer="advanced", # Request detailed information
50
+ max_results=20, # Limit number of news items
51
+ days=days_map[frequency], # Number of days to consider
52
+ # include_domains=["techcrunch.com", "venturebeat.com/ai", ...] # (Optional) restrict sources
53
+ )
54
+
55
+ # Store the fetched results in the state dictionary
56
+ state['news_data'] = response.get('results', [])
57
+ self.state['news_data'] = state['news_data']
58
+
59
+ return state
60
+
61
+ # ----------------------------------------------------------------------
62
+ def summarize_news(self, state: dict) -> dict:
63
+ """
64
+ Summarize the fetched AI news articles using the provided LLM.
65
+
66
+ Args:
67
+ state (dict): The current graph state containing 'news_data'.
68
+
69
+ Returns:
70
+ dict: Updated state with 'summary' containing the summarized news in markdown format.
71
+ """
72
+
73
+ # Get the list of fetched news articles
74
+ news_items = self.state['news_data']
75
+
76
+ # Define how the summary should be formatted (markdown structure)
77
+ prompt_template = ChatPromptTemplate.from_messages([
78
+ ("system", """Summarize AI news articles into markdown format. For each item include:
79
+ - Date in **YYYY-MM-DD** format (IST timezone)
80
+ - A concise summary of the news
81
+ - Sorted by latest date first
82
+ - Include the source URL as a hyperlink
83
+ Use this format:
84
+ ### [Date]
85
+ - [Summary](URL)"""),
86
+ ("user", "Articles:\n{articles}")
87
+ ])
88
+
89
+ # Convert each article into a formatted string with content, URL, and publication date
90
+ articles_str = "\n\n".join([
91
+ f"Content: {item.get('content', '')}\nURL: {item.get('url', '')}\nDate: {item.get('published_date', '')}"
92
+ for item in news_items
93
+ ])
94
+
95
+ # Ask the LLM to generate a markdown summary from the provided articles
96
+ response = self.llm.invoke(prompt_template.format(articles=articles_str))
97
+
98
+ # Save the summary to the state
99
+ state['summary'] = response.content
100
+ self.state['summary'] = state['summary']
101
+
102
+ return self.state
103
+
104
+ # ----------------------------------------------------------------------
105
+ def save_result(self, state):
106
+ """
107
+ Save the summarized AI news to a markdown (.md) file.
108
+
109
+ Args:
110
+ state (dict): The current graph state (contains 'frequency' and 'summary').
111
+
112
+ Returns:
113
+ dict: Updated state with 'filename' indicating where the summary file was saved.
114
+ """
115
+
116
+ # Retrieve frequency and summary content from the state
117
+ frequency = self.state['frequency']
118
+ summary = self.state['summary']
119
+
120
+ # Define output file path (e.g., ./AINews/daily_summary.md)
121
+ filename = f"./AINews/{frequency}_summary.md"
122
+
123
+ # Write the summary content to a markdown file
124
+ with open(filename, 'w') as f:
125
+ f.write(f"# {frequency.capitalize()} AI News Summary\n\n")
126
+ f.write(summary)
127
+
128
+ # Save filename in the state for reference
129
+ self.state['filename'] = filename
130
+
131
+ return self.state
src/langgraphagenticai/nodes/basic_chatbot_node.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+ class BasicChatbotNode:
4
+ """
5
+ Basic Chatbot login implementation
6
+ """
7
+ def __init__(self,model):
8
+ self.llm=model
9
+
10
+ def process(self,state:State)->dict:
11
+ """
12
+ Processes the input state and generates a chatbot response.
13
+ """
14
+ return {"messages":self.llm.invoke(state['messages'])}
15
+
src/langgraphagenticai/nodes/chatbot_with_Tool_node.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+ class ChatbotWithToolNode:
4
+ """
5
+ Chatbot logic enhanced with tool integration.
6
+ """
7
+ def __init__(self,model):
8
+ self.llm = model
9
+
10
+ def process(self, state: State) -> dict:
11
+ """
12
+ Processes the input state and generates a response with tool integration.
13
+ """
14
+ user_input = state["messages"][-1] if state["messages"] else ""
15
+ llm_response = self.llm.invoke([{"role": "user", "content": user_input}])
16
+
17
+ # Simulate tool-specific logic
18
+ tools_response = f"Tool integration for: '{user_input}'"
19
+
20
+ return {"messages": [llm_response, tools_response]}
21
+
22
+
23
+ def create_chatbot(self, tools):
24
+ """
25
+ Returns a chatbot node function.
26
+ """
27
+ llm_with_tools = self.llm.bind_tools(tools)
28
+
29
+ def chatbot_node(state: State):
30
+ """
31
+ Chatbot logic for processing the input state and returning a response.
32
+ """
33
+ return {"messages": [llm_with_tools.invoke(state["messages"])]}
34
+
35
+ return chatbot_node
36
+
src/langgraphagenticai/state/__init__.py ADDED
File without changes
src/langgraphagenticai/state/state.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing_extensions import TypedDict,List
2
+ from langgraph.graph.message import add_messages
3
+ from typing import Annotated
4
+
5
+
6
+ class State(TypedDict):
7
+ """
8
+ Represent the structure of the state used in graph
9
+ """
10
+ messages: Annotated[List,add_messages]
src/langgraphagenticai/tools/__init__.py ADDED
File without changes
src/langgraphagenticai/tools/search_tool.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.tools.tavily_search import TavilySearchResults
2
+ from langgraph.prebuilt import ToolNode
3
+
4
+ def get_tools():
5
+ """
6
+ Return the list of tools to be used in the chatbot
7
+ """
8
+ tools=[TavilySearchResults(max_results=2)]
9
+ return tools
10
+
11
+ def create_tool_node(tools):
12
+ """
13
+ creates and returns a tool node for the graph
14
+ """
15
+ return ToolNode(tools=tools)
16
+
src/langgraphagenticai/ui/__init__.py ADDED
File without changes
src/langgraphagenticai/ui/streamlitui/display_result.py ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from langchain_core.messages import HumanMessage, AIMessage, ToolMessage
3
+ import json
4
+
5
+
6
+ # This class is responsible for displaying the output/results
7
+ # of the Agentic AI workflows in the Streamlit user interface (UI).
8
+ class DisplayResultStreamlit:
9
+ def __init__(self, usecase, graph, user_message):
10
+ # Store the selected use case (e.g., Basic Chatbot, Chatbot With Web, AI News)
11
+ self.usecase = usecase
12
+ # Store the LangGraph graph object (the AI workflow graph to execute)
13
+ self.graph = graph
14
+ # Store the user's input message (what the user typed or selected)
15
+ self.user_message = user_message
16
+
17
+ # Main function that runs the AI process and displays the results in Streamlit
18
+ def display_result_on_ui(self):
19
+ # Extract class variables for easier access
20
+ usecase = self.usecase
21
+ graph = self.graph
22
+ user_message = self.user_message
23
+
24
+ print(user_message) # For debugging – shows user message in console
25
+
26
+ # ---------------------------
27
+ # 1️⃣ USE CASE: BASIC CHATBOT
28
+ # ---------------------------
29
+ if usecase == "Basic Chatbot":
30
+ # The chatbot processes the user message in a streaming manner (real-time updates)
31
+ for event in graph.stream({'messages': ("user", user_message)}):
32
+ print(event.values()) # Debug: print current event values
33
+
34
+ for value in event.values():
35
+ print(value['messages']) # Debug: print the message object
36
+
37
+ # Display user's message in the chat interface
38
+ with st.chat_message("user"):
39
+ st.write(user_message)
40
+
41
+ # Display the AI’s reply in the chat interface
42
+ with st.chat_message("assistant"):
43
+ st.write(value["messages"].content)
44
+
45
+ # ---------------------------------
46
+ # 2️⃣ USE CASE: CHATBOT WITH WEB
47
+ # ---------------------------------
48
+ elif usecase == "Chatbot With Web":
49
+ # Initialize the message state for the graph to process
50
+ initial_state = {"messages": [user_message]}
51
+
52
+ # Invoke the Agentic AI graph once (non-streaming)
53
+ res = graph.invoke(initial_state)
54
+
55
+ # Iterate through each message in the result
56
+ for message in res['messages']:
57
+ # Display human messages
58
+ if type(message) == HumanMessage:
59
+ with st.chat_message("user"):
60
+ st.write(message.content)
61
+
62
+ # Display tool messages (e.g., web search or API calls made by the agent)
63
+ elif type(message) == ToolMessage:
64
+ with st.chat_message("ai"):
65
+ st.write("Tool Call Start")
66
+ st.write(message.content)
67
+ st.write("Tool Call End")
68
+
69
+ # Display AI (assistant) messages
70
+ elif type(message) == AIMessage and message.content:
71
+ with st.chat_message("assistant"):
72
+ st.write(message.content)
73
+
74
+ # -------------------------------
75
+ # 3️⃣ USE CASE: AI NEWS GENERATOR
76
+ # -------------------------------
77
+ elif usecase == "AI News":
78
+ # In this mode, the user_message stores the frequency (Daily, Weekly, Monthly)
79
+ frequency = self.user_message
80
+
81
+ # Show a loading spinner while news is being fetched and summarized
82
+ with st.spinner("Fetching and summarizing news... ⏳"):
83
+ # Invoke the graph to trigger the AI News workflow
84
+ result = graph.invoke({"messages": frequency})
85
+
86
+ try:
87
+ # Construct file path to the generated Markdown summary
88
+ AI_NEWS_PATH = f"./AINews/{frequency.lower()}_summary.md"
89
+
90
+ # Open and read the markdown file content
91
+ with open(AI_NEWS_PATH, "r") as file:
92
+ markdown_content = file.read()
93
+
94
+ # Display the markdown summary nicely in Streamlit
95
+ st.markdown(markdown_content, unsafe_allow_html=True)
96
+
97
+ # Handle cases where the markdown file does not exist
98
+ except FileNotFoundError:
99
+ st.error(f"News Not Generated or File not found: {AI_NEWS_PATH}")
100
+
101
+ # Handle other unexpected errors gracefully
102
+ except Exception as e:
103
+ st.error(f"An error occurred: {str(e)}")
src/langgraphagenticai/ui/streamlitui/loadui.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ from src.langgraphagenticai.ui.uiconfigfile import Config
4
+
5
+ # This class is responsible for creating and managing the Streamlit user interface (UI)
6
+ # for the LangGraph Agentic AI application.
7
+ class LoadStreamlitUI:
8
+ def __init__(self):
9
+ # Initialize the configuration object (reads from uiconfigfile.ini)
10
+ self.config = Config()
11
+ # A dictionary to store user-selected UI control values (e.g., LLM, model, API keys)
12
+ self.user_controls = {}
13
+
14
+ # Main function to load and display all Streamlit UI components
15
+ def load_streamlit_ui(self):
16
+ # Set page configuration such as title and layout
17
+ st.set_page_config(
18
+ page_title="🤖 " + self.config.get_page_title(),
19
+ layout="wide"
20
+ )
21
+
22
+ # Display the main page header
23
+ st.header("🤖 " + self.config.get_page_title())
24
+
25
+ # Initialize session variables for controlling UI behavior
26
+ st.session_state.timeframe = ''
27
+ st.session_state.IsFetchButtonClicked = False
28
+
29
+ # Everything inside 'with st.sidebar' will appear in the left sidebar
30
+ with st.sidebar:
31
+ # Retrieve configuration options (LLMs and Use Cases)
32
+ llm_options = self.config.get_llm_options()
33
+ usecase_options = self.config.get_usecase_options()
34
+
35
+ # Dropdown to select which LLM provider to use (e.g., Groq)
36
+ self.user_controls["selected_llm"] = st.selectbox("Select LLM", llm_options)
37
+
38
+ # If user chooses 'Groq' as LLM, show additional Groq-specific inputs
39
+ if self.user_controls["selected_llm"] == 'Groq':
40
+ # Get Groq model list from config file
41
+ model_options = self.config.get_groq_model_options()
42
+
43
+ # Dropdown to select which Groq model to use (e.g., llama-3.1-8b)
44
+ self.user_controls["selected_groq_model"] = st.selectbox("Select Model", model_options)
45
+
46
+ # Textbox for entering Groq API key (masked as password)
47
+ self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"] = st.text_input(
48
+ "API Key", type="password"
49
+ )
50
+
51
+ # Show a warning if the API key is not entered
52
+ if not self.user_controls["GROQ_API_KEY"]:
53
+ st.warning("⚠️ Please enter your GROQ API key to proceed. Don't have one? Visit: https://console.groq.com/keys")
54
+
55
+ # Dropdown to select which use case to run (e.g., Basic Chatbot, Chatbot With Web, AI News)
56
+ self.user_controls["selected_usecase"] = st.selectbox("Select Usecases", usecase_options)
57
+
58
+ # If the selected use case involves web search or news fetching,
59
+ # ask for the Tavily API key (used for web search capabilities)
60
+ if self.user_controls["selected_usecase"] in ["Chatbot With Web", "AI News"]:
61
+ os.environ["TAVILY_API_KEY"] = self.user_controls["TAVILY_API_KEY"] = st.session_state["TAVILY_API_KEY"] = st.text_input(
62
+ "TAVILY API KEY", type="password"
63
+ )
64
+
65
+ # Show a warning if Tavily API key is not entered
66
+ if not self.user_controls["TAVILY_API_KEY"]:
67
+ st.warning("⚠️ Please enter your TAVILY_API_KEY to proceed. Don't have one? Visit: https://app.tavily.com/home")
68
+
69
+ # If the user selects "AI News" use case, show additional news-specific options
70
+ if self.user_controls['selected_usecase'] == "AI News":
71
+ st.subheader("📰 AI News Explorer ")
72
+
73
+ # Allow user to choose the time frame for news updates
74
+ with st.sidebar:
75
+ time_frame = st.selectbox(
76
+ "📅 Select Time Frame",
77
+ ["Daily", "Weekly", "Monthly"],
78
+ index=0
79
+ )
80
+
81
+ # Button to trigger fetching AI news
82
+ if st.button("🔍 Fetch Latest AI News", use_container_width=True):
83
+ # Mark that the fetch button was clicked and store selected time frame
84
+ st.session_state.IsFetchButtonClicked = True
85
+ st.session_state.timeframe = time_frame
86
+
87
+ # Return all collected user input values for further processing
88
+ return self.user_controls
src/langgraphagenticai/ui/uiconfigfile.ini ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [DEFAULT]
2
+ # This section defines default configuration settings
3
+ # that the Streamlit UI or app will use.
4
+
5
+ # Title text that appears at the top of the Streamlit web page.
6
+ PAGE_TITLE = LangGraph: Agentic AI
7
+
8
+ # List of available Large Language Model (LLM) providers shown in the UI dropdown.
9
+ LLM_OPTIONS = Groq
10
+
11
+ # List of available use cases that users can select from in the app.
12
+ # For example: a simple chatbot, a chatbot that can browse the web, or an AI news assistant.
13
+ USECASE_OPTIONS = Chatbot, Chatbot With Web, AI News
14
+
15
+ # List of model names available when the selected LLM provider is "Groq".
16
+ # These are Groq-hosted versions of Meta’s Llama models with different sizes and capabilities.
17
+ GROQ_MODEL_OPTIONS = llama-3.1-8b-instant, llama-3.3-70b-versatile
src/langgraphagenticai/ui/uiconfigfile.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from configparser import ConfigParser
2
+
3
+
4
+ class Config:
5
+ def __init__(self,config_file="./src/langgraphagenticai/ui/uiconfigfile.ini"):
6
+ self.config=ConfigParser()
7
+ self.config.read(config_file)
8
+
9
+ def get_llm_options(self):
10
+ return self.config["DEFAULT"].get("LLM_OPTIONS").split(", ")
11
+
12
+ def get_usecase_options(self):
13
+ return self.config["DEFAULT"].get("USECASE_OPTIONS").split(", ")
14
+
15
+ def get_groq_model_options(self):
16
+ return self.config["DEFAULT"].get("GROQ_MODEL_OPTIONS").split(", ")
17
+
18
+ def get_page_title(self):
19
+ return self.config["DEFAULT"].get("PAGE_TITLE")
20
+