Sachin-Hansaka commited on
Commit
6fe4093
·
verified ·
1 Parent(s): 346fc75
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. src/__init__.py +0 -0
  2. src/__pycache__/__init__.cpython-311.pyc +0 -0
  3. src/__pycache__/__init__.cpython-313.pyc +0 -0
  4. src/langgraphagenticai/LLMS/__init__.py +0 -0
  5. src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-311.pyc +0 -0
  6. src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-313.pyc +0 -0
  7. src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-311.pyc +0 -0
  8. src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-313.pyc +0 -0
  9. src/langgraphagenticai/LLMS/groqllm.py +20 -0
  10. src/langgraphagenticai/__init__.py +0 -0
  11. src/langgraphagenticai/__pycache__/__init__.cpython-311.pyc +0 -0
  12. src/langgraphagenticai/__pycache__/__init__.cpython-313.pyc +0 -0
  13. src/langgraphagenticai/__pycache__/main.cpython-311.pyc +0 -0
  14. src/langgraphagenticai/__pycache__/main.cpython-313.pyc +0 -0
  15. src/langgraphagenticai/graph/__init__.py +0 -0
  16. src/langgraphagenticai/graph/__pycache__/__init__.cpython-313.pyc +0 -0
  17. src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-313.pyc +0 -0
  18. src/langgraphagenticai/graph/graph_builder.py +122 -0
  19. src/langgraphagenticai/main.py +60 -0
  20. src/langgraphagenticai/nodes/__init__.py +0 -0
  21. src/langgraphagenticai/nodes/__pycache__/__init__.cpython-313.pyc +0 -0
  22. src/langgraphagenticai/nodes/__pycache__/ai_news_node.cpython-313.pyc +0 -0
  23. src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-313.pyc +0 -0
  24. src/langgraphagenticai/nodes/__pycache__/chatbot_with_Tool_node.cpython-313.pyc +0 -0
  25. src/langgraphagenticai/nodes/ai_news_node.py +89 -0
  26. src/langgraphagenticai/nodes/basic_chatbot_node.py +14 -0
  27. src/langgraphagenticai/nodes/chatbot_with_Tool_node.py +35 -0
  28. src/langgraphagenticai/state/__init__.py +0 -0
  29. src/langgraphagenticai/state/__pycache__/__init__.cpython-313.pyc +0 -0
  30. src/langgraphagenticai/state/__pycache__/state.cpython-313.pyc +0 -0
  31. src/langgraphagenticai/state/state.py +10 -0
  32. src/langgraphagenticai/tools/__init__.py +0 -0
  33. src/langgraphagenticai/tools/__pycache__/__init__.cpython-313.pyc +0 -0
  34. src/langgraphagenticai/tools/__pycache__/arxiv_tool.cpython-313.pyc +0 -0
  35. src/langgraphagenticai/tools/__pycache__/search_tool.cpython-313.pyc +0 -0
  36. src/langgraphagenticai/tools/arxiv_tool.py +42 -0
  37. src/langgraphagenticai/tools/search_tool.py +28 -0
  38. src/langgraphagenticai/ui/__init__.py +0 -0
  39. src/langgraphagenticai/ui/__pycache__/__init__.cpython-311.pyc +0 -0
  40. src/langgraphagenticai/ui/__pycache__/__init__.cpython-313.pyc +0 -0
  41. src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-311.pyc +0 -0
  42. src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-313.pyc +0 -0
  43. src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-313.pyc +0 -0
  44. src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-313.pyc +0 -0
  45. src/langgraphagenticai/ui/streamlitui/__pycache__/loadui2.cpython-311.pyc +0 -0
  46. src/langgraphagenticai/ui/streamlitui/__pycache__/loadui2.cpython-313.pyc +0 -0
  47. src/langgraphagenticai/ui/streamlitui/__pycache__/loadui3.cpython-313.pyc +0 -0
  48. src/langgraphagenticai/ui/streamlitui/__pycache__/loadui4.cpython-313.pyc +0 -0
  49. src/langgraphagenticai/ui/streamlitui/display_result.py +82 -0
  50. src/langgraphagenticai/ui/streamlitui/loadui.py +36 -0
src/__init__.py ADDED
File without changes
src/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (169 Bytes). View file
 
src/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (157 Bytes). View file
 
src/langgraphagenticai/LLMS/__init__.py ADDED
File without changes
src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (193 Bytes). View file
 
src/langgraphagenticai/LLMS/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (181 Bytes). View file
 
src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-311.pyc ADDED
Binary file (1.52 kB). View file
 
src/langgraphagenticai/LLMS/__pycache__/groqllm.cpython-313.pyc ADDED
Binary file (1.41 kB). View file
 
src/langgraphagenticai/LLMS/groqllm.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import streamlit as st
3
+ from langchain_groq import ChatGroq
4
+
5
+ class GroqLLM:
6
+ def __init__(self,user_contols_input):
7
+ self.user_controls_input=user_contols_input
8
+
9
+ def get_llm_model(self):
10
+ try:
11
+ groq_api_key=self.user_controls_input["GROQ_API_KEY"]
12
+ selected_groq_model=self.user_controls_input["selected_groq_model"]
13
+ if groq_api_key=='' and os.environ["GROQ_API_KEY"] =='':
14
+ st.error("Please Enter the Groq API KEY")
15
+
16
+ llm=ChatGroq(api_key=groq_api_key,model=selected_groq_model)
17
+
18
+ except Exception as e:
19
+ raise ValueError(f"Error Ocuured With Exception : {e}")
20
+ return llm
src/langgraphagenticai/__init__.py ADDED
File without changes
src/langgraphagenticai/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (188 Bytes). View file
 
src/langgraphagenticai/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (176 Bytes). View file
 
src/langgraphagenticai/__pycache__/main.cpython-311.pyc ADDED
Binary file (2.87 kB). View file
 
src/langgraphagenticai/__pycache__/main.cpython-313.pyc ADDED
Binary file (2.77 kB). View file
 
src/langgraphagenticai/graph/__init__.py ADDED
File without changes
src/langgraphagenticai/graph/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (182 Bytes). View file
 
src/langgraphagenticai/graph/__pycache__/graph_builder.cpython-313.pyc ADDED
Binary file (6.1 kB). View file
 
src/langgraphagenticai/graph/graph_builder.py ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.graph import StateGraph
2
+ from src.langgraphagenticai.state.state import State
3
+ from langgraph.graph import START,END
4
+ from src.langgraphagenticai.nodes.basic_chatbot_node import BasicChatbotNode
5
+ from src.langgraphagenticai.tools.search_tool import get_tools,create_tool_node,get_tools_by_usecase
6
+ from langgraph.prebuilt import tools_condition,ToolNode
7
+ from src.langgraphagenticai.nodes.chatbot_with_Tool_node import ChatbotWithToolNode
8
+ from src.langgraphagenticai.nodes.ai_news_node import AINewsNode
9
+
10
+ class GraphBuilder:
11
+ def __init__(self,model):
12
+ self.llm=model
13
+ self.graph_builder=StateGraph(State)
14
+
15
+ def basic_chatbot_build_graph(self):
16
+ """
17
+ Builds a basic chatbot graph using LangGraph.
18
+ This method initializes a chatbot node using the `BasicChatbotNode` class
19
+ and integrates it into the graph. The chatbot node is set as both the
20
+ entry and exit point of the graph.
21
+ """
22
+
23
+ self.basic_chatbot_node=BasicChatbotNode(self.llm)
24
+
25
+ self.graph_builder.add_node("chatbot",self.basic_chatbot_node.process)
26
+ self.graph_builder.add_edge(START,"chatbot")
27
+ self.graph_builder.add_edge("chatbot",END)
28
+
29
+
30
+ def chatbot_with_tools_build_graph(self):
31
+
32
+ """
33
+ Builds an advanced chatbot graph with tool integration.
34
+ This method creates a chatbot graph that includes both a chatbot node
35
+ and a tool node. It defines tools, initializes the chatbot with tool
36
+ capabilities, and sets up conditional and direct edges between nodes.
37
+ The chatbot node is set as the entry point.
38
+ """
39
+ ## Define the tool and tool node
40
+ tools=get_tools()
41
+ tool_node=create_tool_node(tools)
42
+
43
+ ## Define the LLM
44
+ llm=self.llm
45
+
46
+ ## Define the chatbot node
47
+
48
+ obj_chatbot_with_node=ChatbotWithToolNode(llm)
49
+ chatbot_node=obj_chatbot_with_node.create_chatbot(tools)
50
+
51
+
52
+ ## Add nodes
53
+ self.graph_builder.add_node("chatbot", chatbot_node)
54
+ self.graph_builder.add_node("tools",tool_node)
55
+ # Define conditional and direct edges
56
+ self.graph_builder.add_edge(START,"chatbot")
57
+ self.graph_builder.add_conditional_edges("chatbot",tools_condition)
58
+ self.graph_builder.add_edge("tools","chatbot")
59
+ # self.graph_builder.add_edge("chatbot",END)
60
+
61
+ def research_assistant_build_graph(self):
62
+ """
63
+ Builds a research assistant graph with ArXiv and web search tools.
64
+ This method creates a chatbot graph specifically designed for academic
65
+ research, integrating ArXiv search capabilities alongside web search
66
+ to provide comprehensive research assistance.
67
+ """
68
+ ## Define the research tools (ArXiv + Web search)
69
+ tools = get_tools_by_usecase("Research Assistant")
70
+ tool_node = create_tool_node(tools)
71
+
72
+ ## Define the LLM
73
+ llm = self.llm
74
+
75
+ ## Define the chatbot node with research capabilities
76
+ obj_chatbot_with_node = ChatbotWithToolNode(llm)
77
+ chatbot_node = obj_chatbot_with_node.create_chatbot(tools)
78
+
79
+ ## Add nodes
80
+ self.graph_builder.add_node("chatbot", chatbot_node)
81
+ self.graph_builder.add_node("tools", tool_node)
82
+
83
+ # Define conditional and direct edges
84
+ self.graph_builder.add_edge(START, "chatbot")
85
+ self.graph_builder.add_conditional_edges("chatbot", tools_condition)
86
+ self.graph_builder.add_edge("tools", "chatbot")
87
+
88
+
89
+ def ai_news_builder_graph(self):
90
+
91
+ ai_news_node=AINewsNode(self.llm)
92
+
93
+ ## added the nodes
94
+
95
+ self.graph_builder.add_node("fetch_news",ai_news_node.fetch_news)
96
+ self.graph_builder.add_node("summarize_news",ai_news_node.summarize_news)
97
+ self.graph_builder.add_node("save_result",ai_news_node.save_result)
98
+
99
+ #added the edges
100
+
101
+ self.graph_builder.set_entry_point("fetch_news")
102
+ self.graph_builder.add_edge("fetch_news","summarize_news")
103
+ self.graph_builder.add_edge("summarize_news","save_result")
104
+ self.graph_builder.add_edge("save_result", END)
105
+
106
+ def setup_graph(self, usecase: str):
107
+ """
108
+ Sets up the graph for the selected use case.
109
+ """
110
+ if usecase == "Basic Chatbot":
111
+ self.basic_chatbot_build_graph()
112
+ elif usecase == "Chatbot with Web Search":
113
+ self.chatbot_with_tools_build_graph()
114
+ elif usecase == "Research Assistant":
115
+ self.research_assistant_build_graph()
116
+ elif usecase == "AI News":
117
+ self.ai_news_builder_graph()
118
+ else:
119
+ # Default to basic chatbot if usecase is not recognized
120
+ self.basic_chatbot_build_graph()
121
+
122
+ return self.graph_builder.compile()
src/langgraphagenticai/main.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from src.langgraphagenticai.ui.streamlitui.loadui4 import LoadStreamlitUI
3
+ from src.langgraphagenticai.LLMS.groqllm import GroqLLM
4
+ from src.langgraphagenticai.graph.graph_builder import GraphBuilder
5
+ from src.langgraphagenticai.ui.streamlitui.display_result import DisplayResultStreamlit
6
+
7
+ def load_langgraph_agenticai_app():
8
+ """
9
+ This function launches and manages the LangGraph AgenticAI application with a Streamlit user interface.
10
+ It initializes the UI, collects user input, configures the selected LLM model,
11
+ and sets up the agentic graph workflow based on the chosen use case. Throughout execution,
12
+ it displays results interactively and incorporates robust exception handling to ensure reliability and a smooth user experience.
13
+
14
+ """
15
+
16
+ ##Load UI
17
+ ui=LoadStreamlitUI()
18
+ user_input=ui.load_streamlit_ui()
19
+
20
+ if not user_input:
21
+ st.error("Error: Failed to load user input from the UI.")
22
+ return
23
+
24
+ # Text input for user message
25
+ if st.session_state.IsFetchButtonClicked:
26
+ user_message = st.session_state.timeframe
27
+ else :
28
+ user_message = st.chat_input("Enter your message:")
29
+
30
+ if user_message:
31
+ try:
32
+ ## Configure The LLM's
33
+ obj_llm_config=GroqLLM(user_contols_input=user_input)
34
+ model=obj_llm_config.get_llm_model()
35
+
36
+ if not model:
37
+ st.error("Error: LLM model could not be initialized")
38
+ return
39
+
40
+ # Initialize and set up the graph based on use case
41
+ usecase=user_input.get("selected_usecase")
42
+
43
+ if not usecase:
44
+ st.error("Error: No use case selected.")
45
+ return
46
+
47
+ ## Graph Builder
48
+
49
+ graph_builder=GraphBuilder(model)
50
+ try:
51
+ graph=graph_builder.setup_graph(usecase)
52
+ print(user_message)
53
+ DisplayResultStreamlit(usecase,graph,user_message).display_result_on_ui()
54
+ except Exception as e:
55
+ st.error(f"Error: Graph set up failed- {e}")
56
+ return
57
+
58
+ except Exception as e:
59
+ st.error(f"Error: Graph set up failed- {e}")
60
+ return
src/langgraphagenticai/nodes/__init__.py ADDED
File without changes
src/langgraphagenticai/nodes/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (182 Bytes). View file
 
src/langgraphagenticai/nodes/__pycache__/ai_news_node.cpython-313.pyc ADDED
Binary file (4.31 kB). View file
 
src/langgraphagenticai/nodes/__pycache__/basic_chatbot_node.cpython-313.pyc ADDED
Binary file (1.06 kB). View file
 
src/langgraphagenticai/nodes/__pycache__/chatbot_with_Tool_node.cpython-313.pyc ADDED
Binary file (1.98 kB). View file
 
src/langgraphagenticai/nodes/ai_news_node.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from tavily import TavilyClient
2
+ from langchain_core.prompts import ChatPromptTemplate
3
+
4
+
5
+ class AINewsNode:
6
+ def __init__(self,llm):
7
+ """
8
+ Initialize the AINewsNode with API keys for Tavily and GROQ.
9
+ """
10
+ self.tavily = TavilyClient()
11
+ self.llm = llm
12
+ # this is used to capture various steps in this file so that later can be use for steps shown
13
+ self.state = {}
14
+
15
+ def fetch_news(self, state: dict) -> dict:
16
+ """
17
+ Fetch AI news based on the specified frequency.
18
+
19
+ Args:
20
+ state (dict): The state dictionary containing 'frequency'.
21
+
22
+ Returns:
23
+ dict: Updated state with 'news_data' key containing fetched news.
24
+ """
25
+
26
+ frequency = state['messages'][0].content.lower()
27
+ self.state['frequency'] = frequency
28
+ time_range_map = {'daily': 'd', 'weekly': 'w', 'monthly': 'm', 'year': 'y'}
29
+ days_map = {'daily': 1, 'weekly': 7, 'monthly': 30, 'year': 366}
30
+
31
+ response = self.tavily.search(
32
+ query="Top Artificial Intelligence (AI) technology news globally",
33
+ topic="news",
34
+ time_range=time_range_map[frequency],
35
+ include_answer="advanced",
36
+ max_results=20,
37
+ days=days_map[frequency],
38
+ # include_domains=["techcrunch.com", "venturebeat.com/ai", ...] # Uncomment and add domains if needed
39
+ )
40
+
41
+ state['news_data'] = response.get('results', [])
42
+ self.state['news_data'] = state['news_data']
43
+ return state
44
+
45
+
46
+ def summarize_news(self, state: dict) -> dict:
47
+ """
48
+ Summarize the fetched news using an LLM.
49
+
50
+ Args:
51
+ state (dict): The state dictionary containing 'news_data'.
52
+
53
+ Returns:
54
+ dict: Updated state with 'summary' key containing the summarized news.
55
+ """
56
+
57
+ news_items = self.state['news_data']
58
+
59
+ prompt_template = ChatPromptTemplate.from_messages([
60
+ ("system", """Summarize AI news articles into markdown format. For each item include:
61
+ - Date in **YYYY-MM-DD** format in IST timezone
62
+ - Concise sentences summary from latest news
63
+ - Sort news by date wise (latest first)
64
+ - Source URL as link
65
+ Use format:
66
+ ### [Date]
67
+ - [Summary](URL)"""),
68
+ ("user", "Articles:\n{articles}")
69
+ ])
70
+
71
+ articles_str = "\n\n".join([
72
+ f"Content: {item.get('content', '')}\nURL: {item.get('url', '')}\nDate: {item.get('published_date', '')}"
73
+ for item in news_items
74
+ ])
75
+
76
+ response = self.llm.invoke(prompt_template.format(articles=articles_str))
77
+ state['summary'] = response.content
78
+ self.state['summary'] = state['summary']
79
+ return self.state
80
+
81
+ def save_result(self,state):
82
+ frequency = self.state['frequency']
83
+ summary = self.state['summary']
84
+ filename = f"./AINews/{frequency}_summary.md"
85
+ with open(filename, 'w') as f:
86
+ f.write(f"# {frequency.capitalize()} AI News Summary\n\n")
87
+ f.write(summary)
88
+ self.state['filename'] = filename
89
+ return self.state
src/langgraphagenticai/nodes/basic_chatbot_node.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+ class BasicChatbotNode:
4
+ """
5
+ Basic Chatbot login implementation
6
+ """
7
+ def __init__(self,model):
8
+ self.llm=model
9
+
10
+ def process(self,state:State)->dict:
11
+ """
12
+ Processes the input state and generates a chatbot response.
13
+ """
14
+ return {"messages":self.llm.invoke(state['messages'])}
src/langgraphagenticai/nodes/chatbot_with_Tool_node.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from src.langgraphagenticai.state.state import State
2
+
3
+ class ChatbotWithToolNode:
4
+ """
5
+ Chatbot logic enhanced with tool integration.
6
+ """
7
+ def __init__(self,model):
8
+ self.llm = model
9
+
10
+ def process(self, state: State) -> dict:
11
+ """
12
+ Processes the input state and generates a response with tool integration.
13
+ """
14
+ user_input = state["messages"][-1] if state["messages"] else ""
15
+ llm_response = self.llm.invoke([{"role": "user", "content": user_input}])
16
+
17
+ # Simulate tool-specific logic
18
+ tools_response = f"Tool integration for: '{user_input}'"
19
+
20
+ return {"messages": [llm_response, tools_response]}
21
+
22
+
23
+ def create_chatbot(self, tools):
24
+ """
25
+ Returns a chatbot node function.
26
+ """
27
+ llm_with_tools = self.llm.bind_tools(tools)
28
+
29
+ def chatbot_node(state: State):
30
+ """
31
+ Chatbot logic for processing the input state and returning a response.
32
+ """
33
+ return {"messages": [llm_with_tools.invoke(state["messages"])]}
34
+
35
+ return chatbot_node
src/langgraphagenticai/state/__init__.py ADDED
File without changes
src/langgraphagenticai/state/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (182 Bytes). View file
 
src/langgraphagenticai/state/__pycache__/state.cpython-313.pyc ADDED
Binary file (733 Bytes). View file
 
src/langgraphagenticai/state/state.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing_extensions import TypedDict,List
2
+ from langgraph.graph.message import add_messages
3
+ from typing import Annotated
4
+
5
+
6
+ class State(TypedDict):
7
+ """
8
+ Represent the structure of the state used in graph
9
+ """
10
+ messages: Annotated[List,add_messages]
src/langgraphagenticai/tools/__init__.py ADDED
File without changes
src/langgraphagenticai/tools/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (182 Bytes). View file
 
src/langgraphagenticai/tools/__pycache__/arxiv_tool.cpython-313.pyc ADDED
Binary file (1.33 kB). View file
 
src/langgraphagenticai/tools/__pycache__/search_tool.cpython-313.pyc ADDED
Binary file (1.15 kB). View file
 
src/langgraphagenticai/tools/arxiv_tool.py ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.tools import ArxivQueryRun
2
+ from langchain_community.utilities import ArxivAPIWrapper
3
+ from langgraph.prebuilt import ToolNode
4
+
5
+ def get_arxiv_tools():
6
+ """
7
+ Return the list of ArXiv tools for research assistance
8
+ """
9
+ # Initialize ArXiv API wrapper with configuration
10
+ arxiv_wrapper = ArxivAPIWrapper(
11
+ top_k_results=5, # Return top 5 most relevant papers
12
+ doc_content_chars_max=5000 # Limit content to 5000 characters per paper
13
+ )
14
+
15
+ # Create ArXiv query tool
16
+ arxiv_tool = ArxivQueryRun(api_wrapper=arxiv_wrapper)
17
+
18
+ tools = [arxiv_tool]
19
+ return tools
20
+
21
+ def create_arxiv_tool_node(tools):
22
+ """
23
+ Creates and returns a tool node for ArXiv research tools
24
+ """
25
+ return ToolNode(tools=tools)
26
+
27
+ def get_research_assistant_tools():
28
+ """
29
+ Return combined tools for research assistant (ArXiv + web search if needed)
30
+ """
31
+ from .search_tool import get_tools as get_web_tools
32
+
33
+ # Get ArXiv tools
34
+ arxiv_tools = get_arxiv_tools()
35
+
36
+ # Get web search tools
37
+ web_tools = get_web_tools()
38
+
39
+ # Combine both tool sets for comprehensive research
40
+ combined_tools = arxiv_tools + web_tools
41
+
42
+ return combined_tools
src/langgraphagenticai/tools/search_tool.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.tools.tavily_search import TavilySearchResults
2
+ from langgraph.prebuilt import ToolNode
3
+
4
+ def get_tools():
5
+ """
6
+ Return the list of tools to be used in the chatbot
7
+ """
8
+ tools=[TavilySearchResults(max_results=2)]
9
+ return tools
10
+
11
+ def create_tool_node(tools):
12
+ """
13
+ creates and returns a tool node for the graph
14
+ """
15
+ return ToolNode(tools=tools)
16
+
17
+ def get_tools_by_usecase(usecase):
18
+ """
19
+ Return tools based on the specific use case
20
+ """
21
+ if usecase == "Chatbot with Web Search":
22
+ return get_tools()
23
+ elif usecase == "Research Assistant":
24
+ from .arxiv_tool import get_research_assistant_tools
25
+ return get_research_assistant_tools()
26
+ else:
27
+ # Default to web search tools
28
+ return get_tools()
src/langgraphagenticai/ui/__init__.py ADDED
File without changes
src/langgraphagenticai/ui/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (191 Bytes). View file
 
src/langgraphagenticai/ui/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (179 Bytes). View file
 
src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-311.pyc ADDED
Binary file (2.05 kB). View file
 
src/langgraphagenticai/ui/__pycache__/uiconfigfile.cpython-313.pyc ADDED
Binary file (1.89 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/display_result.cpython-313.pyc ADDED
Binary file (5.77 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui.cpython-313.pyc ADDED
Binary file (2.7 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui2.cpython-311.pyc ADDED
Binary file (7.76 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui2.cpython-313.pyc ADDED
Binary file (7.18 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui3.cpython-313.pyc ADDED
Binary file (9.68 kB). View file
 
src/langgraphagenticai/ui/streamlitui/__pycache__/loadui4.cpython-313.pyc ADDED
Binary file (14.8 kB). View file
 
src/langgraphagenticai/ui/streamlitui/display_result.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from langchain_core.messages import HumanMessage,AIMessage,ToolMessage
3
+ import json
4
+
5
+
6
+ class DisplayResultStreamlit:
7
+ def __init__(self,usecase,graph,user_message):
8
+ self.usecase= usecase
9
+ self.graph = graph
10
+ self.user_message = user_message
11
+
12
+ def display_result_on_ui(self):
13
+ usecase= self.usecase
14
+ graph = self.graph
15
+ user_message = self.user_message
16
+ print(user_message)
17
+ if usecase =="Basic Chatbot":
18
+ for event in graph.stream({'messages':("user",user_message)}):
19
+ print(event.values())
20
+ for value in event.values():
21
+ print(value['messages'])
22
+ with st.chat_message("user"):
23
+ st.write(user_message)
24
+ with st.chat_message("assistant"):
25
+ st.write(value["messages"].content)
26
+
27
+ elif usecase=="Chatbot with Web Search":
28
+ # Prepare state and invoke the graph
29
+ initial_state = {"messages": [user_message]}
30
+ res = graph.invoke(initial_state)
31
+ for message in res['messages']:
32
+ if type(message) == HumanMessage:
33
+ with st.chat_message("user"):
34
+ st.write(message.content)
35
+ elif type(message)==ToolMessage:
36
+ with st.chat_message("ai"):
37
+ st.write("Tool Call Start")
38
+ st.write(message.content)
39
+ st.write("Tool Call End")
40
+ elif type(message)==AIMessage and message.content:
41
+ with st.chat_message("assistant"):
42
+ st.write(message.content)
43
+
44
+ elif usecase=="Research Assistant":
45
+ # Prepare state and invoke the graph for research assistance
46
+ initial_state = {"messages": [user_message]}
47
+ res = graph.invoke(initial_state)
48
+ for message in res['messages']:
49
+ if type(message) == HumanMessage:
50
+ with st.chat_message("user"):
51
+ st.write(message.content)
52
+ elif type(message)==ToolMessage:
53
+ with st.chat_message("ai"):
54
+ # Enhanced display for research results
55
+ if "arxiv" in message.name.lower():
56
+ st.write("🔬 **ArXiv Research Results:**")
57
+ else:
58
+ st.write("🌐 **Web Search Results:**")
59
+
60
+ # Display the tool result content
61
+ st.write(message.content)
62
+ st.write("---")
63
+ elif type(message)==AIMessage and message.content:
64
+ with st.chat_message("assistant"):
65
+ st.write(message.content)
66
+
67
+ elif usecase == "AI News":
68
+ frequency = self.user_message
69
+ with st.spinner("Fetching and summarizing news... ⏳"):
70
+ result = graph.invoke({"messages": frequency})
71
+ try:
72
+ # Read the markdown file
73
+ AI_NEWS_PATH = f"./AINews/{frequency.lower()}_summary.md"
74
+ with open(AI_NEWS_PATH, "r") as file:
75
+ markdown_content = file.read()
76
+
77
+ # Display the markdown content in Streamlit
78
+ st.markdown(markdown_content, unsafe_allow_html=True)
79
+ except FileNotFoundError:
80
+ st.error(f"News Not Generated or File not found: {AI_NEWS_PATH}")
81
+ except Exception as e:
82
+ st.error(f"An error occurred: {str(e)}")
src/langgraphagenticai/ui/streamlitui/loadui.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+
4
+ from src.langgraphagenticai.ui.uiconfigfile import Config
5
+
6
+ class LoadStreamlitUI:
7
+ def __init__(self):
8
+ self.config=Config()
9
+ self.user_controls={}
10
+
11
+ def load_streamlit_ui(self):
12
+ st.set_page_config(page_title= "🤖 " + self.config.get_page_title(), layout="wide")
13
+ st.header("🤖 " + self.config.get_page_title())
14
+
15
+
16
+ with st.sidebar:
17
+ # Get options from config
18
+ llm_options = self.config.get_llm_options()
19
+ usecase_options = self.config.get_usecase_options()
20
+
21
+ # LLM selection
22
+ self.user_controls["selected_llm"] = st.selectbox("Select LLM", llm_options)
23
+
24
+ if self.user_controls["selected_llm"] == 'Groq':
25
+ # Model selection
26
+ model_options = self.config.get_groq_model_options()
27
+ self.user_controls["selected_groq_model"] = st.selectbox("Select Model", model_options)
28
+ self.user_controls["GROQ_API_KEY"] = st.session_state["GROQ_API_KEY"]=st.text_input("API Key",type="password")
29
+ # Validate API key
30
+ if not self.user_controls["GROQ_API_KEY"]:
31
+ st.warning("⚠️ Please enter your GROQ API key to proceed. Don't have? refer : https://console.groq.com/keys ")
32
+
33
+ ## USecase selection
34
+ self.user_controls["selected_usecase"]=st.selectbox("Select Usecases",usecase_options)
35
+
36
+ return self.user_controls