genaitiwari commited on
Commit
40fd4e5
·
1 Parent(s): d1bb0ce

with llamaIndex tool wikipedia

Browse files
app.py CHANGED
@@ -3,6 +3,7 @@ import streamlit as st
3
  from configfile import Config
4
  from src.streamlitui.loadui import LoadStreamlitUI
5
  from src.usecases.multiagentschat import MultiAgentChat
 
6
  from src.LLMS.groqllm import GroqLLM
7
 
8
 
@@ -26,6 +27,14 @@ if __name__ == "__main__":
26
 
27
  if problem:
28
  # start multichat
29
- obj_usecases = MultiAgentChat(assistant_name='Assistant', user_proxy_name='Userproxy', llm_config=llm_config,
30
- problem=problem)
31
- obj_usecases.run()
 
 
 
 
 
 
 
 
 
3
  from configfile import Config
4
  from src.streamlitui.loadui import LoadStreamlitUI
5
  from src.usecases.multiagentschat import MultiAgentChat
6
+ from src.usecases.withllamaIndex import WithLlamaIndexMultiAgentChat
7
  from src.LLMS.groqllm import GroqLLM
8
 
9
 
 
27
 
28
  if problem:
29
  # start multichat
30
+ if user_input['selected_usecase'] == "MultiAgent Chat":
31
+ obj_usecases_multichat = MultiAgentChat(assistant_name='Assistant', user_proxy_name='Userproxy', llm_config=llm_config,
32
+ problem=problem)
33
+ obj_usecases_multichat.run()
34
+
35
+ elif user_input['selected_usecase'] == "With LLamaIndex Tool":
36
+ obj_usecases_with_llamaIndex_multichat = WithLlamaIndexMultiAgentChat(assistant_name='Assistant', user_proxy_name='Userproxy',
37
+ llm_config=llm_config,
38
+ problem=problem,user_input=user_input)
39
+ obj_usecases_with_llamaIndex_multichat.run()
40
+
configfile.ini CHANGED
@@ -2,5 +2,5 @@
2
  PAGE_TITLE = AUTOGEN IN ACTION
3
  LLM_OPTIONS = Groq, Huggingface
4
  USECASE_OPTIONS = MultiAgent Chat, RAG Chat, With LLamaIndex Tool, Teachable Agent, With Langchain
5
- GROQ_MODEL_OPTIONS = Mixtral 8x7b, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
6
 
 
2
  PAGE_TITLE = AUTOGEN IN ACTION
3
  LLM_OPTIONS = Groq, Huggingface
4
  USECASE_OPTIONS = MultiAgent Chat, RAG Chat, With LLamaIndex Tool, Teachable Agent, With Langchain
5
+ GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
6
 
requirements.txt CHANGED
@@ -1,3 +1,8 @@
1
  streamlit
2
  pyautogen
3
- groq
 
 
 
 
 
 
1
  streamlit
2
  pyautogen
3
+ groq
4
+ llama-index
5
+ llama-index-tools-wikipedia
6
+ llama-index-readers-wikipedia
7
+ wikipedia
8
+ llama-index-llms-groq
src/LLMS/groqllm.py CHANGED
@@ -15,6 +15,6 @@ class GroqLLM:
15
  "cache_seed": None}
16
  ]
17
 
18
- llm_config = {"config_list": config_list}
19
  st.session_state['llm_config'] = llm_config
20
  return llm_config
 
15
  "cache_seed": None}
16
  ]
17
 
18
+ llm_config = {"config_list": config_list, "request_timeout": 60}
19
  st.session_state['llm_config'] = llm_config
20
  return llm_config
src/agents/assistantagent.py CHANGED
@@ -1,5 +1,6 @@
1
  from autogen import AssistantAgent
2
  import streamlit as st
 
3
 
4
 
5
  class TrackableAssistantAgent(AssistantAgent):
@@ -7,3 +8,13 @@ class TrackableAssistantAgent(AssistantAgent):
7
  with st.chat_message(sender.name):
8
  st.write(message)
9
  return super()._process_received_message(message, sender, silent)
 
 
 
 
 
 
 
 
 
 
 
1
  from autogen import AssistantAgent
2
  import streamlit as st
3
+ from autogen.agentchat.contrib.llamaindex_conversable_agent import LLamaIndexConversableAgent
4
 
5
 
6
  class TrackableAssistantAgent(AssistantAgent):
 
8
  with st.chat_message(sender.name):
9
  st.write(message)
10
  return super()._process_received_message(message, sender, silent)
11
+
12
+
13
+ class TrackableLLamaIndexConversableAgent(LLamaIndexConversableAgent):
14
+ def _process_received_message(self, message, sender, silent):
15
+ with st.chat_message(sender.name):
16
+ st.write(message)
17
+ return super()._process_received_message(message, sender, silent)
18
+
19
+
20
+
src/agents/wikipediaagent.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # from some_module import WikipediaToolSpec, ReActAgent # Ensure you have the correct imports
2
+ # from some_llm_module import llm # Import the LLM you're using
3
+
4
+ from llama_index.core import Settings
5
+ from llama_index.core.agent import ReActAgent
6
+ from llama_index.embeddings.openai import OpenAIEmbedding
7
+ from llama_index.llms.openai import OpenAI
8
+ from llama_index.tools.wikipedia import WikipediaToolSpec
9
+
10
+
11
+
12
+
13
+
14
+ class WikipediaAgent:
15
+ def __init__(self, llm):
16
+ self.llm = llm
17
+ self.wikipedia_tool = None
18
+ self.agent = None
19
+
20
+ def create_agent(self, max_iterations=10, verbose=True):
21
+ # Create the Wikipedia tool specification
22
+ wiki_spec = WikipediaToolSpec()
23
+
24
+ # Get the search Wikipedia tool
25
+ self.wikipedia_tool = wiki_spec.to_tool_list()[1]
26
+
27
+ # Create the ReActAgent with the Wikipedia tool
28
+ self.agent = ReActAgent.from_tools(
29
+ tools=[self.wikipedia_tool],
30
+ llm=self.llm,
31
+ max_iterations=max_iterations,
32
+ verbose=verbose
33
+ )
34
+
35
+ return self.agent
36
+
37
+
38
+
39
+
40
+ # Usage
41
+ # wiki_agent = WikipediaAgent(llm)
42
+ # agent = wiki_agent.create_agent()
43
+
44
+
45
+
src/usecases/withllamaIndex.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+
3
+ from autogen.agentchat.contrib.llamaindex_conversable_agent import LLamaIndexConversableAgent
4
+ from llama_index.tools.wikipedia import WikipediaToolSpec
5
+
6
+ from src.agents.assistantagent import TrackableAssistantAgent, TrackableLLamaIndexConversableAgent
7
+ from src.agents.userproxyagent import TrackableUserProxyAgent
8
+ from src.agents.wikipediaagent import WikipediaAgent
9
+ from llama_index.core import Settings
10
+ from llama_index.core.agent import ReActAgent
11
+ from llama_index.embeddings.openai import OpenAIEmbedding
12
+ from llama_index.llms.openai import OpenAI
13
+ from llama_index.tools.wikipedia import WikipediaToolSpec
14
+ from llama_index.llms.groq import Groq
15
+
16
+ import streamlit as st
17
+
18
+
19
+ class WithLlamaIndexMultiAgentChat:
20
+ def __init__(self, assistant_name, user_proxy_name, llm_config, problem, user_input):
21
+ # self.assistant = TrackableAssistantAgent(name=assistant_name,
22
+ # system_message="""you are helpful assistant. Reply "TERMINATE" in
23
+ # the end when everything is done """,
24
+ # human_input_mode="NEVER",
25
+ # llm_config=llm_config,
26
+ # )
27
+
28
+ self.llm_config = llm_config
29
+ self.user_input = user_input
30
+
31
+ self.trip_assistant = TrackableLLamaIndexConversableAgent(
32
+ "trip_specialist",
33
+ llama_index_agent=self.get_location_specialist(),
34
+ system_message='''You help customers finding more about places they would like to
35
+ visit. You can use external resources to provide more details as you engage with
36
+ the customer. Reply 'TERMINATE' if all task is done ''',
37
+ description="This agents helps customers discover locations to visit, things to do, and other details about a location. It can use external resources to provide more details. This agent helps in finding attractions, history and all that there si to know about a place",
38
+ )
39
+
40
+ self.user_proxy = TrackableUserProxyAgent(name=user_proxy_name,
41
+ system_message="You are Admin",
42
+ human_input_mode="NEVER",
43
+ llm_config=llm_config,
44
+ code_execution_config=False,
45
+ is_termination_msg=lambda x: x.get("content", "").strip().endswith(
46
+ "TERMINATE"))
47
+ self.problem = problem
48
+ self.loop = asyncio.new_event_loop()
49
+ asyncio.set_event_loop(self.loop)
50
+
51
+ async def initiate_chat(self):
52
+ await self.user_proxy.a_initiate_chat(self.trip_assistant, message=self.problem,
53
+ clear_history=st.session_state["chat_with_history"])
54
+
55
+ def run(self):
56
+ self.loop.run_until_complete(self.initiate_chat())
57
+
58
+ def get_location_specialist(self):
59
+ # create a react agent to use wikipedia tool
60
+ wiki_spec = WikipediaToolSpec()
61
+ # Get the search wikipedia tool
62
+ wikipedia_tool = wiki_spec.to_tool_list()[1]
63
+
64
+ llm = Groq(model=self.user_input['selected_groq_model'], api_key=st.session_state["GROQ_API_KEY"])
65
+ llm_70b = Groq(model="llama3-70b-8192")
66
+
67
+ location_specialist = ReActAgent.from_tools(tools=[wikipedia_tool], llm=llm, max_iterations=3,
68
+ verbose=True)
69
+
70
+ return location_specialist
src/utils/utility.py ADDED
File without changes