genaitiwari commited on
Commit
ac85c1d
·
1 Parent(s): 3328745

refinement of llamaindex tool

Browse files
README.md CHANGED
@@ -70,6 +70,11 @@ docs or filename path : https://github.com/microsoft/autogen/blob/main/python/sa
70
  ![alt text](rag_chat.png)
71
 
72
  #### With LLamaIndex Tool
 
 
 
 
 
73
  #### AgentChat Sql Spider
74
 
75
  ### GROQ_MODEL_OPTIONS
 
70
  ![alt text](rag_chat.png)
71
 
72
  #### With LLamaIndex Tool
73
+ prompt: What can i find in Tokyo related to Hayao Miyazaki and its moveis like Spirited Away?.
74
+ ![alt text](with_llamatool.png)
75
+
76
+
77
+
78
  #### AgentChat Sql Spider
79
 
80
  ### GROQ_MODEL_OPTIONS
app.py CHANGED
@@ -47,18 +47,19 @@ if __name__ == "__main__":
47
  obj_usecases_rag_multichat.run()
48
 
49
  elif user_input['selected_usecase'] == "With LLamaIndex Tool":
 
50
  obj_usecases_with_llamaIndex_multichat = WithLlamaIndexMultiAgentChat(assistant_name='Assistant', user_proxy_name='Userproxy',
51
  llm_config=llm_config,
52
  problem=problem,user_input=user_input)
53
  obj_usecases_with_llamaIndex_multichat.run()
54
 
55
 
56
- elif user_input['selected_usecase'] == "AgentChat Sql Spider":
57
- obj_sql_spider = AgentChatSqlSpider(assistant_name="Assistant", user_proxy_name='Userproxy',
58
- llm_config=llm_config,
59
- problem=problem)
60
 
61
- obj_sql_spider.run()
62
 
63
  elif user_input['selected_usecase'] == "Basic Example":
64
  obj_basic_example = BasicExample(assistant_name="Assistant", user_proxy_name='Userproxy',
 
47
  obj_usecases_rag_multichat.run()
48
 
49
  elif user_input['selected_usecase'] == "With LLamaIndex Tool":
50
+
51
  obj_usecases_with_llamaIndex_multichat = WithLlamaIndexMultiAgentChat(assistant_name='Assistant', user_proxy_name='Userproxy',
52
  llm_config=llm_config,
53
  problem=problem,user_input=user_input)
54
  obj_usecases_with_llamaIndex_multichat.run()
55
 
56
 
57
+ # elif user_input['selected_usecase'] == "AgentChat Sql Spider":
58
+ # obj_sql_spider = AgentChatSqlSpider(assistant_name="Assistant", user_proxy_name='Userproxy',
59
+ # llm_config=llm_config,
60
+ # problem=problem)
61
 
62
+ # obj_sql_spider.run()
63
 
64
  elif user_input['selected_usecase'] == "Basic Example":
65
  obj_basic_example = BasicExample(assistant_name="Assistant", user_proxy_name='Userproxy',
configfile.ini CHANGED
@@ -1,6 +1,6 @@
1
  [DEFAULT]
2
  PAGE_TITLE = AUTOGEN IN ACTION
3
  LLM_OPTIONS = Groq, Huggingface
4
- USECASE_OPTIONS = Basic Example, MultiAgent Chat, MultiAgent Code Execution, RAG Chat, With LLamaIndex Tool, AgentChat Sql Spider
5
- GROQ_MODEL_OPTIONS = mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma-7b-i
6
 
 
1
  [DEFAULT]
2
  PAGE_TITLE = AUTOGEN IN ACTION
3
  LLM_OPTIONS = Groq, Huggingface
4
+ USECASE_OPTIONS = Basic Example, MultiAgent Chat, MultiAgent Code Execution, RAG Chat, With LLamaIndex Tool
5
+ GROQ_MODEL_OPTIONS = llama-3.3-70b-versatile, mixtral-8x7b-32768, llama3-8b-8192, llama3-70b-8192, gemma2-9b-it
6
 
src/agents/assistantagent.py CHANGED
@@ -15,8 +15,14 @@ class TrackableAssistantAgent(AssistantAgent):
15
 
16
  class TrackableLLamaIndexConversableAgent(LLamaIndexConversableAgent):
17
  def _process_received_message(self, message, sender, silent):
18
- with st.chat_message(sender.name):
19
- st.write(message)
 
 
 
 
 
 
20
  return super()._process_received_message(message, sender, silent)
21
 
22
 
 
15
 
16
  class TrackableLLamaIndexConversableAgent(LLamaIndexConversableAgent):
17
  def _process_received_message(self, message, sender, silent):
18
+ if message and type(message)== str and sender.name =="Userproxy":
19
+ with st.chat_message("user"):
20
+ st.write(message)
21
+
22
+ else:
23
+ with st.chat_message("ai"):
24
+ st.write(message['content'])
25
+
26
  return super()._process_received_message(message, sender, silent)
27
 
28
 
src/agents/userproxyagent.py CHANGED
@@ -5,5 +5,8 @@ import streamlit as st
5
  class TrackableUserProxyAgent(UserProxyAgent):
6
  def _process_received_message(self, message, sender, silent):
7
  with st.chat_message(sender.name.lower()):
8
- st.write(message['content'])
 
 
 
9
  return super()._process_received_message(message, sender, silent)
 
5
  class TrackableUserProxyAgent(UserProxyAgent):
6
  def _process_received_message(self, message, sender, silent):
7
  with st.chat_message(sender.name.lower()):
8
+ if type(message)==str:
9
+ st.write(message)
10
+ else :
11
+ st.write(message['content'])
12
  return super()._process_received_message(message, sender, silent)
src/agents/wikipediaagent.py CHANGED
@@ -17,16 +17,16 @@ class WikipediaAgent:
17
  self.wikipedia_tool = None
18
  self.agent = None
19
 
20
- def create_agent(self, max_iterations=10, verbose=True):
21
  # Create the Wikipedia tool specification
22
  wiki_spec = WikipediaToolSpec()
23
 
24
  # Get the search Wikipedia tool
25
- self.wikipedia_tool = wiki_spec.to_tool_list()[1]
26
 
27
  # Create the ReActAgent with the Wikipedia tool
28
  self.agent = ReActAgent.from_tools(
29
- tools=[self.wikipedia_tool],
30
  llm=self.llm,
31
  max_iterations=max_iterations,
32
  verbose=verbose
 
17
  self.wikipedia_tool = None
18
  self.agent = None
19
 
20
+ def create_agent(self, max_iterations=8, verbose=True):
21
  # Create the Wikipedia tool specification
22
  wiki_spec = WikipediaToolSpec()
23
 
24
  # Get the search Wikipedia tool
25
+ self.wikipedia_tool = wiki_spec.to_tool_list()
26
 
27
  # Create the ReActAgent with the Wikipedia tool
28
  self.agent = ReActAgent.from_tools(
29
+ tools=self.wikipedia_tool,
30
  llm=self.llm,
31
  max_iterations=max_iterations,
32
  verbose=verbose
src/streamlitui/loadui.py CHANGED
@@ -31,7 +31,12 @@ class LoadStreamlitUI:
31
  if self.user_controls['selected_usecase'] == "RAG Chat":
32
  st.session_state["docs_path"] = st.text_input("Enter Docs path or filename")
33
 
 
 
34
 
35
  st.session_state["chat_with_history"] = st.sidebar.toggle("Chat With History")
 
 
 
36
 
37
  return self.user_controls
 
31
  if self.user_controls['selected_usecase'] == "RAG Chat":
32
  st.session_state["docs_path"] = st.text_input("Enter Docs path or filename")
33
 
34
+
35
+
36
 
37
  st.session_state["chat_with_history"] = st.sidebar.toggle("Chat With History")
38
+
39
+ if self.user_controls['selected_usecase'] == "With LLamaIndex Tool":
40
+ st.subheader("🏝️ Trip Advisor Specialist using wikipedia")
41
 
42
  return self.user_controls
src/usecases/withllamaIndex.py CHANGED
@@ -29,7 +29,7 @@ class WithLlamaIndexMultiAgentChat:
29
  self.user_input = user_input
30
 
31
  self.trip_assistant = TrackableLLamaIndexConversableAgent(
32
- "trip_specialist",
33
  llama_index_agent=self.get_location_specialist(),
34
  system_message='''You help customers finding more about places they would like to
35
  visit. You can use external resources to provide more details as you engage with
@@ -49,7 +49,7 @@ class WithLlamaIndexMultiAgentChat:
49
  asyncio.set_event_loop(self.loop)
50
 
51
  async def initiate_chat(self):
52
- await self.user_proxy.a_initiate_chat(self.trip_assistant, message=self.problem,
53
  clear_history=st.session_state["chat_with_history"])
54
 
55
  def run(self):
@@ -59,12 +59,11 @@ class WithLlamaIndexMultiAgentChat:
59
  # create a react agent to use wikipedia tool
60
  wiki_spec = WikipediaToolSpec()
61
  # Get the search wikipedia tool
62
- wikipedia_tool = wiki_spec.to_tool_list()[1]
63
 
64
  llm = Groq(model=self.user_input['selected_groq_model'], api_key=st.session_state["GROQ_API_KEY"])
65
- llm_70b = Groq(model="llama3-70b-8192")
66
 
67
- location_specialist = ReActAgent.from_tools(tools=[wikipedia_tool], llm=llm, max_iterations=1,
68
  verbose=True)
69
 
70
  return location_specialist
 
29
  self.user_input = user_input
30
 
31
  self.trip_assistant = TrackableLLamaIndexConversableAgent(
32
+ assistant_name,
33
  llama_index_agent=self.get_location_specialist(),
34
  system_message='''You help customers finding more about places they would like to
35
  visit. You can use external resources to provide more details as you engage with
 
49
  asyncio.set_event_loop(self.loop)
50
 
51
  async def initiate_chat(self):
52
+ await self.user_proxy.a_initiate_chat(self.trip_assistant, message=self.problem,max_turns=2,
53
  clear_history=st.session_state["chat_with_history"])
54
 
55
  def run(self):
 
59
  # create a react agent to use wikipedia tool
60
  wiki_spec = WikipediaToolSpec()
61
  # Get the search wikipedia tool
62
+ wikipedia_tool = wiki_spec.to_tool_list()
63
 
64
  llm = Groq(model=self.user_input['selected_groq_model'], api_key=st.session_state["GROQ_API_KEY"])
 
65
 
66
+ location_specialist = ReActAgent.from_tools(tools=wikipedia_tool, llm=llm,
67
  verbose=True)
68
 
69
  return location_specialist
with_llamatool.png ADDED