File size: 3,600 Bytes
40fd4e5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ac85c1d
40fd4e5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ac85c1d
40fd4e5
 
 
 
 
 
 
 
 
ac85c1d
40fd4e5
 
 
ac85c1d
40fd4e5
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import asyncio

from autogen.agentchat.contrib.llamaindex_conversable_agent import LLamaIndexConversableAgent
from llama_index.tools.wikipedia import WikipediaToolSpec

from src.agents.assistantagent import TrackableAssistantAgent, TrackableLLamaIndexConversableAgent
from src.agents.userproxyagent import TrackableUserProxyAgent
from src.agents.wikipediaagent import WikipediaAgent
from llama_index.core import Settings
from llama_index.core.agent import ReActAgent
from llama_index.embeddings.openai import OpenAIEmbedding
from llama_index.llms.openai import OpenAI
from llama_index.tools.wikipedia import WikipediaToolSpec
from llama_index.llms.groq import Groq

import streamlit as st


class WithLlamaIndexMultiAgentChat:
    def __init__(self, assistant_name, user_proxy_name, llm_config, problem, user_input):
        # self.assistant = TrackableAssistantAgent(name=assistant_name,
        #                                          system_message="""you are helpful assistant. Reply "TERMINATE" in
        #                                          the end when everything is done """,
        #                                          human_input_mode="NEVER",
        #                                          llm_config=llm_config,
        #                                          )

        self.llm_config = llm_config
        self.user_input = user_input

        self.trip_assistant = TrackableLLamaIndexConversableAgent(
            assistant_name,
            llama_index_agent=self.get_location_specialist(),
            system_message='''You help customers finding more about places they would like to 
            visit. You can use external resources to provide more details as you engage with 
            the customer. Reply 'TERMINATE' if all task is done ''',
            description="This agents helps customers discover locations to visit, things to do, and other details about a location. It can use external resources to provide more details. This agent helps in finding attractions, history and all that there si to know about a place",
        )

        self.user_proxy = TrackableUserProxyAgent(name=user_proxy_name,
                                                  system_message="You are Admin",
                                                  human_input_mode="NEVER",
                                                  llm_config=llm_config,
                                                  code_execution_config=False,
                                                  is_termination_msg=lambda x: x.get("content", "").strip().endswith(
                                                      "TERMINATE"))
        self.problem = problem
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

    async def initiate_chat(self):
        await self.user_proxy.a_initiate_chat(self.trip_assistant, message=self.problem,max_turns=2,
                                              clear_history=st.session_state["chat_with_history"])

    def run(self):
        self.loop.run_until_complete(self.initiate_chat())

    def get_location_specialist(self):
        # create a react agent to use wikipedia tool
        wiki_spec = WikipediaToolSpec()
        # Get the search wikipedia tool
        wikipedia_tool = wiki_spec.to_tool_list()

        llm = Groq(model=self.user_input['selected_groq_model'], api_key=st.session_state["GROQ_API_KEY"])

        location_specialist = ReActAgent.from_tools(tools=wikipedia_tool, llm=llm,
                                                    verbose=True)

        return location_specialist