Spaces:
Sleeping
Sleeping
| import asyncio | |
| from autogen.agentchat.contrib.llamaindex_conversable_agent import LLamaIndexConversableAgent | |
| from llama_index.tools.wikipedia import WikipediaToolSpec | |
| from src.agents.assistantagent import TrackableAssistantAgent, TrackableLLamaIndexConversableAgent | |
| from src.agents.userproxyagent import TrackableUserProxyAgent | |
| from src.agents.wikipediaagent import WikipediaAgent | |
| from llama_index.core import Settings | |
| from llama_index.core.agent import ReActAgent | |
| from llama_index.embeddings.openai import OpenAIEmbedding | |
| from llama_index.llms.openai import OpenAI | |
| from llama_index.tools.wikipedia import WikipediaToolSpec | |
| from llama_index.llms.groq import Groq | |
| import streamlit as st | |
| class WithLlamaIndexMultiAgentChat: | |
| def __init__(self, assistant_name, user_proxy_name, llm_config, problem, user_input): | |
| # self.assistant = TrackableAssistantAgent(name=assistant_name, | |
| # system_message="""you are helpful assistant. Reply "TERMINATE" in | |
| # the end when everything is done """, | |
| # human_input_mode="NEVER", | |
| # llm_config=llm_config, | |
| # ) | |
| self.llm_config = llm_config | |
| self.user_input = user_input | |
| self.trip_assistant = TrackableLLamaIndexConversableAgent( | |
| assistant_name, | |
| llama_index_agent=self.get_location_specialist(), | |
| system_message='''You help customers finding more about places they would like to | |
| visit. You can use external resources to provide more details as you engage with | |
| the customer. Reply 'TERMINATE' if all task is done ''', | |
| description="This agents helps customers discover locations to visit, things to do, and other details about a location. It can use external resources to provide more details. This agent helps in finding attractions, history and all that there si to know about a place", | |
| ) | |
| self.user_proxy = TrackableUserProxyAgent(name=user_proxy_name, | |
| system_message="You are Admin", | |
| human_input_mode="NEVER", | |
| llm_config=llm_config, | |
| code_execution_config=False, | |
| is_termination_msg=lambda x: x.get("content", "").strip().endswith( | |
| "TERMINATE")) | |
| self.problem = problem | |
| self.loop = asyncio.new_event_loop() | |
| asyncio.set_event_loop(self.loop) | |
| async def initiate_chat(self): | |
| await self.user_proxy.a_initiate_chat(self.trip_assistant, message=self.problem,max_turns=2, | |
| clear_history=st.session_state["chat_with_history"]) | |
| def run(self): | |
| self.loop.run_until_complete(self.initiate_chat()) | |
| def get_location_specialist(self): | |
| # create a react agent to use wikipedia tool | |
| wiki_spec = WikipediaToolSpec() | |
| # Get the search wikipedia tool | |
| wikipedia_tool = wiki_spec.to_tool_list() | |
| llm = Groq(model=self.user_input['selected_groq_model'], api_key=st.session_state["GROQ_API_KEY"]) | |
| location_specialist = ReActAgent.from_tools(tools=wikipedia_tool, llm=llm, | |
| verbose=True) | |
| return location_specialist | |