bstraehle's picture
Update agents.py
137447e verified
raw
history blame
4.52 kB
import os
#from langchain.agents import load_tools
from openai import OpenAI
#from openinference.instrumentation.smolagents import SmolagentsInstrumentor
#from phoenix.otel import register
from smolagents import (
CodeAgent,
ToolCallingAgent,
OpenAIServerModel,
Tool,
DuckDuckGoSearchTool,
WikipediaSearchTool
)
from tools import VisitWebpageTool
###
from crewai import Agent
from langchain_openai import ChatOpenAI
from tools import scrape_tool, search_tool, today_tool
###
MODEL_ID_1 = "gpt-4o-mini"
MODEL_ID_2 = "gpt-4o"
MODEL_ID_3 = "o4-mini"
#PHOENIX_PROJECT_NAME = "gaia"
#os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = "api_key = " + os.environ["PHOENIX_API_KEY"];
#os.environ["PHOENIX_CLIENT_HEADERS"] = "api_key = " + os.environ["PHOENIX_API_KEY"];
#os.environ["PHOENIX_COLLECTOR_ENDPOINT"] = "https://app.phoenix.arize.com";
#tracer_provider = register(
# auto_instrument = True,
# endpoint = "https://app.phoenix.arize.com/v1/traces",
# project_name = PHOENIX_PROJECT_NAME
#)
#SmolagentsInstrumentor().instrument(tracer_provider = tracer_provider)
def run_gaia(question, file_name):
#search_tool = Tool.from_langchain(load_tools(["serpapi"])[0])
#wikipedia_tool = Tool.from_langchain(load_tools(["wikipedia"])[0])
#web_search_agent = ToolCallingAgent(
# description = "Runs web searches for you. Give it your query as an argument.",
# max_steps = 2,
# model = OpenAIServerModel(model_id = MODEL_ID_1),
# name = "web_search_agent",
# tools = [search_tool, VisitWebpageTool()],
# #tools = [DuckDuckGoSearchTool(), VisitWebpageTool()],
# verbosity_level = 1
#)
#wikipedia_search_agent = ToolCallingAgent(
# description = "Runs wikipedia searches for you. Give it your query as an argument.",
# max_steps = 2,
# model = OpenAIServerModel(model_id = MODEL_ID_1),
# name = "wikipedia_search_agent",
# tools = [wikipedia_tool],
# #tools = [WikipediaSearchTool()],
# verbosity_level = 1
#)
manager_agent = CodeAgent(
#add_base_tools = True,
#additional_authorized_imports = ["json", "numpy", "pandas", "time"],
#final_answer_checks = [get_final_answer],
#managed_agents = [web_search_agent],
#max_steps = 5,
model = OpenAIServerModel(model_id = MODEL_ID_2),
planning_interval=3,
tools = [DuckDuckGoSearchTool(), VisitWebpageTool(), WikipediaSearchTool()],
verbosity_level = 1
)
#return manager_agent.run(question)
answer = manager_agent.run(question)
return get_final_answer(question, answer)
def get_final_answer(question, answer):
prompt_template = """
You are an expert in precise question answering. You are given a question and context. You must **precisely** answer the question based on the context and then stop.
**Question:** """ + str(question) + """
**Context:** """ + str(answer) + """
**Example 1:** What is the capital of France? Paris
**Example 2:** What is the superlative of good? Best
**Example 3:** What is the opposite of left? Right
**Answer:**:
"""
client = OpenAI()
completion = client.chat.completions.create(
messages = [{"role": "user", "content": [{"type": "text", "text": prompt_template}]}],
model = MODEL_ID_1
)
final_answer = completion.choices[0].message.content
print(f"Question: {question}")
print(f"Answer: {answer}")
print(f"Final answer: {final_answer}")
return final_answer
###
def get_researcher_agent(model, verbose):
return Agent(
role="Researcher",
goal="Research content on topic: {topic}.",
backstory="You're working on researching content on topic: {topic}. "
"Your work is the basis for the Writer to write on this topic.",
llm=ChatOpenAI(model=model),
tools = [search_tool(), scrape_tool()],
allow_delegation=False,
verbose=verbose
)
def get_writer_agent(model, verbose):
return Agent(
role="Writer",
goal="Write an article on topic: {topic}.",
backstory="You're working on writing an article on topic: {topic}. "
"You base your writing on the work of the Researcher, who provides context on this topic.",
llm=ChatOpenAI(model=model),
tools = [today_tool()],
allow_delegation=False,
verbose=verbose
)
###