Spaces:
Sleeping
Sleeping
File size: 1,977 Bytes
cf8515b abd9b3a b44dbf5 cf8515b feb9394 cf8515b 793501e abd9b3a cf8515b 793501e 2da68c5 abd9b3a b9f2425 abd9b3a daf985f abd9b3a f103005 cf8515b 0cda8d8 cf8515b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | import os
import dotenv
import yaml
from smolagents import CodeAgent, LiteLLMModel, OpenAIServerModel, DuckDuckGoSearchTool, VisitWebpageTool
from tools import ToolReadFiles, ToolReverseString, ToolDownloadImage, FinalAnswerTool
dotenv.load_dotenv()
# Create prompt template
# with open("prompts.yaml", 'r') as stream:
# prompt_templates = yaml.safe_load(stream)
class Agent:
def __init__(self):
# self.hf_token = os.getenv("HF_API_KEY")
# self.openai_key = os.getenv("OPEN_AI_KEY")
self.gemini_key = os.getenv("GEMINI_API_KEY")
# self.model = HfApiModel(
# model_id="Qwen/Qwen3-235B-A22B",
# token=self.hf_token
# )
# self.model = OpenAIServerModel(
# model_id="gpt-4o-mini-2024-07-18",
# api_key=self.openai_key
# )
self.model = LiteLLMModel(
model_id="gemini/gemini-2.0-flash-lite",
api_key=self.gemini_key
)
self.agent = CodeAgent(
model=self.model,
add_base_tools=True,
tools=[ToolReadFiles, ToolReverseString, ToolDownloadImage, FinalAnswerTool(self.model)],
max_steps=10,
additional_authorized_imports=['numpy','beautifulsoup4','wiki','re','pandas']
)
def __call__(self, query:str, file:str) -> str:
if file != "":
query = f"{query} [FILE] {file}"
return self.ask(query)
def ask(self, query:str) -> str:
result = self.agent.run(query)
return result
if __name__ == "__main__":
otto = Agent()
response = otto.ask("Who was President of the United States in 1957?")
print(response)
# response = agent.run("Where were the Vietnamese specimens described by Kuznetzov in Nedoshivina's 2010 paper eventually deposited? Just give me the city name without abbreviations.")
# print(response)
# response = model(messages) |