Spaces:
Sleeping
Sleeping
File size: 2,010 Bytes
cf8515b 715b496 b44dbf5 cf8515b feb9394 cf8515b feb9394 cf8515b 91223dd 2da68c5 feb9394 cf8515b 05f5174 2da68c5 cf8515b 3b01f7c cf8515b | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 | import os
import dotenv
import yaml
from smolagents import CodeAgent, LiteLLMModel, OpenAIServerModel, HfApiModel, DuckDuckGoSearchTool, VisitWebpageTool
from tools import ToolReadFiles, ToolReverseString, ToolDownloadImage, FinalAnswerTool
dotenv.load_dotenv()
# HF_API_KEY = os.getenv("HF_API_KEY")
# OPENAI_KEY = os.getenv("OPEN_AI_KEY")
# Create prompt template
# with open("prompts.yaml", 'r') as stream:
# prompt_templates = yaml.safe_load(stream)
class Agent:
def __init__(self):
self.hf_token = os.getenv("HF_API_KEY")
self.model = HfApiModel(
model_id="Qwen/Qwen3-235B-A22B",
token=self.hf_token
)
# self.model = OpenAIServerModel(
# model_id="gpt-4o-mini-2024-07-18",
# api_key=OPENAI_KEY
# )
# self.model_local = LiteLLMModel(
# model_id="ollama/qwen3:8b",
# api_base="http://localhost:11434",
# num_ctx=40000, # Context set for Mistral Nemo
# )
self.agent = CodeAgent(
model=self.model,
add_base_tools=True,
tools=[ToolReadFiles, ToolReverseString, ToolDownloadImage, FinalAnswerTool()],
max_steps=10,
additional_authorized_imports=['numpy','beautifulsoup4','wiki','re','pandas']
)
def __call__(self, query:str, file:str) -> str:
if file != "":
query = f"{query} [FILE] {file}"
return self.ask(query)
def ask(self, query:str) -> str:
result = self.agent.run(query)
return result
if __name__ == "__main__":
otto = Agent()
response = otto.ask("Who was President of the United States in 1957?")
print(response)
# response = agent.run("Where were the Vietnamese specimens described by Kuznetzov in Nedoshivina's 2010 paper eventually deposited? Just give me the city name without abbreviations.")
# print(response)
# response = model(messages) |