Amalia
commited on
Commit
·
8a4e9dd
1
Parent(s):
574f383
try litellmmodel
Browse files
app.py
CHANGED
|
@@ -5,7 +5,7 @@ import requests
|
|
| 5 |
import inspect
|
| 6 |
import pandas as pd
|
| 7 |
from typing import Dict, List
|
| 8 |
-
from smolagents import CodeAgent, tool, InferenceClientModel
|
| 9 |
from langchain_community.tools.tavily_search import TavilySearchResults
|
| 10 |
from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
|
| 11 |
|
|
@@ -93,13 +93,49 @@ def wikipedia_search(query: str, load_max_docs: int=3) -> str:
|
|
| 93 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
| 94 |
class BasicAgent:
|
| 95 |
def __init__(self):
|
| 96 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 97 |
self.agent = CodeAgent(
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 102 |
)
|
|
|
|
| 103 |
print("BasicAgent initialized.")
|
| 104 |
def __call__(self, question: str) -> str:
|
| 105 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
|
|
|
| 5 |
import inspect
|
| 6 |
import pandas as pd
|
| 7 |
from typing import Dict, List
|
| 8 |
+
from smolagents import CodeAgent, LiteLLMModel, tool, InferenceClientModel, WikipediaSearchTool, VisitWebpageTool, DuckDuckGoSearchTool
|
| 9 |
from langchain_community.tools.tavily_search import TavilySearchResults
|
| 10 |
from langchain_community.document_loaders import WikipediaLoader, ArxivLoader
|
| 11 |
|
|
|
|
| 93 |
# ----- THIS IS WERE YOU CAN BUILD WHAT YOU WANT ------
|
| 94 |
class BasicAgent:
|
| 95 |
def __init__(self):
|
| 96 |
+
|
| 97 |
+
model = LiteLLMModel(model_id=config.CLAUDE_3_5_SONNET_MODEL_ID, api_key=ANTHROPIC_API_KEY,
|
| 98 |
+
temperature=DEFAULT_TEMPERATURE,
|
| 99 |
+
max_retries=3)
|
| 100 |
+
|
| 101 |
+
# System prompt recommended by GAIA benchmark instruction https://huggingface.co/spaces/gaia-benchmark/leaderboard
|
| 102 |
+
# Emitting the part of final answer, as instructed by the HF course
|
| 103 |
+
system_prompt = """
|
| 104 |
+
You are a general AI assistant.
|
| 105 |
+
I will ask you a question.
|
| 106 |
+
Report your thoughts.
|
| 107 |
+
YOUR FINAL ANSWER should be a number OR as few words as possible OR a comma separated list of numbers and/or strings.
|
| 108 |
+
If you are asked for a number, don't use comma to write your number neither use units such as $ or percent sign unless specified otherwise.
|
| 109 |
+
If you are asked for a string, don't use articles, neither abbreviations (e.g. for cities), and write the digits in plain text unless specified otherwise.
|
| 110 |
+
If you are asked for a comma separated list, apply the above rules depending of whether the element to be put in the list is a number or a string.
|
| 111 |
+
"""
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
# # Init agent with InferenceClientModel
|
| 115 |
+
# model_id = "Qwen/Qwen3-32B"
|
| 116 |
+
# self.agent = CodeAgent(
|
| 117 |
+
# model=InferenceClientModel(model_id=model_id, token=os.getenv("HF_TOKEN")),
|
| 118 |
+
# tools=[multiply, add, subtract, modulus, web_search, arxiv_search, wikipedia_search],
|
| 119 |
+
# max_steps=10,
|
| 120 |
+
# verbosity_level=2,
|
| 121 |
+
# )
|
| 122 |
+
|
| 123 |
+
# Init agent with LiteLLMModel
|
| 124 |
+
model = LiteLLMModel(model_id="openai/gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY"),
|
| 125 |
+
# temperature=DEFAULT_TEMPERATURE,
|
| 126 |
+
max_retries=3)
|
| 127 |
self.agent = CodeAgent(
|
| 128 |
+
tools=[
|
| 129 |
+
multiply, add, subtract, modulus, web_search, arxiv_search, wikipedia_search,
|
| 130 |
+
WikipediaSearchTool(),
|
| 131 |
+
DuckDuckGoSearchTool(),
|
| 132 |
+
VisitWebpageTool()
|
| 133 |
+
],
|
| 134 |
+
model=model,
|
| 135 |
+
# planning_interval=DEFAULT_PLANNING_INTERVAL,
|
| 136 |
+
# step_callbacks=[generation_error_interrupter]
|
| 137 |
)
|
| 138 |
+
|
| 139 |
print("BasicAgent initialized.")
|
| 140 |
def __call__(self, question: str) -> str:
|
| 141 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|