Spaces:
Runtime error
Runtime error
Add DuckDuckGoSearchTool to SmolAgent
Browse files
app.py
CHANGED
|
@@ -3,7 +3,7 @@ import gradio as gr
|
|
| 3 |
import requests
|
| 4 |
import inspect
|
| 5 |
import pandas as pd
|
| 6 |
-
from smolagents import CodeAgent, InferenceClientModel
|
| 7 |
|
| 8 |
# (Keep Constants as is)
|
| 9 |
# --- Constants ---
|
|
@@ -19,6 +19,9 @@ class SmolAgent:
|
|
| 19 |
if not hf_token:
|
| 20 |
raise ValueError("Hugging Face token not found. Please set HF_TOKEN environment variable in HF Spaces settings.")
|
| 21 |
|
|
|
|
|
|
|
|
|
|
| 22 |
# 3. Initialize the SmolLM model
|
| 23 |
model = InferenceClientModel(
|
| 24 |
model_id="HuggingFaceTB/SmolLM-360M-Instruct",
|
|
@@ -27,11 +30,11 @@ class SmolAgent:
|
|
| 27 |
|
| 28 |
# 4. Replace your current BasicAgent with a smolagents.CodeAgent
|
| 29 |
self._agent = CodeAgent(
|
| 30 |
-
tools=[],
|
| 31 |
model=model,
|
| 32 |
instructions=SYSTEM_PROMPT,
|
| 33 |
)
|
| 34 |
-
print("SmolAgent initialized.")
|
| 35 |
|
| 36 |
def __call__(self, question: str) -> str:
|
| 37 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
|
@@ -196,7 +199,7 @@ with gr.Blocks() as demo:
|
|
| 196 |
**Model Information:**
|
| 197 |
- Using: HuggingFaceTB/SmolLM-360M-Instruct
|
| 198 |
- Framework: smolagents CodeAgent
|
| 199 |
-
-
|
| 200 |
|
| 201 |
**Disclaimers:**
|
| 202 |
Once clicking on the "submit button, it can take quite some time ( this is the time for the agent to go through all the questions).
|
|
|
|
| 3 |
import requests
|
| 4 |
import inspect
|
| 5 |
import pandas as pd
|
| 6 |
+
from smolagents import CodeAgent, InferenceClientModel, DuckDuckGoSearchTool
|
| 7 |
|
| 8 |
# (Keep Constants as is)
|
| 9 |
# --- Constants ---
|
|
|
|
| 19 |
if not hf_token:
|
| 20 |
raise ValueError("Hugging Face token not found. Please set HF_TOKEN environment variable in HF Spaces settings.")
|
| 21 |
|
| 22 |
+
# Initialize the tool
|
| 23 |
+
search_tool = DuckDuckGoSearchTool()
|
| 24 |
+
|
| 25 |
# 3. Initialize the SmolLM model
|
| 26 |
model = InferenceClientModel(
|
| 27 |
model_id="HuggingFaceTB/SmolLM-360M-Instruct",
|
|
|
|
| 30 |
|
| 31 |
# 4. Replace your current BasicAgent with a smolagents.CodeAgent
|
| 32 |
self._agent = CodeAgent(
|
| 33 |
+
tools=[search_tool],
|
| 34 |
model=model,
|
| 35 |
instructions=SYSTEM_PROMPT,
|
| 36 |
)
|
| 37 |
+
print("SmolAgent initialized with DuckDuckGoSearchTool.")
|
| 38 |
|
| 39 |
def __call__(self, question: str) -> str:
|
| 40 |
print(f"Agent received question (first 50 chars): {question[:50]}...")
|
|
|
|
| 199 |
**Model Information:**
|
| 200 |
- Using: HuggingFaceTB/SmolLM-360M-Instruct
|
| 201 |
- Framework: smolagents CodeAgent
|
| 202 |
+
- Tools: DuckDuckGo Search (for web access)
|
| 203 |
|
| 204 |
**Disclaimers:**
|
| 205 |
Once clicking on the "submit button, it can take quite some time ( this is the time for the agent to go through all the questions).
|