Use a local transformers model instead of InfererenceReferenceModel
Browse files
app.py
CHANGED
|
@@ -1,14 +1,15 @@
|
|
| 1 |
import os
|
| 2 |
import gradio as gr
|
| 3 |
from huggingface_hub import login
|
| 4 |
-
from smolagents import DuckDuckGoSearchTool, ToolCallingAgent, InferenceClientModel
|
| 5 |
|
| 6 |
login(os.environ.get('HF_TOKEN'))
|
| 7 |
|
| 8 |
def create_agent():
|
|
|
|
| 9 |
tools = [DuckDuckGoSearchTool()]
|
| 10 |
-
model =
|
| 11 |
-
return ToolCallingAgent(tools=tools,
|
| 12 |
|
| 13 |
|
| 14 |
|
|
|
|
| 1 |
import os
|
| 2 |
import gradio as gr
|
| 3 |
from huggingface_hub import login
|
| 4 |
+
from smolagents import DuckDuckGoSearchTool, ToolCallingAgent, InferenceClientModel, TransformersModel
|
| 5 |
|
| 6 |
login(os.environ.get('HF_TOKEN'))
|
| 7 |
|
| 8 |
def create_agent():
|
| 9 |
+
model_id = "meta-llama/Llama-3.2-3B-Instruct"
|
| 10 |
tools = [DuckDuckGoSearchTool()]
|
| 11 |
+
model = TransformersModel(model_id=model_id)
|
| 12 |
+
return ToolCallingAgent(tools=tools, model=model, add_base_tools=True)
|
| 13 |
|
| 14 |
|
| 15 |
|