alialhaddad commited on
Commit
53da617
·
verified ·
1 Parent(s): 4e6198d

Use a local transformers model instead of InfererenceReferenceModel

Browse files
Files changed (1) hide show
  1. app.py +4 -3
app.py CHANGED
@@ -1,14 +1,15 @@
1
  import os
2
  import gradio as gr
3
  from huggingface_hub import login
4
- from smolagents import DuckDuckGoSearchTool, ToolCallingAgent, InferenceClientModel
5
 
6
  login(os.environ.get('HF_TOKEN'))
7
 
8
  def create_agent():
 
9
  tools = [DuckDuckGoSearchTool()]
10
- model = InferenceClientModel(tool_choice="auto")
11
- return ToolCallingAgent(tools=tools, model=model)
12
 
13
 
14
 
 
1
  import os
2
  import gradio as gr
3
  from huggingface_hub import login
4
+ from smolagents import DuckDuckGoSearchTool, ToolCallingAgent, InferenceClientModel, TransformersModel
5
 
6
  login(os.environ.get('HF_TOKEN'))
7
 
8
  def create_agent():
9
+ model_id = "meta-llama/Llama-3.2-3B-Instruct"
10
  tools = [DuckDuckGoSearchTool()]
11
+ model = TransformersModel(model_id=model_id)
12
+ return ToolCallingAgent(tools=tools, model=model, add_base_tools=True)
13
 
14
 
15