faizaltkl commited on
Commit
8122399
·
verified ·
1 Parent(s): 1b4285d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -11
app.py CHANGED
@@ -1,9 +1,8 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
  import datetime
3
  import requests
4
  import pytz
5
  import yaml
6
- from tools.final_answer import FinalAnswerTool
7
 
8
  from Gradio_UI import GradioUI
9
 
@@ -35,24 +34,25 @@ def get_current_time_in_timezone(timezone: str) -> str:
35
 
36
 
37
  final_answer = FinalAnswerTool()
38
- model = HfApiModel(
39
- max_tokens=2096,
40
- temperature=0.5,
41
- model_id='https://wxknx1kg971u7k1n.us-east-1.aws.endpoints.huggingface.cloud',# it is possible that this model may be overloaded
42
- #model_id='deepseek-ai/DeepSeek-R1',
43
- custom_role_conversions=None,
44
  )
45
 
46
 
 
47
  # Import tool from Hub ok
48
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
49
 
50
  with open("prompts.yaml", 'r') as stream:
51
  prompt_templates = yaml.safe_load(stream)
52
 
 
53
  agent = CodeAgent(
54
  model=model,
55
- tools=[final_answer,get_current_time_in_timezone,image_generation_tool], ## add your tools here (don't remove final answer)
56
  max_steps=6,
57
  verbosity_level=1,
58
  grammar=None,
@@ -62,5 +62,4 @@ agent = CodeAgent(
62
  prompt_templates=prompt_templates
63
  )
64
 
65
-
66
- GradioUI(agent).launch()
 
1
+ from smolagents import CodeAgent, DuckDuckGoSearchTool, FinalAnswerTool, InferenceClientModel, load_tool, tool
2
  import datetime
3
  import requests
4
  import pytz
5
  import yaml
 
6
 
7
  from Gradio_UI import GradioUI
8
 
 
34
 
35
 
36
  final_answer = FinalAnswerTool()
37
+ model = InferenceClientModel(
38
+ max_tokens=2096,
39
+ temperature=0.5,
40
+ model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
41
+ custom_role_conversions=None,
 
42
  )
43
 
44
 
45
+
46
  # Import tool from Hub ok
47
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
48
 
49
  with open("prompts.yaml", 'r') as stream:
50
  prompt_templates = yaml.safe_load(stream)
51
 
52
+ # We're creating our CodeAgent
53
  agent = CodeAgent(
54
  model=model,
55
+ tools=[final_answer], # add your tools here (don't remove final_answer)
56
  max_steps=6,
57
  verbosity_level=1,
58
  grammar=None,
 
62
  prompt_templates=prompt_templates
63
  )
64
 
65
+ GradioUI(agent).launch()