Zanqi commited on
Commit
b85f271
·
1 Parent(s): fe734ca

Refactor app.py for code style consistency and enhance tool integration

Browse files

- Improved formatting and spacing in function definitions and imports for better readability.
- Added DuckDuckGoSearchTool to the agent's tools list.
- Updated comments for clarity and consistency.

Files changed (1) hide show
  1. app.py +20 -11
app.py CHANGED
@@ -1,4 +1,4 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
  import datetime
3
  import requests
4
  import pytz
@@ -7,17 +7,21 @@ from tools.final_answer import FinalAnswerTool
7
 
8
  from Gradio_UI import GradioUI
9
 
 
10
  # Below is an example of a tool that does nothing. Amaze us with your creativity !
11
  @tool
12
- def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
13
- #Keep this format for the description / args / args description but feel free to modify the tool
14
- """A tool that does nothing yet
 
 
15
  Args:
16
  arg1: the first argument
17
  arg2: the second argument
18
  """
19
  return "What magic will you build ?"
20
 
 
21
  @tool
22
  def get_current_time_in_timezone(timezone: str) -> str:
23
  """A tool that fetches the current local time in a specified timezone.
@@ -33,15 +37,16 @@ def get_current_time_in_timezone(timezone: str) -> str:
33
  except Exception as e:
34
  return f"Error fetching time for timezone '{timezone}': {str(e)}"
35
 
 
36
  final_answer = FinalAnswerTool()
37
 
38
  # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
39
- # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
40
 
41
  model = HfApiModel(
42
  max_tokens=2096,
43
  temperature=0.5,
44
- model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
45
  custom_role_conversions=None,
46
  )
47
 
@@ -49,20 +54,24 @@ model = HfApiModel(
49
  # Import tool from Hub
50
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
51
 
52
- with open("prompts.yaml", 'r') as stream:
53
  prompt_templates = yaml.safe_load(stream)
54
-
55
  agent = CodeAgent(
56
  model=model,
57
- tools=[final_answer, get_current_time_in_timezone], ## add your tools here (don't remove final answer)
 
 
 
 
58
  max_steps=6,
59
  verbosity_level=1,
60
  grammar=None,
61
  planning_interval=None,
62
  name=None,
63
  description=None,
64
- prompt_templates=prompt_templates
65
  )
66
 
67
 
68
- GradioUI(agent).launch()
 
1
+ from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
2
  import datetime
3
  import requests
4
  import pytz
 
7
 
8
  from Gradio_UI import GradioUI
9
 
10
+
11
  # Below is an example of a tool that does nothing. Amaze us with your creativity !
12
  @tool
13
+ def my_custom_tool(
14
+ arg1: str, arg2: int
15
+ ) -> str: # it's import to specify the return type
16
+ # Keep this format for the description / args / args description but feel free to modify the tool
17
+ """A tool that does nothing yet
18
  Args:
19
  arg1: the first argument
20
  arg2: the second argument
21
  """
22
  return "What magic will you build ?"
23
 
24
+
25
  @tool
26
  def get_current_time_in_timezone(timezone: str) -> str:
27
  """A tool that fetches the current local time in a specified timezone.
 
37
  except Exception as e:
38
  return f"Error fetching time for timezone '{timezone}': {str(e)}"
39
 
40
+
41
  final_answer = FinalAnswerTool()
42
 
43
  # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
44
+ # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
45
 
46
  model = HfApiModel(
47
  max_tokens=2096,
48
  temperature=0.5,
49
+ model_id="Qwen/Qwen2.5-Coder-32B-Instruct", # it is possible that this model may be overloaded
50
  custom_role_conversions=None,
51
  )
52
 
 
54
  # Import tool from Hub
55
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
56
 
57
+ with open("prompts.yaml", "r") as stream:
58
  prompt_templates = yaml.safe_load(stream)
59
+
60
  agent = CodeAgent(
61
  model=model,
62
+ tools=[
63
+ final_answer,
64
+ get_current_time_in_timezone,
65
+ DuckDuckGoSearchTool(),
66
+ ], ## add your tools here (don't remove final answer)
67
  max_steps=6,
68
  verbosity_level=1,
69
  grammar=None,
70
  planning_interval=None,
71
  name=None,
72
  description=None,
73
+ prompt_templates=prompt_templates,
74
  )
75
 
76
 
77
+ GradioUI(agent).launch()