NikitaBaramiia commited on
Commit
e96076b
·
verified ·
1 Parent(s): db85cf2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -38
app.py CHANGED
@@ -1,20 +1,18 @@
1
  import os
2
- import datetime
3
- import requests
4
- import pytz
5
  import yaml
 
6
 
7
- from smolagents import CodeAgent, InferenceClientModel, load_tool, tool
8
-
9
- from tools.web_search import DuckDuckGoSearchTool
10
- from tools.final_answer import FinalAnswerTool
11
 
12
  from Gradio_UI import GradioUI
13
 
14
 
15
  # Below is an example of a tool that does nothing. Amaze us with your creativity !
16
  @tool
17
- def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
18
  #Keep this format for the description / args / args description but feel free to modify the tool
19
  """A tool that does nothing yet
20
  Args:
@@ -23,34 +21,7 @@ def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return
23
  """
24
  return "What magic will you build ?"
25
 
26
- @tool
27
- def get_current_time_in_timezone(timezone: str) -> str:
28
- """A tool that fetches the current local time in a specified timezone.
29
- Args:
30
- timezone: A string representing a valid timezone (e.g., 'America/New_York').
31
- """
32
- try:
33
- # Create timezone object
34
- tz = pytz.timezone(timezone)
35
- # Get current time in that timezone
36
- local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
37
- return f"The current local time in {timezone} is: {local_time}"
38
- except Exception as e:
39
- return f"Error fetching time for timezone '{timezone}': {str(e)}"
40
-
41
-
42
- final_answer = FinalAnswerTool()
43
-
44
- # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
45
- # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
46
-
47
- # model = HfApiModel(
48
- # max_tokens=2096,
49
- # temperature=0.5,
50
- # model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
51
- # custom_role_conversions=None,
52
- # )
53
-
54
  model = InferenceClientModel(
55
  model_name='Qwen/Qwen2.5-Coder-32B-Instruct',
56
  max_tokens=2096,
@@ -65,9 +36,10 @@ image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_co
65
  with open("prompts.yaml", 'r') as stream:
66
  prompt_templates = yaml.safe_load(stream)
67
 
 
68
  agent = CodeAgent(
69
  model=model,
70
- tools=[DuckDuckGoSearchTool(), get_current_time_in_timezone, final_answer], ## add your tools here (don't remove final answer)
71
  max_steps=6,
72
  verbosity_level=1,
73
  grammar=None,
@@ -77,5 +49,4 @@ agent = CodeAgent(
77
  prompt_templates=prompt_templates
78
  )
79
 
80
-
81
  GradioUI(agent).launch()
 
1
  import os
 
 
 
2
  import yaml
3
+ import requests
4
 
5
+ from smolagents import (
6
+ DuckDuckGoSearchTool, WebSearchTool, FinalAnswerTool,
7
+ CodeAgent, InferenceClientModel, load_tool, tool
8
+ )
9
 
10
  from Gradio_UI import GradioUI
11
 
12
 
13
  # Below is an example of a tool that does nothing. Amaze us with your creativity !
14
  @tool
15
+ def my_custom_tool(arg1: str, arg2: int) -> str: #it's import to specify the return type
16
  #Keep this format for the description / args / args description but feel free to modify the tool
17
  """A tool that does nothing yet
18
  Args:
 
21
  """
22
  return "What magic will you build ?"
23
 
24
+ # Model init
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  model = InferenceClientModel(
26
  model_name='Qwen/Qwen2.5-Coder-32B-Instruct',
27
  max_tokens=2096,
 
36
  with open("prompts.yaml", 'r') as stream:
37
  prompt_templates = yaml.safe_load(stream)
38
 
39
+
40
  agent = CodeAgent(
41
  model=model,
42
+ tools=[DuckDuckGoSearchTool(), WebSearchTool(), FinalAnswerTool()], ## add your tools here (don't remove final answer)
43
  max_steps=6,
44
  verbosity_level=1,
45
  grammar=None,
 
49
  prompt_templates=prompt_templates
50
  )
51
 
 
52
  GradioUI(agent).launch()