huaholou commited on
Commit
256f653
·
verified ·
1 Parent(s): 43c713d
Files changed (1) hide show
  1. app.py +70 -42
app.py CHANGED
@@ -1,66 +1,94 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
- import datetime
3
- import requests
4
- import pytz
5
- import yaml
6
  from tools.final_answer import FinalAnswerTool
7
-
8
  from Gradio_UI import GradioUI
9
 
10
- # Below is an example of a tool that does nothing. Amaze us with your creativity !
11
- @tool
12
- def my_custom_tool(arg1:str, arg2:int)-> str: #it's import to specify the return type
13
- #Keep this format for the description / args / args description but feel free to modify the tool
14
- """A tool that does nothing yet
15
- Args:
16
- arg1: the first argument
17
- arg2: the second argument
18
- """
19
- return "What magic will you build ?"
20
 
 
 
 
21
  @tool
22
- def get_current_time_in_timezone(timezone: str) -> str:
23
- """A tool that fetches the current local time in a specified timezone.
24
- Args:
25
- timezone: A string representing a valid timezone (e.g., 'America/New_York').
26
  """
 
27
  try:
28
- # Create timezone object
29
- tz = pytz.timezone(timezone)
30
- # Get current time in that timezone
31
- local_time = datetime.datetime.now(tz).strftime("%Y-%m-%d %H:%M:%S")
32
- return f"The current local time in {timezone} is: {local_time}"
33
- except Exception as e:
34
- return f"Error fetching time for timezone '{timezone}': {str(e)}"
35
 
 
 
 
 
36
 
37
- final_answer = FinalAnswerTool()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
- # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
40
- # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
41
 
42
- model = HfApiModel(
43
- max_tokens=2096,
44
- temperature=0.5,
45
- model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
46
- custom_role_conversions=None,
47
- )
48
 
49
 
50
- # Import tool from Hub
 
 
51
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
52
 
53
- with open("prompts.yaml", 'r') as stream:
 
 
 
 
54
  prompt_templates = yaml.safe_load(stream)
55
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  agent = CodeAgent(
57
  model=model,
58
- tools=[final_answer, get_current_time_in_timezone, my_custom_tool, image_generation_tool],
59
  max_steps=6,
60
  verbosity_level=1,
61
  prompt_templates=prompt_templates
62
  )
63
 
64
 
65
-
66
- GradioUI(agent).launch()
 
 
 
1
+ from smolagents import CodeAgent, InferenceClientModel, load_tool, tool
 
 
 
 
2
  from tools.final_answer import FinalAnswerTool
 
3
  from Gradio_UI import GradioUI
4
 
5
+ import requests
6
+ import yaml
 
 
 
 
 
 
 
 
7
 
8
+ # ----------------------------
9
+ # 1️⃣ OUTIL : MÉTÉO À PARIS
10
+ # ----------------------------
11
  @tool
12
+ def get_weather_paris() -> str:
13
+ """Renvoie la météo actuelle à Paris (température + description).
14
+ Aucun argument nécessaire.
 
15
  """
16
+
17
  try:
18
+ # API gratuite de Open-Meteo (pas besoin de clé)
19
+ url = "https://api.open-meteo.com/v1/forecast?latitude=48.8566&longitude=2.3522&current_weather=true"
20
+ response = requests.get(url).json()
 
 
 
 
21
 
22
+ weather = response["current_weather"]
23
+ temperature = weather["temperature"]
24
+ windspeed = weather["windspeed"]
25
+ weather_code = weather["weathercode"]
26
 
27
+ # Description simple selon le code météo
28
+ descriptions = {
29
+ 0: "ciel clair",
30
+ 1: "principalement clair",
31
+ 2: "partiellement nuageux",
32
+ 3: "couvert",
33
+ 45: "brouillard",
34
+ 48: "brouillard givrant",
35
+ 51: "bruine légère",
36
+ 53: "bruine",
37
+ 55: "bruine forte",
38
+ 61: "pluie légère",
39
+ 63: "pluie",
40
+ 65: "pluie forte",
41
+ 71: "neige légère",
42
+ 73: "neige",
43
+ 75: "neige forte",
44
+ }
45
 
46
+ description = descriptions.get(weather_code, "conditions météo inconnues")
 
47
 
48
+ return f"À Paris, il fait {temperature}°C avec un {description}."
49
+
50
+ except Exception as e:
51
+ return f"Erreur lors de la récupération de la météo : {str(e)}"
 
 
52
 
53
 
54
+ # ---------------------------------------------------
55
+ # 2️⃣ OUTIL : GÉNÉRATION D’IMAGE À PARTIR D’UN TEXTE
56
+ # ---------------------------------------------------
57
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
58
 
59
+
60
+ # ---------------------------------
61
+ # 3️⃣ CHARGEMENT DU PROMPT SYSTÈME
62
+ # ---------------------------------
63
+ with open("prompts.yaml", "r") as stream:
64
  prompt_templates = yaml.safe_load(stream)
65
+
66
+
67
+ # -------------------------
68
+ # 4️⃣ MODÈLE UTILISÉ
69
+ # -------------------------
70
+ model = InferenceClientModel(
71
+ max_tokens=2048,
72
+ temperature=0.5,
73
+ model_id="Qwen/Qwen2.5-Coder-32B-Instruct"
74
+ )
75
+
76
+
77
+ # -------------------------
78
+ # 5️⃣ CRÉATION DE L’AGENT
79
+ # -------------------------
80
+ final_answer = FinalAnswerTool()
81
+
82
  agent = CodeAgent(
83
  model=model,
84
+ tools=[final_answer, get_weather_paris, image_generation_tool],
85
  max_steps=6,
86
  verbosity_level=1,
87
  prompt_templates=prompt_templates
88
  )
89
 
90
 
91
+ # -------------------------
92
+ # 6️⃣ LANCEMENT DE GRADIO
93
+ # -------------------------
94
+ GradioUI(agent).launch()