Abdelkarim Bengrine commited on
Commit
3051d77
·
1 Parent(s): 05f7d77

fix: oauth

Browse files
Files changed (1) hide show
  1. echo_server.py +184 -1
echo_server.py CHANGED
@@ -1,10 +1,190 @@
1
  import requests
2
  from bot_discord import trigger_message
3
  from mcp.server.fastmcp import FastMCP
 
 
 
 
4
 
5
  mcp = FastMCP("Chess Analysis Server")
6
 
7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  def _get_games_json(archive_url: str) -> dict:
9
  r = requests.get(archive_url, headers={"User-Agent": "python-chess-data/1.0"})
10
  r.raise_for_status()
@@ -27,10 +207,13 @@ def analyze_latest_game(username: str, player_color: str):
27
  return {"player_username": username, "pgn": game["games"][-1]["pgn"]}
28
 
29
 
30
- @mcp.tool(description='Envoie un message sur Discord pour annoncer qu\'un participant rejoint un tournoi.')
 
 
31
  def send_message_discord(nom: str, tournoi: str, lien: str, date: str, lieu: str):
32
  trigger_message(nom, tournoi, lien, date, lieu)
33
 
 
34
  # @mcp.tool(description="Extracts FEN from a Chess.com game URL")
35
  # def get_game_pgn_from_url(game_url: str, player_username: str):
36
  # game_id = game_url.split("/")[-1]
 
1
  import requests
2
  from bot_discord import trigger_message
3
  from mcp.server.fastmcp import FastMCP
4
+ from typing import List, Dict
5
+ import re
6
+ from concurrent.futures import ThreadPoolExecutor, as_completed
7
+ from bs4 import BeautifulSoup
8
 
9
  mcp = FastMCP("Chess Analysis Server")
10
 
11
 
12
+ def _fetch_tournaments_page() -> str:
13
+ """Fetch the raw content from the chess tournaments page."""
14
+ url = "https://www.echecsfrance.com/en/tournaments"
15
+ try:
16
+ response = requests.get(url)
17
+ response.raise_for_status()
18
+ return response.text
19
+ except requests.RequestException as e:
20
+ print(f"Error fetching the page: {e}")
21
+ return ""
22
+
23
+
24
+ def _extract_tournament_references(html_content: str) -> List[str]:
25
+ """Extract all FicheTournoi.aspx?Ref=XXXXX patterns from HTML."""
26
+ # Regex pattern to match FicheTournoi.aspx?Ref= followed by digits
27
+ pattern = r"FicheTournoi\.aspx\?Ref=\d+"
28
+
29
+ # Find all matches
30
+ matches = re.findall(pattern, html_content)
31
+
32
+ # Remove duplicates while preserving order
33
+ unique_matches = list(dict.fromkeys(matches))
34
+
35
+ return unique_matches
36
+
37
+
38
+ def _fetch_tournament_details(tournament_ref: str) -> Dict[str, str]:
39
+ """Fetch tournament details from a specific tournament page."""
40
+ base_url = "https://www.echecs.asso.fr/"
41
+ full_url = base_url + tournament_ref
42
+
43
+ try:
44
+ # Add timeout and session for better performance
45
+ session = requests.Session()
46
+ session.headers.update(
47
+ {
48
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
49
+ }
50
+ )
51
+
52
+ response = session.get(full_url, timeout=10)
53
+ response.raise_for_status()
54
+
55
+ soup = BeautifulSoup(response.text, "html.parser")
56
+
57
+ def extract_field(field_id: str) -> str:
58
+ """Helper function to extract field value by ID."""
59
+ elem = soup.find("span", {"id": field_id})
60
+ return elem.get_text().strip() if elem else "N/A"
61
+
62
+ # Extract all requested fields
63
+ details = {
64
+ "name": extract_field("ctl00_ContentPlaceHolderMain_LabelNom"),
65
+ "dates": extract_field("ctl00_ContentPlaceHolderMain_LabelDates"),
66
+ "nombre_rondes": extract_field(
67
+ "ctl00_ContentPlaceHolderMain_LabelNbrRondes"
68
+ ),
69
+ "cadence": extract_field("ctl00_ContentPlaceHolderMain_LabelCadence"),
70
+ "organisateur": extract_field(
71
+ "ctl00_ContentPlaceHolderMain_LabelOrganisateur"
72
+ ),
73
+ "arbitre": extract_field("ctl00_ContentPlaceHolderMain_LabelArbitre"),
74
+ "adresse": extract_field("ctl00_ContentPlaceHolderMain_LabelAdresse"),
75
+ "contact": extract_field("ctl00_ContentPlaceHolderMain_LabelContact"),
76
+ "premier_prix": extract_field("ctl00_ContentPlaceHolderMain_LabelPrix1"),
77
+ "inscription_senior": extract_field(
78
+ "ctl00_ContentPlaceHolderMain_LabelInscriptionSenior"
79
+ ),
80
+ "inscription_jeunes": extract_field(
81
+ "ctl00_ContentPlaceHolderMain_LabelInscriptionJeune"
82
+ ),
83
+ "annonce": extract_field("ctl00_ContentPlaceHolderMain_LabelAnnonce"),
84
+ "url": full_url,
85
+ }
86
+
87
+ return details
88
+ except Exception as e:
89
+ print(f"Error fetching tournament {tournament_ref}: {e}")
90
+ return {
91
+ "name": "Error",
92
+ "dates": "Error",
93
+ "nombre_rondes": "Error",
94
+ "cadence": "Error",
95
+ "organisateur": "Error",
96
+ "arbitre": "Error",
97
+ "adresse": "Error",
98
+ "contact": "Error",
99
+ "premier_prix": "Error",
100
+ "inscription_senior": "Error",
101
+ "inscription_jeunes": "Error",
102
+ "annonce": "Error",
103
+ "url": full_url,
104
+ }
105
+
106
+
107
+ @mcp.tool(
108
+ description="Récupère les informations des tournois d'échecs à venir (nom, dates, nombre de rondes, organisateur)"
109
+ )
110
+ def get_tournaments_upcoming():
111
+ """Récupère rapidement les informations des tournois d'échecs à venir.
112
+ Version optimisée qui ne récupère que les champs principaux pour une réponse plus rapide.
113
+ Récupère tous les tournois à venir.
114
+ """
115
+ html_content = _fetch_tournaments_page()
116
+
117
+ if not html_content:
118
+ return {"total_tournaments": 0, "tournaments": []}
119
+
120
+ tournament_refs = _extract_tournament_references(html_content)
121
+
122
+ def _fetch_basic_details(tournament_ref: str) -> Dict[str, str]:
123
+ """Fetch only basic tournament details."""
124
+ base_url = "https://www.echecs.asso.fr/"
125
+ full_url = base_url + tournament_ref
126
+
127
+ try:
128
+ session = requests.Session()
129
+ session.headers.update(
130
+ {
131
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
132
+ }
133
+ )
134
+
135
+ response = session.get(full_url, timeout=5)
136
+ response.raise_for_status()
137
+
138
+ soup = BeautifulSoup(response.text, "html.parser")
139
+
140
+ def extract_field(field_id: str) -> str:
141
+ elem = soup.find("span", {"id": field_id})
142
+ return elem.get_text().strip() if elem else "N/A"
143
+
144
+ return {
145
+ "name": extract_field("ctl00_ContentPlaceHolderMain_LabelNom"),
146
+ "dates": extract_field("ctl00_ContentPlaceHolderMain_LabelDates"),
147
+ "nombre_rondes": extract_field(
148
+ "ctl00_ContentPlaceHolderMain_LabelNbrRondes"
149
+ ),
150
+ "organisateur": extract_field(
151
+ "ctl00_ContentPlaceHolderMain_LabelOrganisateur"
152
+ ),
153
+ "url": full_url,
154
+ }
155
+ except Exception as e:
156
+ return {
157
+ "name": "Error",
158
+ "dates": "Error",
159
+ "nombre_rondes": "Error",
160
+ "organisateur": "Error",
161
+ "url": full_url,
162
+ }
163
+
164
+ tournament_details = []
165
+
166
+ # Use ThreadPoolExecutor for parallel processing
167
+ with ThreadPoolExecutor(max_workers=15) as executor:
168
+ future_to_ref = {
169
+ executor.submit(_fetch_basic_details, ref): ref for ref in tournament_refs
170
+ }
171
+
172
+ for future in as_completed(future_to_ref):
173
+ ref = future_to_ref[future]
174
+ try:
175
+ details = future.result()
176
+ tournament_details.append(details)
177
+ except Exception as e:
178
+ print(f"Error processing tournament {ref}: {e}")
179
+
180
+ result = {
181
+ "total_tournaments": len(tournament_details),
182
+ "tournaments": tournament_details,
183
+ }
184
+
185
+ return result
186
+
187
+
188
  def _get_games_json(archive_url: str) -> dict:
189
  r = requests.get(archive_url, headers={"User-Agent": "python-chess-data/1.0"})
190
  r.raise_for_status()
 
207
  return {"player_username": username, "pgn": game["games"][-1]["pgn"]}
208
 
209
 
210
+ @mcp.tool(
211
+ description="Envoie un message sur Discord pour annoncer qu'un participant rejoint un tournoi."
212
+ )
213
  def send_message_discord(nom: str, tournoi: str, lien: str, date: str, lieu: str):
214
  trigger_message(nom, tournoi, lien, date, lieu)
215
 
216
+
217
  # @mcp.tool(description="Extracts FEN from a Chess.com game URL")
218
  # def get_game_pgn_from_url(game_url: str, player_username: str):
219
  # game_id = game_url.split("/")[-1]