sghorbal commited on
Commit
dd21b8c
·
1 Parent(s): f9e3089

add "wake up" capability

Browse files
Files changed (3) hide show
  1. app.py +7 -3
  2. requirements.txt +1 -0
  3. utils.py +63 -0
app.py CHANGED
@@ -7,7 +7,7 @@ import threading
7
  import gradio as gr
8
  from fastapi.responses import JSONResponse, RedirectResponse
9
  from fastapi import FastAPI, Request, Response, status
10
- from utils import manage_urls
11
 
12
  # Charger les variables d'environnement
13
  load_dotenv()
@@ -64,8 +64,12 @@ def ping_loop():
64
  while True:
65
  for url in url_list:
66
  try:
67
- requests.get(url, timeout=5)
68
- print(f"[ping] {url}")
 
 
 
 
69
  except Exception as e:
70
  print(f"[ping] ❌ {url} | {e}")
71
  time.sleep(3600) # 1h
 
7
  import gradio as gr
8
  from fastapi.responses import JSONResponse, RedirectResponse
9
  from fastapi import FastAPI, Request, Response, status
10
+ from utils import manage_urls, wake_up_space, get_wake_up_action_name
11
 
12
  # Charger les variables d'environnement
13
  load_dotenv()
 
64
  while True:
65
  for url in url_list:
66
  try:
67
+ if get_wake_up_action_name(url):
68
+ print(f"[wake-up] 🚀 {url}")
69
+ wake_up_space(url)
70
+ else:
71
+ requests.get(url, timeout=5)
72
+ print(f"[ping] ✅ {url}")
73
  except Exception as e:
74
  print(f"[ping] ❌ {url} | {e}")
75
  time.sleep(3600) # 1h
requirements.txt CHANGED
@@ -3,3 +3,4 @@ fastapi==0.115.12
3
  uvicorn==0.34.2
4
  requests==2.32.3
5
  python-dotenv==1.1.0
 
 
3
  uvicorn==0.34.2
4
  requests==2.32.3
5
  python-dotenv==1.1.0
6
+ beautifulsoup4==4.13.4
utils.py CHANGED
@@ -1,3 +1,9 @@
 
 
 
 
 
 
1
  # Fonction Gradio pour ajouter / retirer des URLs
2
  def manage_urls(action, url):
3
  global url_list
@@ -18,3 +24,60 @@ def manage_urls(action, url):
18
 
19
  # Toujours retourner la liste actuelle formatée
20
  return message, "\n".join(url_list)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ from typing import Optional
3
+ from bs4 import BeautifulSoup
4
+ import time
5
+ import re
6
+
7
  # Fonction Gradio pour ajouter / retirer des URLs
8
  def manage_urls(action, url):
9
  global url_list
 
24
 
25
  # Toujours retourner la liste actuelle formatée
26
  return message, "\n".join(url_list)
27
+
28
+ def wake_up_space(space_url: str):
29
+ action_name = get_wake_up_action_name(space_url)
30
+
31
+ if not action_name:
32
+ print(f"[wake-up] ❌ Invalid URL: {space_url}")
33
+ return
34
+
35
+ try:
36
+ session = requests.Session()
37
+ response = session.get(space_url, timeout=10)
38
+ response.raise_for_status()
39
+
40
+ # Vérifier si le formulaire de réveil est présent
41
+ soup = BeautifulSoup(response.text, "html.parser")
42
+ form = soup.find("form", action=action_name)
43
+
44
+ if form:
45
+ print("[wake-up] 💤 Space is sleeping, trying to restart it...")
46
+ csrf_token_input = form.find("input", {"name": "csrf"})
47
+ csrf_token = csrf_token_input["value"] if csrf_token_input else ""
48
+
49
+ post_response = session.post(
50
+ f"https://huggingface.co{action_name}",
51
+ headers={
52
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"
53
+ },
54
+ cookies=session.cookies,
55
+ data={"csrf": csrf_token},
56
+ timeout=10
57
+ )
58
+
59
+ if post_response.status_code == 200:
60
+ print("[wake-up] 🚀 Restart triggered successfully. Waiting for the Space to be ready...")
61
+ time.sleep(60) # attendre que le Space redémarre
62
+ else:
63
+ print(f"[wake-up] ❌ Failed to restart Space: {post_response.status_code}")
64
+ else:
65
+ print("[wake-up] ✅ Space is already awake.")
66
+
67
+ except Exception as e:
68
+ if "403 Forbidden" in str(e):
69
+ # This is ok, the space is already awake
70
+ print("[wake-up] ✅ Space is already awake.")
71
+ else:
72
+ print(f"[wake-up] ❌ Error waking up space: {e}")
73
+
74
+ def get_wake_up_action_name(space_url: str) -> Optional[str]:
75
+ space_pattern = 'https://([\\w]+)-([\\w-]+).hf.space'
76
+ pattern = re.compile(space_pattern, flags=re.IGNORECASE)
77
+
78
+ if match := pattern.match(space_url):
79
+ # Extract the owner and the space name
80
+ owner = match.group(1)
81
+ space_name = match.group(2)
82
+
83
+ return f'/spaces/{owner}/{space_name}/start'