Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -19,26 +19,27 @@ HISTORY_DURATION_SECONDS = 60 * 60
|
|
| 19 |
# Structure: { "id": "uuid", "url": "string", "status": "pending/ok/error/checking",
|
| 20 |
# "ip": "string", "responseTime": float_ms, "lastChecked": "iso_string_utc",
|
| 21 |
# "history": [{"timestamp": float_unix_ts_seconds, "status": "ok/error"}],
|
| 22 |
-
# "userId": "string_user_uuid",
|
| 23 |
# "_thread": threading.Thread_object, "_stop_event": threading.Event_object }
|
| 24 |
monitored_urls_store = {}
|
| 25 |
-
lock = threading.Lock()
|
| 26 |
|
| 27 |
# --- Helper Functions ---
|
| 28 |
def save_data_to_json():
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
|
|
|
| 42 |
|
| 43 |
def load_data_from_json():
|
| 44 |
global monitored_urls_store
|
|
@@ -57,7 +58,7 @@ def load_data_from_json():
|
|
| 57 |
data_item.setdefault('responseTime', None)
|
| 58 |
data_item.setdefault('lastChecked', None)
|
| 59 |
data_item.setdefault('history', data_item.get('history', []))
|
| 60 |
-
data_item.setdefault('userId', None)
|
| 61 |
temp_store[current_id] = data_item
|
| 62 |
|
| 63 |
with lock:
|
|
@@ -75,8 +76,6 @@ def load_data_from_json():
|
|
| 75 |
|
| 76 |
url_ids_to_start_monitoring = []
|
| 77 |
with lock:
|
| 78 |
-
# We don't know the user context here, so all threads for loaded URLs are started.
|
| 79 |
-
# The GET endpoint will filter by user.
|
| 80 |
url_ids_to_start_monitoring = list(monitored_urls_store.keys())
|
| 81 |
|
| 82 |
for url_id in url_ids_to_start_monitoring:
|
|
@@ -170,7 +169,7 @@ def execute_url_check(url_id_to_check):
|
|
| 170 |
live_url_data['history'] = current_history_list
|
| 171 |
prune_url_history(live_url_data)
|
| 172 |
|
| 173 |
-
save_data_to_json()
|
| 174 |
print(f"Finished check for {live_url_data['url']}: {final_check_status}, {http_response_time_ms} ms")
|
| 175 |
|
| 176 |
def pinger_thread_function(url_id_param, stop_event_param):
|
|
@@ -224,13 +223,12 @@ def serve_index():
|
|
| 224 |
def get_all_urls_for_user():
|
| 225 |
user_id = request.headers.get('X-User-ID')
|
| 226 |
if not user_id:
|
| 227 |
-
# Return empty list if no user ID is provided, or could be 400 error
|
| 228 |
return jsonify([]), 200
|
| 229 |
|
| 230 |
with lock:
|
| 231 |
response_list = []
|
| 232 |
for data_item in monitored_urls_store.values():
|
| 233 |
-
if data_item.get('userId') == user_id:
|
| 234 |
display_item = data_item.copy()
|
| 235 |
display_item.pop("_thread", None)
|
| 236 |
display_item.pop("_stop_event", None)
|
|
@@ -263,7 +261,6 @@ def add_new_url_for_user():
|
|
| 263 |
with lock:
|
| 264 |
normalized_new_url = input_url.rstrip('/').lower()
|
| 265 |
for existing_url_data in monitored_urls_store.values():
|
| 266 |
-
# Check for duplicates only for the current user
|
| 267 |
if existing_url_data.get('userId') == user_id and \
|
| 268 |
existing_url_data['url'].rstrip('/').lower() == normalized_new_url:
|
| 269 |
return jsonify({"error": "URL already monitored by you"}), 409
|
|
@@ -274,12 +271,12 @@ def add_new_url_for_user():
|
|
| 274 |
url_entry_to_add = {
|
| 275 |
"id": new_url_id, "url": input_url, "status": 'pending',
|
| 276 |
"ip": resolved_ip, "responseTime": None, "lastChecked": None, "history": [],
|
| 277 |
-
"userId": user_id
|
| 278 |
}
|
| 279 |
|
| 280 |
response_payload = url_entry_to_add.copy()
|
| 281 |
monitored_urls_store[new_url_id] = url_entry_to_add
|
| 282 |
-
save_data_to_json()
|
| 283 |
|
| 284 |
start_url_monitoring_thread(new_url_id)
|
| 285 |
return jsonify(response_payload), 201
|
|
@@ -295,14 +292,12 @@ def delete_existing_url_for_user(target_url_id):
|
|
| 295 |
if target_url_id in monitored_urls_store:
|
| 296 |
url_entry_to_check = monitored_urls_store[target_url_id]
|
| 297 |
|
| 298 |
-
# Check if the URL belongs to the requesting user
|
| 299 |
if url_entry_to_check.get('userId') != user_id:
|
| 300 |
-
# Do not reveal existence of URL; treat as if not found for this user
|
| 301 |
return jsonify({"error": "URL not found or permission denied"}), 404
|
| 302 |
|
| 303 |
-
stop_url_monitoring_thread(target_url_id)
|
| 304 |
removed_url_entry = monitored_urls_store.pop(target_url_id)
|
| 305 |
-
save_data_to_json()
|
| 306 |
|
| 307 |
response_data = removed_url_entry.copy()
|
| 308 |
response_data.pop("_thread", None)
|
|
|
|
| 19 |
# Structure: { "id": "uuid", "url": "string", "status": "pending/ok/error/checking",
|
| 20 |
# "ip": "string", "responseTime": float_ms, "lastChecked": "iso_string_utc",
|
| 21 |
# "history": [{"timestamp": float_unix_ts_seconds, "status": "ok/error"}],
|
| 22 |
+
# "userId": "string_user_uuid",
|
| 23 |
# "_thread": threading.Thread_object, "_stop_event": threading.Event_object }
|
| 24 |
monitored_urls_store = {}
|
| 25 |
+
lock = threading.Lock() # Non-reentrant lock
|
| 26 |
|
| 27 |
# --- Helper Functions ---
|
| 28 |
def save_data_to_json():
|
| 29 |
+
# This function must be called with 'lock' acquired
|
| 30 |
+
# REMOVED 'with lock:' from here as callers already acquire it.
|
| 31 |
+
serializable_data = {}
|
| 32 |
+
for url_id, data in monitored_urls_store.items():
|
| 33 |
+
s_data = data.copy()
|
| 34 |
+
s_data.pop("_thread", None)
|
| 35 |
+
s_data.pop("_stop_event", None)
|
| 36 |
+
serializable_data[url_id] = s_data
|
| 37 |
+
try:
|
| 38 |
+
with open(DATA_FILE, 'w') as f:
|
| 39 |
+
json.dump(serializable_data, f, indent=2)
|
| 40 |
+
print(f"Data saved to {DATA_FILE}")
|
| 41 |
+
except IOError as e:
|
| 42 |
+
print(f"Error saving data to {DATA_FILE}: {e}")
|
| 43 |
|
| 44 |
def load_data_from_json():
|
| 45 |
global monitored_urls_store
|
|
|
|
| 58 |
data_item.setdefault('responseTime', None)
|
| 59 |
data_item.setdefault('lastChecked', None)
|
| 60 |
data_item.setdefault('history', data_item.get('history', []))
|
| 61 |
+
data_item.setdefault('userId', None)
|
| 62 |
temp_store[current_id] = data_item
|
| 63 |
|
| 64 |
with lock:
|
|
|
|
| 76 |
|
| 77 |
url_ids_to_start_monitoring = []
|
| 78 |
with lock:
|
|
|
|
|
|
|
| 79 |
url_ids_to_start_monitoring = list(monitored_urls_store.keys())
|
| 80 |
|
| 81 |
for url_id in url_ids_to_start_monitoring:
|
|
|
|
| 169 |
live_url_data['history'] = current_history_list
|
| 170 |
prune_url_history(live_url_data)
|
| 171 |
|
| 172 |
+
save_data_to_json() # Called while lock is held
|
| 173 |
print(f"Finished check for {live_url_data['url']}: {final_check_status}, {http_response_time_ms} ms")
|
| 174 |
|
| 175 |
def pinger_thread_function(url_id_param, stop_event_param):
|
|
|
|
| 223 |
def get_all_urls_for_user():
|
| 224 |
user_id = request.headers.get('X-User-ID')
|
| 225 |
if not user_id:
|
|
|
|
| 226 |
return jsonify([]), 200
|
| 227 |
|
| 228 |
with lock:
|
| 229 |
response_list = []
|
| 230 |
for data_item in monitored_urls_store.values():
|
| 231 |
+
if data_item.get('userId') == user_id:
|
| 232 |
display_item = data_item.copy()
|
| 233 |
display_item.pop("_thread", None)
|
| 234 |
display_item.pop("_stop_event", None)
|
|
|
|
| 261 |
with lock:
|
| 262 |
normalized_new_url = input_url.rstrip('/').lower()
|
| 263 |
for existing_url_data in monitored_urls_store.values():
|
|
|
|
| 264 |
if existing_url_data.get('userId') == user_id and \
|
| 265 |
existing_url_data['url'].rstrip('/').lower() == normalized_new_url:
|
| 266 |
return jsonify({"error": "URL already monitored by you"}), 409
|
|
|
|
| 271 |
url_entry_to_add = {
|
| 272 |
"id": new_url_id, "url": input_url, "status": 'pending',
|
| 273 |
"ip": resolved_ip, "responseTime": None, "lastChecked": None, "history": [],
|
| 274 |
+
"userId": user_id
|
| 275 |
}
|
| 276 |
|
| 277 |
response_payload = url_entry_to_add.copy()
|
| 278 |
monitored_urls_store[new_url_id] = url_entry_to_add
|
| 279 |
+
save_data_to_json() # Called while lock is held
|
| 280 |
|
| 281 |
start_url_monitoring_thread(new_url_id)
|
| 282 |
return jsonify(response_payload), 201
|
|
|
|
| 292 |
if target_url_id in monitored_urls_store:
|
| 293 |
url_entry_to_check = monitored_urls_store[target_url_id]
|
| 294 |
|
|
|
|
| 295 |
if url_entry_to_check.get('userId') != user_id:
|
|
|
|
| 296 |
return jsonify({"error": "URL not found or permission denied"}), 404
|
| 297 |
|
| 298 |
+
stop_url_monitoring_thread(target_url_id) # Must be called with lock held
|
| 299 |
removed_url_entry = monitored_urls_store.pop(target_url_id)
|
| 300 |
+
save_data_to_json() # Called while lock is held
|
| 301 |
|
| 302 |
response_data = removed_url_entry.copy()
|
| 303 |
response_data.pop("_thread", None)
|