diff --git "a/Optomshoptxt.txt" "b/Optomshoptxt.txt"
new file mode 100644--- /dev/null
+++ "b/Optomshoptxt.txt"
@@ -0,0 +1,2092 @@
+
+# START OF FILE app (24)_updated.py
+
+import os
+import hmac
+import hashlib
+import json
+from urllib.parse import unquote, parse_qsl, urlencode
+from flask import Flask, request, jsonify, Response, send_file
+from flask_caching import Cache
+import logging
+import threading
+import time
+from datetime import datetime
+from huggingface_hub import HfApi, hf_hub_download, utils as hf_utils
+from werkzeug.utils import secure_filename
+import requests
+from io import BytesIO
+import uuid
+from typing import Union, Optional, Dict, Any, Tuple, List
+
+
+app = Flask(__name__)
+app.secret_key = os.getenv("FLASK_SECRET_KEY", "supersecretkey_mini_app_unique_v2")
+BOT_TOKEN = os.getenv('TELEGRAM_BOT_TOKEN', '6750208873:AAE2hvPlJ99dBdhGa_Brre0IIpUdOvXxHt4') # MUST be set
+DATA_FILE = 'cloudeng_mini_app_data_v2.json'
+REPO_ID = "Eluza133/Z1e1u" # Same HF Repo
+HF_TOKEN_WRITE = os.getenv("HF_TOKEN")
+HF_TOKEN_READ = os.getenv("HF_TOKEN_READ") or HF_TOKEN_WRITE
+UPLOAD_FOLDER = 'uploads_mini_app_v2'
+os.makedirs(UPLOAD_FOLDER, exist_ok=True)
+
+
+cache = Cache(app, config={'CACHE_TYPE': 'simple'})
+logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
+
+
+AUTH_DATA_LIFETIME = 3600 # 1 hour
+
+
+def find_node_by_id(filesystem: Optional[Dict[str, Any]], node_id: str) -> Tuple[Optional[Dict[str, Any]], Optional[Dict[str, Any]]]:
+ if not filesystem or not isinstance(filesystem, dict):
+ return None, None
+ if filesystem.get('id') == node_id:
+ return filesystem, None
+
+ queue = [(filesystem, None)]
+ visited = {filesystem.get('id')}
+
+ while queue:
+ current_node, parent = queue.pop(0)
+ if current_node.get('type') == 'folder' and 'children' in current_node:
+ for child in current_node.get('children', []):
+ child_id = child.get('id')
+ if not child_id: continue
+
+ if child_id == node_id:
+ return child, current_node
+ if child_id not in visited and isinstance(child, dict) and child.get('type') == 'folder':
+ visited.add(child_id)
+ queue.append((child, current_node))
+ return None, None
+
+def add_node(filesystem: Dict[str, Any], parent_id: str, node_data: Dict[str, Any]) -> bool:
+ parent_node, _ = find_node_by_id(filesystem, parent_id)
+ if parent_node and parent_node.get('type') == 'folder':
+ if 'children' not in parent_node or not isinstance(parent_node['children'], list):
+ parent_node['children'] = []
+ existing_ids = {child.get('id') for child in parent_node['children'] if isinstance(child, dict)}
+ if node_data.get('id') not in existing_ids:
+ parent_node['children'].append(node_data)
+ return True
+ return False
+
+def remove_node(filesystem: Dict[str, Any], node_id: str) -> bool:
+ node_to_remove, parent_node = find_node_by_id(filesystem, node_id)
+ if node_to_remove and parent_node and 'children' in parent_node and isinstance(parent_node['children'], list):
+ original_length = len(parent_node['children'])
+ parent_node['children'] = [child for child in parent_node['children'] if not isinstance(child, dict) or child.get('id') != node_id]
+ return len(parent_node['children']) < original_length
+ if node_to_remove and node_id == filesystem.get('id'):
+ logging.warning("Attempted to remove root node directly.")
+ return False
+ return False
+
+def get_node_path_list(filesystem: Dict[str, Any], node_id: str) -> List[Dict[str, str]]:
+ path_list = []
+ current_id = node_id
+ processed_ids = set()
+ max_depth = 20
+ depth = 0
+
+ while current_id and current_id not in processed_ids and depth < max_depth:
+ processed_ids.add(current_id)
+ depth += 1
+ node, parent = find_node_by_id(filesystem, current_id)
+ if not node:
+ logging.warning(f"Node {current_id} not found during path generation.")
+ break
+ path_list.append({
+ 'id': node.get('id', ''),
+ 'name': node.get('name', node.get('original_filename', 'Unknown'))
+ })
+ if not parent:
+ if node.get('id') != 'root':
+ logging.warning(f"Node {current_id} found but has no parent (and is not root).")
+ break
+ parent_id = parent.get('id')
+ if parent_id == current_id:
+ logging.error(f"Filesystem loop detected at node {current_id}")
+ break
+ current_id = parent_id
+
+ if not any(p['id'] == 'root' for p in path_list):
+ root_node, _ = find_node_by_id(filesystem, 'root')
+ root_name = root_node.get('name', 'Root') if root_node else 'Root'
+ path_list.append({'id': 'root', 'name': root_name})
+
+ final_path = []
+ seen_ids = set()
+ for item in reversed(path_list):
+ item_id = item.get('id')
+ if item_id and item_id not in seen_ids:
+ final_path.append(item)
+ seen_ids.add(item_id)
+ return final_path
+
+
+def initialize_user_filesystem(user_data: Dict[str, Any]):
+ if 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict) or not user_data['filesystem'].get('id') == 'root':
+ logging.warning(f"Filesystem missing or invalid for user. Initializing.")
+ user_data['filesystem'] = {
+ "type": "folder",
+ "id": "root",
+ "name": "Root",
+ "children": []
+ }
+ elif 'children' not in user_data['filesystem'] or not isinstance(user_data['filesystem']['children'], list):
+ logging.warning(f"Root node missing 'children' list. Initializing.")
+ user_data['filesystem']['children'] = []
+
+
+@cache.memoize(timeout=60)
+def load_data() -> Dict[str, Any]:
+ local_data = None
+ if os.path.exists(DATA_FILE):
+ try:
+ with open(DATA_FILE, 'r', encoding='utf-8') as file:
+ local_data = json.load(file)
+ if not isinstance(local_data, dict):
+ logging.warning(f"Local data file {DATA_FILE} is not a dict. Will try HF download.")
+ local_data = None
+ else:
+ logging.info(f"Successfully loaded local data from {DATA_FILE}")
+ except json.JSONDecodeError:
+ logging.error(f"Error decoding JSON from local file {DATA_FILE}. Will try HF download.")
+ local_data = None
+ except Exception as e:
+ logging.error(f"Error loading local data from {DATA_FILE}: {e}. Will try HF download.")
+ local_data = None
+
+ hf_data = None
+ if HF_TOKEN_READ:
+ try:
+ download_path = hf_hub_download(
+ repo_id=REPO_ID, filename=DATA_FILE, repo_type="dataset",
+ token=HF_TOKEN_READ, local_dir=".", local_dir_use_symlinks=False,
+ force_download=True, etag_timeout=10
+ )
+ with open(download_path, 'r', encoding='utf-8') as file:
+ hf_data = json.load(file)
+ if not isinstance(hf_data, dict):
+ logging.warning(f"Downloaded HF data file {DATA_FILE} is not a dict. Discarding.")
+ hf_data = None
+ else:
+ logging.info("Successfully downloaded and loaded data from Hugging Face.")
+ # Make downloaded file the main file
+ if download_path != DATA_FILE:
+ os.replace(download_path, DATA_FILE)
+
+ except hf_utils.RepositoryNotFoundError:
+ logging.error(f"Repository {REPO_ID} not found on Hugging Face.")
+ except hf_utils.EntryNotFoundError:
+ logging.warning(f"{DATA_FILE} not found in repo {REPO_ID}. Using local version if available.")
+ except requests.exceptions.ConnectionError as e:
+ logging.error(f"Connection error downloading DB from HF: {e}. Using local version if available.")
+ except json.JSONDecodeError:
+ logging.error(f"Error decoding JSON from downloaded HF file {DATA_FILE}.")
+ except Exception as e:
+ logging.error(f"Generic error downloading/processing database from HF: {e}")
+ else:
+ logging.warning("HF_TOKEN_READ not set, skipping database download.")
+
+ data_to_use = hf_data if hf_data is not None else local_data
+
+ if data_to_use is None:
+ logging.warning(f"Neither local nor HF data could be loaded. Initializing empty data structure.")
+ data_to_use = {'users': {}}
+ # Attempt to save this initial structure locally
+ try:
+ with open(DATA_FILE, 'w', encoding='utf-8') as f:
+ json.dump(data_to_use, f, ensure_ascii=False, indent=4)
+ except Exception as e:
+ logging.error(f"Failed to write initial empty data file: {e}")
+ else:
+ logging.info("Using loaded data (HF preferred over local).")
+
+ data_to_use.setdefault('users', {})
+ for user_id, user_data in data_to_use['users'].items():
+ if isinstance(user_data, dict):
+ initialize_user_filesystem(user_data)
+ else:
+ logging.warning(f"User data for ID {user_id} is not a dictionary. Skipping filesystem check.")
+
+ logging.info("Data loading complete.")
+ return data_to_use
+
+_save_lock = threading.Lock()
+
+def save_data(data: Dict[str, Any]):
+ if not isinstance(data, dict) or 'users' not in data:
+ logging.error("Attempted to save invalid data structure. Aborting save.")
+ return False
+
+ with _save_lock:
+ try:
+ # Write to a temporary file first
+ temp_data_file = DATA_FILE + ".tmp"
+ with open(temp_data_file, 'w', encoding='utf-8') as file:
+ json.dump(data, file, ensure_ascii=False, indent=4)
+
+ # If write successful, replace original file
+ os.replace(temp_data_file, DATA_FILE)
+ logging.info(f"Data successfully saved locally to {DATA_FILE}.")
+
+ # Clear cache after successful local save
+ cache.clear()
+ logging.info("Cache cleared after successful save.")
+
+ # Schedule HF upload
+ upload_db_to_hf_async()
+ return True
+
+ except Exception as e:
+ logging.error(f"Error saving data locally: {e}")
+ # Clean up temp file if it exists
+ if os.path.exists(temp_data_file):
+ try:
+ os.remove(temp_data_file)
+ except OSError as remove_err:
+ logging.error(f"Error removing temporary save file {temp_data_file}: {remove_err}")
+ return False
+
+
+def upload_db_to_hf_async():
+ if not HF_TOKEN_WRITE:
+ logging.warning("HF_TOKEN_WRITE not set, skipping database upload.")
+ return
+ if not os.path.exists(DATA_FILE):
+ logging.warning(f"Local data file {DATA_FILE} not found for upload.")
+ return
+
+ threading.Thread(target=upload_db_to_hf_worker, daemon=True).start()
+
+def upload_db_to_hf_worker():
+ try:
+ api = HfApi()
+ api.upload_file(
+ path_or_fileobj=DATA_FILE,
+ path_in_repo=DATA_FILE,
+ repo_id=REPO_ID,
+ repo_type="dataset",
+ token=HF_TOKEN_WRITE,
+ commit_message=f"Backup MiniApp {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
+ )
+ logging.info("Database upload to Hugging Face completed.")
+ except Exception as e:
+ logging.error(f"Error during background database upload: {e}")
+
+
+def get_file_type(filename: Optional[str]) -> str:
+ if not filename or '.' not in filename: return 'other'
+ ext = filename.lower().split('.')[-1]
+ if ext in ['mp4', 'mov', 'avi', 'webm', 'mkv']: return 'video'
+ if ext in ['jpg', 'jpeg', 'png', 'gif', 'bmp', 'webp', 'svg', 'heic', 'avif']: return 'image'
+ if ext == 'pdf': return 'pdf'
+ if ext in ['txt', 'md', 'log', 'csv', 'json', 'xml', 'html', 'css', 'js', 'py', 'sh']: return 'text'
+ if ext in ['doc', 'docx', 'rtf']: return 'document'
+ if ext in ['xls', 'xlsx']: return 'spreadsheet'
+ if ext in ['ppt', 'pptx']: return 'presentation'
+ if ext in ['zip', 'rar', '7z', 'gz', 'tar']: return 'archive'
+ if ext in ['mp3', 'wav', 'ogg', 'flac', 'aac', 'm4a']: return 'audio'
+ return 'other'
+
+
+def check_telegram_authorization(auth_data: str, bot_token: str) -> Optional[Dict[str, Any]]:
+ if not auth_data or not bot_token or bot_token == 'YOUR_BOT_TOKEN':
+ logging.warning("Validation skipped: Missing auth_data or valid BOT_TOKEN.")
+ return None
+ try:
+ parsed_data = dict(parse_qsl(unquote(auth_data)))
+ if "hash" not in parsed_data:
+ logging.error("Hash not found in auth data")
+ return None
+
+ telegram_hash = parsed_data.pop('hash')
+ auth_date_ts = int(parsed_data.get('auth_date', 0))
+ current_ts = int(time.time())
+
+ if abs(current_ts - auth_date_ts) > AUTH_DATA_LIFETIME:
+ logging.warning(f"Auth data expired (Auth: {auth_date_ts}, Now: {current_ts}, Diff: {current_ts - auth_date_ts})")
+ return None
+
+ data_check_string = "\n".join(sorted([f"{k}={v}" for k, v in parsed_data.items()]))
+ secret_key = hmac.new("WebAppData".encode(), bot_token.encode(), hashlib.sha256).digest()
+ calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest()
+
+ if calculated_hash == telegram_hash:
+ user_data_str = parsed_data.get('user')
+ if user_data_str:
+ try:
+ user_info = json.loads(user_data_str)
+ if 'id' not in user_info:
+ logging.error("Validated user data missing 'id'")
+ return None
+ user_info['id'] = str(user_info['id']) # Ensure ID is string
+ return user_info
+ except json.JSONDecodeError:
+ logging.error("Failed to decode user JSON from auth data")
+ return None
+ else:
+ logging.warning("No 'user' field in validated auth data")
+ return None
+ else:
+ logging.warning("Hash mismatch during validation")
+ return None
+ except Exception as e:
+ logging.error(f"Exception during validation: {e}")
+ return None
+
+
+HTML_TEMPLATE = """
+
+
+
+
+
+ Zeus Cloud
+
+
+
+
+
+
Загрузка...
+
+
+
+
+
Zeus Cloud
+
+
+
+
+
+
+
Действия
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Содержимое
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+"""
+
+
+@app.route('/')
+def index():
+ return Response(HTML_TEMPLATE, mimetype='text/html')
+
+@app.route('/validate_init_data', methods=['POST'])
+def validate_init_data():
+ data = request.get_json()
+ if not data or 'initData' not in data:
+ return jsonify({"status": "error", "message": "Отсутствует initData"}), 400
+
+ init_data = data['initData']
+ user_info = check_telegram_authorization(init_data, BOT_TOKEN)
+
+ if user_info and 'id' in user_info:
+ tg_user_id = str(user_info['id'])
+ needs_save = False
+ try:
+ db_data = load_data()
+ except Exception as e:
+ logging.critical(f"FATAL: Failed to load initial data: {e}")
+ return jsonify({"status": "error", "message": "Критическая ошибка загрузки данных."}), 500
+
+ users = db_data.setdefault('users', {})
+
+ if tg_user_id not in users or not isinstance(users.get(tg_user_id), dict):
+ logging.info(f"New user detected or invalid user data: {tg_user_id}. Initializing.")
+ users[tg_user_id] = {
+ 'user_info': user_info,
+ 'created_at': datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+ }
+ initialize_user_filesystem(users[tg_user_id])
+ needs_save = True
+ else:
+ # Ensure filesystem exists and update user info if changed
+ if 'filesystem' not in users[tg_user_id]:
+ initialize_user_filesystem(users[tg_user_id])
+ needs_save = True
+ current_user_info = users[tg_user_id].get('user_info', {})
+ if current_user_info.get('username') != user_info.get('username') or \
+ current_user_info.get('first_name') != user_info.get('first_name') or \
+ current_user_info.get('last_name') != user_info.get('last_name'):
+ users[tg_user_id]['user_info'] = user_info
+ needs_save = True
+
+ if needs_save:
+ if not save_data(db_data):
+ # Log error but proceed if possible, user already exists or created in memory
+ logging.error(f"Failed initial save for user {tg_user_id}, proceeding with in-memory data.")
+
+
+ return jsonify({"status": "ok", "user": user_info})
+ else:
+ logging.warning(f"Validation failed for initData.")
+ return jsonify({"status": "error", "message": "Недействительные данные авторизации Telegram."}), 403
+
+
+@app.route('/get_dashboard_data', methods=['POST'])
+def get_dashboard_data():
+ data = request.get_json()
+ if not data or 'initData' not in data or 'folder_id' not in data:
+ return jsonify({"status": "error", "message": "Неполный запрос"}), 400
+
+ user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
+ if not user_info or 'id' not in user_info:
+ return jsonify({"status": "error", "message": "Не авторизован"}), 403
+
+ tg_user_id = str(user_info['id'])
+ folder_id = data['folder_id']
+
+ try:
+ db_data = load_data()
+ except Exception as e:
+ logging.error(f"Failed to load data in get_dashboard_data for user {tg_user_id}: {e}")
+ return jsonify({"status": "error", "message": "Ошибка загрузки данных."}), 500
+
+ user_data = db_data.get('users', {}).get(tg_user_id)
+
+ if not user_data or 'filesystem' not in user_data:
+ logging.error(f"User data or filesystem missing for validated user {tg_user_id}")
+ # Attempt to re-initialize if missing, maybe a race condition?
+ if user_data and 'filesystem' not in user_data:
+ initialize_user_filesystem(user_data)
+ if not save_data(db_data): # Try saving the fix
+ logging.error(f"Failed to save re-initialized filesystem for user {tg_user_id}")
+ # Continue with the initialized data if save failed but initialization worked
+ else:
+ return jsonify({"status": "error", "message": "Ошибка данных пользователя"}), 500
+
+
+ current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id)
+
+ if not current_folder or current_folder.get('type') != 'folder':
+ logging.warning(f"Folder {folder_id} not found or invalid for user {tg_user_id}. Defaulting to root.")
+ folder_id = 'root'
+ current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id)
+ if not current_folder:
+ logging.error(f"CRITICAL: Root folder not found for user {tg_user_id}")
+ # Attempt recovery: Re-initialize and save
+ initialize_user_filesystem(user_data)
+ if save_data(db_data):
+ current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id)
+ if not current_folder: # Still not found after recovery attempt
+ return jsonify({"status": "error", "message": "Критическая ошибка: Корневая папка отсутствует, восстановление не удалось."}), 500
+
+
+ items_in_folder = current_folder.get('children', [])
+ # Filter out potentially invalid children (though add_node should prevent this)
+ items_in_folder = [item for item in items_in_folder if isinstance(item, dict) and item.get('id')]
+
+ breadcrumbs = get_node_path_list(user_data['filesystem'], folder_id)
+
+ current_folder_info = {
+ 'id': current_folder.get('id'),
+ 'name': current_folder.get('name', 'Root')
+ }
+
+ return jsonify({
+ "status": "ok",
+ "items": items_in_folder,
+ "breadcrumbs": breadcrumbs,
+ "current_folder": current_folder_info
+ })
+
+
+@app.route('/upload', methods=['POST'])
+def upload_files():
+ init_data = request.form.get('initData')
+ current_folder_id = request.form.get('current_folder_id', 'root')
+ files = request.files.getlist('files')
+
+ user_info = check_telegram_authorization(init_data, BOT_TOKEN)
+ if not user_info or 'id' not in user_info:
+ return jsonify({"status": "error", "message": "Не авторизован"}), 403
+
+ tg_user_id = str(user_info['id'])
+
+ if not HF_TOKEN_WRITE:
+ logging.error("HF_TOKEN_WRITE not configured. Upload disabled.")
+ return jsonify({'status': 'error', 'message': 'Загрузка невозможна: Ошибка конфигурации сервера.'}), 503
+
+ if not files or all(not f.filename for f in files):
+ return jsonify({'status': 'error', 'message': 'Файлы для загрузки не выбраны.'}), 400
+
+ try:
+ db_data = load_data()
+ except Exception as e:
+ logging.error(f"Failed to load data during upload for user {tg_user_id}: {e}")
+ return jsonify({"status": "error", "message": "Ошибка загрузки данных пользователя."}), 500
+
+ user_data = db_data.get('users', {}).get(tg_user_id)
+ if not user_data or 'filesystem' not in user_data:
+ logging.error(f"User data or filesystem missing for upload user {tg_user_id}")
+ return jsonify({"status": "error", "message": "Ошибка данных пользователя"}), 500
+
+ target_folder_node, _ = find_node_by_id(user_data['filesystem'], current_folder_id)
+ if not target_folder_node or target_folder_node.get('type') != 'folder':
+ logging.warning(f"Target folder {current_folder_id} not found for upload by user {tg_user_id}. Defaulting to root.")
+ current_folder_id = 'root' # Try to recover by uploading to root
+ target_folder_node, _ = find_node_by_id(user_data['filesystem'], current_folder_id)
+ if not target_folder_node: # Still can't find root? Major issue.
+ logging.error(f"CRITICAL: Cannot find root folder during upload for user {tg_user_id}")
+ return jsonify({'status': 'error', 'message': 'Ошибка: Не удается найти корневую папку.'}), 500
+
+
+ api = HfApi()
+ uploaded_count = 0
+ errors = []
+ needs_save = False
+ temp_files_to_clean = []
+
+ for file in files:
+ if file and file.filename:
+ original_filename = secure_filename(file.filename)
+ if not original_filename:
+ logging.warning(f"Skipping file with invalid name from user {tg_user_id}")
+ errors.append("Пропущен файл с недопустимым именем.")
+ continue
+
+ name_part, ext_part = os.path.splitext(original_filename)
+ unique_suffix = uuid.uuid4().hex[:8]
+ # Ensure unique_filename doesn't exceed typical path limits, though unlikely
+ safe_name_part = name_part[:100] # Limit base name length
+ unique_filename = f"{safe_name_part}_{unique_suffix}{ext_part}"
+ file_id = uuid.uuid4().hex
+
+ hf_path = f"cloud_files/{tg_user_id}/{current_folder_id}/{unique_filename}"
+ temp_path = os.path.join(UPLOAD_FOLDER, f"{file_id}_{unique_filename}")
+ temp_files_to_clean.append(temp_path)
+ file_added_to_db = False
+
+ try:
+ file.save(temp_path)
+
+ api.upload_file(
+ path_or_fileobj=temp_path, path_in_repo=hf_path,
+ repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE,
+ commit_message=f"User {tg_user_id} uploaded {original_filename} to {current_folder_id}"
+ )
+ logging.info(f"Successfully uploaded {original_filename} to HF path {hf_path} for user {tg_user_id}")
+
+ file_info = {
+ 'type': 'file', 'id': file_id,
+ 'original_filename': original_filename, 'unique_filename': unique_filename,
+ 'path': hf_path, 'file_type': get_file_type(original_filename),
+ 'upload_date': datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+ }
+
+ if add_node(user_data['filesystem'], current_folder_id, file_info):
+ uploaded_count += 1
+ needs_save = True
+ file_added_to_db = True
+ logging.info(f"Successfully added file metadata {file_id} to DB for user {tg_user_id}")
+ else:
+ # This case should be rare if target_folder_node was found correctly
+ error_msg = f"Критическая ошибка: Не удалось добавить метаданные для {original_filename} в БД."
+ errors.append(error_msg)
+ logging.error(f"Failed add_node for {file_id} to {current_folder_id} for {tg_user_id} even after finding parent.")
+ # Attempt to delete the orphaned HF file
+ try:
+ api.delete_file(path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE)
+ logging.warning(f"Deleted orphaned HF file {hf_path} after DB add failure.")
+ except Exception as del_err:
+ logging.error(f"Failed to delete orphaned HF file {hf_path}: {del_err}")
+
+
+ except Exception as e:
+ error_msg = f"Ошибка загрузки файла {original_filename}: {e}"
+ errors.append(error_msg)
+ logging.error(f"Upload or processing error for {original_filename} (user {tg_user_id}): {e}")
+ # If DB entry was already added (shouldn't happen with current logic, but defensively)
+ # or if HF upload succeeded but DB add failed, try cleaning up HF file
+ if not file_added_to_db and 'api.upload_file' in str(e.__traceback__): # Check if error was likely during HF upload
+ pass # HF upload failed, no need to delete
+ elif file_added_to_db: # DB add succeeded but something else failed? Rollback DB? Complex.
+ logging.warning(f"File {file_id} added to DB but error occurred later. State might be inconsistent.")
+ else: # HF upload likely succeeded, but DB add failed or error after HF success
+ try:
+ # Check if file exists on HF before attempting delete
+ api.file_exists(repo_id=REPO_ID, repo_type="dataset", filename=hf_path, token=HF_TOKEN_READ or HF_TOKEN_WRITE)
+ api.delete_file(path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE)
+ logging.warning(f"Attempted cleanup of HF file {hf_path} due to error: {e}")
+ except hf_utils.EntryNotFoundError:
+ logging.info(f"HF file {hf_path} not found, no cleanup needed.")
+ except Exception as del_err:
+ logging.error(f"Failed attempt to cleanup HF file {hf_path} after error: {del_err}")
+
+
+ # Cleanup all temporary files
+ for temp_file in temp_files_to_clean:
+ if os.path.exists(temp_file):
+ try:
+ os.remove(temp_file)
+ except OSError as e:
+ logging.error(f"Error removing temp file {temp_file}: {e}")
+
+ # Save the database IF any uploads were successfully added
+ if needs_save:
+ if not save_data(db_data):
+ logging.error(f"CRITICAL: Failed to save DB after successful uploads for user {tg_user_id}. Data inconsistency likely.")
+ errors.append("Критическая ошибка: Не удалось сохранить изменения в базе данных после загрузки.")
+ # Don't totally fail the response, report partial success but warn user
+ final_status = "error"
+ final_message = f"{uploaded_count} файл(ов) загружено, НО ОШИБКА СОХРАНЕНИЯ БАЗЫ ДАННЫХ. Данные могут быть неактуальны."
+ if errors:
+ final_message += " Другие ошибки: " + "; ".join(errors)
+ return jsonify({"status": final_status, "message": final_message }), 500
+
+
+ final_status = "ok" if not errors else "partial_error"
+ final_message = f"{uploaded_count} из {len(files)} файл(ов) успешно загружено."
+ if errors:
+ final_status = "error" if uploaded_count == 0 else "partial_error"
+ final_message = f"{uploaded_count} из {len(files)} файл(ов) загружено. Ошибки: " + "; ".join(errors)
+
+
+ return jsonify({
+ "status": final_status,
+ "message": final_message
+ })
+
+
+@app.route('/create_folder', methods=['POST'])
+def create_folder():
+ data = request.get_json()
+ if not data or 'initData' not in data or 'parent_folder_id' not in data or 'folder_name' not in data:
+ return jsonify({"status": "error", "message": "Неполный запрос"}), 400
+
+ user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
+ if not user_info or 'id' not in user_info:
+ return jsonify({"status": "error", "message": "Не авторизован"}), 403
+
+ tg_user_id = str(user_info['id'])
+ parent_folder_id = data['parent_folder_id']
+ folder_name = data['folder_name'].strip()
+
+ if not folder_name:
+ return jsonify({'status': 'error', 'message': 'Имя папки не может быть пустым.'}), 400
+ if '/' in folder_name or '\\' in folder_name:
+ return jsonify({'status': 'error', 'message': 'Имя папки содержит недопустимые символы (/ или \\).'}), 400
+ if folder_name == '.' or folder_name == '..':
+ return jsonify({'status': 'error', 'message': 'Недопустимое имя папки.'}), 400
+ if len(folder_name) > 100:
+ return jsonify({'status': 'error', 'message': 'Имя папки слишком длинное (макс 100 симв).'}), 400
+
+
+ try:
+ db_data = load_data()
+ except Exception as e:
+ logging.error(f"Failed to load data during create_folder for user {tg_user_id}: {e}")
+ return jsonify({"status": "error", "message": "Ошибка загрузки данных пользователя."}), 500
+
+ user_data = db_data.get('users', {}).get(tg_user_id)
+ if not user_data or 'filesystem' not in user_data:
+ logging.error(f"User data or filesystem missing for create_folder user {tg_user_id}")
+ return jsonify({"status": "error", "message": "Ошибка данных пользователя"}), 500
+
+ # Check if folder with the same name already exists in the parent
+ parent_node, _ = find_node_by_id(user_data['filesystem'], parent_folder_id)
+ if parent_node and parent_node.get('type') == 'folder' and 'children' in parent_node:
+ if any(child.get('name') == folder_name and child.get('type') == 'folder' for child in parent_node.get('children', []) if isinstance(child, dict)):
+ return jsonify({'status': 'error', 'message': f'Папка с именем "{folder_name}" уже существует здесь.'}), 409 # Conflict
+
+
+ folder_id = uuid.uuid4().hex
+ folder_data = {
+ 'type': 'folder', 'id': folder_id,
+ 'name': folder_name, 'children': []
+ }
+
+ if add_node(user_data['filesystem'], parent_folder_id, folder_data):
+ if save_data(db_data):
+ logging.info(f"Folder '{folder_name}' (id: {folder_id}) created successfully for user {tg_user_id} under {parent_folder_id}.")
+ return jsonify({'status': 'ok', 'message': f'Папка "{folder_name}" создана.'})
+ else:
+ logging.error(f"Create folder DB save error for user {tg_user_id}. Attempting to rollback DB change.")
+ # Attempt rollback (remove the added node) - best effort
+ remove_node(user_data['filesystem'], folder_id)
+ return jsonify({'status': 'error', 'message': 'Ошибка сохранения данных после создания папки.'}), 500
+ else:
+ # This implies parent_folder_id was not found or wasn't a folder
+ logging.error(f"Failed to add folder node '{folder_name}' for user {tg_user_id}. Parent folder {parent_folder_id} likely invalid.")
+ return jsonify({'status': 'error', 'message': 'Не удалось найти родительскую папку или добавить узел.'}), 400
+
+
+@app.route('/download/')
+def download_file_route(file_id):
+ try:
+ db_data = load_data()
+ except Exception as e:
+ logging.error(f"Failed to load data for download request file_id {file_id}: {e}")
+ return Response("Ошибка сервера при получении данных файла", status=500)
+
+ file_node = None
+ owner_user_id = None
+
+ for user_id, user_data in db_data.get('users', {}).items():
+ if isinstance(user_data, dict) and 'filesystem' in user_data:
+ node, _ = find_node_by_id(user_data['filesystem'], file_id)
+ if node and isinstance(node, dict) and node.get('type') == 'file':
+ file_node = node
+ owner_user_id = user_id
+ break
+
+ if not file_node or not isinstance(file_node, dict):
+ return Response("Файл не найден", status=404)
+
+ hf_path = file_node.get('path')
+ original_filename = file_node.get('original_filename', f'{file_id}_download')
+
+ if not hf_path:
+ logging.error(f"Missing HF path for file ID {file_id} (owner: {owner_user_id})")
+ return Response("Ошибка: Путь к файлу не определен", status=500)
+
+ file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true"
+ logging.info(f"Attempting download for user (implicit) file {file_id} from HF URL: {file_url}")
+
+ try:
+ headers = {}
+ if HF_TOKEN_READ:
+ headers["authorization"] = f"Bearer {HF_TOKEN_READ}"
+
+ # Use GET with stream=True for potentially large files
+ response = requests.get(file_url, headers=headers, stream=True, timeout=60) # Increased timeout
+ response.raise_for_status()
+
+ # Correct Content-Disposition encoding (RFC 6266)
+ try:
+ # Simple ASCII fallback
+ ascii_filename = original_filename.encode('ascii', 'ignore').decode('ascii')
+ disposition = f'attachment; filename="{ascii_filename}"'
+ # If filename contains non-ASCII, use filename*
+ if ascii_filename != original_filename:
+ encoded_filename = urlencode({'filename': original_filename}, encoding='utf-8')[9:]
+ disposition += f"; filename*=UTF-8''{encoded_filename}"
+ except Exception as e:
+ logging.warning(f"Error encoding filename '{original_filename}' for Content-Disposition: {e}. Using simplified name.")
+ disposition = f'attachment; filename="downloaded_file"'
+
+ # Get content type from HF response, default to octet-stream
+ content_type = response.headers.get('Content-Type', 'application/octet-stream')
+
+ return Response(response.iter_content(chunk_size=65536), # 64KB chunk
+ mimetype=content_type,
+ headers={"Content-Disposition": disposition})
+
+ except requests.exceptions.HTTPError as e:
+ status_code = e.response.status_code
+ logging.error(f"HTTP Error {status_code} downloading file from HF ({hf_path}, owner: {owner_user_id}): {e}")
+ message = "Ошибка скачивания файла"
+ if status_code == 404:
+ message = "Файл не найден на сервере хранения."
+ elif status_code == 401 or status_code == 403:
+ message = "Ошибка доступа к файлу на сервере хранения."
+ return Response(f"{message} (Код: {status_code})", status=status_code)
+ except requests.exceptions.Timeout:
+ logging.error(f"Timeout downloading file from HF ({hf_path}, owner: {owner_user_id})")
+ return Response("Тайм-аут при скачивании файла с сервера хранения.", status=504)
+ except requests.exceptions.RequestException as e:
+ logging.error(f"Network error downloading file from HF ({hf_path}, owner: {owner_user_id}): {e}")
+ return Response("Сетевая ошибка при скачивании файла.", status=502) # Bad Gateway
+ except Exception as e:
+ logging.error(f"Unexpected error during download ({hf_path}, owner: {owner_user_id}): {e}", exc_info=True)
+ return Response("Внутренняя ошибка сервера при подготовке файла к скачиванию.", status=500)
+
+
+@app.route('/delete_file/', methods=['POST'])
+def delete_file_route(file_id):
+ data = request.get_json()
+ if not data or 'initData' not in data or 'current_folder_id' not in data: # current_folder_id might not be needed but good for context
+ return jsonify({"status": "error", "message": "Неполный запрос"}), 400
+
+ user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
+ if not user_info or 'id' not in user_info:
+ return jsonify({"status": "error", "message": "Не авторизован"}), 403
+
+ tg_user_id = str(user_info['id'])
+
+ if not HF_TOKEN_WRITE:
+ logging.error("HF_TOKEN_WRITE not configured. Delete disabled.")
+ return jsonify({'status': 'error', 'message': 'Удаление невозможно: Ошибка конфигурации сервера.'}), 503
+
+ try:
+ db_data = load_data()
+ except Exception as e:
+ logging.error(f"Failed to load data during delete_file for user {tg_user_id}: {e}")
+ return jsonify({"status": "error", "message": "Ошибка загрузки данных пользователя."}), 500
+
+ user_data = db_data.get('users', {}).get(tg_user_id)
+ if not user_data or 'filesystem' not in user_data:
+ logging.error(f"User data or filesystem missing for delete_file user {tg_user_id}")
+ return jsonify({"status": "error", "message": "Ошибка данных пользователя"}), 500
+
+ file_node, parent_node = find_node_by_id(user_data['filesystem'], file_id)
+
+ if not file_node or not isinstance(file_node, dict) or file_node.get('type') != 'file' or not parent_node:
+ logging.warning(f"File node {file_id} not found or invalid for deletion by user {tg_user_id}")
+ return jsonify({'status': 'error', 'message': 'Файл не найден или не может быть удален.'}), 404
+
+ hf_path = file_node.get('path')
+ original_filename = file_node.get('original_filename', 'файл')
+ db_removed = False
+ hf_deleted_or_missing = False
+
+ # 1. Try deleting from Hugging Face Hub
+ if hf_path:
+ try:
+ api = HfApi()
+ api.delete_file(
+ path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE,
+ commit_message=f"User {tg_user_id} deleted file {original_filename} (id: {file_id})"
+ )
+ logging.info(f"Deleted file {hf_path} from HF Hub for user {tg_user_id}")
+ hf_deleted_or_missing = True
+ except hf_utils.EntryNotFoundError:
+ logging.warning(f"File {hf_path} not found on HF Hub during delete attempt by {tg_user_id}. Assuming deleted or never existed.")
+ hf_deleted_or_missing = True # Treat as success for DB removal purposes
+ except Exception as e:
+ logging.error(f"Error deleting file from HF Hub ({hf_path}, user {tg_user_id}): {e}")
+ # Proceed to try removing from DB anyway, but report HF error later
+ else:
+ logging.warning(f"File node {file_id} is missing 'path' attribute for user {tg_user_id}. Skipping HF delete.")
+ hf_deleted_or_missing = True # Cannot delete from HF, proceed with DB removal
+
+
+ # 2. Try removing from DB if HF delete was successful or skipped/not found
+ if hf_deleted_or_missing:
+ if remove_node(user_data['filesystem'], file_id):
+ logging.info(f"Removed file node {file_id} from DB for user {tg_user_id}")
+ db_removed = True
+ else:
+ # This is unexpected if find_node_by_id worked initially
+ logging.error(f"CRITICAL: Failed to remove file node {file_id} from DB structure for {tg_user_id} after successful find.")
+ # Don't save data if the structure seems inconsistent
+ return jsonify({'status': 'error', 'message': 'Критическая ошибка при обновлении базы данных.'}), 500
+
+ # 3. Save DB changes if node was removed
+ if db_removed:
+ if save_data(db_data):
+ return jsonify({'status': 'ok', 'message': f'Файл "{original_filename}" успешно удален.'})
+ else:
+ logging.error(f"CRITICAL: Delete file DB save error for user {tg_user_id} after successful removal from structure. State inconsistent.")
+ # Data was removed in memory, but save failed. Very bad state.
+ return jsonify({'status': 'error', 'message': 'Файл удален, но ПРОИЗОШЛА КРИТИЧЕСКАЯ ОШИБКА сохранения базы данных.'}), 500
+ elif not hf_deleted_or_missing:
+ # HF delete failed, DB not touched
+ return jsonify({'status': 'error', 'message': f'Ошибка при удалении файла "{original_filename}" с сервера хранения. База данных не изменена.'}), 500
+ else:
+ # HF delete ok/skipped, but DB remove failed (shouldn't happen based on checks)
+ return jsonify({'status': 'error', 'message': 'Не удалось удалить файл из базы данных после операции на сервере.'}), 500
+
+
+@app.route('/delete_folder/', methods=['POST'])
+def delete_folder_route(folder_id):
+ if folder_id == 'root':
+ return jsonify({'status': 'error', 'message': 'Нельзя удалить корневую папку!'}), 400
+
+ data = request.get_json()
+ if not data or 'initData' not in data:
+ return jsonify({"status": "error", "message": "Неполный запрос"}), 400
+
+ user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
+ if not user_info or 'id' not in user_info:
+ return jsonify({"status": "error", "message": "Не авторизован"}), 403
+
+ tg_user_id = str(user_info['id'])
+
+ try:
+ db_data = load_data()
+ except Exception as e:
+ logging.error(f"Failed to load data during delete_folder for user {tg_user_id}: {e}")
+ return jsonify({"status": "error", "message": "Ошибка загрузки данных пользователя."}), 500
+
+ user_data = db_data.get('users', {}).get(tg_user_id)
+ if not user_data or 'filesystem' not in user_data:
+ logging.error(f"User data or filesystem missing for delete_folder user {tg_user_id}")
+ return jsonify({"status": "error", "message": "Ошибка данных пользователя"}), 500
+
+ folder_node, parent_node = find_node_by_id(user_data['filesystem'], folder_id)
+
+ if not folder_node or not isinstance(folder_node, dict) or folder_node.get('type') != 'folder' or not parent_node:
+ logging.warning(f"Folder node {folder_id} not found or invalid for deletion by user {tg_user_id}")
+ return jsonify({'status': 'error', 'message': 'Папка не найдена или не может быть удалена.'}), 404
+
+ folder_name = folder_node.get('name', 'папка')
+
+ # Check if folder is empty
+ if folder_node.get('children'):
+ logging.warning(f"Attempt to delete non-empty folder {folder_id} by user {tg_user_id}")
+ return jsonify({'status': 'error', 'message': f'Папку "{folder_name}" можно удалить только если она пуста.'}), 400
+
+ # Remove the folder node from the filesystem structure
+ if remove_node(user_data['filesystem'], folder_id):
+ logging.info(f"Removed empty folder node {folder_id} ('{folder_name}') from DB for user {tg_user_id}")
+ # Save the updated data
+ if save_data(db_data):
+ return jsonify({'status': 'ok', 'message': f'Папка "{folder_name}" успешно удалена.'})
+ else:
+ logging.error(f"CRITICAL: Delete folder DB save error for user {tg_user_id} after successful removal. State inconsistent.")
+ # Attempt to rollback? Very difficult state. Return error.
+ # Re-adding might be complex if parent isn't easily available or structure changed.
+ return jsonify({'status': 'error', 'message': 'Папка удалена из структуры, но ПРОИЗОШЛА КРИТИЧЕСКАЯ ОШИБКА сохранения базы данных.'}), 500
+ else:
+ # This implies remove_node failed, which is unexpected if find_node worked
+ logging.error(f"CRITICAL: Failed to remove empty folder node {folder_id} from DB for {tg_user_id} after successful find.")
+ return jsonify({'status': 'error', 'message': 'Не удалось удалить папку из базы данных (внутренняя ошибка).'}), 500
+
+
+@app.route('/get_text_content/')
+def get_text_content_route(file_id):
+ try:
+ db_data = load_data()
+ except Exception as e:
+ logging.error(f"Failed to load data for text content request file_id {file_id}: {e}")
+ return Response("Ошибка сервера при получении данных файла", status=500)
+
+ file_node = None
+ owner_user_id = None
+
+ for user_id, user_data in db_data.get('users', {}).items():
+ if isinstance(user_data, dict) and 'filesystem' in user_data:
+ node, _ = find_node_by_id(user_data['filesystem'], file_id)
+ if node and isinstance(node, dict) and node.get('type') == 'file' and node.get('file_type') == 'text':
+ file_node = node
+ owner_user_id = user_id
+ break
+
+ if not file_node:
+ return Response("Текстовый файл не найден", status=404)
+
+ hf_path = file_node.get('path')
+ if not hf_path:
+ logging.error(f"Missing HF path for text file ID {file_id} (owner: {owner_user_id})")
+ return Response("Ошибка: путь к файлу отсутствует", status=500)
+
+ file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true"
+ logging.info(f"Fetching text content for file {file_id} from {file_url}")
+
+ try:
+ headers = {}
+ if HF_TOKEN_READ:
+ headers["authorization"] = f"Bearer {HF_TOKEN_READ}"
+
+ # Use a reasonable timeout for potentially small text files
+ response = requests.get(file_url, headers=headers, timeout=20)
+ response.raise_for_status()
+
+ # Limit preview size to prevent memory issues with huge text files
+ max_preview_size = 2 * 1024 * 1024 # 2 MB limit for text preview
+ content_length = response.headers.get('Content-Length')
+ if content_length and int(content_length) > max_preview_size:
+ logging.warning(f"Text file {file_id} ({hf_path}) is too large for preview ({content_length} bytes).")
+ return Response(f"Файл слишком большой для предпросмотра (>{max_preview_size // 1024 // 1024}MB).", status=413) # Payload Too Large
+
+ content = response.content
+ if len(content) > max_preview_size: # Double check if Content-Length was missing
+ logging.warning(f"Text file {file_id} ({hf_path}) content length exceeds preview limit ({len(content)} bytes).")
+ return Response(f"Файл слишком большой для предпросмотра (>{max_preview_size // 1024 // 1024}MB).", status=413)
+
+
+ text_content = None
+ detected_encoding = None
+ # Try common encodings
+ encodings_to_try = ['utf-8', 'cp1251', 'latin-1', 'utf-16']
+ for enc in encodings_to_try:
+ try:
+ text_content = content.decode(enc)
+ detected_encoding = enc
+ logging.info(f"Decoded text file {file_id} using {enc}")
+ break
+ except UnicodeDecodeError:
+ continue
+ except Exception as decode_err: # Catch broader errors like utf-16 bom issues
+ logging.warning(f"Error decoding {file_id} with {enc}: {decode_err}")
+ continue
+
+
+ if text_content is None:
+ logging.error(f"Could not decode text file {file_id} ({hf_path}) with tried encodings.")
+ # Try decoding with 'ignore' errors as a last resort
+ try:
+ text_content = content.decode('utf-8', errors='ignore')
+ detected_encoding = 'utf-8 (ignored errors)'
+ logging.warning(f"Decoded text file {file_id} using utf-8 with ignored errors.")
+ except Exception:
+ return Response("Не удалось определить кодировку файла или произошла ошибка декодирования.", status=500)
+
+ return Response(text_content, mimetype=f'text/plain; charset=utf-8') # Always serve as UTF-8
+
+ except requests.exceptions.HTTPError as e:
+ status_code = e.response.status_code
+ logging.error(f"HTTP Error {status_code} fetching text content from HF ({hf_path}, owner {owner_user_id}): {e}")
+ message = "Ошибка загрузки содержимого"
+ if status_code == 404: message = "Файл не найден на сервере."
+ return Response(f"{message} ({status_code})", status=status_code)
+ except requests.exceptions.Timeout:
+ logging.error(f"Timeout fetching text content from HF ({hf_path}, owner {owner_user_id})")
+ return Response("Тайм-аут при загрузке содержимого.", status=504)
+ except requests.exceptions.RequestException as e:
+ logging.error(f"Network error fetching text content from HF ({hf_path}, owner {owner_user_id}): {e}")
+ return Response("Сетевая ошибка при загрузке содержимого.", status=502)
+ except Exception as e:
+ logging.error(f"Unexpected error fetching text content ({hf_path}, owner {owner_user_id}): {e}", exc_info=True)
+ return Response("Внутренняя ошибка сервера при обработке текстового файла.", status=500)
+
+
+@app.route('/preview_thumb/')
+def preview_thumb_route(file_id):
+ try:
+ db_data = load_data()
+ except Exception as e:
+ logging.error(f"Failed to load data for preview thumb request file_id {file_id}: {e}")
+ return Response("Ошибка сервера", status=500)
+
+ file_node = None
+ owner_user_id = None
+
+ for user_id, user_data in db_data.get('users', {}).items():
+ if isinstance(user_data, dict) and 'filesystem' in user_data:
+ node, _ = find_node_by_id(user_data['filesystem'], file_id)
+ if node and isinstance(node, dict) and node.get('type') == 'file' and node.get('file_type') == 'image':
+ file_node = node
+ owner_user_id = user_id
+ break
+
+ if not file_node: return Response("Изображение не найдено", status=404)
+ hf_path = file_node.get('path')
+ if not hf_path: return Response("Путь к файлу не найден", status=500)
+
+ # Use the non-download link for potential browser caching / direct rendering
+ file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}"
+ logging.info(f"Fetching image preview for {file_id} from {file_url}")
+
+ try:
+ headers = {}
+ if HF_TOKEN_READ: headers["authorization"] = f"Bearer {HF_TOKEN_READ}"
+ # Use stream=True to avoid loading large images fully into memory server-side
+ response = requests.get(file_url, headers=headers, stream=True, timeout=30)
+ response.raise_for_status()
+
+ content_type = response.headers.get('Content-Type', 'image/jpeg') # Default guess
+ if not content_type.startswith('image/'):
+ logging.warning(f"Unexpected Content-Type '{content_type}' for image preview {file_id} ({hf_path}).")
+ # Fallback or return error? Let's try serving it anyway.
+
+ return Response(response.iter_content(chunk_size=65536), mimetype=content_type)
+
+ except requests.exceptions.HTTPError as e:
+ status_code = e.response.status_code
+ logging.error(f"HTTP Error {status_code} fetching preview from HF ({hf_path}, owner: {owner_user_id}): {e}")
+ message = "Ошибка загрузки превью"
+ if status_code == 404: message = "Превью не найдено."
+ return Response(f"{message} ({status_code})", status=status_code)
+ except requests.exceptions.Timeout:
+ logging.error(f"Timeout fetching preview from HF ({hf_path}, owner: {owner_user_id})")
+ return Response("Тайм-аут загрузки превью.", status=504)
+ except requests.exceptions.RequestException as e:
+ logging.error(f"Network error fetching preview from HF ({hf_path}, owner: {owner_user_id}): {e}")
+ return Response("Сетевая ошибка загрузки превью.", status=502)
+ except Exception as e:
+ logging.error(f"Unexpected error during preview ({hf_path}, owner: {owner_user_id}): {e}", exc_info=True)
+ return Response("Внутренняя ошибка сервера при загрузке превью.", status=500)
+
+
+if __name__ == '__main__':
+ print("*"*60)
+ if not BOT_TOKEN or BOT_TOKEN == 'YOUR_BOT_TOKEN':
+ logging.critical("CRITICAL: TELEGRAM_BOT_TOKEN is not set properly.")
+ print(" CRITICAL: TELEGRAM_BOT_TOKEN is not set or is default.")
+ print(" Telegram authentication WILL FAIL. Set the environment variable.")
+ else:
+ print(f" BOT_TOKEN detected (Length: {len(BOT_TOKEN)}).")
+
+ if not HF_TOKEN_WRITE:
+ logging.warning("HF_TOKEN_WRITE (write access) is not set. File uploads/deletions will fail.")
+ print(" WARNING: HF_TOKEN_WRITE is not set. Uploads/deletes disabled.")
+ else:
+ print(f" HF_TOKEN_WRITE detected (Length: {len(HF_TOKEN_WRITE)}). Uploads/deletes enabled.")
+
+ if not HF_TOKEN_READ:
+ logging.warning("HF_TOKEN_READ is not set. Will use HF_TOKEN_WRITE if available, else downloads/previews might fail for private repos.")
+ print(" WARNING: HF_TOKEN_READ is not set. Downloads/previews might fail for private repos.")
+ else:
+ print(f" HF_TOKEN_READ detected (Length: {len(HF_TOKEN_READ)}).")
+ print("*"*60)
+
+ logging.info("Attempting initial database load/download...")
+ try:
+ initial_data = load_data()
+ user_count = len(initial_data.get('users', {}))
+ logging.info(f"Initial data load complete. Found {user_count} user(s).")
+ except Exception as e:
+ logging.critical(f"FATAL: Could not perform initial data load: {e}", exc_info=True)
+ print("\nFATAL ERROR DURING INITIAL DATA LOAD. Check logs. Exiting.")
+ exit(1)
+
+ print(f"Starting Flask server on 0.0.0.0:7860...")
+ # Use waitress or gunicorn in production instead of app.run(debug=False)
+ # For simplicity here, we keep app.run
+ app.run(debug=False, host='0.0.0.0', port=7860)
+
+# END OF FILE