diff --git "a/app.py" "b/app.py" --- "a/app.py" +++ "b/app.py" @@ -1,268 +1,143 @@ - -# --- START OF FILE app (24).py --- - -import os -import hmac -import hashlib -import json -from urllib.parse import unquote, parse_qsl, urlencode -from flask import Flask, request, jsonify, Response, send_file +# --- START OF FILE app.py --- +from flask import Flask, render_template_string, request, redirect, url_for, session, flash, send_file, jsonify, Response from flask_caching import Cache +import json +import os import logging import threading import time -from datetime import datetime +from datetime import datetime, timedelta, timezone from huggingface_hub import HfApi, hf_hub_download, utils as hf_utils from werkzeug.utils import secure_filename import requests from io import BytesIO import uuid -from typing import Union, Optional -import shutil +import hmac +import hashlib +import urllib.parse +from functools import wraps app = Flask(__name__) -app.secret_key = os.getenv("FLASK_SECRET_KEY", "supersecretkey_mini_app_unique_v2") -BOT_TOKEN = os.getenv('TELEGRAM_BOT_TOKEN', '6750208873:AAE2hvPlJ99dBdhGa_Brre0IIpUdOvXxHt4') -DATA_FILE = 'cloudeng_mini_app_data.json' -DATA_FILE_TEMP = DATA_FILE + '.tmp' -DATA_FILE_BACKUP = DATA_FILE + '.bak' +app.secret_key = os.getenv("FLASK_SECRET_KEY", "supersecretkey_folders_tma_unique") +DATA_FILE = 'cloudeng_tma_data.json' REPO_ID = "Eluza133/Z1e1u" HF_TOKEN_WRITE = os.getenv("HF_TOKEN") HF_TOKEN_READ = os.getenv("HF_TOKEN_READ") or HF_TOKEN_WRITE -UPLOAD_FOLDER = 'uploads_mini_app' +TELEGRAM_BOT_TOKEN = os.getenv("TELEGRAM_BOT_TOKEN") +ADMIN_TELEGRAM_IDS_STR = os.getenv("ADMIN_TELEGRAM_IDS", "") +ADMIN_TELEGRAM_IDS = {int(admin_id.strip()) for admin_id in ADMIN_TELEGRAM_IDS_STR.split(',') if admin_id.strip().isdigit()} + +UPLOAD_FOLDER = 'uploads' os.makedirs(UPLOAD_FOLDER, exist_ok=True) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') +logging.basicConfig(level=logging.INFO) -AUTH_DATA_LIFETIME = 3600 +# --- Data Handling --- def find_node_by_id(filesystem, node_id): - if not filesystem or not isinstance(filesystem, dict): - return None, None + if not filesystem: return None, None if filesystem.get('id') == node_id: return filesystem, None queue = [(filesystem, None)] - visited = {filesystem.get('id')} - while queue: current_node, parent = queue.pop(0) - node_type = current_node.get('type') - node_children = current_node.get('children') - - if node_type == 'folder' and isinstance(node_children, list): - for child in node_children: - if not isinstance(child, dict): continue - child_id = child.get('id') - if not child_id: continue - - if child_id == node_id: + if current_node.get('type') == 'folder' and 'children' in current_node: + for i, child in enumerate(current_node['children']): + if child.get('id') == node_id: return child, current_node - if child_id not in visited and child.get('type') == 'folder': - visited.add(child_id) + if child.get('type') == 'folder': queue.append((child, current_node)) return None, None def add_node(filesystem, parent_id, node_data): parent_node, _ = find_node_by_id(filesystem, parent_id) if parent_node and parent_node.get('type') == 'folder': - if 'children' not in parent_node or not isinstance(parent_node['children'], list): + if 'children' not in parent_node: parent_node['children'] = [] - existing_ids = {child.get('id') for child in parent_node['children'] if isinstance(child, dict)} - if node_data.get('id') not in existing_ids: - parent_node['children'].append(node_data) - return True + parent_node['children'].append(node_data) + return True return False def remove_node(filesystem, node_id): node_to_remove, parent_node = find_node_by_id(filesystem, node_id) - if node_to_remove and parent_node and 'children' in parent_node and isinstance(parent_node['children'], list): - original_length = len(parent_node['children']) - parent_node['children'] = [child for child in parent_node['children'] if not isinstance(child, dict) or child.get('id') != node_id] - return len(parent_node['children']) < original_length - if node_to_remove and node_id == filesystem.get('id'): - logging.warning("Attempted to remove root node directly.") - return False + if node_to_remove and parent_node and 'children' in parent_node: + parent_node['children'] = [child for child in parent_node['children'] if child.get('id') != node_id] + return True + # Handle removing from root if parent is root + elif node_to_remove and filesystem and filesystem.get('id') == 'root' and node_id != 'root': + if 'children' in filesystem: + original_len = len(filesystem['children']) + filesystem['children'] = [child for child in filesystem['children'] if child.get('id') != node_id] + return len(filesystem['children']) < original_len return False -def get_node_path_list(filesystem, node_id): + +def get_node_path_string(filesystem, node_id): path_list = [] current_id = node_id - processed_ids = set() - max_depth = 20 - depth = 0 - while current_id and current_id not in processed_ids and depth < max_depth: - processed_ids.add(current_id) - depth += 1 + while current_id: node, parent = find_node_by_id(filesystem, current_id) - - if not node or not isinstance(node, dict): - logging.error(f"Path traversal failed: Node not found or invalid for ID {current_id}") - break - - path_list.append({ - 'id': node.get('id'), - 'name': node.get('name', node.get('original_filename', 'Unknown')) - }) - - if not parent or not isinstance(parent, dict): - if node.get('id') != 'root': - logging.warning(f"Node {current_id} has no parent, stopping path traversal.") + if not node: break + if node.get('id') != 'root': + path_list.append(node.get('name', node.get('original_filename', ''))) + if not parent and node.get('id') != 'root': # Reached root's child without finding root explicitly break - - parent_id = parent.get('id') - if parent_id == current_id: - logging.error(f"Filesystem loop detected at node {current_id}") + if not parent and node.get('id') == 'root': # Is root break - current_id = parent_id + current_id = parent.get('id') if parent else None - if not any(p['id'] == 'root' for p in path_list): - root_node, _ = find_node_by_id(filesystem, 'root') - if root_node: - path_list.append({'id': 'root', 'name': root_node.get('name', 'Root')}) - else: - path_list.append({'id': 'root', 'name': 'Root'}) - - - final_path = [] - seen_ids = set() - for item in reversed(path_list): - if item['id'] not in seen_ids: - final_path.append(item) - seen_ids.add(item['id']) - return final_path + return " / ".join(reversed(path_list)) or "Root" def initialize_user_filesystem(user_data): - if not isinstance(user_data, dict): - logging.error("Invalid user_data passed to initialize_user_filesystem") - return - if 'filesystem' not in user_data or not isinstance(user_data.get('filesystem'), dict) or not user_data['filesystem'].get('id') == 'root': - logging.warning(f"Initializing/Resetting filesystem for user data fragment: {str(user_data)[:100]}") + if 'filesystem' not in user_data: user_data['filesystem'] = { "type": "folder", "id": "root", - "name": "Root", + "name": "root", "children": [] } - elif 'children' not in user_data['filesystem'] or not isinstance(user_data['filesystem']['children'], list): - logging.warning(f"Fixing missing/invalid children array for root filesystem: {str(user_data)[:100]}") - user_data['filesystem']['children'] = [] - -def load_data_from_file(filepath): +@cache.memoize(timeout=300) +def load_data(): try: - with open(filepath, 'r', encoding='utf-8') as file: + download_db_from_hf() + with open(DATA_FILE, 'r', encoding='utf-8') as file: data = json.load(file) if not isinstance(data, dict): - logging.warning(f"Data in {filepath} is not a dict, using empty.") + logging.warning("Data is not in dict format, initializing empty database") return {'users': {}} data.setdefault('users', {}) - # Deep check and initialization - users_copy = data.get('users', {}) - if not isinstance(users_copy, dict): - logging.warning(f"Users field in {filepath} is not a dict, resetting users.") - data['users'] = {} - return data - - for user_id, user_data in list(users_copy.items()): # Use list to allow potential removal during iteration - if not isinstance(user_data, dict): - logging.warning(f"Invalid user data structure for user {user_id} in {filepath}, removing entry.") - del data['users'][user_id] - continue - initialize_user_filesystem(user_data) - logging.info(f"Data loaded successfully from {filepath}") + # Ensure keys are strings for JSON, but represent integers + data['users'] = {str(k): v for k, v in data['users'].items()} + for user_id_str, user_data in data['users'].items(): + initialize_user_filesystem(user_data) + logging.info("Data successfully loaded and initialized") return data - except FileNotFoundError: - logging.warning(f"{filepath} not found.") - return None - except json.JSONDecodeError: - logging.error(f"Error decoding JSON from {filepath}.") - return None except Exception as e: - logging.error(f"Error loading data from {filepath}: {e}") - return None - -@cache.memoize(timeout=60) -def load_data(): - logging.info("Attempting to load data...") - # 1. Try to download from HF - download_success = download_db_from_hf() - - # 2. Try loading the main file - data = load_data_from_file(DATA_FILE) - if data is not None: - logging.info("Using main data file.") - return data - - # 3. If main file failed or didn't exist (and download might have failed), try backup - logging.warning("Main data file failed to load or not found, trying backup.") - data = load_data_from_file(DATA_FILE_BACKUP) - if data is not None: - logging.info("Using backup data file.") - # Attempt to restore main file from backup - try: - shutil.copy(DATA_FILE_BACKUP, DATA_FILE) - logging.info(f"Restored {DATA_FILE} from {DATA_FILE_BACKUP}") - except Exception as e: - logging.error(f"Failed to restore main file from backup: {e}") - return data - - # 4. If both fail, initialize empty structure - logging.error("Both main and backup data files are missing or corrupt. Initializing empty data.") - return {'users': {}} - + logging.error(f"Error loading data: {e}") + return {'users': {}} def save_data(data): - if not isinstance(data, dict) or not isinstance(data.get('users'), dict): - logging.critical(f"CRITICAL: Attempted to save invalid data structure: {str(data)[:200]}. Aborting save.") - # Optionally raise an exception or handle more gracefully - return False # Indicate save failure - try: - # Write to temporary file first - with open(DATA_FILE_TEMP, 'w', encoding='utf-8') as file: + # Ensure user keys are strings for JSON dump + data['users'] = {str(k): v for k, v in data['users'].items()} + with open(DATA_FILE, 'w', encoding='utf-8') as file: json.dump(data, file, ensure_ascii=False, indent=4) - - # If temporary write succeeded, create backup and then rename - if os.path.exists(DATA_FILE): - try: - shutil.copy(DATA_FILE, DATA_FILE_BACKUP) # More robust than rename for backup - logging.info(f"Created backup: {DATA_FILE_BACKUP}") - except Exception as e: - logging.warning(f"Could not create backup file {DATA_FILE_BACKUP}: {e}") - - shutil.move(DATA_FILE_TEMP, DATA_FILE) # Atomic rename/move - - cache.clear() # Clear cache after successful save - logging.info("Data saved successfully to " + DATA_FILE) - - # Schedule HF upload (run_as_future makes it non-blocking) - upload_thread = threading.Thread(target=upload_db_to_hf) - upload_thread.start() - return True # Indicate save success - + upload_db_to_hf() + cache.clear() + logging.info("Data saved and uploaded to HF") except Exception as e: logging.error(f"Error saving data: {e}") - # Clean up temp file if it exists - if os.path.exists(DATA_FILE_TEMP): - try: - os.remove(DATA_FILE_TEMP) - except OSError as e_rm: - logging.error(f"Error removing temporary save file {DATA_FILE_TEMP}: {e_rm}") - return False # Indicate save failure - + raise def upload_db_to_hf(): if not HF_TOKEN_WRITE: logging.warning("HF_TOKEN_WRITE not set, skipping database upload.") return - if not os.path.exists(DATA_FILE): - logging.warning(f"Data file {DATA_FILE} not found for upload.") - return - try: api = HfApi() api.upload_file( @@ -271,2137 +146,1694 @@ def upload_db_to_hf(): repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE, - commit_message=f"Backup MiniApp {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}", - # run_as_future=True # Already running in a separate thread from save_data + commit_message=f"Backup {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}" ) - logging.info("Database upload to Hugging Face completed.") + logging.info("Database uploaded to Hugging Face") except Exception as e: - logging.error(f"Error during database upload: {e}") - + logging.error(f"Error uploading database: {e}") def download_db_from_hf(): if not HF_TOKEN_READ: logging.warning("HF_TOKEN_READ not set, skipping database download.") - return False - - local_path_tmp = DATA_FILE + ".hf_download" + if not os.path.exists(DATA_FILE): + with open(DATA_FILE, 'w', encoding='utf-8') as f: + json.dump({'users': {}}, f) + return try: - logging.info(f"Attempting download of {DATA_FILE} from {REPO_ID}") hf_hub_download( repo_id=REPO_ID, filename=DATA_FILE, repo_type="dataset", token=HF_TOKEN_READ, local_dir=".", - local_dir_use_symlinks=False, - force_download=True, # Ensure we get the latest - etag_timeout=10, - resume_download=False, - cache_dir=None, # Don't use HF cache, write directly - local_path=local_path_tmp # Download to temp file first + local_dir_use_symlinks=False ) - - # Verify downloaded file is valid JSON before replacing - if load_data_from_file(local_path_tmp) is not None: - shutil.move(local_path_tmp, DATA_FILE) - logging.info("Database downloaded successfully from Hugging Face and verified.") - cache.clear() # Clear cache as data might have changed - return True - else: - logging.error("Downloaded database file is invalid JSON. Discarding download.") - os.remove(local_path_tmp) - return False - - except hf_utils.RepositoryNotFoundError: - logging.error(f"Repository {REPO_ID} not found on Hugging Face.") - return False - except hf_utils.EntryNotFoundError: - logging.warning(f"{DATA_FILE} not found in repo {REPO_ID}. Using local/backup if available.") - return False - except requests.exceptions.RequestException as e: - logging.error(f"Connection error downloading DB from HF: {e}. Using local/backup.") - return False + logging.info("Database downloaded from Hugging Face") + except (hf_utils.RepositoryNotFoundError, hf_utils.EntryNotFoundError) as e: + logging.warning(f"Database file not found on HF Hub ({e}). Initializing empty database if local file doesn't exist.") + if not os.path.exists(DATA_FILE): + with open(DATA_FILE, 'w', encoding='utf-8') as f: + json.dump({'users': {}}, f) except Exception as e: - logging.error(f"Generic error downloading database: {e}") - # Clean up potentially partial download - if os.path.exists(local_path_tmp): - try: os.remove(local_path_tmp) - except OSError: pass - return False + logging.error(f"Error downloading database: {e}") + if not os.path.exists(DATA_FILE): + with open(DATA_FILE, 'w', encoding='utf-8') as f: + json.dump({'users': {}}, f) + +def periodic_backup(): + while True: + time.sleep(1800) + try: + # Reload data before backup to ensure consistency if save_data wasn't called recently + current_data = load_data() + upload_db_to_hf() # Uses the local file which might be slightly older, but save_data triggers upload anyway + logging.info("Periodic backup check complete.") + except Exception as e: + logging.error(f"Error during periodic backup: {e}") def get_file_type(filename): - if not filename or '.' not in filename: return 'other' - ext = filename.lower().split('.')[-1] - if ext in ['mp4', 'mov', 'avi', 'webm', 'mkv', 'm4v', 'quicktime']: return 'video' - if ext in ['jpg', 'jpeg', 'png', 'gif', 'bmp', 'webp', 'svg', 'heic', 'heif']: return 'image' - if ext == 'pdf': return 'pdf' - if ext in ['txt', 'md', 'log', 'csv', 'json', 'xml', 'html', 'css', 'js', 'py', 'java', 'c', 'cpp']: return 'text' - if ext in ['doc', 'docx', 'rtf']: return 'doc' - if ext in ['xls', 'xlsx']: return 'sheet' - if ext in ['ppt', 'pptx']: return 'slides' - if ext in ['zip', 'rar', '7z', 'gz', 'tar']: return 'archive' - if ext in ['mp3', 'wav', 'ogg', 'flac', 'aac', 'm4a']: return 'audio' + filename_lower = filename.lower() + if filename_lower.endswith(('.mp4', '.mov', '.avi', '.webm', '.mkv')): + return 'video' + elif filename_lower.endswith(('.jpg', '.jpeg', '.png', '.gif', '.bmp', '.webp', '.svg')): + return 'image' + elif filename_lower.endswith('.pdf'): + return 'pdf' + elif filename_lower.endswith('.txt'): + return 'text' return 'other' +# --- Telegram Auth --- + +def verify_telegram_data(init_data_str, bot_token): + if not init_data_str or not bot_token: + return None, "No initData or bot token provided" -def check_telegram_authorization(auth_data: str, bot_token: str) -> Optional[dict]: - if not auth_data or not bot_token or bot_token == 'YOUR_BOT_TOKEN': - logging.warning("Validation skipped: Missing auth_data or valid BOT_TOKEN.") - return None # Consider returning a specific error? try: - parsed_data = dict(parse_qsl(unquote(auth_data))) - if "hash" not in parsed_data: - logging.error("Hash not found in auth data") - return None - - telegram_hash = parsed_data.pop('hash') - auth_date_ts = int(parsed_data.get('auth_date', 0)) - current_ts = int(time.time()) - - if abs(current_ts - auth_date_ts) > AUTH_DATA_LIFETIME: - logging.warning(f"Auth data expired (Auth: {auth_date_ts}, Now: {current_ts}, Diff: {current_ts - auth_date_ts})") - # return None # Temporarily disable expiration check for easier testing if needed - pass # Allow expired data for now, maybe add strict mode later - - data_check_string = "\n".join(sorted([f"{k}={v}" for k, v in parsed_data.items()])) - secret_key = hmac.new("WebAppData".encode(), bot_token.encode(), hashlib.sha256).digest() - calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest() - - if hmac.compare_digest(calculated_hash, telegram_hash): - user_data_str = parsed_data.get('user') - if user_data_str: - try: - user_info = json.loads(user_data_str) - if 'id' not in user_info: - logging.error("Validated user data missing 'id'") - return None - logging.info(f"Validation successful for user ID: {user_info.get('id')}") - return user_info - except json.JSONDecodeError: - logging.error("Failed to decode user JSON from auth data") - return None - else: - logging.warning("No 'user' field in validated auth data") - return None - else: - logging.warning("Hash mismatch during validation") - return None + parsed_data = urllib.parse.parse_qs(init_data_str) except Exception as e: - logging.error(f"Exception during validation: {e}", exc_info=True) - return None - - -HTML_TEMPLATE = """ - - - - - - Cloud Eng - - - - -
- -

Loading...

-
- - -
-

Cloud Eng

-
- - - -
-
- - -
- -
- - - -
-
-
- -

Files

-
- -
- -
- - - -
-
-

Actions

-
- -
- -
- -
+ try: + user_data = json.loads(parsed_data['user'][0]) + # Ensure essential fields are present + if 'id' not in user_data: + return None, "User data missing 'id'" + # Make sure id is an integer + user_data['id'] = int(user_data['id']) + return user_data, None # Success + except (json.JSONDecodeError, KeyError, ValueError) as e: + return None, f"Could not parse user data: {e}" + +# --- Decorators --- + +def login_required(f): + @wraps(f) + def decorated_function(*args, **kwargs): + if 'telegram_user' not in session: + return jsonify({"status": "error", "message": "Authentication required", "action": "reload"}), 401 + return f(*args, **kwargs) + return decorated_function + +def admin_required(f): + @wraps(f) + def decorated_function(*args, **kwargs): + if 'telegram_user' not in session: + flash('Authentication required.', 'error') + return redirect(url_for('index')) # Redirect to main page for re-auth attempt + user_id = session['telegram_user'].get('id') + if not user_id or user_id not in ADMIN_TELEGRAM_IDS: + flash('Admin access required.', 'error') + return redirect(url_for('dashboard')) # Redirect non-admins to their dashboard + return f(*args, **kwargs) + return decorated_function + + +# --- Styling --- +BASE_STYLE = ''' +:root { + --primary: #ff4d6d; --secondary: #00ddeb; --accent: #8b5cf6; + --background-light: #f5f6fa; --background-dark: #1a1625; + --card-bg: rgba(255, 255, 255, 0.95); --card-bg-dark: rgba(40, 35, 60, 0.95); + --text-light: #2a1e5a; --text-dark: #e8e1ff; --shadow: 0 10px 30px rgba(0, 0, 0, 0.2); + --glass-bg: rgba(255, 255, 255, 0.15); --transition: all 0.3s ease; --delete-color: #ff4444; + --folder-color: #ffc107; + --tg-theme-bg-color: var(--background-light); + --tg-theme-text-color: var(--text-light); + --tg-theme-button-color: var(--primary); + --tg-theme-button-text-color: #ffffff; + --tg-theme-hint-color: #aaa; +} +html.dark-mode { + --tg-theme-bg-color: var(--background-dark); + --tg-theme-text-color: var(--text-dark); +} + +* { margin: 0; padding: 0; box-sizing: border-box; } +body { font-family: 'Inter', sans-serif; background: var(--tg-theme-bg-color); color: var(--tg-theme-text-color); line-height: 1.6; padding-bottom: 60px; /* Space for potential main button */ } +.container { margin: 10px auto; max-width: 1200px; padding: 15px; background: var(--card-bg); border-radius: 15px; box-shadow: var(--shadow); overflow-x: hidden; } +html.dark-mode .container { background: var(--card-bg-dark); } +h1 { font-size: 1.8em; font-weight: 800; text-align: center; margin-bottom: 20px; background: linear-gradient(135deg, var(--primary), var(--accent)); -webkit-background-clip: text; color: transparent; } +h2 { font-size: 1.4em; margin-top: 25px; margin-bottom: 10px; color: var(--tg-theme-text-color); } +h4 { font-size: 1.1em; margin-top: 15px; margin-bottom: 5px; color: var(--accent); } +ol, ul { margin-left: 20px; margin-bottom: 15px; } +li { margin-bottom: 5px; } +input, textarea { width: 100%; padding: 12px; margin: 10px 0; border: none; border-radius: 12px; background: var(--glass-bg); color: var(--tg-theme-text-color); font-size: 1em; box-shadow: inset 0 2px 8px rgba(0, 0, 0, 0.1); } +html.dark-mode input, html.dark-mode textarea { background: rgba(255, 255, 255, 0.1); } +input:focus, textarea:focus { outline: none; box-shadow: 0 0 0 3px var(--primary); } +.btn { padding: 12px 24px; background: var(--tg-theme-button-color); color: var(--tg-theme-button-text-color); border: none; border-radius: 12px; cursor: pointer; font-size: 1em; font-weight: 600; transition: var(--transition); box-shadow: var(--shadow); display: inline-block; text-decoration: none; margin-top: 5px; margin-right: 5px; } +.btn:hover { filter: brightness(1.1); transform: scale(1.03); } +.download-btn { background: var(--secondary); color: #fff; } +.download-btn:hover { background: #00b8c5; } +.delete-btn { background: var(--delete-color); color: #fff; } +.delete-btn:hover { background: #cc3333; } +.folder-btn { background: var(--folder-color); color: #fff; } +.folder-btn:hover { background: #e6a000; } +.flash { color: var(--secondary); text-align: center; margin-bottom: 15px; padding: 10px; background: rgba(0, 221, 235, 0.1); border-radius: 10px; font-size: 0.9em; } +.flash.error { color: var(--delete-color); background: rgba(255, 68, 68, 0.1); } +.file-grid { display: grid; grid-template-columns: repeat(auto-fill, minmax(160px, 1fr)); gap: 15px; margin-top: 15px; } +.user-list { margin-top: 20px; } +.user-item { padding: 15px; background: var(--card-bg); border-radius: 16px; margin-bottom: 10px; box-shadow: var(--shadow); transition: var(--transition); display: flex; align-items: center; gap: 15px;} +html.dark-mode .user-item { background: var(--card-bg-dark); } +.user-item:hover { transform: translateY(-3px); } +.user-item img.avatar { width: 50px; height: 50px; border-radius: 50%; object-fit: cover; } +.user-item .user-info { flex-grow: 1; } +.user-item a { color: var(--primary); text-decoration: none; font-weight: 600; } +.user-item a:hover { color: var(--accent); } +.item { background: var(--card-bg); padding: 12px; border-radius: 14px; box-shadow: var(--shadow); text-align: center; transition: var(--transition); display: flex; flex-direction: column; justify-content: space-between; } +html.dark-mode .item { background: var(--card-bg-dark); } +.item:hover { transform: translateY(-4px); } +.item-preview { max-width: 100%; height: 110px; object-fit: cover; border-radius: 8px; margin-bottom: 8px; cursor: pointer; display: block; margin-left: auto; margin-right: auto;} +.item.folder .item-preview { object-fit: contain; font-size: 50px; color: var(--folder-color); line-height: 110px; } +.item p { font-size: 0.85em; margin: 4px 0; word-break: break-all; } +.item a { color: var(--primary); text-decoration: none; } +.item a:hover { color: var(--accent); } +.item-actions { margin-top: 8px; display: flex; flex-wrap: wrap; gap: 4px; justify-content: center; } +.item-actions .btn { font-size: 0.8em; padding: 4px 8px; } +.modal { display: none; position: fixed; top: 0; left: 0; width: 100%; height: 100%; background: rgba(0, 0, 0, 0.85); z-index: 2000; justify-content: center; align-items: center; } +.modal-content { max-width: 95%; max-height: 95%; background: var(--tg-theme-bg-color); padding: 10px; border-radius: 15px; overflow: auto; position: relative; } +.modal img, .modal video, .modal iframe, .modal pre { max-width: 100%; max-height: 85vh; display: block; margin: auto; border-radius: 10px; } +.modal iframe { width: 90vw; height: 85vh; border: none; background: #fff; } +html.dark-mode .modal iframe { background: #333; } +.modal pre { background: #eee; color: #333; padding: 15px; border-radius: 8px; white-space: pre-wrap; word-wrap: break-word; text-align: left; max-height: 85vh; overflow-y: auto; font-size: 0.9em;} +html.dark-mode .modal pre { background: #2b2a33; color: var(--text-dark); } +.modal-close-btn { position: absolute; top: 10px; right: 15px; font-size: 24px; color: #aaa; cursor: pointer; background: rgba(0,0,0,0.5); border-radius: 50%; width: 25px; height: 25px; line-height: 25px; text-align: center; } +html.dark-mode .modal-close-btn { color: #555; background: rgba(255,255,255,0.2); } +#progress-container { width: 100%; background: var(--glass-bg); border-radius: 8px; margin: 12px 0; display: none; position: relative; height: 18px; } +html.dark-mode #progress-container { background: rgba(255, 255, 255, 0.1); } +#progress-bar { width: 0%; height: 100%; background: var(--primary); border-radius: 8px; transition: width 0.3s ease; } +#progress-text { position: absolute; width: 100%; text-align: center; line-height: 18px; color: white; font-size: 0.8em; font-weight: bold; text-shadow: 1px 1px 1px rgba(0,0,0,0.5); } +.breadcrumbs { margin-bottom: 15px; font-size: 1em; color: var(--tg-theme-hint-color); } +.breadcrumbs a { color: var(--accent); text-decoration: none; } +.breadcrumbs a:hover { text-decoration: underline; } +.breadcrumbs span { margin: 0 4px; } +.folder-actions { margin-top: 15px; margin-bottom: 10px; display: flex; gap: 8px; align-items: center; flex-wrap: wrap; } +.folder-actions input[type=text] { width: auto; flex-grow: 1; margin: 0; min-width: 150px; } +.folder-actions .btn { margin: 0; flex-shrink: 0;} +#auth-status { text-align: center; padding: 20px; font-size: 1.1em; } +.user-info-header { display: flex; align-items: center; gap: 10px; margin-bottom: 15px; padding: 10px; background: var(--glass-bg); border-radius: 10px;} +html.dark-mode .user-info-header { background: rgba(255, 255, 255, 0.1); } +.user-info-header img.avatar { width: 40px; height: 40px; border-radius: 50%; } +.user-info-header p { margin: 0; font-weight: 600; } + +@media (max-width: 768px) { + .file-grid { grid-template-columns: repeat(auto-fill, minmax(140px, 1fr)); gap: 12px; } + .item-preview { height: 90px; } + .item.folder .item-preview { font-size: 40px; line-height: 90px; } + h1 { font-size: 1.6em; } + .btn { padding: 10px 20px; font-size: 0.95em; } + .item-actions .btn { padding: 3px 6px; font-size: 0.75em;} +} +@media (max-width: 480px) { + .container { padding: 10px; } + .file-grid { grid-template-columns: repeat(auto-fill, minmax(120px, 1fr)); gap: 10px; } + .item-preview { height: 80px; } + .item.folder .item-preview { font-size: 35px; line-height: 80px; } + .item p { font-size: 0.8em;} + .breadcrumbs { font-size: 0.9em; } + .btn { padding: 9px 18px; } + .folder-actions { flex-direction: column; align-items: stretch; } + .folder-actions input[type=text] { width: 100%; } +} +''' + +# --- Routes --- +@app.route('/') +def index(): + init_script = ''' + + ''' + html_base = ''' + +Zeus Cloud +''' + init_script + ''' +

Загрузка приложения...

+ +''' + + # If user is already authenticated via session, redirect to dashboard directly + if 'telegram_user' in session: + return redirect(url_for('dashboard')) + + # Otherwise, show the initial loading/auth page + return render_template_string(html_base) + + +@app.route('/verify_telegram_auth', methods=['POST']) +def verify_auth(): + if not TELEGRAM_BOT_TOKEN: + logging.error("TELEGRAM_BOT_TOKEN is not set!") + return jsonify({"status": "error", "message": "Server configuration error (no bot token)."}), 500 + + req_data = request.get_json() + init_data_str = req_data.get('initData') + + if not init_data_str: + return jsonify({"status": "error", "message": "No initData received."}), 400 + + user_data, error_message = verify_telegram_data(init_data_str, TELEGRAM_BOT_TOKEN) + + if user_data: + session['telegram_user'] = user_data + session.permanent = True # Make session persistent + + # --- User creation/update in DB --- + data = load_data() + user_id = user_data['id'] + user_id_str = str(user_id) + + if user_id_str not in data['users']: + logging.info(f"New user detected: {user_id}, Username: {user_data.get('username', 'N/A')}") + data['users'][user_id_str] = { + 'telegram_id': user_id, + 'username': user_data.get('username'), + 'first_name': user_data.get('first_name'), + 'last_name': user_data.get('last_name'), + 'photo_url': user_data.get('photo_url'), + 'created_at': datetime.now().strftime('%Y-%m-%d %H:%M:%S'), + 'filesystem': { + "type": "folder", "id": "root", "name": "root", "children": [] } - if (responseData.status !== 'ok') { - // Handle cases where HTTP status is 200 but API reports an error - throw new Error(responseData.message || 'API operation failed'); - } - - return responseData; // Return the parsed JSON data - } catch (error) { - console.error(`API call to ${endpoint} failed:`, error); - showFlash(`Network or server error: ${error.message}`, 'error'); - throw error; // Re-throw to allow calling function to handle } - } + else: + # Update user info if changed + data['users'][user_id_str]['username'] = user_data.get('username') + data['users'][user_id_str]['first_name'] = user_data.get('first_name') + data['users'][user_id_str]['last_name'] = user_data.get('last_name') + data['users'][user_id_str]['photo_url'] = user_data.get('photo_url') + initialize_user_filesystem(data['users'][user_id_str]) # Ensure filesystem exists - // --- UI Rendering --- - function showLoadingScreen() { - loadingEl.style.display = 'flex'; - errorViewEl.style.display = 'none'; - appContentEl.style.display = 'none'; - } + try: + save_data(data) + return jsonify({"status": "success", "user": user_data}) + except Exception as e: + logging.error(f"Failed to save data after user verification/creation for {user_id}: {e}") + # Decide if auth should fail if DB save fails. For now, let auth succeed but log error. + return jsonify({"status": "success", "user": user_data, "warning": "DB save failed"}) - function showError(message) { - loadingEl.style.display = 'none'; - errorViewEl.innerHTML = `

Error

${message}

`; - errorViewEl.style.display = 'flex'; - appContentEl.style.display = 'none'; - if (tg.HapticFeedback) tg.HapticFeedback.notificationOccurred('error'); - } + else: + logging.warning(f"Telegram auth verification failed: {error_message}") + return jsonify({"status": "error", "message": error_message}), 403 + +@app.route('/dashboard', methods=['GET', 'POST']) +@login_required +def dashboard(): + telegram_user = session['telegram_user'] + user_id = telegram_user['id'] + user_id_str = str(user_id) + + data = load_data() + if user_id_str not in data['users']: + session.pop('telegram_user', None) + flash('Пользователь не найден в базе данных! Попробуйте перезапустить приложение.', 'error') + # In TMA context, can't easily redirect to login, show error or trigger reload + return render_template_string('Ошибка: Пользователь не найден. Перезагрузить'), 404 + + user_data = data['users'][user_id_str] + # Filesystem should be initialized on login/verify, but check again just in case + initialize_user_filesystem(user_data) + + current_folder_id = request.args.get('folder_id', 'root') + current_folder, parent_folder = find_node_by_id(user_data['filesystem'], current_folder_id) - function showAppContent() { - loadingEl.style.display = 'none'; - errorViewEl.style.display = 'none'; - appContentEl.style.display = 'flex'; - } + if not current_folder or current_folder.get('type') != 'folder': + flash('Папка не найдена!', 'error') + current_folder_id = 'root' + # Refetch root node + current_folder, parent_folder = find_node_by_id(user_data['filesystem'], current_folder_id) + if not current_folder: + logging.error(f"CRITICAL: Root folder not found for user {user_id}") + flash('Критическая ошибка: корневая папка не найдена.', 'error') + session.pop('telegram_user', None) + return render_template_string('Критическая ошибка файловой системы. Перезагрузить'), 500 - let flashTimeout; - function showFlash(message, type = 'success') { - clearTimeout(flashTimeout); // Clear existing timeout if any - - const flashDiv = document.createElement('div'); - flashDiv.className = `flash ${type}`; - flashDiv.textContent = message; - flashDiv.onclick = () => { // Allow dismissing by clicking - flashDiv.style.opacity = '0'; - setTimeout(() => { - if (flashDiv.parentNode === flashContainerEl) { - flashContainerEl.removeChild(flashDiv); - } - }, 300); - }; - - flashContainerEl.innerHTML = ''; // Clear previous messages - flashContainerEl.appendChild(flashDiv); - - // Trigger fade in - requestAnimationFrame(() => { - flashDiv.classList.add('show'); - }); - - - flashTimeout = setTimeout(() => { - flashDiv.style.opacity = '0'; - setTimeout(() => { - if (flashDiv.parentNode === flashContainerEl) { - flashContainerEl.removeChild(flashDiv); - } - }, 300); // Wait for fade out transition - }, 5000); - - if (type === 'success' && tg.HapticFeedback) tg.HapticFeedback.notificationOccurred('success'); - if (type === 'error' && tg.HapticFeedback) tg.HapticFeedback.notificationOccurred('error'); - } - function renderBreadcrumbs(breadcrumbs) { - breadcrumbsContainerEl.innerHTML = ''; - if (!breadcrumbs || breadcrumbs.length === 0) return; + items_in_folder = sorted(current_folder.get('children', []), key=lambda x: (x['type'] != 'folder', x.get('name', x.get('original_filename', '')).lower())) - breadcrumbs.forEach((crumb, index) => { - if (index > 0) { - const separator = document.createElement('span'); - separator.className = 'separator'; - separator.textContent = '›'; // iOS style separator - breadcrumbsContainerEl.appendChild(separator); - } - if (index === breadcrumbs.length - 1) { - const span = document.createElement('span'); - span.className = 'current-folder'; - span.textContent = crumb.name; - breadcrumbsContainerEl.appendChild(span); - currentFolderTitleEl.textContent = crumb.name === 'Root' ? 'Files' : crumb.name; // Set section title - } else { - const link = document.createElement('a'); - link.href = '#'; - link.textContent = crumb.name; - link.onclick = (e) => { e.preventDefault(); loadFolderContent(crumb.id); }; - breadcrumbsContainerEl.appendChild(link); - } - }); + if request.method == 'POST': + if not HF_TOKEN_WRITE: + flash('Загрузка невозможна: токен для записи не настроен.', 'error') + return redirect(url_for('dashboard', folder_id=current_folder_id)) - // Show/hide back button - if (breadcrumbs.length > 1) { - tg.BackButton.show(); - } else { - tg.BackButton.hide(); - } - } + files = request.files.getlist('files') + if not files or all(not f.filename for f in files): + # This case might be handled by JS now, but keep server-side check + # flash('Файлы для загрузки не выбраны.', 'error') # Avoid flash, return JSON for XHR + return jsonify({'status': 'error', 'message': 'Файлы для загрузки не выбраны.'}), 400 - function getItemIcon(item) { - if (item.type === 'folder') return '📁'; - switch (item.file_type) { - case 'image': return '🖼️'; // Or use preview directly - case 'video': return '▶️'; - case 'pdf': return '📄'; - case 'text': return '📝'; - case 'doc': return '📄'; // Could use a specific Word icon if available - case 'sheet': return '📊'; - case 'slides': return '🖥️'; - case 'archive': return '📦'; - case 'audio': return '🎵'; - default: return '���'; - } - } + if len(files) > 20: + # flash('Максимум 20 файлов за раз!', 'error') + return jsonify({'status': 'error', 'message': 'Максимум 20 файлов за раз!'}), 400 - function renderItems(items) { - fileGridContainerEl.innerHTML = ''; // Clear previous items - if (!items || items.length === 0) { - emptyFolderPlaceholder.style.display = 'block'; - fileGridContainerEl.style.display = 'none'; - return; - } - emptyFolderPlaceholder.style.display = 'none'; - fileGridContainerEl.style.display = 'grid'; - - // Sort: folders first, then alphabetically by name - items.sort((a, b) => { - if (a.type === 'folder' && b.type !== 'folder') return -1; - if (a.type !== 'folder' && b.type === 'folder') return 1; - const nameA = a.name || a.original_filename || ''; - const nameB = b.name || b.original_filename || ''; - return nameA.localeCompare(nameB); - }); + target_folder_id = request.form.get('current_folder_id', 'root') + target_folder_node, _ = find_node_by_id(user_data['filesystem'], target_folder_id) - items.forEach(item => { - const itemDiv = document.createElement('div'); - itemDiv.className = `item ${item.type}` + (item.type === 'file' ? ` type-${item.file_type}` : ''); - const filenameDisplay = item.original_filename || item.name || 'Unnamed'; - - let previewHtml = ''; - let mainAction = () => {}; // Function to execute on primary tap - - const previewContainer = document.createElement('div'); - previewContainer.className = 'item-preview-container'; - - if (item.type === 'folder') { - previewContainer.innerHTML = `${getItemIcon(item)}`; - mainAction = () => loadFolderContent(item.id); - } else { // File - const iconHtml = `${getItemIcon(item)}`; - const dlUrl = `${apiBaseUrl}/download/${item.id}`; - const previewUrl = `${apiBaseUrl}/preview_thumb/${item.id}`; - const textContentUrl = `${apiBaseUrl}/get_text_content/${item.id}`; - - if (item.file_type === 'image') { - previewContainer.innerHTML = `${filenameDisplay}`; - mainAction = () => openModal(dlUrl, 'image', item.id); - } else if (item.file_type === 'video') { - previewContainer.innerHTML = iconHtml; // Show icon initially - mainAction = () => openModal(dlUrl, 'video', item.id); - } else if (item.file_type === 'pdf') { - previewContainer.innerHTML = iconHtml; - mainAction = () => openModal(dlUrl, 'pdf', item.id); - } else if (item.file_type === 'text') { - previewContainer.innerHTML = iconHtml; - mainAction = () => openModal(textContentUrl, 'text', item.id); - } else { - previewContainer.innerHTML = iconHtml; - mainAction = () => { showFlash('Preview not available for this file type.', 'error'); if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light'); }; // Maybe trigger download? - } - } - previewContainer.onclick = mainAction; - itemDiv.appendChild(previewContainer); - - // Info section - const infoDiv = document.createElement('div'); - infoDiv.className = 'item-info'; - infoDiv.innerHTML = ` -

${filenameDisplay}

- ${item.upload_date ? `

${item.upload_date.split(' ')[0]}

` : '

 

'} - `; - itemDiv.appendChild(infoDiv); - - // Actions Trigger (ellipsis button) - const trigger = document.createElement('div'); - trigger.className = 'item-actions-trigger'; - trigger.innerHTML = '...'; // Or use an SVG icon - trigger.onclick = (e) => { - e.stopPropagation(); // Prevent triggering mainAction - openActionsMenu(item); - if (tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light'); - }; - previewContainer.appendChild(trigger); // Append trigger to preview container - - fileGridContainerEl.appendChild(itemDiv); - }); - } + if not target_folder_node or target_folder_node.get('type') != 'folder': + # flash('Целевая папка для загрузки не найдена!', 'error') + return jsonify({'status': 'error', 'message': 'Целевая папка для загрузки не найдена!'}), 404 - // --- Actions Menu --- - function openActionsMenu(item) { - actionsMenuTitle.textContent = item.name || item.original_filename || 'Item'; - actionsMenuButtons.innerHTML = ''; // Clear previous buttons - - const filenameDisplay = item.original_filename || item.name || 'Unnamed'; - - if (item.type === 'folder') { - addButtonToActionMenu('Open', () => loadFolderContent(item.id), 'btn'); - addButtonToActionMenu('Delete', () => deleteFolder(item.id, filenameDisplay), 'btn delete'); - } else { // File - const dlUrl = `${apiBaseUrl}/download/${item.id}`; - const textContentUrl = `${apiBaseUrl}/get_text_content/${item.id}`; - const previewable = ['image', 'video', 'pdf', 'text'].includes(item.file_type); - - if (previewable) { - let previewAction = () => {}; - if (item.file_type === 'image') previewAction = () => openModal(dlUrl, 'image', item.id); - else if (item.file_type === 'video') previewAction = () => openModal(dlUrl, 'video', item.id); - else if (item.file_type === 'pdf') previewAction = () => openModal(dlUrl, 'pdf', item.id); - else if (item.file_type === 'text') previewAction = () => openModal(textContentUrl, 'text', item.id); - addButtonToActionMenu('Preview', previewAction, 'btn'); - } - - addButtonToActionMenu('Download', () => window.open(dlUrl, '_blank'), 'btn'); // Open in new tab for download - addButtonToActionMenu('Delete', () => deleteFile(item.id, filenameDisplay), 'btn delete'); - } - actionsMenu.style.display = 'block'; - actionsMenuBackdrop.style.display = 'block'; - } - - function addButtonToActionMenu(text, onClickAction, className = 'btn') { - const button = document.createElement('button'); - button.className = className; - button.textContent = text; - button.onclick = () => { - closeActionsMenu(); - onClickAction(); - if (tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light'); - }; - actionsMenuButtons.appendChild(button); - } + api = HfApi() + uploaded_count = 0 + errors = [] - function closeActionsMenu() { - // Add closing animation if desired - actionsMenu.style.display = 'none'; - actionsMenuBackdrop.style.display = 'none'; - } + for file in files: + if file and file.filename: + original_filename = secure_filename(file.filename) + name_part, ext_part = os.path.splitext(original_filename) + unique_suffix = uuid.uuid4().hex[:8] + unique_filename = f"{name_part}_{unique_suffix}{ext_part}" + file_id = uuid.uuid4().hex - // --- Modal Logic --- - async function openModal(srcOrUrl, type, itemId) { - modalContent.innerHTML = '

Loading...

'; // Show loading state - mediaModal.style.display = 'flex'; - // Trigger fade-in animation - requestAnimationFrame(() => { - mediaModal.classList.add('show'); - }); - - if (tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light'); - - try { - // Construct absolute URL if needed - const absoluteUrl = srcOrUrl.startsWith('/') ? window.location.origin + srcOrUrl : srcOrUrl; - - if (type === 'pdf') { - // Use Telegram's built-in viewer or a reliable external one - // Option 1: Telegram's viewer (preferred) - if (tg.openLink) { - tg.openLink(absoluteUrl); - closeModalManual(); // Close our modal as TG will handle it - return; - } else { - // Fallback: Google Docs viewer in iframe (might be blocked) - modalContent.innerHTML = ``; - } - } else if (type === 'image') { - modalContent.innerHTML = `Image Preview`; - } else if (type === 'video') { - modalContent.innerHTML = ``; - } else if (type === 'text') { - // Fetch text content using apiCall or standard fetch - const response = await fetch(absoluteUrl); // Use the dedicated text route URL directly - if (!response.ok) throw new Error(`Failed to load text content: ${response.statusText}`); - const text = await response.text(); - const escapedText = text.replace(//g, ">"); // Basic HTML escaping - modalContent.innerHTML = `
${escapedText}
`; - } else { - modalContent.innerHTML = '

Preview not supported for this file type.

'; - } - } catch (error) { - console.error("Error loading modal content:", error); - modalContent.innerHTML = `

Could not load preview. ${error.message}

`; - if (tg.HapticFeedback) tg.HapticFeedback.notificationOccurred('error'); - } - } + # Use telegram_id in the path + hf_path = f"cloud_files/{user_id}/{target_folder_id}/{unique_filename}" + temp_path = os.path.join(UPLOAD_FOLDER, f"{file_id}_{unique_filename}") + try: + file.save(temp_path) + + api.upload_file( + path_or_fileobj=temp_path, + path_in_repo=hf_path, + repo_id=REPO_ID, + repo_type="dataset", + token=HF_TOKEN_WRITE, + commit_message=f"User {user_id} uploaded {original_filename} to folder {target_folder_id}" + ) + + file_info = { + 'type': 'file', + 'id': file_id, + 'original_filename': original_filename, + 'unique_filename': unique_filename, + 'path': hf_path, + 'file_type': get_file_type(original_filename), + 'upload_date': datetime.now().strftime('%Y-%m-%d %H:%M:%S') + } - function closeModalManual() { - mediaModal.classList.remove('show'); // Trigger fade-out - setTimeout(() => { - mediaModal.style.display = 'none'; - const video = mediaModal.querySelector('video'); - if (video) { video.pause(); video.src = ''; } - const iframe = mediaModal.querySelector('iframe'); - if (iframe) iframe.src = 'about:blank'; - modalContent.innerHTML = ''; // Clear content after fade out - }, 300); // Match CSS transition duration - } + if add_node(user_data['filesystem'], target_folder_id, file_info): + uploaded_count += 1 + else: + errors.append(f"Ошибка добавления метаданных для {original_filename}.") + logging.error(f"Failed to add node metadata for file {file_id} to folder {target_folder_id} for user {user_id}") + try: + api.delete_file(path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE) + except Exception as del_err: + logging.error(f"Failed to delete orphaned file {hf_path} from HF Hub: {del_err}") + + except Exception as e: + logging.error(f"Error uploading file {original_filename} for {user_id}: {e}") + errors.append(f"Ошибка загрузки файла {original_filename}: {e}") + finally: + if os.path.exists(temp_path): + os.remove(temp_path) + + response_message = "" + final_status = "success" + + if uploaded_count > 0: + try: + save_data(data) + response_message += f'{uploaded_count} файл(ов) успешно загружено! ' + except Exception as e: + response_message += 'Файлы загружены на сервер, но произошла ошибка сохранения метаданных. ' + logging.error(f"Error saving data after upload for {user_id}: {e}") + final_status = "warning" # Or "error" depending on severity + + if errors: + response_message += "Ошибки: " + "; ".join(errors) + final_status = "error" if uploaded_count == 0 else "warning" + + # Return JSON response for XHR request + return jsonify({'status': final_status, 'message': response_message.strip()}) + + + # --- Breadcrumbs Calculation (for GET request) --- + breadcrumbs = [] + temp_id = current_folder_id + # Limit depth to avoid infinite loops in case of data corruption + for _ in range(20): # Max 20 levels deep + node, parent = find_node_by_id(user_data['filesystem'], temp_id) + if not node: break + is_link = (node['id'] != current_folder_id) + breadcrumbs.append({'id': node['id'], 'name': node.get('name', 'Root'), 'is_link': is_link}) + if not parent and node.get('id') == 'root': break # Stop at root + if not parent: break # Stop if parent somehow not found before root + temp_id = parent.get('id') + if not temp_id: break # Stop if parent has no id + breadcrumbs.reverse() + + # --- HTML Template for GET request --- + html = ''' + +Панель управления - Zeus Cloud + + + + +
+ +
+ {% if telegram_user.photo_url %} + Avatar + {% endif %} +

Привет, {{ telegram_user.first_name or telegram_user.username or 'Пользователь' }}!

+ {% if is_admin %} + Админ + {% endif %} +
+ +

Zeus Cloud

+
+{% with messages = get_flashed_messages(with_categories=true) %} + {% if messages %} + {% for category, message in messages %} +
{{ message }}
+ {% endfor %} + {% endif %} +{% endwith %} +
+ + + +
+
+ + + +
+
+ +
+ + + +
+
0%
+ +

Содержимое папки: {{ current_folder.name if current_folder_id != 'root' else 'Главная' }}

+
+ {% for item in items %} +
+ {% if item.type == 'folder' %} + 📁 +

{{ item.name | truncate(25, True) }}

+
+ Открыть +
+ + +
+
+ {% elif item.type == 'file' %} + {% set previewable = item.file_type in ['image', 'video', 'pdf', 'text'] %} + {% if item.file_type == 'image' %} + {{ item.original_filename }} + {% elif item.file_type == 'video' %} + + {% elif item.file_type == 'pdf' %} +
📄
+ {% elif item.file_type == 'text' %} +
📝
+ {% else %} +
+ {% endif %} +

{{ item.original_filename | truncate(25, True) }}

+

{{ item.upload_date }}

+
+ Скачать + {% if previewable %} + + {% endif %} +
+ + +
+
+ {% endif %} +
+ {% endfor %} + {% if not items %}

Эта папка пуста.

{% endif %} +
- // --- Folder Operations --- - async function loadFolderContent(folderId) { - currentFolderId = folderId; - console.log(`Loading folder: ${folderId}`); - // Optionally show a loading indicator over the grid - fileGridContainerEl.innerHTML = '

Loading...

'; // Simple text indicator - emptyFolderPlaceholder.style.display = 'none'; - fileGridContainerEl.style.display = 'block'; // Ensure grid container is visible for loading message +
- try { - const data = await apiCall('/get_dashboard_data', 'POST', { folder_id: folderId }); - currentItems = data.items || []; - renderBreadcrumbs(data.breadcrumbs || [{'id': 'root', 'name': 'Root'}]); - renderItems(currentItems); - } catch (error) { - // Error is already handled and shown by apiCall - fileGridContainerEl.innerHTML = '

Failed to load folder content.

'; - } - } + + + - - -""" - - -@app.route('/') -def index(): - return Response(HTML_TEMPLATE, mimetype='text/html') - -@app.route('/validate_init_data', methods=['POST']) -def validate_init_data(): - data = request.get_json() - if not data or 'initData' not in data: - return jsonify({"status": "error", "message": "Missing initData"}), 400 - - init_data = data['initData'] - user_info = check_telegram_authorization(init_data, BOT_TOKEN) - - if user_info and 'id' in user_info: - tg_user_id = str(user_info['id']) - db_data = load_data() - users = db_data.setdefault('users', {}) - save_needed = False - - user_entry = users.get(tg_user_id) - - if not user_entry or not isinstance(user_entry, dict): - logging.info(f"New user detected or invalid entry: {tg_user_id}. Initializing.") - users[tg_user_id] = { - 'user_info': user_info, - 'created_at': datetime.now().isoformat() # Use ISO format + } catch (e) { + console.error("Failed to parse upload response:", e, xhr.responseText); + flashContainer.innerHTML = `
Неожиданный ответ от сервера.
`; + tg.showAlert('Неожиданный ответ от сервера после загрузки.'); } - initialize_user_filesystem(users[tg_user_id]) - save_needed = True - else: - # Check if filesystem needs initialization or repair - if 'filesystem' not in user_entry or not isinstance(user_entry.get('filesystem'), dict): - logging.warning(f"Filesystem missing or invalid for user {tg_user_id}. Re-initializing.") - initialize_user_filesystem(user_entry) - save_needed = True - - # Optionally update user info if changed (e.g., username) - if user_entry.get('user_info', {}).get('username') != user_info.get('username'): - user_entry['user_info'] = user_info # Update stored info - save_needed = True - - if save_needed: - if not save_data(db_data): - logging.error(f"Failed to save data for user {tg_user_id} during validation.") - # Avoid returning 500 if possible, user might still be usable with loaded data - # return jsonify({"status": "error", "message": "Error saving user data."}), 500 - pass # Logged the error, proceed with current (possibly unsaved) state - - return jsonify({"status": "ok", "user": user_info}) - else: - logging.warning(f"Validation failed for initData prefix: {init_data[:100]}...") - return jsonify({"status": "error", "message": "Invalid authorization data."}), 403 - - -@app.route('/get_dashboard_data', methods=['POST']) -def get_dashboard_data(): - data = request.get_json() - if not data or 'initData' not in data or 'folder_id' not in data: - return jsonify({"status": "error", "message": "Incomplete request"}), 400 - - user_info = check_telegram_authorization(data['initData'], BOT_TOKEN) - if not user_info or 'id' not in user_info: - return jsonify({"status": "error", "message": "Unauthorized"}), 403 - - tg_user_id = str(user_info['id']) - folder_id = data['folder_id'] - db_data = load_data() - user_data = db_data.get('users', {}).get(tg_user_id) - - if not user_data or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict): - logging.error(f"User data or filesystem missing/invalid for validated user {tg_user_id}") - # Attempt recovery if filesystem is bad but user_data exists - if isinstance(user_data, dict): - logging.warning(f"Attempting to re-initialize filesystem for user {tg_user_id}") - initialize_user_filesystem(user_data) - if not save_data(db_data): - logging.error(f"Failed to save re-initialized filesystem for user {tg_user_id}") - # Continue with the newly initialized filesystem if save failed but init worked - else: - return jsonify({"status": "error", "message": "User data error"}), 500 - - - current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id) - - if not current_folder or current_folder.get('type') != 'folder': - logging.warning(f"Folder {folder_id} not found or invalid for user {tg_user_id}. Defaulting to root.") - folder_id = 'root' - current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id) - if not current_folder: - logging.critical(f"CRITICAL: Root folder cannot be found for user {tg_user_id} even after check.") - # Attempt recovery again - initialize_user_filesystem(user_data) - if not save_data(db_data): - logging.error(f"Failed to save re-initialized filesystem after root recovery attempt for {tg_user_id}") - - current_folder, _ = find_node_by_id(user_data['filesystem'], 'root') - if not current_folder: # Still failing - return jsonify({"status": "error", "message": "Critical error: Root folder missing."}), 500 - - items_in_folder = current_folder.get('children', []) - if not isinstance(items_in_folder, list): - logging.warning(f"Invalid 'children' in folder {folder_id} for user {tg_user_id}. Resetting to empty list.") - items_in_folder = [] - current_folder['children'] = [] - # Consider saving data here if you want to persist this fix immediately - # save_data(db_data) - - breadcrumbs = get_node_path_list(user_data['filesystem'], folder_id) - - current_folder_info = { - 'id': current_folder.get('id'), - 'name': current_folder.get('name', 'Root') + // Clear file input after upload + fileInput.value = ''; + }); + + xhr.addEventListener('error', function() { + handleUploadEnd('Произошла ошибка во время загрузки.'); + }); + + xhr.addEventListener('abort', function() { + handleUploadEnd('Загрузка отменена.'); + }); + + function handleUploadEnd(message) { + uploadBtn.disabled = false; + uploadBtn.textContent = 'Загрузить файлы сюда'; + progressContainer.style.display = 'none'; + tg.MainButton.hideProgress(); + tg.MainButton.setText('Загрузить файлы сюда'); + tg.MainButton.enable(); + tg.showAlert(message); + flashContainer.innerHTML = `
${message}
`; + fileInput.value = ''; + } + + xhr.open('POST', form.action, true); + // Add headers if needed, e.g., CSRF token if implemented + xhr.send(formData); + }); + + +''' + + template_context = { + 'telegram_user': telegram_user, + 'items': items_in_folder, + 'current_folder_id': current_folder_id, + 'current_folder': current_folder, + 'breadcrumbs': breadcrumbs, + 'repo_id': REPO_ID, + 'HF_TOKEN_READ': HF_TOKEN_READ, + 'hf_file_url': lambda path, download=False: f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{path}{'?download=true' if download else ''}", + 'os': os, + 'is_admin': user_id in ADMIN_TELEGRAM_IDS } - - return jsonify({ - "status": "ok", - "items": items_in_folder, - "breadcrumbs": breadcrumbs, - "current_folder": current_folder_info - }) - - -@app.route('/upload', methods=['POST']) -def upload_files(): - init_data = request.form.get('initData') - current_folder_id = request.form.get('current_folder_id', 'root') - files = request.files.getlist('files') - - user_info = check_telegram_authorization(init_data, BOT_TOKEN) - if not user_info or 'id' not in user_info: - return jsonify({"status": "error", "message": "Unauthorized"}), 403 - - tg_user_id = str(user_info['id']) - - if not HF_TOKEN_WRITE: - return jsonify({'status': 'error', 'message': 'Upload configuration error.'}), 500 - - if not files or all(not f.filename for f in files): - return jsonify({'status': 'error', 'message': 'No files selected for upload.'}), 400 - - if len(files) > 20: - return jsonify({'status': 'error', 'message': 'Maximum 20 files per upload.'}), 400 - - db_data = load_data() - user_data = db_data.get('users', {}).get(tg_user_id) - - if not user_data or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict): - logging.error(f"Upload error: User data or filesystem missing/invalid for {tg_user_id}") - return jsonify({"status": "error", "message": "User data error during upload."}), 500 - - target_folder_node, _ = find_node_by_id(user_data['filesystem'], current_folder_id) - if not target_folder_node or target_folder_node.get('type') != 'folder': - logging.error(f"Upload error: Target folder {current_folder_id} not found for user {tg_user_id}") - return jsonify({'status': 'error', 'message': 'Target folder not found!'}), 404 - - api = HfApi() - uploaded_count = 0 - errors = [] - nodes_added = [] # Keep track of nodes added in this request - - for file in files: - if file and file.filename: - original_filename = secure_filename(file.filename) - if not original_filename: - logging.warning(f"Skipping file with potentially insecure name: {file.filename}") - errors.append(f"Skipped file with invalid name: {file.filename}") - continue - - name_part, ext_part = os.path.splitext(original_filename) - unique_suffix = uuid.uuid4().hex[:8] - # Ensure filename doesn't become excessively long - max_len = 100 - safe_name_part = name_part[:max_len] - unique_filename = f"{safe_name_part}_{unique_suffix}{ext_part}" - file_id = uuid.uuid4().hex - - # Define path relative to user/folder for organization - hf_path = f"cloud_files/{tg_user_id}/{file_id[:2]}/{file_id}_{unique_filename}" # Add subfolder based on ID start - temp_path = os.path.join(UPLOAD_FOLDER, f"{file_id}_{unique_filename}") - - file_info = { - 'type': 'file', 'id': file_id, - 'original_filename': original_filename, - 'unique_filename': unique_filename, # Store the unique name used on HF - 'path': hf_path, - 'file_type': get_file_type(original_filename), - 'upload_date': datetime.now().isoformat() # Use ISO format - } - - try: - file.save(temp_path) - logging.info(f"Attempting HF upload to: {hf_path}") - api.upload_file( - path_or_fileobj=temp_path, path_in_repo=hf_path, - repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE, - commit_message=f"User {tg_user_id} uploaded {original_filename}" - ) - logging.info(f"HF upload successful for {original_filename} ({file_id})") - - # Add node to filesystem structure *after* successful HF upload - if add_node(user_data['filesystem'], current_folder_id, file_info): - uploaded_count += 1 - nodes_added.append(file_info) # Track success - else: - # This case is critical - file is on HF, but not in DB structure - error_msg = f"Failed to add metadata for {original_filename} after upload." - errors.append(error_msg) - logging.error(f"{error_msg} User: {tg_user_id}, FileID: {file_id}, TargetFolder: {current_folder_id}") - # Attempt to delete the orphaned HF file - try: - logging.warning(f"Attempting cleanup of orphaned HF file: {hf_path}") - api.delete_file(path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE) - logging.info(f"Orphaned file {hf_path} deleted from HF.") - except Exception as del_err: - logging.error(f"CRITICAL: Failed to delete orphaned HF file {hf_path}: {del_err}") - - except Exception as e: - logging.error(f"Upload error for {original_filename} (User: {tg_user_id}, FileID: {file_id}): {e}", exc_info=True) - errors.append(f"Error uploading {original_filename}") - # Ensure node wasn't partially added if error occurred during add_node or before - if file_info in nodes_added: nodes_added.remove(file_info) - finally: - # Clean up local temporary file - if os.path.exists(temp_path): - try: os.remove(temp_path) - except OSError as e_rm: logging.warning(f"Error removing temp file {temp_path}: {e_rm}") - - # Save data only if at least one file was successfully uploaded AND added to structure - if uploaded_count > 0 and nodes_added: - logging.info(f"Saving DB for user {tg_user_id} after {uploaded_count} successful uploads.") - if not save_data(db_data): - # If save fails, we have inconsistency: files on HF, maybe some nodes added in memory, but not persisted. - logging.error(f"CRITICAL: Failed to save DB after successful uploads for user {tg_user_id}.") - errors.append("Critical error saving file metadata after upload.") - # Attempt to revert the in-memory additions? Very complex. Logging is key here. - # Rollback: Remove nodes that were added in this request from the in-memory structure - for node_info in nodes_added: - remove_node(user_data['filesystem'], node_info['id']) - uploaded_count = 0 # Reflect that the save failed - # Do NOT try to delete the HF files here, could lead to data loss if DB save fails intermittently - - final_message = f"{uploaded_count} file(s) uploaded." - if errors: - final_message += f" Errors occurred with {len(errors)} file(s)." - # Consider logging the specific errors to the user if appropriate - # final_message += " Details: " + "; ".join(errors) - - return jsonify({ - "status": "ok" if uploaded_count > 0 else "error", # Status based on successful *persisted* uploads - "message": final_message - }) + return render_template_string(html, **template_context) @app.route('/create_folder', methods=['POST']) +@login_required def create_folder(): - data = request.get_json() - if not data or 'initData' not in data or 'parent_folder_id' not in data or 'folder_name' not in data: - return jsonify({"status": "error", "message": "Incomplete request"}), 400 + telegram_user = session['telegram_user'] + user_id = telegram_user['id'] + user_id_str = str(user_id) - user_info = check_telegram_authorization(data['initData'], BOT_TOKEN) - if not user_info or 'id' not in user_info: - return jsonify({"status": "error", "message": "Unauthorized"}), 403 + data = load_data() + user_data = data['users'].get(user_id_str) + if not user_data: + flash('Пользователь не найден!', 'error') + return redirect(url_for('dashboard')) # Redirect to root dashboard - tg_user_id = str(user_info['id']) - parent_folder_id = data['parent_folder_id'] - folder_name = data['folder_name'].strip() + parent_folder_id = request.form.get('parent_folder_id', 'root') + folder_name = request.form.get('folder_name', '').strip() if not folder_name: - return jsonify({'status': 'error', 'message': 'Folder name cannot be empty.'}), 400 - if len(folder_name) > 100: - return jsonify({'status': 'error', 'message': 'Folder name is too long.'}), 400 - # Basic validation for problematic characters - if /[<>:"/\\|?*]/.test(folder_name): - return jsonify({'status': 'error', 'message': 'Folder name contains invalid characters.'}), 400 - - - db_data = load_data() - user_data = db_data.get('users', {}).get(tg_user_id) - - if not user_data or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict): - logging.error(f"Create folder error: User data or filesystem missing/invalid for {tg_user_id}") - return jsonify({"status": "error", "message": "User data error."}), 500 + flash('Имя папки не может быть пустым!', 'error') + return redirect(url_for('dashboard', folder_id=parent_folder_id)) + # Allow more characters, sanitize later if needed + # if not folder_name.isalnum() and '_' not in folder_name and ' ' not in folder_name: + # flash('Имя папки может содержать буквы, цифры, пробелы и подчеркивания.', 'error') + # return redirect(url_for('dashboard', folder_id=parent_folder_id)) + folder_name = secure_filename(folder_name.replace(' ', '_')) # Basic sanitization - # Check if folder with the same name already exists in the parent - parent_node, _ = find_node_by_id(user_data['filesystem'], parent_folder_id) - if parent_node and 'children' in parent_node and isinstance(parent_node['children'], list): - for child in parent_node['children']: - if isinstance(child, dict) and child.get('type') == 'folder' and child.get('name') == folder_name: - return jsonify({'status': 'error', 'message': f'A folder named "{folder_name}" already exists here.'}), 409 # 409 Conflict + if not folder_name: # If secure_filename removes everything + flash('Недопустимое имя папки.', 'error') + return redirect(url_for('dashboard', folder_id=parent_folder_id)) folder_id = uuid.uuid4().hex folder_data = { - 'type': 'folder', 'id': folder_id, - 'name': folder_name, 'children': [] + 'type': 'folder', + 'id': folder_id, + 'name': folder_name, + 'children': [] } if add_node(user_data['filesystem'], parent_folder_id, folder_data): - if save_data(db_data): - return jsonify({'status': 'ok', 'message': f'Folder "{folder_name}" created.'}) - else: - logging.error(f"Create folder save error ({tg_user_id}) after adding node {folder_id}.") - # Attempt to rollback the in-memory addition - remove_node(user_data['filesystem'], folder_id) - return jsonify({'status': 'error', 'message': 'Error saving data after creating folder.'}), 500 + try: + save_data(data) + flash(f'Папка "{folder_name}" успешно создана.') + except Exception as e: + flash('Ошибка сохранения данных при создании папки.', 'error') + logging.error(f"Create folder save error for user {user_id}: {e}") + # Attempt to remove the added node if save failed? Complex. else: - # This implies parent folder wasn't found or wasn't a folder type - logging.error(f"Create folder error: Failed add_node. User: {tg_user_id}, Parent: {parent_folder_id}") - return jsonify({'status': 'error', 'message': 'Could not find parent folder to add new folder.'}), 400 + flash('Не удалось найти родительскую папку для создания новой.', 'error') + + return redirect(url_for('dashboard', folder_id=parent_folder_id)) @app.route('/download/') -def download_file_route(file_id): - # Note: This route has NO BUILT-IN AUTHENTICATION. - # It relies on the obscurity of file_id and HF path. - # For sensitive data, proper auth (e.g., checking initData passed as query param, - # or session-based auth) would be needed here, which complicates direct linking/previewing. - db_data = load_data() # Use cached data if possible - file_node = None - owner_user_id = None +@login_required # Require login even for download link access initially +def download_file(file_id): + current_user_id = session['telegram_user']['id'] + is_current_user_admin = current_user_id in ADMIN_TELEGRAM_IDS - # Find the file node across all users - for user_id_scan, user_data_scan in db_data.get('users', {}).items(): - if 'filesystem' in user_data_scan and isinstance(user_data_scan['filesystem'], dict): - node, _ = find_node_by_id(user_data_scan['filesystem'], file_id) - if node and isinstance(node, dict) and node.get('type') == 'file': - file_node = node - owner_user_id = user_id_scan - break + data = load_data() + file_node = None + owner_user_id_str = None + + # 1. Check if the file belongs to the current user + current_user_data = data['users'].get(str(current_user_id)) + if current_user_data: + file_node, _ = find_node_by_id(current_user_data.get('filesystem', {}), file_id) + if file_node and file_node.get('type') == 'file': + owner_user_id_str = str(current_user_id) + + # 2. If not found for current user AND current user is admin, search all users + if not file_node and is_current_user_admin: + logging.info(f"Admin {current_user_id} searching for file ID {file_id} across all users.") + for uid_str, udata in data.get('users', {}).items(): + node, _ = find_node_by_id(udata.get('filesystem', {}), file_id) + if node and node.get('type') == 'file': + file_node = node + owner_user_id_str = uid_str + logging.info(f"Admin {current_user_id} found file ID {file_id} belonging to user {owner_user_id_str}") + break if not file_node: - logging.warning(f"Download request for unknown file_id: {file_id}") - return Response("File not found", status=404, mimetype='text/plain') + flash('Файл не найден!', 'error') + # Redirect back to user's dashboard or admin panel depending on who requested + if is_current_user_admin and request.referrer and 'admhosto' in request.referrer: + return redirect(request.referrer) + return redirect(url_for('dashboard')) + hf_path = file_node.get('path') - original_filename = file_node.get('original_filename', f'{file_id}_download') + original_filename = file_node.get('original_filename', 'downloaded_file') if not hf_path: - logging.error(f"Download error: Missing HF path for file ID {file_id} (Owner: {owner_user_id})") - return Response("Error: File path configuration missing", status=500, mimetype='text/plain') + flash('Ошибка: Путь к файлу не найден в метаданных.', 'error') + if is_current_user_admin and request.referrer and 'admhosto' in request.referrer: + return redirect(request.referrer) + return redirect(url_for('dashboard')) - # Construct the direct download URL - # Using /info/refs might be faster for checking existence before redirecting, but resolve/main is simpler file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true" - logging.info(f"Attempting to serve file via redirect/proxy from: {file_url}") try: headers = {} if HF_TOKEN_READ: headers["authorization"] = f"Bearer {HF_TOKEN_READ}" - # Use requests to stream the file from HF - # Timeout set for initial connection and read chunks - response = requests.get(file_url, headers=headers, stream=True, timeout=(10, 30)) # (connect_timeout, read_timeout) - response.raise_for_status() # Check for 4xx/5xx errors from HF - - # Prepare Flask response headers - resp_headers = {} - content_type = response.headers.get('Content-Type', 'application/octet-stream') - resp_headers['Content-Type'] = content_type - - # Create a safe filename for Content-Disposition - # Simple approach: replace potentially problematic chars - safe_filename = "".join(c if c.isalnum() or c in ['.', '-', '_'] else '_' for c in original_filename) - # Encode for header value (URL encoding for filename*=UTF-8'') - encoded_filename = urlencode({'filename': original_filename}, encoding='utf-8')[9:] - resp_headers['Content-Disposition'] = f"attachment; filename=\"{safe_filename}\"; filename*=UTF-8''{encoded_filename}" - - # Add Content-Length if provided by HF - if 'Content-Length' in response.headers: - resp_headers['Content-Length'] = response.headers['Content-Length'] + response = requests.get(file_url, headers=headers, stream=True, timeout=60) # Add timeout + response.raise_for_status() - # Stream the response body - return Response(response.iter_content(chunk_size=8192), status=response.status_code, headers=resp_headers) + # Stream download if needed for large files, but send_file handles BytesIO well too + file_content = BytesIO(response.content) + return send_file( + file_content, + as_attachment=True, + download_name=original_filename, + mimetype='application/octet-stream' + ) except requests.exceptions.Timeout: - logging.error(f"Timeout downloading file from HF: {hf_path}") - return Response("Error: Timed out connecting to file storage", status=504, mimetype='text/plain') # 504 Gateway Timeout + logging.error(f"Timeout downloading file from HF ({hf_path})") + flash(f'Ошибка скачивания файла {original_filename}: Тайм-аут соединения с сервером.', 'error') except requests.exceptions.RequestException as e: - status_code = e.response.status_code if e.response is not None else 502 # 502 Bad Gateway if no response - logging.error(f"Error downloading file from HF ({hf_path}, Owner: {owner_user_id}): {e} (Status: {status_code})") - # Don't expose detailed error message to client - return Response(f"Error retrieving file ({status_code})", status=status_code, mimetype='text/plain') + logging.error(f"Error downloading file from HF ({hf_path}): {e}") + flash(f'Ошибка скачивания файла {original_filename}! ({e})', 'error') except Exception as e: - logging.error(f"Unexpected error during download proxy ({hf_path}, Owner: {owner_user_id}): {e}", exc_info=True) - return Response("Internal server error during file download", status=500, mimetype='text/plain') - + logging.error(f"Unexpected error during download ({hf_path}): {e}") + flash('Произошла непредвиденная ошибка при скачивании файла.', 'error') -@app.route('/delete_file/', methods=['POST']) -def delete_file_route(file_id): - data = request.get_json() - if not data or 'initData' not in data: # current_folder_id might not be strictly necessary - return jsonify({"status": "error", "message": "Incomplete request"}), 400 - - user_info = check_telegram_authorization(data['initData'], BOT_TOKEN) - if not user_info or 'id' not in user_info: - return jsonify({"status": "error", "message": "Unauthorized"}), 403 + # Fallback redirect + if is_current_user_admin and request.referrer and 'admhosto' in request.referrer: + return redirect(request.referrer) + return redirect(url_for('dashboard')) - tg_user_id = str(user_info['id']) - if not HF_TOKEN_WRITE: - return jsonify({'status': 'error', 'message': 'Deletion configuration error.'}), 500 - - db_data = load_data() - user_data = db_data.get('users', {}).get(tg_user_id) - - if not user_data or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict): - logging.error(f"Delete file error: User data or filesystem missing/invalid for {tg_user_id}") - # Don't reveal file existence, just say user data error - return jsonify({"status": "error", "message": "User data error."}), 500 +@app.route('/delete_file/', methods=['POST']) +@login_required +def delete_file(file_id): + telegram_user = session['telegram_user'] + user_id = telegram_user['id'] + user_id_str = str(user_id) + + data = load_data() + user_data = data['users'].get(user_id_str) + if not user_data: + flash('Пользователь не найден!', 'error') + session.pop('telegram_user', None) + return redirect(url_for('index')) # Force re-auth file_node, parent_node = find_node_by_id(user_data['filesystem'], file_id) + # Determine the folder to redirect back to + current_view_folder_id = request.form.get('current_view_folder_id') + if not current_view_folder_id and parent_node: + current_view_folder_id = parent_node.get('id', 'root') + elif not current_view_folder_id: + current_view_folder_id = 'root' + - if not file_node or file_node.get('type') != 'file' or not parent_node: - # File not found *for this user*. Do not confirm non-existence. - logging.warning(f"Delete request for non-existent/invalid file ID {file_id} by user {tg_user_id}") - return jsonify({'status': 'error', 'message': 'File not found.'}), 404 + if not file_node or file_node.get('type') != 'file': # Parent check removed, root files possible + flash('Файл не найден или не может быть удален.', 'error') + return redirect(url_for('dashboard', folder_id=current_view_folder_id)) hf_path = file_node.get('path') - original_filename = file_node.get('original_filename', 'file') - db_removed = False - hf_deleted = False - save_error = False + original_filename = file_node.get('original_filename', 'файл') - # 1. Attempt to delete from Hugging Face Hub - if hf_path: + # Attempt to remove from DB first + if remove_node(user_data['filesystem'], file_id): try: - api = HfApi() - logging.info(f"Attempting HF delete for: {hf_path} by user {tg_user_id}") - api.delete_file( - path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE, - commit_message=f"User {tg_user_id} deleted {original_filename}" - ) - hf_deleted = True - logging.info(f"Successfully deleted file {hf_path} from HF Hub for user {tg_user_id}") - except hf_utils.EntryNotFoundError: - logging.warning(f"File {hf_path} already deleted or never existed on HF Hub for delete attempt by {tg_user_id}.") - hf_deleted = True # Treat as success for the purpose of DB removal + save_data(data) + logging.info(f"Removed file {file_id} ({original_filename}) from DB for user {user_id}.") + # Now attempt to delete from HF + if hf_path and HF_TOKEN_WRITE: + try: + api = HfApi() + api.delete_file( + path_in_repo=hf_path, + repo_id=REPO_ID, + repo_type="dataset", + token=HF_TOKEN_WRITE, + commit_message=f"User {user_id} deleted file {original_filename} (ID: {file_id})" + ) + logging.info(f"Deleted file {hf_path} from HF Hub for user {user_id}") + flash(f'Файл {original_filename} успешно удален!') + except hf_utils.EntryNotFoundError: + logging.warning(f"File {hf_path} not found on HF Hub during delete for user {user_id}, but removed from DB.") + flash(f'Файл {original_filename} удален из базы (не найден на сервере).') + except Exception as e: + logging.error(f"Error deleting file {hf_path} from HF Hub for user {user_id} (DB entry removed): {e}") + flash(f'Файл {original_filename} удален из базы, но ошибка при удалении с сервера: {e}', 'error') + elif not hf_path: + flash(f'Файл {original_filename} удален из базы (путь не найден).') + elif not HF_TOKEN_WRITE: + flash(f'Файл {original_filename} удален из базы (удаление с сервера невозможно - токен отсутствует).', 'warning') + except Exception as e: - logging.error(f"Error deleting file from HF Hub ({hf_path}, User: {tg_user_id}): {e}") - # Do not stop here; still try to remove from DB if HF delete fails, - # but report the overall operation as potentially failed. - # A background cleanup job might be needed for such inconsistencies. + # This is bad - removed from structure in memory, but failed to save + logging.critical(f"CRITICAL: Failed to save DB after removing file {file_id} for user {user_id}. Data inconsistency possible! Error: {e}") + flash('Критическая ошибка: не удалось сохранить базу данных после удаления файла. Перезагрузите данные.', 'error') + # Force cache clear and maybe reload? + cache.clear() + # Don't attempt HF delete if DB save failed else: - logging.warning(f"File node {file_id} for user {tg_user_id} has no HF path. Skipping HF deletion.") - hf_deleted = True # No path means nothing to delete on HF - - # 2. Attempt to remove from DB structure *if HF deletion was successful or skipped* - if hf_deleted: - if remove_node(user_data['filesystem'], file_id): - db_removed = True - logging.info(f"Removed file node {file_id} from DB for user {tg_user_id}") - # 3. Attempt to save the updated DB structure - if not save_data(db_data): - logging.error(f"CRITICAL: Delete file DB save error for user {tg_user_id} after removing node {file_id}.") - save_error = True - # Attempt to rollback the in-memory removal? Very risky. Better to log. - # Re-adding the node might fail if parent was modified etc. - # add_node(user_data['filesystem'], parent_node['id'], file_node) # Risky rollback attempt + flash('Не удалось найти файл в структуре для удаления.', 'error') - else: - # This shouldn't happen if find_node_by_id found it initially - logging.error(f"Failed to remove file node {file_id} from DB structure for {tg_user_id} after it was found.") - - # Determine final status - if db_removed and not save_error: - return jsonify({'status': 'ok', 'message': f'File "{original_filename}" deleted.'}) - elif hf_deleted and db_removed and save_error: - return jsonify({'status': 'error', 'message': f'File deleted from storage, but failed to update database.'}), 500 - elif hf_deleted and not db_removed: - return jsonify({'status': 'error', 'message': f'File deleted from storage, but failed to remove from database structure.'}), 500 - else: # hf_deleted is False (meaning HF delete failed) - return jsonify({'status': 'error', 'message': f'Failed to delete file from storage.'}), 500 + return redirect(url_for('dashboard', folder_id=current_view_folder_id)) @app.route('/delete_folder/', methods=['POST']) -def delete_folder_route(folder_id): - if folder_id == 'root': - return jsonify({'status': 'error', 'message': 'Cannot delete the root folder.'}), 400 - - data = request.get_json() - if not data or 'initData' not in data: - return jsonify({"status": "error", "message": "Incomplete request"}), 400 +@login_required +def delete_folder(folder_id): + telegram_user = session['telegram_user'] + user_id = telegram_user['id'] + user_id_str = str(user_id) - user_info = check_telegram_authorization(data['initData'], BOT_TOKEN) - if not user_info or 'id' not in user_info: - return jsonify({"status": "error", "message": "Unauthorized"}), 403 - - tg_user_id = str(user_info['id']) - - db_data = load_data() - user_data = db_data.get('users', {}).get(tg_user_id) + if folder_id == 'root': + flash('Нельзя удалить корневую папку!', 'error') + return redirect(url_for('dashboard')) - if not user_data or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict): - logging.error(f"Delete folder error: User data or filesystem missing/invalid for {tg_user_id}") - return jsonify({"status": "error", "message": "User data error."}), 500 + data = load_data() + user_data = data['users'].get(user_id_str) + if not user_data: + flash('Пользователь не найден!', 'error') + session.pop('telegram_user', None) + return redirect(url_for('index')) folder_node, parent_node = find_node_by_id(user_data['filesystem'], folder_id) + current_view_folder_id = request.form.get('current_view_folder_id') # Where user was viewing from + redirect_to_folder_id = 'root' # Default redirect target - if not folder_node or folder_node.get('type') != 'folder' or not parent_node: - logging.warning(f"Delete request for non-existent/invalid folder ID {folder_id} by user {tg_user_id}") - return jsonify({'status': 'error', 'message': 'Folder not found.'}), 404 + if parent_node: + redirect_to_folder_id = parent_node.get('id', 'root') + elif current_view_folder_id: # Fallback to where user clicked delete + redirect_to_folder_id = current_view_folder_id - folder_name = folder_node.get('name', 'folder') + if not folder_node or folder_node.get('type') != 'folder': + flash('Папка не найдена или не может быть удалена.', 'error') + return redirect(url_for('dashboard', folder_id=redirect_to_folder_id)) - # Check if folder is empty (safer to check 'children' array directly) - if 'children' in folder_node and isinstance(folder_node['children'], list) and folder_node['children']: - return jsonify({'status': 'error', 'message': f'Folder "{folder_name}" is not empty. Please delete its contents first.'}), 400 + folder_name = folder_node.get('name', 'папка') - # Attempt to remove the folder node + if folder_node.get('children'): + flash(f'Папку "{folder_name}" можно удалить только если она пуста.', 'error') + return redirect(url_for('dashboard', folder_id=current_view_folder_id or folder_id)) # Stay in current view or folder itself + + # Proceed with deletion if remove_node(user_data['filesystem'], folder_id): - # Attempt to save the change - if save_data(db_data): - logging.info(f"Folder {folder_id} ('{folder_name}') deleted by user {tg_user_id}") - return jsonify({'status': 'ok', 'message': f'Folder "{folder_name}" deleted.'}) - else: - logging.error(f"Delete folder save error for user {tg_user_id} after removing node {folder_id}.") - # Attempt rollback (risky) - # add_node(user_data['filesystem'], parent_node['id'], folder_node) - return jsonify({'status': 'error', 'message': 'Error saving database after deleting folder.'}), 500 + try: + save_data(data) + flash(f'Пустая папка "{folder_name}" успешно удалена.') + except Exception as e: + flash('Ошибка сохранения данных после удаления папки.', 'error') + logging.error(f"Delete empty folder save error for user {user_id}: {e}") + # Data inconsistency - folder removed in memory, not saved. + cache.clear() # Clear cache to force reload on next request else: - # This indicates an internal logic error if the node was found before - logging.error(f"Failed to remove empty folder node {folder_id} from DB for {tg_user_id} after it was found.") - return jsonify({'status': 'error', 'message': 'Could not remove folder from database structure.'}), 500 + flash('Не удалось удалить папку из базы данных (не найдена?).', 'error') + + return redirect(url_for('dashboard', folder_id=redirect_to_folder_id)) @app.route('/get_text_content/') -def get_text_content_route(file_id): - # NO AUTHENTICATION - relies on file_id obscurity - db_data = load_data() - file_node = None - owner_user_id = None +@login_required # Require login +def get_text_content(file_id): + current_user_id = session['telegram_user']['id'] + is_current_user_admin = current_user_id in ADMIN_TELEGRAM_IDS - for user_id_scan, user_data_scan in db_data.get('users', {}).items(): - if 'filesystem' in user_data_scan and isinstance(user_data_scan['filesystem'], dict): - node, _ = find_node_by_id(user_data_scan['filesystem'], file_id) - # Allow preview only for 'text' type files - if node and isinstance(node, dict) and node.get('type') == 'file' and node.get('file_type') == 'text': + data = load_data() + file_node = None + owner_user_id_str = None + + # 1. Check current user's files + current_user_data = data['users'].get(str(current_user_id)) + if current_user_data: + node, _ = find_node_by_id(current_user_data.get('filesystem', {}), file_id) + if node and node.get('type') == 'file' and node.get('file_type') == 'text': + file_node = node + owner_user_id_str = str(current_user_id) + + # 2. If admin and not found, check others + if not file_node and is_current_user_admin: + for uid_str, udata in data.get('users', {}).items(): + node, _ = find_node_by_id(udata.get('filesystem', {}), file_id) + if node and node.get('type') == 'file' and node.get('file_type') == 'text': file_node = node - owner_user_id = user_id_scan + owner_user_id_str = uid_str break if not file_node: - logging.warning(f"Text content request for unknown/non-text file_id: {file_id}") - return Response("Text file not found or preview not allowed", status=404, mimetype='text/plain') + return Response("Текстовый файл не найден или доступ запрещен", status=404) hf_path = file_node.get('path') if not hf_path: - logging.error(f"Text content error: Missing HF path for file ID {file_id} (Owner: {owner_user_id})") - return Response("Error: File path configuration missing", status=500, mimetype='text/plain') + return Response("Ошибка: путь к файлу отсутствует в метаданных", status=500) file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true" - logging.info(f"Attempting to fetch text content from: {file_url}") try: headers = {} if HF_TOKEN_READ: headers["authorization"] = f"Bearer {HF_TOKEN_READ}" - response = requests.get(file_url, headers=headers, timeout=15) # Shorter timeout for text files + response = requests.get(file_url, headers=headers, timeout=15) # Timeout for text files response.raise_for_status() - # Limit preview size to prevent loading huge files in browser - max_preview_size = 1 * 1024 * 1024 # 1 MB limit - if 'Content-Length' in response.headers and int(response.headers['Content-Length']) > max_preview_size: - logging.warning(f"Text file {file_id} too large for preview ({response.headers['Content-Length']} bytes).") - return Response("File is too large for preview (>1MB). Please download.", status=413, mimetype='text/plain') # 413 Payload Too Large - - # If size is unknown or within limits, proceed to read content - content_bytes = response.content - if len(content_bytes) > max_preview_size: - logging.warning(f"Text file {file_id} too large for preview after download ({len(content_bytes)} bytes).") - return Response("File is too large for preview (>1MB). Please download.", status=413, mimetype='text/plain') - - # Attempt to decode the text content - text_content = None - detected_encoding = None - # Try common encodings - encodings_to_try = ['utf-8', 'cp1251', 'latin-1'] - for enc in encodings_to_try: + # Limit preview size + max_preview_size = 1 * 1024 * 1024 # 1MB + if len(response.content) > max_preview_size: + # Return truncated content with a warning + text_content_bytes = response.content[:max_preview_size] + warning_message = "\n\n[Файл слишком большой, показана только первая 1MB]" try: - text_content = content_bytes.decode(enc) - detected_encoding = enc - logging.info(f"Decoded text file {file_id} using {enc}") - break - except UnicodeDecodeError: - continue + text_content = text_content_bytes.decode('utf-8', errors='ignore') + warning_message + except UnicodeDecodeError: # Should be caught by errors='ignore' + text_content = "[Не удалось декодировать начало файла]" + warning_message - if text_content is None: - # Fallback: Try to detect using chardet if installed, or assume UTF-8 lossy - try: - import chardet - result = chardet.detect(content_bytes) - detected_encoding = result['encoding'] - if detected_encoding: - text_content = content_bytes.decode(detected_encoding, errors='replace') - logging.info(f"Decoded text file {file_id} using detected encoding {detected_encoding}") - else: - raise ValueError("Chardet could not detect encoding") - except (ImportError, Exception) as E: - logging.warning(f"Could not decode text file {file_id} with common encodings or chardet ({E}). Falling back to utf-8 replace.") - text_content = content_bytes.decode('utf-8', errors='replace') - detected_encoding = 'utf-8 (replaced errors)' - - - # Return decoded text with appropriate content type - return Response(text_content, mimetype=f'text/plain; charset={detected_encoding.split(" ")[0]}') # Use detected/fallback encoding + return Response(text_content, mimetype='text/plain') + # Or return an error: + # return Response("Файл слишком большой для предпросмотра ( > 1MB).", status=413) + + + # Try decoding + try: + text_content = response.content.decode('utf-8') + except UnicodeDecodeError: + try: + # Fallback to latin-1 or common windows encoding + text_content = response.content.decode('latin-1') + except UnicodeDecodeError: + try: + text_content = response.content.decode('cp1251') + except Exception: + return Response("Не удалось определить кодировку файла.", status=500) + + return Response(text_content, mimetype='text/plain; charset=utf-8') # Specify charset except requests.exceptions.Timeout: - logging.error(f"Timeout fetching text content from HF: {hf_path}") - return Response("Error: Timed out connecting to file storage", status=504, mimetype='text/plain') + logging.warning(f"Timeout fetching text content from HF ({hf_path})") + return Response("Тайм-аут при загрузке содержимого файла.", status=504) except requests.exceptions.RequestException as e: - status_code = e.response.status_code if e.response is not None else 502 - logging.error(f"Error fetching text content from HF ({hf_path}, Owner: {owner_user_id}): {e} (Status: {status_code})") - return Response(f"Error retrieving text content ({status_code})", status=status_code, mimetype='text/plain') + logging.error(f"Error fetching text content from HF ({hf_path}): {e}") + return Response(f"Ошибка загрузки содержимого: {e}", status=502) except Exception as e: - logging.error(f"Unexpected error fetching text content ({hf_path}, Owner: {owner_user_id}): {e}", exc_info=True) - return Response("Internal server error fetching text content", status=500, mimetype='text/plain') + logging.error(f"Unexpected error fetching text content ({hf_path}): {e}") + return Response("Внутренняя ошибка сервера", status=500) -@app.route('/preview_thumb/') -def preview_thumb_route(file_id): - # NO AUTHENTICATION - db_data = load_data() - file_node = None - owner_user_id = None +# --- Admin Routes --- - for user_id_scan, user_data_scan in db_data.get('users', {}).items(): - if 'filesystem' in user_data_scan and isinstance(user_data_scan['filesystem'], dict): - node, _ = find_node_by_id(user_data_scan['filesystem'], file_id) - if node and isinstance(node, dict) and node.get('type') == 'file' and node.get('file_type') == 'image': - file_node = node - owner_user_id = user_id_scan - break +@app.route('/admhosto') +@admin_required +def admin_panel(): + data = load_data() + users = data.get('users', {}) - if not file_node: return Response("Image not found", status=404, mimetype='text/plain') - hf_path = file_node.get('path') - if not hf_path: return Response("Error: File path missing", status=500, mimetype='text/plain') + user_details = [] + for user_id_str, udata in users.items(): + file_count = 0 + folder_count = 0 + q = [(udata.get('filesystem', {}))] # Start with root object + visited_ids = set() - # Use the /resolve/main path for direct file access - file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}" - logging.info(f"Attempting to serve image preview via proxy from: {file_url}") + while q: + current_node = q.pop(0) + if not current_node or not isinstance(current_node, dict) or current_node.get('id') in visited_ids: + continue + visited_ids.add(current_node.get('id')) + + if current_node.get('type') == 'file': + file_count += 1 + elif current_node.get('type') == 'folder': + if current_node.get('id') != 'root': # Don't count root itself + folder_count += 1 + if 'children' in current_node and isinstance(current_node['children'], list): + for child in current_node['children']: + if isinstance(child, dict): + q.append(child) + + + user_details.append({ + 'telegram_id': int(user_id_str), + 'username': udata.get('username', 'N/A'), + 'first_name': udata.get('first_name', ''), + 'last_name': udata.get('last_name', ''), + 'photo_url': udata.get('photo_url'), + 'created_at': udata.get('created_at', 'N/A'), + 'file_count': file_count, + 'folder_count': folder_count + }) - try: - headers = {} - if HF_TOKEN_READ: headers["authorization"] = f"Bearer {HF_TOKEN_READ}" - response = requests.get(file_url, headers=headers, stream=True, timeout=20) - response.raise_for_status() + user_details.sort(key=lambda x: x.get('created_at', ''), reverse=True) + + html = ''' + +Админ-панель + + + + +

Админ-панель

+Назад в мой кабинет +{% with messages = get_flashed_messages(with_categories=true) %}{% if messages %}{% for category, message in messages %}
{{ message }}
{% endfor %}{% endif %}{% endwith %} +

Пользователи ({{ user_details|length }})

+{% for user in user_details %} +
+ Avatar + +
+ Файлы +
+ +
+
+
+{% else %}

Пользователей нет.

{% endfor %}
+ +''' + return render_template_string(html, user_details=user_details) + +@app.route('/admhosto/user/') +@admin_required +def admin_user_files(user_id): + user_id_str = str(user_id) + data = load_data() + user_data = data.get('users', {}).get(user_id_str) + if not user_data: + flash(f'Пользователь с ID {user_id} не найден.', 'error') + return redirect(url_for('admin_panel')) + + user_info = { + 'telegram_id': user_id, + 'username': user_data.get('username', 'N/A'), + 'first_name': user_data.get('first_name', ''), + 'last_name': user_data.get('last_name', ''), + 'photo_url': user_data.get('photo_url'), + } + + all_files = [] + def collect_files_recursive(folder_node, current_path_str="Root"): + if not folder_node or folder_node.get('type') != 'folder': return + for item in folder_node.get('children', []): + if item.get('type') == 'file': + item['parent_path_str'] = current_path_str + all_files.append(item) + elif item.get('type') == 'folder': + folder_name = item.get('name', 'Unnamed Folder') + new_path = f"{current_path_str} / {folder_name}" + collect_files_recursive(item, new_path) + + collect_files_recursive(user_data.get('filesystem', {})) + all_files.sort(key=lambda x: x.get('upload_date', ''), reverse=True) + + + html = ''' +Файлы {{ user_info.username or user_info.telegram_id }} + + + +
+ +
+ Avatar +
+

Файлы пользователя: {{ user_info.first_name or '' }} {{ user_info.last_name or '' }}

+

{% if user_info.username %}@{{ user_info.username }} | {% endif %}ID: {{ user_info.telegram_id }}

+
+
+ +Назад к пользователям +{% with messages = get_flashed_messages(with_categories=true) %}{% if messages %}{% for category, message in messages %}
{{ message }}
{% endfor %}{% endif %}{% endwith %} + +
+{% for file in files %} +
+
+ {% if file.file_type == 'image' %} + {% elif file.file_type == 'video' %} + {% elif file.file_type == 'pdf' %}
📄
+ {% elif file.file_type == 'text' %}
📝
+ {% else %}
{% endif %} +

{{ file.original_filename | truncate(30) }}

+

В папке: {{ file.parent_path_str }}

+

Загружен: {{ file.upload_date }}

+

ID: {{ file.id }}

+

Path: {{ file.path | truncate(40) }}

+
+
+ Скачать + {% set previewable = file.file_type in ['image', 'video', 'pdf', 'text'] %} + {% if previewable %} + + {% endif %} +
+ +
+
+
+{% else %}

У пользователя нет файлов.

{% endfor %} +
+ + + + +''' + return render_template_string(html, + user_info=user_info, + files=all_files, + repo_id=REPO_ID, + hf_file_url=lambda path, download=False: f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{path}{'?download=true' if download else ''}", + HF_TOKEN_READ=HF_TOKEN_READ) - resp_headers['Content-Type'] = content_type - if 'Content-Length' in response.headers: - resp_headers['Content-Length'] = response.headers['Content-Length'] - # Add cache headers? Maybe Cache-Control: public, max-age=3600 ? - return Response(response.iter_content(chunk_size=8192), status=response.status_code, headers=resp_headers) +@app.route('/admhosto/delete_user/', methods=['POST']) +@admin_required +def admin_delete_user(user_id): + admin_user_id = session['telegram_user']['id'] + user_id_str = str(user_id) - except requests.exceptions.Timeout: - logging.error(f"Timeout fetching preview from HF: {hf_path}") - return Response("Error: Timed out connecting to storage", status=504, mimetype='text/plain') - except requests.exceptions.RequestException as e: - status_code = e.response.status_code if e.response is not None else 502 - logging.error(f"Error fetching preview from HF ({hf_path}, Owner: {owner_user_id}): {e} (Status: {status_code})") - return Response(f"Error retrieving preview ({status_code})", status=status_code, mimetype='text/plain') + if not HF_TOKEN_WRITE: + flash('Удаление невозможно: токен для записи Hugging Face не настроен.', 'error') + return redirect(url_for('admin_panel')) + + data = load_data() + if user_id_str not in data['users']: + flash('Пользователь не найден!', 'error') + return redirect(url_for('admin_panel')) + + user_data_to_delete = data['users'][user_id_str] + username_for_log = user_data_to_delete.get('username', user_id_str) + logging.warning(f"ADMIN ACTION by {admin_user_id}: Attempting to delete user {username_for_log} (ID: {user_id_str}) and all their data.") + + # --- Attempt to delete from Hugging Face first --- + hf_delete_successful = False + try: + api = HfApi() + # Path uses user ID + user_folder_path_on_hf = f"cloud_files/{user_id_str}" + + logging.info(f"Attempting to delete HF Hub folder: {user_folder_path_on_hf} for user {user_id_str}") + # Note: delete_folder might require listing files first if it's not empty. + # A safer approach might be listing and deleting files individually, then the folder. + # However, let's try delete_folder directly first. It might handle non-empty folders. + # Update: delete_folder usually expects an *empty* folder. Robust deletion needs listing+deleting files. + # Let's try deleting the folder path prefix, which might work better. + objects_to_delete = api.list_repo_tree(repo_id=REPO_ID, repo_type="dataset", path_in_repo=user_folder_path_on_hf, token=HF_TOKEN_READ, recursive=True) + + paths_to_delete = [obj.path for obj in objects_to_delete] + + if paths_to_delete: + logging.info(f"Found {len(paths_to_delete)} items in {user_folder_path_on_hf} to delete.") + # Delete files first + for path in paths_to_delete: + if not path.endswith('/'): # Assuming paths ending in / are folders handled implicitly or need separate deletion + try: + api.delete_file(path_in_repo=path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE) + logging.info(f"Admin deleted HF file: {path}") + except hf_utils.EntryNotFoundError: + logging.warning(f"File {path} not found during bulk delete, skipping.") + except Exception as file_del_e: + logging.error(f"Error deleting file {path} during user cleanup: {file_del_e}") + # Optionally decide whether to abort the whole user deletion + # Try deleting the folder itself after files are gone (might still fail if structure complex) + try: + api.delete_folder(folder_path=user_folder_path_on_hf, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE, + commit_message=f"ADMIN ACTION by {admin_user_id}: Deleted folder for user {user_id_str}") + logging.info(f"Successfully deleted folder {user_folder_path_on_hf} on HF Hub.") + hf_delete_successful = True # Mark as successful if folder delete works + except hf_utils.HfHubHTTPError as e: + if e.response.status_code == 404 or "is not empty" in str(e): # Folder might be gone or implicitly deleted + logging.warning(f"Folder {user_folder_path_on_hf} possibly already gone or non-empty after file deletion attempt. Assuming HF cleanup done.") + hf_delete_successful = True # Count as success if folder seems gone + else: raise e # Re-raise other HF errors + except Exception as folder_del_e: + logging.error(f"Error deleting folder {user_folder_path_on_hf} after file deletion: {folder_del_e}") + # Don't mark hf_delete_successful = True + else: + logging.info(f"No objects found in HF path {user_folder_path_on_hf}. Assuming HF cleanup not needed or already done.") + hf_delete_successful = True # No files to delete = success + + + except hf_utils.HfHubHTTPError as e: + if e.response.status_code == 404: # Initial listing failed (folder never existed) + logging.warning(f"User folder {user_folder_path_on_hf} not found on HF Hub for user {user_id_str}. Skipping HF deletion.") + hf_delete_successful = True # Consider it success as there's nothing to delete + else: + logging.error(f"Error during HF cleanup for {user_id_str}: {e}") + flash(f'Ошибка при удалении файлов пользователя {username_for_log} с сервера: {e}. Пользователь НЕ удален из базы.', 'error') + return redirect(url_for('admin_panel')) except Exception as e: - logging.error(f"Unexpected error during preview proxy ({hf_path}, Owner: {owner_user_id}): {e}", exc_info=True) - return Response("Internal server error during preview", status=500, mimetype='text/plain') + logging.error(f"Unexpected error during HF Hub data deletion for {user_id_str}: {e}") + flash(f'Неожиданная ошибка при удалении файлов {username_for_log} с сервера: {e}. Пользователь НЕ удален из базы.', 'error') + return redirect(url_for('admin_panel')) + + # --- Proceed with DB deletion only if HF deletion was deemed successful or skipped --- + if hf_delete_successful: + try: + del data['users'][user_id_str] + save_data(data) + flash(f'Пользователь {username_for_log} (ID: {user_id_str}) и его данные успешно удалены из базы данных!') + logging.info(f"ADMIN ACTION by {admin_user_id}: Successfully deleted user {user_id_str} from database.") + except Exception as e: + logging.error(f"CRITICAL: Error saving data after deleting user {user_id_str} from dict. DB MIGHT BE INCONSISTENT. HF data likely deleted. Error: {e}") + flash(f'Данные пользователя {username_for_log} удалены с сервера, но произошла КРИТИЧЕСКАЯ ОШИБКА при удалении пользователя из базы данных: {e}', 'error') + cache.clear() + else: + flash(f'Удаление пользователя {username_for_log} из базы отменено из-за ошибки при удалении файлов с сервера.', 'error') + + + return redirect(url_for('admin_panel')) + + +@app.route('/admhosto/delete_file//', methods=['POST']) +@admin_required +def admin_delete_file(user_id, file_id): + admin_user_id = session['telegram_user']['id'] + user_id_str = str(user_id) + + if not HF_TOKEN_WRITE: + flash('Удаление невозможно: токен для записи Hugging Face не настроен.', 'error') + return redirect(url_for('admin_user_files', user_id=user_id)) + + data = load_data() + user_data = data.get('users', {}).get(user_id_str) + if not user_data: + flash(f'Пользователь {user_id_str} не найден.', 'error') + return redirect(url_for('admin_panel')) + + file_node, parent_node = find_node_by_id(user_data.get('filesystem',{}), file_id) + + if not file_node or file_node.get('type') != 'file': + flash('Файл не найден в структуре пользователя.', 'error') + return redirect(url_for('admin_user_files', user_id=user_id)) + + hf_path = file_node.get('path') + original_filename = file_node.get('original_filename', 'файл') + username_for_log = user_data.get('username', user_id_str) + + # Try removing from DB first + if remove_node(user_data['filesystem'], file_id): + try: + save_data(data) + logging.info(f"ADMIN ACTION by {admin_user_id}: Removed file {file_id} ({original_filename}) from DB for user {username_for_log} ({user_id_str}).") + + # Now delete from HF + if hf_path: + try: + api = HfApi() + api.delete_file( + path_in_repo=hf_path, + repo_id=REPO_ID, + repo_type="dataset", + token=HF_TOKEN_WRITE, + commit_message=f"ADMIN ACTION by {admin_user_id}: Deleted file {original_filename} (ID: {file_id}) for user {user_id_str}" + ) + logging.info(f"ADMIN ACTION by {admin_user_id}: Deleted file {hf_path} from HF Hub for user {user_id_str}") + flash(f'Файл {original_filename} успешно удален (админ)!') + except hf_utils.EntryNotFoundError: + logging.warning(f"ADMIN ACTION: File {hf_path} not found on HF Hub during delete for user {user_id_str}, but removed from DB.") + flash(f'Файл {original_filename} удален из базы (не найден на сервере) (админ).') + except Exception as e: + logging.error(f"ADMIN ACTION: Error deleting file {hf_path} from HF Hub for user {user_id_str} (DB entry removed): {e}") + flash(f'Файл {original_filename} удален из базы, но ошибка при удалении с сервера: {e} (админ)', 'error') + else: + flash(f'Файл {original_filename} удален из базы (путь не найден) (админ).') + + except Exception as e: + logging.critical(f"CRITICAL ADMIN ACTION: Failed to save DB after removing file {file_id} for user {user_id_str}. Data inconsistency possible! Error: {e}") + flash('Критическая ошибка: не удалось сохранить базу данных после удаления файла (админ).', 'error') + cache.clear() + else: + flash('Не удалось найти файл в структуре для удаления (админ).', 'error') + + + return redirect(url_for('admin_user_files', user_id=user_id)) # --- Main Execution --- + if __name__ == '__main__': - print("Starting Zeus Cloud Mini App Backend...") - logging.info("Starting Zeus Cloud Mini App Backend...") - - # Initial sanity checks - if not BOT_TOKEN or BOT_TOKEN == 'YOUR_BOT_TOKEN': - logging.critical("\n" + "*"*60 + - "\n CRITICAL: TELEGRAM_BOT_TOKEN is not set correctly. " + - "\n Telegram authentication WILL FAIL. Set the environment variable." + - "\n" + "*"*60) - if not HF_TOKEN_WRITE: - logging.warning("HF_TOKEN (write access) is not set. File uploads & deletions will fail.") - if not HF_TOKEN_READ and HF_TOKEN_WRITE: - logging.info("HF_TOKEN_READ not set, using HF_TOKEN (write token) for read access.") - elif not HF_TOKEN_READ and not HF_TOKEN_WRITE: - logging.warning("HF_TOKEN_READ is not set. File downloads/previews might fail if repo is private.") - if not REPO_ID: - logging.critical("HF REPO_ID is not set. Application cannot function.") - exit(1) - - logging.info(f"Using HF Repo: {REPO_ID}") - logging.info(f"Data file: {DATA_FILE}") - - # Attempt initial data load/sync - logging.info("Performing initial database sync/load...") - initial_data = load_data() - if not initial_data or not initial_data.get('users'): - logging.warning("Initial data load resulted in empty or invalid data. Check logs.") + app.permanent_session_lifetime = timedelta(days=30) # Extend session lifetime + + if not TELEGRAM_BOT_TOKEN: + logging.critical("FATAL: TELEGRAM_BOT_TOKEN environment variable is not set. Application cannot verify users.") + exit(1) + if not ADMIN_TELEGRAM_IDS: + logging.warning("ADMIN_TELEGRAM_IDS environment variable is not set or empty. Admin panel will not be accessible.") else: - logging.info(f"Initial data loaded. User count: {len(initial_data['users'])}") + logging.info(f"Admin users configured: {ADMIN_TELEGRAM_IDS}") - # Run Flask app - # Use waitress or gunicorn in production instead of Flask's development server - logging.info("Starting Flask server...") - try: - # For production deployment, replace app.run with a production server like waitress or gunicorn - # Example using waitress (install with: pip install waitress): - # from waitress import serve - # serve(app, host='0.0.0.0', port=7860) + if not HF_TOKEN_WRITE: + logging.warning("HF_TOKEN (write access) is not set. File uploads, deletions, and backups will fail.") + if not HF_TOKEN_READ: + logging.warning("HF_TOKEN_READ is not set. Falling back to HF_TOKEN. File downloads/previews might fail for private repos if HF_TOKEN is also not set.") + + if HF_TOKEN_WRITE: + logging.info("Performing initial database download before starting background backup.") + download_db_from_hf() # Download before starting backup thread + backup_thread = threading.Thread(target=periodic_backup, daemon=True) + backup_thread.start() + logging.info("Periodic backup thread started.") + elif HF_TOKEN_READ: + logging.info("Write token not found. Performing initial database download (read-only mode). Backups disabled.") + download_db_from_hf() + else: + logging.critical("Neither HF_TOKEN nor HF_TOKEN_READ is set. Hugging Face operations disabled. Loading/creating local DB only.") + if not os.path.exists(DATA_FILE): + with open(DATA_FILE, 'w', encoding='utf-8') as f: + json.dump({'users': {}}, f) + logging.info(f"Created empty local database file: {DATA_FILE}") + else: + logging.info(f"Using existing local database file: {DATA_FILE}") - # Using Flask's development server (set debug=False for production-like behavior) - app.run(debug=False, host='0.0.0.0', port=7860) - except Exception as run_e: - logging.critical(f"Failed to start Flask server: {run_e}", exc_info=True) - exit(1) + app.run(debug=False, host='0.0.0.0', port=7860) -# --- END OF FILE app (24).py --- +# --- END OF FILE app.py --- \ No newline at end of file