diff --git "a/app.py" "b/app.py" --- "a/app.py" +++ "b/app.py" @@ -2,25 +2,27 @@ import os import hmac import hashlib import json -from urllib.parse import unquote, parse_qsl, urlencode -from flask import Flask, request, jsonify, Response, send_file -from flask_caching import Cache -import logging +import shutil import threading import time +import uuid +import logging from datetime import datetime +from io import BytesIO +from urllib.parse import unquote, parse_qsl, urlencode +from typing import Union, Optional + +import requests +from flask import Flask, request, jsonify, Response, send_file +from flask_caching import Cache from huggingface_hub import HfApi, hf_hub_download, utils as hf_utils from werkzeug.utils import secure_filename -import requests -from io import BytesIO -import uuid -from typing import Union, Optional, Dict, Any app = Flask(__name__) -app.secret_key = os.getenv("FLASK_SECRET_KEY", "supersecretkey_mini_app_unique_dev") -BOT_TOKEN = os.getenv('TELEGRAM_BOT_TOKEN', '6750208873:AAE2hvPlJ99dBdhGa_Brre0IIpUdOvXxHt4') # MUST be set +app.secret_key = os.getenv("FLASK_SECRET_KEY", "supersecretkey_mini_app_unique_v2") +BOT_TOKEN = os.getenv('TELEGRAM_BOT_TOKEN', '6750208873:AAE2hvPlJ99dBdhGa_Brre0IIpUdOvXxHt4') DATA_FILE = 'cloudeng_mini_app_data.json' -DATA_FILE_BACKUP = 'cloudeng_mini_app_data.json.bak' +DATA_FILE_BACKUP = DATA_FILE + '.bak' REPO_ID = "Eluza133/Z1e1u" HF_TOKEN_WRITE = os.getenv("HF_TOKEN") HF_TOKEN_READ = os.getenv("HF_TOKEN_READ") or HF_TOKEN_WRITE @@ -30,10 +32,10 @@ os.makedirs(UPLOAD_FOLDER, exist_ok=True) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') -AUTH_DATA_LIFETIME = 3600 # 1 hour +AUTH_DATA_LIFETIME = 3600 +data_lock = threading.Lock() -# --- Filesystem Utilities --- -def find_node_by_id(filesystem: Dict[str, Any], node_id: str) -> (Optional[Dict[str, Any]], Optional[Dict[str, Any]]): +def find_node_by_id(filesystem, node_id): if not filesystem or not isinstance(filesystem, dict): return None, None if filesystem.get('id') == node_id: @@ -45,245 +47,179 @@ def find_node_by_id(filesystem: Dict[str, Any], node_id: str) -> (Optional[Dict[ while queue: current_node, parent = queue.pop(0) if current_node.get('type') == 'folder' and 'children' in current_node: - for i, child in enumerate(current_node.get('children', [])): - if not isinstance(child, dict): - logging.warning(f"Invalid child found in node {current_node.get('id')}: {child}") - continue - child_id = child.get('id') - if not child_id: continue - - if child_id == node_id: + for child in current_node.get('children', []): + child_id = child.get('id') + if not child_id: continue + + if child_id == node_id: return child, current_node - if child_id not in visited and isinstance(child, dict) and child.get('type') == 'folder': + if child_id not in visited and isinstance(child, dict) and child.get('type') == 'folder': visited.add(child_id) queue.append((child, current_node)) return None, None -def add_node(filesystem: Dict[str, Any], parent_id: str, node_data: Dict[str, Any]) -> bool: +def add_node(filesystem, parent_id, node_data): parent_node, _ = find_node_by_id(filesystem, parent_id) - if parent_node and isinstance(parent_node, dict) and parent_node.get('type') == 'folder': + if parent_node and parent_node.get('type') == 'folder': if 'children' not in parent_node or not isinstance(parent_node['children'], list): parent_node['children'] = [] - - existing_ids = set() - valid_children = [] - for child in parent_node['children']: - if isinstance(child, dict) and 'id' in child: - existing_ids.add(child['id']) - valid_children.append(child) - else: - logging.warning(f"Found invalid child structure in parent {parent_id}, removing: {child}") - parent_node['children'] = valid_children # Clean up invalid entries - + existing_ids = {child.get('id') for child in parent_node['children'] if isinstance(child, dict)} if node_data.get('id') not in existing_ids: parent_node['children'].append(node_data) return True - else: - logging.warning(f"Node with id {node_data.get('id')} already exists in parent {parent_id}") - return False # Indicate node already exists, maybe update instead? return False -def remove_node(filesystem: Dict[str, Any], node_id: str) -> bool: - if node_id == filesystem.get('id'): - logging.error("Attempted to remove the root node.") - return False - +def remove_node(filesystem, node_id): node_to_remove, parent_node = find_node_by_id(filesystem, node_id) - - if node_to_remove and parent_node and isinstance(parent_node, dict) and 'children' in parent_node and isinstance(parent_node['children'], list): + if node_to_remove and parent_node and 'children' in parent_node and isinstance(parent_node['children'], list): original_length = len(parent_node['children']) - parent_node['children'] = [child for child in parent_node['children'] if not (isinstance(child, dict) and child.get('id') == node_id)] + parent_node['children'] = [child for child in parent_node['children'] if not isinstance(child, dict) or child.get('id') != node_id] return len(parent_node['children']) < original_length - elif node_to_remove: - logging.error(f"Found node {node_id} but its parent was not found or invalid.") + if node_to_remove and node_id == filesystem.get('id'): + logging.warning("Attempted to remove root node directly.") + return False return False - -def get_node_path_list(filesystem: Dict[str, Any], node_id: str) -> list: +def get_node_path_list(filesystem, node_id): path_list = [] current_id = node_id processed_ids = set() - max_depth = 50 + max_depth = 20 depth = 0 while current_id and current_id not in processed_ids and depth < max_depth: processed_ids.add(current_id) depth += 1 node, parent = find_node_by_id(filesystem, current_id) - if not node or not isinstance(node, dict): - logging.error(f"Path traversal failed: Node not found or invalid for ID {current_id}") - break - + logging.warning(f"Path traversal stopped: Node not found or invalid for ID {current_id}") + break path_list.append({ 'id': node.get('id'), 'name': node.get('name', node.get('original_filename', 'Unknown')) }) - if not parent or not isinstance(parent, dict): break - parent_id = parent.get('id') if parent_id == current_id: logging.error(f"Filesystem loop detected at node {current_id}") break current_id = parent_id - if not any(p['id'] == 'root' for p in path_list): - # Check if root exists before adding it - root_node, _ = find_node_by_id(filesystem, 'root') - if root_node and isinstance(root_node, dict): - path_list.append({'id': 'root', 'name': root_node.get('name', 'Root')}) - else: - logging.error("Root node ('root') not found in filesystem during path generation.") - + if not any(p['id'] == 'root' for p in path_list) and filesystem and filesystem.get('id') == 'root': + path_list.append({'id': 'root', 'name': filesystem.get('name','Root')}) final_path = [] seen_ids = set() for item in reversed(path_list): - if item.get('id') not in seen_ids: + if item['id'] not in seen_ids: final_path.append(item) - seen_ids.add(item.get('id')) - - if not final_path or final_path[0].get('id') != 'root': - logging.warning(f"Path generation for {node_id} resulted in unexpected structure. Correcting.") - # Attempt to reconstruct from root if possible - root_node, _ = find_node_by_id(filesystem, 'root') - if root_node: - corrected_path = [{'id': 'root', 'name': root_node.get('name', 'Root')}] - # We might not be able to fully reconstruct the broken path here easily - # Return at least the root if the target node path failed badly - return corrected_path - else: - return [] # Return empty if root is also missing - + seen_ids.add(item['id']) + if not final_path or final_path[0]['id'] != 'root': + final_path.insert(0, {'id': 'root', 'name': filesystem.get('name','Root') if filesystem else 'Root'}) return final_path -def initialize_user_filesystem(user_data: Dict[str, Any]): + +def initialize_user_filesystem(user_data): if 'filesystem' not in user_data or not isinstance(user_data.get('filesystem'), dict) or not user_data['filesystem'].get('id') == 'root': - logging.warning(f"Initializing/resetting filesystem for user data: {user_data.get('user_info', {}).get('id', 'UNKNOWN')}") + logging.warning(f"Initializing/Resetting filesystem for user.") user_data['filesystem'] = { "type": "folder", "id": "root", "name": "Root", "children": [] } + elif 'children' not in user_data['filesystem'] or not isinstance(user_data['filesystem']['children'], list): + user_data['filesystem']['children'] = [] -# --- Data Loading/Saving --- -def load_data() -> Dict[str, Any]: - global_data = {'users': {}} - try: - if not os.path.exists(DATA_FILE): - logging.warning(f"{DATA_FILE} not found locally. Attempting download.") - download_db_from_hf() # Try to fetch first - - if os.path.exists(DATA_FILE): - with open(DATA_FILE, 'r', encoding='utf-8') as file: - try: - global_data = json.load(file) - if not isinstance(global_data, dict): - logging.error(f"Data file {DATA_FILE} is not a dictionary. Trying backup.") - raise ValueError("Data is not a dictionary") - global_data.setdefault('users', {}) - except (json.JSONDecodeError, ValueError) as e: - logging.error(f"Error decoding JSON from {DATA_FILE}: {e}. Attempting to load backup {DATA_FILE_BACKUP}") - if os.path.exists(DATA_FILE_BACKUP): - try: - with open(DATA_FILE_BACKUP, 'r', encoding='utf-8') as bak_file: - global_data = json.load(bak_file) - if not isinstance(global_data, dict): - logging.error(f"Backup file {DATA_FILE_BACKUP} is also invalid. Initializing empty data.") - global_data = {'users': {}} - else: - logging.info(f"Successfully loaded data from backup {DATA_FILE_BACKUP}") - # Optionally try to restore the main file from backup here - try: - with open(DATA_FILE, 'w', encoding='utf-8') as main_file: - json.dump(global_data, main_file, ensure_ascii=False, indent=4) - logging.info(f"Restored {DATA_FILE} from backup.") - except Exception as write_err: - logging.error(f"Failed to restore {DATA_FILE} from backup: {write_err}") - except Exception as bak_e: - logging.error(f"Error reading backup file {DATA_FILE_BACKUP}: {bak_e}. Initializing empty data.") - global_data = {'users': {}} - else: - logging.warning(f"Backup file {DATA_FILE_BACKUP} not found. Initializing empty data.") - global_data = {'users': {}} - else: - logging.warning(f"{DATA_FILE} still not found after download attempt. Initializing empty data.") - global_data = {'users': {}} +def load_data_from_file(filepath): + try: + with open(filepath, 'r', encoding='utf-8') as file: + data = json.load(file) + if not isinstance(data, dict): + logging.warning(f"Data file {filepath} is not a dict, treating as invalid.") + return None + data.setdefault('users', {}) + for user_id, user_data in data['users'].items(): + if isinstance(user_data, dict): + initialize_user_filesystem(user_data) + else: + logging.warning(f"Invalid user_data structure for user {user_id} in {filepath}, skipping.") + logging.info(f"Data loaded successfully from {filepath}.") + return data + except FileNotFoundError: + logging.info(f"{filepath} not found locally.") + return None + except json.JSONDecodeError: + logging.error(f"Error decoding JSON from {filepath}.") + return None except Exception as e: - logging.error(f"Unexpected error loading data: {e}. Returning empty data.") - global_data = {'users': {}} - - # Ensure filesystem is initialized for all users after loading - users = global_data.setdefault('users', {}) - if isinstance(users, dict): - for user_id, user_data in users.items(): - if isinstance(user_data, dict): - initialize_user_filesystem(user_data) - else: - logging.warning(f"Invalid user data structure for user {user_id}. Skipping initialization.") - logging.info("Data loaded and filesystems checked/initialized.") - else: - logging.error("User data structure is not a dictionary. Resetting users.") - global_data['users'] = {} - + logging.error(f"Error loading data from {filepath}: {e}") + return None - cache.set('app_data', global_data) - logging.info(f"Loaded data into cache. User count: {len(global_data.get('users', {}))}") - return global_data +@cache.memoize(timeout=60) +def load_data(): + with data_lock: + data = None + primary_exists = os.path.exists(DATA_FILE) + backup_exists = os.path.exists(DATA_FILE_BACKUP) + + if primary_exists: + data = load_data_from_file(DATA_FILE) + + if data is None and backup_exists: + logging.warning(f"Primary data file {DATA_FILE} failed to load or missing, attempting backup.") + data = load_data_from_file(DATA_FILE_BACKUP) + if data: + logging.info("Loaded data from backup. Attempting to restore primary file.") + try: + shutil.copy2(DATA_FILE_BACKUP, DATA_FILE) + except Exception as e: + logging.error(f"Failed to restore primary file from backup: {e}") + if data is None: + logging.warning("Both primary and backup data files failed to load or missing. Attempting download from HF.") + download_success = download_db_from_hf() + if download_success: + data = load_data_from_file(DATA_FILE) -def save_data(data: Dict[str, Any]): - if not isinstance(data, dict) or 'users' not in data: - logging.error("Attempted to save invalid data structure. Aborting save.") - return # Prevent saving malformed data + if data is None: + logging.critical("CRITICAL: Could not load data from local files or HF. Initializing empty data structure.") + data = {'users': {}} - # Validate filesystem integrity before saving (basic check) - for user_id, user_data in data.get('users', {}).items(): - if not isinstance(user_data, dict) or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict) or user_data['filesystem'].get('id') != 'root': - logging.error(f"Filesystem integrity check failed for user {user_id}. Aborting save.") - # Optionally try to recover/reset the user's filesystem here? Risky. - return + return data - try: - # Backup current file before overwriting - if os.path.exists(DATA_FILE): - try: - os.replace(DATA_FILE, DATA_FILE_BACKUP) # Atomic rename if possible - logging.info(f"Created backup {DATA_FILE_BACKUP}") - except OSError as e: - logging.warning(f"Could not create backup file {DATA_FILE_BACKUP}: {e}. Proceeding with caution.") +def save_data(data): + with data_lock: + try: + if os.path.exists(DATA_FILE): + try: + shutil.copy2(DATA_FILE, DATA_FILE_BACKUP) + logging.info(f"Created backup: {DATA_FILE_BACKUP}") + except Exception as backup_err: + logging.error(f"Failed to create backup file {DATA_FILE_BACKUP}: {backup_err}") - with open(DATA_FILE, 'w', encoding='utf-8') as file: - json.dump(data, file, ensure_ascii=False, indent=4) + with open(DATA_FILE, 'w', encoding='utf-8') as file: + json.dump(data, file, ensure_ascii=False, indent=2) # Use indent=2 for smaller file size - cache.set('app_data', data) # Update cache immediately - logging.info("Data saved locally successfully.") - upload_db_to_hf() # Initiate upload after successful local save - except Exception as e: - logging.error(f"Error saving data locally: {e}") - # Attempt to restore from backup if save failed - if os.path.exists(DATA_FILE_BACKUP): - try: - os.replace(DATA_FILE_BACKUP, DATA_FILE) - logging.info(f"Restored {DATA_FILE} from backup due to save failure.") - # Reload data from restored file? - load_data() - except OSError as restore_e: - logging.error(f"CRITICAL: Failed to save data AND failed to restore backup: {restore_e}") + logging.info(f"Data saved locally to {DATA_FILE}") + cache.clear() + upload_db_to_hf() + return True + except Exception as e: + logging.error(f"CRITICAL: Error saving data to {DATA_FILE}: {e}") + return False def upload_db_to_hf(): if not HF_TOKEN_WRITE: logging.warning("HF_TOKEN_WRITE not set, skipping database upload.") return if not os.path.exists(DATA_FILE): - logging.error(f"Cannot upload {DATA_FILE} to HF: File does not exist locally.") + logging.warning(f"Local data file {DATA_FILE} not found for upload.") return try: api = HfApi() @@ -294,106 +230,63 @@ def upload_db_to_hf(): repo_type="dataset", token=HF_TOKEN_WRITE, commit_message=f"Backup MiniApp {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}", - run_as_future=True # Schedule async + run_as_future=True ) logging.info("Database upload to Hugging Face scheduled.") except Exception as e: - logging.error(f"Error scheduling database upload to HF: {e}") + logging.error(f"Error scheduling database upload: {e}") + def download_db_from_hf(): if not HF_TOKEN_READ: logging.warning("HF_TOKEN_READ not set, skipping database download.") - if not os.path.exists(DATA_FILE): - logging.info(f"Local file {DATA_FILE} missing and no read token. Creating empty DB.") - with open(DATA_FILE, 'w', encoding='utf-8') as f: - json.dump({'users': {}}, f) - return + return False try: - # Backup local file before potentially overwriting - if os.path.exists(DATA_FILE): - backup_path = f"{DATA_FILE}.{int(time.time())}.local_bak" - try: - os.rename(DATA_FILE, backup_path) - logging.info(f"Backed up local DB to {backup_path} before downloading.") - except OSError as e: - logging.warning(f"Could not backup local DB before download: {e}") - - downloaded_path = hf_hub_download( + hf_hub_download( repo_id=REPO_ID, filename=DATA_FILE, repo_type="dataset", token=HF_TOKEN_READ, local_dir=".", - local_dir_use_symlinks=False, # Ensure actual file is created - force_filename=DATA_FILE, # Ensure it overwrites/creates the correct name + local_dir_use_symlinks=False, + force_download=True, etag_timeout=10 ) - logging.info(f"Database downloaded from Hugging Face to {downloaded_path}") - # Basic validation of the downloaded file - try: - with open(downloaded_path, 'r', encoding='utf-8') as f: - content = json.load(f) - if not isinstance(content, dict) or 'users' not in content: - logging.error("Downloaded DB file is invalid. Restoring local backup if exists.") - raise ValueError("Invalid DB structure downloaded") - except (json.JSONDecodeError, ValueError, Exception) as validate_e: - logging.error(f"Validation of downloaded DB failed: {validate_e}") - if 'backup_path' in locals() and os.path.exists(backup_path): - try: - os.replace(backup_path, DATA_FILE) - logging.info("Restored local DB from backup due to invalid download.") - except OSError as restore_e: - logging.error(f"Failed to restore local DB backup: {restore_e}") - # If restore fails, we might be left with a bad file or no file - if os.path.exists(DATA_FILE): os.remove(DATA_FILE) - with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f) - - + logging.info(f"Database downloaded from Hugging Face to {DATA_FILE}") + return True except hf_utils.RepositoryNotFoundError: - logging.error(f"Repository {REPO_ID} not found on HF.") - if not os.path.exists(DATA_FILE): - with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f) + logging.error(f"Repository {REPO_ID} not found on Hugging Face.") + return False except hf_utils.EntryNotFoundError: - logging.warning(f"{DATA_FILE} not found in repo {REPO_ID}. Using local version or creating empty.") - if not os.path.exists(DATA_FILE): - with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f) - except requests.exceptions.RequestException as e: - logging.error(f"Network error downloading DB from HF: {e}. Using local version if available.") - # Don't create empty if local exists and network fails - if not os.path.exists(DATA_FILE): - with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f) + logging.warning(f"{DATA_FILE} not found in repo {REPO_ID}. No file downloaded.") + return False + except requests.exceptions.ConnectionError as e: + logging.error(f"Connection error downloading DB from HF: {e}. Using local version if available.") + return False except Exception as e: - logging.error(f"Unexpected error downloading database: {e}") - if not os.path.exists(DATA_FILE): - with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f) + logging.error(f"Generic error downloading database from HF: {e}") + return False -# --- File Type Helper --- -def get_file_type(filename: Optional[str]) -> str: +def get_file_type(filename): if not filename or '.' not in filename: return 'other' ext = filename.lower().split('.')[-1] - if ext in ['mp4', 'mov', 'avi', 'webm', 'mkv', 'wmv', 'flv']: return 'video' - if ext in ['jpg', 'jpeg', 'png', 'gif', 'bmp', 'webp', 'svg', 'ico']: return 'image' + if ext in ['mp4', 'mov', 'avi', 'webm', 'mkv', 'm4v', 'wmv', 'flv']: return 'video' + if ext in ['jpg', 'jpeg', 'png', 'gif', 'bmp', 'webp', 'svg', 'heic', 'heif']: return 'image' if ext == 'pdf': return 'pdf' - if ext in ['txt', 'log', 'md', 'json', 'xml', 'html', 'css', 'js', 'py', 'java', 'c', 'cpp', 'h', 'hpp', 'sh', 'bat']: return 'text' - if ext in ['mp3', 'wav', 'ogg', 'aac', 'flac', 'm4a']: return 'audio' - if ext in ['zip', 'rar', '7z', 'tar', 'gz', 'bz2']: return 'archive' - if ext in ['doc', 'docx', 'rtf']: return 'document_word' - if ext in ['xls', 'xlsx']: return 'document_excel' - if ext in ['ppt', 'pptx']: return 'document_ppt' + if ext in ['txt', 'md', 'log', 'csv', 'json', 'xml', 'html', 'css', 'js', 'py', 'java', 'c', 'cpp', 'go', 'rs']: return 'text' + if ext in ['mp3', 'wav', 'ogg', 'flac', 'aac', 'm4a']: return 'audio' + if ext in ['zip', 'rar', '7z', 'tar', 'gz']: return 'archive' + if ext in ['doc', 'docx', 'ppt', 'pptx', 'xls', 'xlsx', 'odt', 'odp', 'ods']: return 'document' return 'other' -# --- Telegram Validation --- -def check_telegram_authorization(auth_data_str: str, bot_token: str) -> Optional[Dict[str, Any]]: - if not auth_data_str or not bot_token or bot_token == 'YOUR_BOT_TOKEN': + +def check_telegram_authorization(auth_data: str, bot_token: str) -> Optional[dict]: + if not auth_data or not bot_token or bot_token == 'YOUR_BOT_TOKEN': logging.warning("Validation skipped: Missing auth_data or valid BOT_TOKEN.") - # In development/debug mode, maybe return a mock user? - # if app.debug: - # return {"id": "12345", "first_name": "Debug", "username": "debug_user"} return None - try: - parsed_data = dict(parse_qsl(unquote(auth_data_str))) + parsed_data = dict(parse_qsl(unquote(auth_data))) if "hash" not in parsed_data: logging.error("Hash not found in auth data") return None @@ -403,17 +296,14 @@ def check_telegram_authorization(auth_data_str: str, bot_token: str) -> Optional current_ts = int(time.time()) if abs(current_ts - auth_date_ts) > AUTH_DATA_LIFETIME: - logging.warning(f"Auth data expired (Auth date: {auth_date_ts}, Now: {current_ts}, Diff: {current_ts - auth_date_ts} > {AUTH_DATA_LIFETIME})") + logging.warning(f"Auth data expired (Auth: {auth_date_ts}, Now: {current_ts}, Diff: {current_ts - auth_date_ts})") return None - data_check_list = sorted([f"{k}={v}" for k, v in parsed_data.items()]) - data_check_string = "\n".join(data_check_list) - + data_check_string = "\n".join(sorted([f"{k}={v}" for k, v in parsed_data.items()])) secret_key = hmac.new("WebAppData".encode(), bot_token.encode(), hashlib.sha256).digest() - calculated_hash_bytes = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).digest() - calculated_hash_hex = calculated_hash_bytes.hex() + calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest() - if hmac.compare_digest(calculated_hash_hex, telegram_hash): + if calculated_hash == telegram_hash: user_data_str = parsed_data.get('user') if user_data_str: try: @@ -421,54 +311,59 @@ def check_telegram_authorization(auth_data_str: str, bot_token: str) -> Optional if 'id' not in user_info: logging.error("Validated user data missing 'id'") return None - user_info['id'] = str(user_info['id']) # Ensure ID is string return user_info - except json.JSONDecodeError as e: - logging.error(f"Failed to decode user JSON from auth data: {e} - Data: {user_data_str}") + except json.JSONDecodeError: + logging.error("Failed to decode user JSON from auth data") return None else: logging.warning("No 'user' field in validated auth data") return None else: - logging.warning(f"Hash mismatch during validation. Received: {telegram_hash}, Calculated: {calculated_hash_hex}") + logging.warning("Hash mismatch during validation") return None except Exception as e: - logging.exception(f"Exception during Telegram validation: {e}") + logging.error(f"Exception during validation: {e}") return None -# --- HTML, CSS, JS Template --- HTML_TEMPLATE = """ - + Zeus Cloud - -
Загрузка...
+
Loading...
- -
-

Zeus Cloud

+
-
- +
-
- - - - +
+

Files

+
    - -
    - -

    Содержимое

    -
    - +
    +
    + + +
    +
    + + +
    +
    @@ -634,14 +545,14 @@ HTML_TEMPLATE = """ const userInfoHeaderEl = document.getElementById('user-info-header'); const flashContainerEl = document.getElementById('flash-container'); const breadcrumbsContainerEl = document.getElementById('breadcrumbs-container'); - const itemGridContainerEl = document.getElementById('item-grid-container'); + const fileListContainerEl = document.getElementById('file-list-container'); const currentFolderTitleEl = document.getElementById('current-folder-title'); - const uploadForm = document.getElementById('upload-form'); // Still needed for FormData + const uploadForm = document.getElementById('upload-form'); const fileInput = document.getElementById('file-input'); - const uploadLabelBtn = document.getElementById('upload-label-btn'); + const uploadBtn = document.getElementById('upload-btn'); const progressContainer = document.getElementById('progress-container'); const progressBar = document.getElementById('progress-bar'); - const progressText = document.getElementById('progress-text'); // Kept in HTML, maybe use later + const progressText = document.getElementById('progress-text'); const newFolderInput = document.getElementById('new-folder-name'); const createFolderBtn = document.getElementById('create-folder-btn'); @@ -649,12 +560,10 @@ HTML_TEMPLATE = """ let validatedInitData = null; let currentUser = null; let currentItems = []; - let isUploading = false; - // --- API Communication --- async function apiCall(endpoint, method = 'POST', body = {}) { if (!validatedInitData) { - showError("Ошибка: Данные авторизации отсутствуют. Попробуйте перезапустить."); + showError("Authentication data is missing."); throw new Error("Not authenticated"); } body.initData = validatedInitData; @@ -666,37 +575,33 @@ HTML_TEMPLATE = """ body: JSON.stringify(body) }); if (!response.ok) { - let errorMsg = `Ошибка сервера (${response.status})`; + let errorMsg = `Server error: ${response.status}`; try { const errData = await response.json(); errorMsg = errData.message || errorMsg; - } catch (e) { /* Ignore if error body is not JSON */ } + } catch (e) { /* Ignore */ } throw new Error(errorMsg); } return await response.json(); } catch (error) { console.error(`API call to ${endpoint} failed:`, error); - showFlash(`Ошибка: ${error.message}`, 'error'); + showFlash(`Network or server error: ${error.message}`, 'error'); throw error; } } - // --- UI Rendering --- - function showLoadingScreen(message = 'Загрузка...') { - loadingEl.textContent = message; - loadingEl.style.display = 'flex'; + function showLoadingScreen() { + loadingEl.style.display = 'block'; errorViewEl.style.display = 'none'; appContentEl.style.display = 'none'; } function showError(message) { loadingEl.style.display = 'none'; - errorViewEl.innerHTML = `

    Ошибка

    ${message}

    `; - errorViewEl.style.display = 'flex'; + errorViewEl.innerHTML = `

    Error

    ${message}

    `; + errorViewEl.style.display = 'block'; appContentEl.style.display = 'none'; if (tg.HapticFeedback) tg.HapticFeedback.notificationOccurred('error'); - // Disable main button if error occurs - tg.MainButton.hide(); } function showAppContent() { @@ -709,25 +614,16 @@ HTML_TEMPLATE = """ const flashDiv = document.createElement('div'); flashDiv.className = `flash ${type}`; flashDiv.textContent = message; - flashContainerEl.innerHTML = ''; // Clear previous messages + flashContainerEl.innerHTML = ''; flashContainerEl.appendChild(flashDiv); - - // Use Telegram's popup for important messages? - // tg.showAlert(message); - - const timeout = type === 'error' ? 8000 : 5000; + if (tg.HapticFeedback) tg.HapticFeedback.notificationOccurred(type); setTimeout(() => { if (flashDiv.parentNode === flashContainerEl) { - flashDiv.style.opacity = '0'; - setTimeout(() => { - if (flashDiv.parentNode === flashContainerEl) { - flashContainerEl.removeChild(flashDiv); - } - }, 300); // Transition duration + flashDiv.style.opacity = '0'; + flashDiv.style.transition = 'opacity 0.5s ease-out'; + setTimeout(() => flashContainerEl.removeChild(flashDiv), 500); } - }, timeout); - - if (tg.HapticFeedback) tg.HapticFeedback.notificationOccurred(type); + }, 4500); } function renderBreadcrumbs(breadcrumbs) { @@ -736,6 +632,7 @@ HTML_TEMPLATE = """ if (index > 0) { const separator = document.createElement('span'); separator.textContent = ' / '; + separator.style.color = 'var(--tg-theme-hint-color)'; breadcrumbsContainerEl.appendChild(separator); } if (index === breadcrumbs.length - 1) { @@ -743,7 +640,7 @@ HTML_TEMPLATE = """ span.className = 'current-folder'; span.textContent = crumb.name; breadcrumbsContainerEl.appendChild(span); - currentFolderTitleEl.textContent = `${crumb.name}`; // Simpler title + currentFolderTitleEl.textContent = crumb.name; } else { const link = document.createElement('a'); link.href = '#'; @@ -752,203 +649,194 @@ HTML_TEMPLATE = """ breadcrumbsContainerEl.appendChild(link); } }); - // Scroll to the end of breadcrumbs if they overflow - breadcrumbsContainerEl.scrollLeft = breadcrumbsContainerEl.scrollWidth; - - // Update BackButton visibility - if (breadcrumbs.length > 1) { - tg.BackButton.show(); - } else { - tg.BackButton.hide(); - } - } - - function getItemIcon(item) { - if (item.type === 'folder') return '📁'; - switch (item.file_type) { - case 'image': return `preview`; // Preview or fallback icon - case 'video': return '🎬'; - case 'audio': return '🎵'; - case 'pdf': return '📄'; // Specific PDF icon - case 'text': return '📝'; - case 'archive': return '📦'; - case 'document_word': - case 'document_excel': - case 'document_ppt': - return '📊'; // Generic doc icon - default: return '❓'; // Unknown - } - } - - function getItemIconClass(item) { - if (item.type === 'folder') return 'folder'; - return item.file_type || 'other'; + // Scroll to the end of breadcrumbs + setTimeout(() => { breadcrumbsContainerEl.scrollLeft = breadcrumbsContainerEl.scrollWidth; }, 0); } function renderItems(items) { - itemGridContainerEl.innerHTML = ''; // Clear previous items + fileListContainerEl.innerHTML = ''; if (!items || items.length === 0) { - itemGridContainerEl.innerHTML = '

    Папка пуста

    '; + const emptyMsg = document.createElement('li'); + emptyMsg.textContent = 'This folder is empty.'; + emptyMsg.style.padding = '20px 16px'; + emptyMsg.style.textAlign = 'center'; + emptyMsg.style.color = 'var(--tg-theme-hint-color)'; + fileListContainerEl.appendChild(emptyMsg); return; } - - items.sort((a, b) => { - // Folders first - if (a.type === 'folder' && b.type !== 'folder') return -1; - if (a.type !== 'folder' && b.type === 'folder') return 1; - // Then sort by name (case-insensitive) - const nameA = a.name || a.original_filename || ''; - const nameB = b.name || b.original_filename || ''; - return nameA.localeCompare(nameB, undefined, { sensitivity: 'base' }); - }); - - items.forEach(item => { - const itemDiv = document.createElement('div'); - itemDiv.className = `item`; - itemDiv.dataset.itemId = item.id; - itemDiv.dataset.itemType = item.type; - itemDiv.dataset.itemName = item.name || item.original_filename; - - const iconClass = getItemIconClass(item); - const iconHtml = getItemIcon(item); - const name = item.name || item.original_filename || 'Без имени'; - const details = item.upload_date ? item.upload_date.split(' ')[0] : (item.type === 'folder' ? '' : ''); - - itemDiv.innerHTML = ` -
    ${iconHtml}
    -

    ${name}

    - ${details ? `

    ${details}

    ` : ''} - `; - - itemDiv.onclick = () => handleItemClick(item); - itemDiv.oncontextmenu = (e) => { - e.preventDefault(); - showItemContextMenu(item, e.clientX, e.clientY); - if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light'); - }; - - itemGridContainerEl.appendChild(itemDiv); + const listItem = document.createElement('li'); + listItem.className = 'list-item'; + listItem.setAttribute('data-id', item.id); + listItem.setAttribute('data-type', item.type); + listItem.setAttribute('data-name', item.name || item.original_filename); + + const iconDiv = document.createElement('div'); + iconDiv.className = 'item-icon'; + + const detailsDiv = document.createElement('div'); + detailsDiv.className = 'item-details'; + + const nameP = document.createElement('p'); + nameP.className = 'item-name'; + nameP.textContent = item.name || item.original_filename || 'Unnamed'; + + const subtitleP = document.createElement('p'); + subtitleP.className = 'item-subtitle'; + + detailsDiv.appendChild(nameP); + detailsDiv.appendChild(subtitleP); + + const actionsDiv = document.createElement('div'); + actionsDiv.className = 'item-actions'; + + if (item.type === 'folder') { + iconDiv.classList.add('icon-folder'); + subtitleP.textContent = `${(item.children || []).length} items`; + listItem.onclick = () => loadFolderContent(item.id); + // Add context menu button for folder actions (like delete) + const menuBtn = createContextMenuButton(item); + actionsDiv.appendChild(menuBtn); + + } else if (item.type === 'file') { + const dlUrl = `/download/${item.id}`; + let canPreview = false; + let previewAction = null; + + if (item.file_type === 'image') { + iconDiv.innerHTML = ``; + previewAction = () => openModal(dlUrl, 'image', item.id); + canPreview = true; + } else if (item.file_type === 'video') { + iconDiv.classList.add('icon-video'); + previewAction = () => openModal(dlUrl, 'video', item.id); + canPreview = true; + } else if (item.file_type === 'pdf') { + iconDiv.classList.add('icon-pdf'); + previewAction = () => openModal(dlUrl, 'pdf', item.id); + canPreview = true; + } else if (item.file_type === 'text') { + iconDiv.classList.add('icon-text'); + previewAction = () => openModal(`/get_text_content/${item.id}`, 'text', item.id); + canPreview = true; + } else if (item.file_type === 'audio') { + iconDiv.classList.add('icon-audio'); + // Simple preview for audio might just open it directly + // previewAction = () => window.open(dlUrl, '_blank'); + // canPreview = true; // Or add modal player + } else if (item.file_type === 'archive') { + iconDiv.classList.add('icon-archive'); + } else if (item.file_type === 'document') { + iconDiv.classList.add('icon-document'); + } else { + iconDiv.classList.add('icon-other'); + } + + subtitleP.textContent = item.upload_date ? item.upload_date.split(' ')[0] : 'File'; + if(canPreview && previewAction) { + listItem.onclick = previewAction; + } else { + listItem.onclick = () => tg.openLink(dlUrl); // Fallback to download/open link + } + + const menuBtn = createContextMenuButton(item); + actionsDiv.appendChild(menuBtn); + } + + listItem.appendChild(iconDiv); + listItem.appendChild(detailsDiv); + listItem.appendChild(actionsDiv); + fileListContainerEl.appendChild(listItem); }); } - function handleItemClick(item) { - if (item.type === 'folder') { - loadFolderContent(item.id); - } else if (item.type === 'file') { - // Decide default action: preview or download? Preview seems better. - const previewable = ['image', 'video', 'pdf', 'text'].includes(item.file_type); - if (previewable) { - openModalForItem(item); - } else { - // Offer download directly for non-previewable - const dlUrl = `/download/${item.id}`; - // Maybe show confirm before download? - // tg.showConfirm(`Скачать файл "${item.original_filename || item.id}"?`, (confirmed) => { - // if(confirmed) window.open(dlUrl, '_blank'); - // }); - // Or just download: - window.open(dlUrl, '_blank'); - } - } + function createContextMenuButton(item) { + const button = document.createElement('button'); + button.className = 'btn icon-btn context-menu-button'; + button.setAttribute('aria-label', 'Actions'); + button.onclick = (e) => { + e.stopPropagation(); // Prevent li click + showItemActions(item, button); + }; + return button; } - function openModalForItem(item) { - if (item.file_type === 'text') { - openModal(`/get_text_content/${item.id}`, 'text', item.id); - } else { - // For image, video, pdf use the download URL which serves the content - openModal(`/download/${item.id}`, item.file_type, item.id); - } - } - - function showItemContextMenu(item, x, y) { - // Telegram doesn't have a native context menu API. - // We can implement a custom one, but it might feel non-native. - // A simpler approach for Mini Apps is often to use tg.showPopup or tg.showAlert - // with action buttons, triggered by a long press or a dedicated action button. - // For simplicity here, let's use tg.showConfirm for delete actions. - - const buttons = []; - if (item.type === 'folder') { - buttons.push({ id: 'open', type: 'default', text: 'Открыть' }); - buttons.push({ id: 'delete', type: 'destructive', text: 'Удалить папку' }); - } else { // File - const previewable = ['image', 'video', 'pdf', 'text'].includes(item.file_type); - if (previewable) { - buttons.push({ id: 'preview', type: 'default', text: 'Просмотр' }); - } - buttons.push({ id: 'download', type: 'default', text: 'Скачать' }); - buttons.push({ id: 'delete', type: 'destructive', text: 'Удалить файл' }); - } - buttons.push({ id: 'cancel', type: 'cancel' }); // Standard cancel - - tg.showPopup({ - title: item.name || item.original_filename, - message: `Выберите действие:`, - buttons: buttons - }, (buttonId) => { - if (!buttonId || buttonId === 'cancel') return; - - if (buttonId === 'open') { - loadFolderContent(item.id); - } else if (buttonId === 'preview') { - openModalForItem(item); - } else if (buttonId === 'download') { - window.open(`/download/${item.id}`, '_blank'); - } else if (buttonId === 'delete') { - if (item.type === 'folder') { - deleteFolder(item.id, item.name || 'папку'); - } else { - deleteFile(item.id, item.original_filename || 'файл'); - } + function showItemActions(item, anchorElement) { + let actions = []; + const itemName = item.name || item.original_filename; + + if (item.type === 'folder') { + actions.push({ text: "Open", action: () => loadFolderContent(item.id) }); + actions.push({ text: "Delete", style: "destructive", action: () => deleteFolder(item.id, itemName) }); + } else { // File + const dlUrl = `/download/${item.id}`; + if (['image', 'video', 'pdf', 'text'].includes(item.file_type)) { + let previewUrl = item.file_type === 'text' ? `/get_text_content/${item.id}` : dlUrl; + actions.push({ text: "Preview", action: () => openModal(previewUrl, item.file_type, item.id) }); + } + actions.push({ text: "Download", action: () => tg.openLink(dlUrl, {try_instant_view: false}) }); + actions.push({ text: "Delete", style: "destructive", action: () => deleteFile(item.id, itemName) }); + } + actions.push({ text: "Cancel", style: "cancel" }); + + const popupParams = { + title: itemName, + message: `Type: ${item.type === 'folder' ? 'Folder' : item.file_type || 'File'}`, + buttons: actions.map(a => ({ + type: a.style || 'default', // 'default', 'ok', 'close', 'cancel', 'destructive' + text: a.text, + id: a.text.toLowerCase().replace(/ /g, '_') // Generate an ID for the callback + })) + }; + + tg.showPopup(popupParams, (buttonId) => { + const selectedAction = actions.find(a => a.text.toLowerCase().replace(/ /g, '_') === buttonId); + if (selectedAction && selectedAction.action) { + selectedAction.action(); } - }); - } - + }); + } - // --- Modal Logic --- async function openModal(srcOrUrl, type, itemId) { const modal = document.getElementById('mediaModal'); const modalContent = document.getElementById('modalContent'); - modalContent.innerHTML = '

    Загрузка...

    '; + modalContent.innerHTML = '

    Loading...

    '; modal.style.display = 'flex'; - if (tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light'); try { if (type === 'pdf') { - // PDF handling in iframe can be tricky, especially mobile - // Option 1: Google Docs Viewer (might have CORS issues or need proxy) - // modalContent.innerHTML = ``; - // Option 2: Link to open externally - // modalContent.innerHTML = `

    PDF файлы лучше открывать в отдельном приложении.

    Открыть PDF`; - // Option 3: Basic iframe (might work for some PDFs/browsers) - modalContent.innerHTML = ``; - + if (tg.platform === "ios" || tg.platform === "android") { + tg.openLink(window.location.origin + srcOrUrl, {try_instant_view: true}); + closeModalManual(); + return; + } else { + modalContent.innerHTML = ``; + } } else if (type === 'image') { - modalContent.innerHTML = `Просмотр изображения`; + modalContent.innerHTML = `Image Preview`; } else if (type === 'video') { - modalContent.innerHTML = ``; + modalContent.innerHTML = ``; } else if (type === 'text') { const response = await fetch(srcOrUrl); - if (!response.ok) throw new Error(`Ошибка загрузки текста: ${response.statusText || response.status}`); + if (!response.ok) throw new Error(`Failed to load text: ${response.statusText}`); const text = await response.text(); - // Basic escaping for HTML safety - const escapedText = text.replace(/&/g, "&").replace(//g, ">"); - modalContent.innerHTML = `
    ${escapedText}
    `; + const pre = document.createElement('pre'); + pre.textContent = text; + modalContent.innerHTML = ''; + modalContent.appendChild(pre); } else { - modalContent.innerHTML = '

    Предпросмотр для этого типа файла не поддерживается.

    '; + modalContent.innerHTML = '

    Preview not supported for this file type.

    '; } } catch (error) { console.error("Error loading modal content:", error); - modalContent.innerHTML = `

    Не удалось загрузить содержимое для предпросмотра.
    ${error.message}

    `; - if (tg.HapticFeedback) tg.HapticFeedback.notificationOccurred('error'); + modalContent.innerHTML = `

    Could not load preview. ${error.message}

    `; + if (tg.HapticFeedback) tg.HapticFeedback.notificationOccurred('error'); } } function closeModal(event) { const modal = document.getElementById('mediaModal'); - if (event.target === modal) { // Click outside content + if (event.target === modal) { closeModalManual(); } } @@ -957,56 +845,48 @@ HTML_TEMPLATE = """ const modal = document.getElementById('mediaModal'); modal.style.display = 'none'; const video = modal.querySelector('video'); - if (video) { try { video.pause(); video.src = ''; } catch(e){} } + if (video) { video.pause(); video.src = ''; } const iframe = modal.querySelector('iframe'); - if (iframe) try { iframe.src = 'about:blank'; } catch(e){} - document.getElementById('modalContent').innerHTML = ''; // Clear content - if (tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light'); + if (iframe) iframe.src = 'about:blank'; + document.getElementById('modalContent').innerHTML = ''; } - // --- Folder Operations --- async function loadFolderContent(folderId) { currentFolderId = folderId; console.log(`Loading folder: ${folderId}`); - showLoadingScreen(`Загрузка папки...`); + tg.BackButton.isVisible = (folderId !== 'root'); try { const data = await apiCall('/get_dashboard_data', 'POST', { folder_id: folderId }); if (data.status === 'ok') { currentItems = data.items || []; renderBreadcrumbs(data.breadcrumbs || [{'id': 'root', 'name': 'Root'}]); - renderItems(currentItems); - showAppContent(); // Ensure app is visible after loading + renderItems(currentItems.sort((a, b) => (a.type !== 'folder') - (b.type !== 'folder') || (a.name || a.original_filename || '').localeCompare(b.name || b.original_filename || ''))); } else { - // API call already shows flash on error, but we might need to show error screen - showError(data.message || 'Не удалось загрузить содержимое папки.'); + showFlash(data.message || 'Failed to load folder content.', 'error'); } } catch (error) { - showError(`Не удалось загрузить содержимое папки: ${error.message}`); - } finally { - // Hide loading screen if it wasn't hidden by success/error path - if (loadingEl.style.display !== 'none') { - loadingEl.style.display = 'none'; - } + // Error handled by apiCall } } async function handleCreateFolder() { const folderName = newFolderInput.value.trim(); if (!folderName) { - showFlash('Введите имя папки.', 'error'); + showFlash('Please enter a folder name.', 'error'); newFolderInput.focus(); return; } - // Basic validation: avoid problematic chars like / \ : * ? " < > | - if (/[\\/:*?"<>|]/.test(folderName)) { - showFlash('Имя папки содержит недопустимые символы.', 'error'); - newFolderInput.focus(); - return; + if (folderName.length > 100) { + showFlash('Folder name is too long (max 100 chars).', 'error'); + return; } + if (/[\\/:*?"<>|]/.test(folderName)) { + showFlash('Folder name contains invalid characters.', 'error'); + return; + } createFolderBtn.disabled = true; - createFolderBtn.textContent = 'Создание...'; - if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light'); + createFolderBtn.textContent = 'Creating...'; try { const data = await apiCall('/create_folder', 'POST', { @@ -1014,85 +894,73 @@ HTML_TEMPLATE = """ folder_name: folderName }); if (data.status === 'ok') { - showFlash(`Папка "${folderName}" создана.`, 'success'); + showFlash(`Folder "${folderName}" created.`, 'success'); newFolderInput.value = ''; - loadFolderContent(currentFolderId); // Refresh content + loadFolderContent(currentFolderId); } else { - showFlash(data.message || 'Не удалось создать папку.', 'error'); + showFlash(data.message || 'Failed to create folder.', 'error'); } } catch (error) { // Error handled by apiCall } finally { createFolderBtn.disabled = false; - createFolderBtn.textContent = 'Создать'; + createFolderBtn.textContent = 'Create'; } } - async function deleteFolder(folderId, folderName) { - tg.showConfirm(`Вы уверены, что хотите удалить папку "${folderName}"? Убедитесь, что папка пуста.`, async (confirmed) => { + function deleteFolder(folderId, folderName) { + tg.showConfirm(`Delete the folder "${folderName}"? It must be empty to be deleted.`, async (confirmed) => { if (confirmed) { - showLoadingScreen('Удаление папки...'); - if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('medium'); try { - const data = await apiCall(`/delete_folder/${folderId}`, 'POST', { current_folder_id: currentFolderId }); // Send current folder for context if needed + const data = await apiCall(`/delete_folder/${folderId}`, 'POST', { current_folder_id: currentFolderId }); if (data.status === 'ok') { - showFlash(`Папка "${folderName}" удалена.`, 'success'); - loadFolderContent(currentFolderId); // Refresh + showFlash(`Folder "${folderName}" deleted.`, 'success'); + loadFolderContent(currentFolderId); } else { - showFlash(data.message || 'Не удалось удалить папку.', 'error'); - showAppContent(); // Hide loading if error occurred + showFlash(data.message || 'Failed to delete folder.', 'error'); } } catch (error) { - showAppContent(); // Hide loading if error occurred + // Error handled by apiCall } } }); } - async function deleteFile(fileId, fileName) { - tg.showConfirm(`Вы уверены, что хотите удалить файл "${fileName}"?`, async (confirmed) => { + function deleteFile(fileId, fileName) { + tg.showConfirm(`Delete the file "${fileName}"? This cannot be undone.`, async (confirmed) => { if (confirmed) { - showLoadingScreen('Удаление файла...'); - if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('medium'); try { const data = await apiCall(`/delete_file/${fileId}`, 'POST', { current_folder_id: currentFolderId }); if (data.status === 'ok') { - showFlash(`Файл "${fileName}" удален.`, 'success'); - loadFolderContent(currentFolderId); // Refresh + showFlash(`File "${fileName}" deleted.`, 'success'); + loadFolderContent(currentFolderId); } else { - showFlash(data.message || 'Не удалось удалить файл.', 'error'); - showAppContent(); // Hide loading if error occurred + showFlash(data.message || 'Failed to delete file.', 'error'); } } catch (error) { - showAppContent(); // Hide loading if error occurred + // Error handled by apiCall } } }); } - // --- File Upload --- - function triggerFileUpload() { - fileInput.click(); - } - - function handleFileSelection() { + function handleFileUpload(event) { + event.preventDefault(); const files = fileInput.files; if (files.length === 0) { - return; // No files selected - } - if (isUploading) { - showFlash('Дождитесь завершения текущей загрузки.', 'error'); + showFlash('Please select files to upload.', 'error'); return; } - isUploading = true; progressContainer.style.display = 'block'; progressBar.style.width = '0%'; - uploadLabelBtn.disabled = true; - uploadLabelBtn.textContent = 'Загрузка...'; - if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light'); + progressText.textContent = '0%'; + uploadBtn.disabled = true; + uploadBtn.textContent = 'Uploading...'; + fileInput.disabled = true; + createFolderBtn.disabled = true; - const formData = new FormData(); // Use the hidden form + const formData = new FormData(); for (let i = 0; i < files.length; i++) { formData.append('files', files[i]); } @@ -1105,13 +973,15 @@ HTML_TEMPLATE = """ if (event.lengthComputable) { const percentComplete = Math.round((event.loaded / event.total) * 100); progressBar.style.width = percentComplete + '%'; + // progressText.textContent = percentComplete + '%'; // Maybe remove text? } }); xhr.addEventListener('load', function() { - isUploading = false; - uploadLabelBtn.disabled = false; - uploadLabelBtn.textContent = 'Загрузить'; + uploadBtn.disabled = false; + uploadBtn.textContent = 'Upload Files'; + fileInput.disabled = false; + createFolderBtn.disabled = false; progressContainer.style.display = 'none'; fileInput.value = ''; // Clear selection @@ -1119,135 +989,133 @@ HTML_TEMPLATE = """ try { const data = JSON.parse(xhr.responseText); if (data.status === 'ok') { - showFlash(data.message || `${files.length} файл(ов) загружено.`, 'success'); - loadFolderContent(currentFolderId); // Refresh + showFlash(data.message || `${files.length} file(s) uploaded.`, 'success'); + loadFolderContent(currentFolderId); } else { - showFlash(data.message || 'Ошибка при обработке загрузки на сервере.', 'error'); + showFlash(data.message || 'Error processing upload on server.', 'error'); } } catch (e) { - showFlash('Некорректный ответ от сервера после загрузки.', 'error'); + showFlash('Invalid response from server after upload.', 'error'); } } else { - let errorMsg = `Ошибка загрузки: ${xhr.statusText || xhr.status}`; - try { - const errData = JSON.parse(xhr.responseText); - errorMsg = errData.message || errorMsg; - } catch (e) {} + let errorMsg = `Upload failed: ${xhr.statusText || xhr.status}`; + try { + const errData = JSON.parse(xhr.responseText); + errorMsg = errData.message || errorMsg; + } catch(e) { /* ignore */} showFlash(errorMsg, 'error'); } }); xhr.addEventListener('error', function() { - isUploading = false; - showFlash('Ошибка сети во время загрузки.', 'error'); - uploadLabelBtn.disabled = false; - uploadLabelBtn.textContent = 'Загрузить'; + showFlash('Network error during upload.', 'error'); + uploadBtn.disabled = false; + uploadBtn.textContent = 'Upload Files'; + fileInput.disabled = false; + createFolderBtn.disabled = false; progressContainer.style.display = 'none'; }); xhr.addEventListener('abort', function() { - isUploading = false; - showFlash('Загрузка отменена.', 'error'); - uploadLabelBtn.disabled = false; - uploadLabelBtn.textContent = 'Загрузить'; + showFlash('Upload aborted.', 'error'); + uploadBtn.disabled = false; + uploadBtn.textContent = 'Upload Files'; + fileInput.disabled = false; + createFolderBtn.disabled = false; progressContainer.style.display = 'none'; }); xhr.open('POST', '/upload', true); + // Consider adding auth token if needed, but initData should suffice for now + // xhr.setRequestHeader('Authorization', 'Bearer ' + your_token); xhr.send(formData); } + function goBack() { + const breadcrumbs = Array.from(breadcrumbsContainerEl.querySelectorAll('a')); + if (breadcrumbs.length > 0) { + const parentLink = breadcrumbs[breadcrumbs.length - 1]; + parentLink.click(); + } else if (currentFolderId !== 'root') { + // Fallback if breadcrumbs aren't fully rendered yet + loadFolderContent('root'); + } + } - // --- Initialization --- function initializeApp() { tg.ready(); tg.expand(); - - // Apply theme colors - document.body.style.backgroundColor = tg.themeParams.bg_color || '#ffffff'; - document.body.style.color = tg.themeParams.text_color || '#000000'; - // Set header color to match secondary background or a specific header color if available - tg.setHeaderColor(tg.themeParams.secondary_bg_color || tg.themeParams.header_bg_color || '#f1f1f1'); - - // Configure Back Button - tg.BackButton.onClick(() => { - if (currentFolderId !== 'root') { - // Find parent ID from breadcrumbs - const breadcrumbs = Array.from(breadcrumbsContainerEl.querySelectorAll('a, span.current-folder')); - if (breadcrumbs.length > 1) { - // Second to last element's ID (if it's a link) - const parentLink = breadcrumbs[breadcrumbs.length - 2]; - if (parentLink && parentLink.tagName === 'A') { - const parentId = parentLink.onclick.toString().match(/'([^']+)'/)[1]; - if (parentId) { - loadFolderContent(parentId); - } else { - loadFolderContent('root'); // Fallback - } - } else { - loadFolderContent('root'); // Fallback if structure is odd - } - } else { - loadFolderContent('root'); // Should not happen if BackButton is visible, but safe fallback - } - } - if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light'); - }); + tg.enableClosingConfirmation(); + + // Set theme + const themeParams = tg.themeParams; + document.documentElement.style.setProperty('--tg-bg-color', themeParams.bg_color); + document.documentElement.style.setProperty('--tg-text-color', themeParams.text_color); + document.documentElement.style.setProperty('--tg-hint-color', themeParams.hint_color); + document.documentElement.style.setProperty('--tg-link-color', themeParams.link_color); + document.documentElement.style.setProperty('--tg-button-color', themeParams.button_color); + document.documentElement.style.setProperty('--tg-button-text-color', themeParams.button_text_color); + document.documentElement.style.setProperty('--tg-secondary-bg-color', themeParams.secondary_bg_color); + // Try to detect dark mode for better default fallback colors if needed + if (themeParams.bg_color) { + const color = themeParams.bg_color.replace('#',''); + const r = parseInt(color.substring(0,2), 16); + const g = parseInt(color.substring(2,4), 16); + const b = parseInt(color.substring(4,6), 16); + const brightness = (r * 299 + g * 587 + b * 114) / 1000; + if (brightness < 128) { + document.documentElement.style.colorScheme = 'dark'; + } else { + document.documentElement.style.colorScheme = 'light'; + } + } + tg.setHeaderColor(themeParams.secondary_bg_color || '#f0f0f0'); + tg.setBackgroundColor(themeParams.bg_color || '#ffffff'); - if (!tg.initDataUnsafe || !tg.initDataUnsafe.user) { - showError("Не удалось получить данные пользователя Telegram (initDataUnsafe). Попробуйте перезапустить Mini App."); + if (!tg.initData) { + showError("Could not get Telegram authorization data (initData). Please try restarting the Mini App."); return; } - // We still need initData for validation server-side - if (!tg.initData) { - showError("Не удалось получить данные авторизации Telegram (initData). Попробуйте перезапустить Mini App."); - return; - } - - validatedInitData = tg.initData; // Send this to backend - - // Show loading screen while validating - showLoadingScreen('Проверка авторизации...'); + validatedInitData = tg.initData; fetch('/validate_init_data', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ initData: validatedInitData }) }) - .then(response => response.json()) + .then(response => { + if (!response.ok) { + return response.json().then(err => { throw new Error(err.message || `HTTP ${response.status}`) }); + } + return response.json(); + }) .then(data => { if (data.status === 'ok' && data.user) { currentUser = data.user; let userName = currentUser.first_name || ''; - if (currentUser.last_name) userName += ` ${currentUser.last_name}`; - if (!userName && currentUser.username) userName = currentUser.username; - if (!userName) userName = `User ${currentUser.id}`; - userInfoHeaderEl.textContent = `Пользователь: ${userName}`; - // showAppContent(); // Content shown after first folder load - loadFolderContent('root'); // Load initial content + if (currentUser.last_name) userName += ' ' + currentUser.last_name; + if (!userName && currentUser.username) userName = '@' + currentUser.username; + if (!userName) userName = `ID: ${currentUser.id}`; + userInfoHeaderEl.textContent = `User: ${userName}`; + showAppContent(); + loadFolderContent('root'); } else { - throw new Error(data.message || 'Не удалось верифицировать пользователя.'); + throw new Error(data.message || 'User verification failed.'); } }) .catch(error => { - console.error("Validation failed:", error); - showError(`Ошибка авторизации: ${error.message}. Попробуйте перезапустить.`); - validatedInitData = null; // Invalidate data on failure + console.error("Validation or initial load failed:", error); + showError(`Authorization Error: ${error.message}. Please try reloading.`); + validatedInitData = null; }); - // Event listeners - fileInput.addEventListener('change', handleFileSelection); - uploadLabelBtn.addEventListener('click', triggerFileUpload); // Trigger hidden input + uploadForm.addEventListener('submit', handleFileUpload); createFolderBtn.addEventListener('click', handleCreateFolder); - newFolderInput.addEventListener('keypress', (e) => { - if (e.key === 'Enter') { - handleCreateFolder(); - } - }); + tg.BackButton.onClick(goBack); + tg.BackButton.hide(); // Initially hidden, shown when navigating into a folder } - // --- Start the App --- initializeApp(); @@ -1255,9 +1123,6 @@ HTML_TEMPLATE = """ """ - -# --- Flask Routes --- - @app.route('/') def index(): return Response(HTML_TEMPLATE, mimetype='text/html') @@ -1266,118 +1131,91 @@ def index(): def validate_init_data(): data = request.get_json() if not data or 'initData' not in data: - return jsonify({"status": "error", "message": "Отсутствуют данные initData"}), 400 + return jsonify({"status": "error", "message": "Missing initData"}), 400 - init_data_str = data['initData'] - user_info = check_telegram_authorization(init_data_str, BOT_TOKEN) + init_data = data['initData'] + user_info = check_telegram_authorization(init_data, BOT_TOKEN) if user_info and 'id' in user_info: tg_user_id = str(user_info['id']) - db_data = load_data() # Load fresh data or from cache + db_data = load_data() users = db_data.setdefault('users', {}) - needs_save = False + if tg_user_id not in users or not isinstance(users.get(tg_user_id), dict): - logging.info(f"New user detected or data invalid: {tg_user_id}. Initializing.") + logging.info(f"New user detected: {tg_user_id}. Initializing filesystem.") users[tg_user_id] = { 'user_info': user_info, - 'created_at': datetime.now().strftime('%Y-%m-%d %H:%M:%S'), - 'filesystem': { "type": "folder", "id": "root", "name": "Root", "children": [] } + 'created_at': datetime.now().strftime('%Y-%m-%d %H:%M:%S') } + initialize_user_filesystem(users[tg_user_id]) needs_save = True else: - # Ensure filesystem is initialized and update user info if changed - user_data = users[tg_user_id] - initialize_user_filesystem(user_data) # Check and initialize if needed - # Update user info if different (e.g., username change) - if user_data.get('user_info') != user_info: - user_data['user_info'] = user_info - logging.info(f"Updating user_info for {tg_user_id}") - needs_save = True # Save if user info changed or FS was initialized + if 'user_info' not in users[tg_user_id] or users[tg_user_id]['user_info'].get('username') != user_info.get('username'): + users[tg_user_id]['user_info'] = user_info + needs_save = True + if 'filesystem' not in users[tg_user_id]: + initialize_user_filesystem(users[tg_user_id]) + needs_save = True + elif 'children' not in users[tg_user_id]['filesystem'] or not isinstance(users[tg_user_id]['filesystem']['children'], list): + users[tg_user_id]['filesystem']['children'] = [] + needs_save = True + if needs_save: - try: - save_data(db_data) # Save the updated structure - except Exception as e: - logging.exception(f"Failed to save data for user {tg_user_id} during validation/init: {e}") - # Don't block validation if save fails, but log error - return jsonify({"status": "error", "message": "Ошибка сохранения данных пользователя."}), 500 + if not save_data(db_data): + logging.error(f"Failed to save data for user {tg_user_id} during validation.") + # Avoid failing validation just because save failed, but log it. + # return jsonify({"status": "error", "message": "Error saving user data."}), 500 return jsonify({"status": "ok", "user": user_info}) else: - logging.warning(f"Validation failed for initData starting with: {init_data_str[:100]}...") - return jsonify({"status": "error", "message": "Недействительные данные авторизации Telegram."}), 403 + logging.warning(f"Validation failed for initData: {init_data[:100]}...") + return jsonify({"status": "error", "message": "Invalid authorization data."}), 403 @app.route('/get_dashboard_data', methods=['POST']) def get_dashboard_data(): data = request.get_json() if not data or 'initData' not in data or 'folder_id' not in data: - return jsonify({"status": "error", "message": "Неполный запрос (initData, folder_id)"}), 400 + return jsonify({"status": "error", "message": "Incomplete request"}), 400 user_info = check_telegram_authorization(data['initData'], BOT_TOKEN) if not user_info or 'id' not in user_info: - return jsonify({"status": "error", "message": "Ошибка авторизации"}), 403 + return jsonify({"status": "error", "message": "Not authorized"}), 403 tg_user_id = str(user_info['id']) folder_id = data['folder_id'] - db_data = load_data() # Use cached or load fresh + db_data = load_data() user_data = db_data.get('users', {}).get(tg_user_id) - if not user_data or not isinstance(user_data, dict) or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict): - logging.error(f"User data or filesystem structure missing/invalid for validated user {tg_user_id}") - # Attempt to re-initialize and save - if isinstance(user_data, dict): + if not user_data or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict): + logging.error(f"User data or filesystem missing/invalid for validated user {tg_user_id}") + # Attempt to re-initialize if possible, might indicate corrupted data + if user_data and isinstance(user_data, dict): initialize_user_filesystem(user_data) - try: - save_data(db_data) - logging.info(f"Re-initialized filesystem for user {tg_user_id} on data fetch.") - except Exception as e: - logging.exception(f"Failed to save re-initialized filesystem for {tg_user_id}: {e}") - return jsonify({"status": "error", "message": "Критическая ошибка данных пользователя. Попробуйте перезапустить."}), 500 + if save_data(db_data): + logging.info(f"Re-initialized filesystem for user {tg_user_id}") + else: + logging.error(f"Failed to save re-initialized filesystem for user {tg_user_id}") + return jsonify({"status": "error", "message": "User data error"}), 500 else: - # If user_data itself is not a dict, something is very wrong - db_data['users'][tg_user_id] = {} # Reset user data to empty dict - initialize_user_filesystem(db_data['users'][tg_user_id]) - try: - save_data(db_data) - logging.warning(f"Reset user data structure for user {tg_user_id}.") - except Exception as e: - logging.exception(f"Failed to save reset user data for {tg_user_id}: {e}") - return jsonify({"status": "error", "message": "Критическая ошибка данных пользователя. Попробуйте перезапустить."}), 500 - - # Re-fetch user_data after potential fix - user_data = db_data.get('users', {}).get(tg_user_id) - if not user_data or 'filesystem' not in user_data: # If still bad, give up - return jsonify({"status": "error", "message": "Не удалось восстановить данные пользователя."}), 500 + return jsonify({"status": "error", "message": "User data error"}), 500 current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id) - if not current_folder or not isinstance(current_folder, dict) or current_folder.get('type') != 'folder': - logging.warning(f"Requested folder {folder_id} not found or invalid for user {tg_user_id}. Defaulting to root.") + if not current_folder or current_folder.get('type') != 'folder': + logging.warning(f"Folder {folder_id} not found or invalid for user {tg_user_id}. Defaulting to root.") folder_id = 'root' current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id) - if not current_folder or not isinstance(current_folder, dict) or current_folder.get('id') != 'root': - logging.error(f"CRITICAL: Root folder invalid or not found for user {tg_user_id}. Attempting reset.") - initialize_user_filesystem(user_data) - try: - save_data(db_data) - current_folder, _ = find_node_by_id(user_data['filesystem'], 'root') - if not current_folder: raise ValueError("Root still missing after reset") - except Exception as e: - logging.exception(f"Failed to reset/find root for user {tg_user_id}: {e}") - return jsonify({"status": "error", "message": "Критическая ошибка: Корневая папка отсутствует или повреждена."}), 500 + if not current_folder: + logging.error(f"CRITICAL: Root folder not found for user {tg_user_id}") + return jsonify({"status": "error", "message": "Critical error: Root folder missing"}), 500 items_in_folder = current_folder.get('children', []) - # Ensure items are dicts before sending - valid_items = [item for item in items_in_folder if isinstance(item, dict)] - if len(valid_items) != len(items_in_folder): - logging.warning(f"Found non-dict items in folder {folder_id} for user {tg_user_id}. Filtering.") - # Optionally clean the data permanently here? - # current_folder['children'] = valid_items - # save_data(db_data) - + # Filter out any potentially invalid children (non-dicts) + items_in_folder = [item for item in items_in_folder if isinstance(item, dict)] breadcrumbs = get_node_path_list(user_data['filesystem'], folder_id) @@ -1388,7 +1226,7 @@ def get_dashboard_data(): return jsonify({ "status": "ok", - "items": valid_items, + "items": items_in_folder, "breadcrumbs": breadcrumbs, "current_folder": current_folder_info }) @@ -1396,40 +1234,36 @@ def get_dashboard_data(): @app.route('/upload', methods=['POST']) def upload_files(): - if 'initData' not in request.form or 'current_folder_id' not in request.form: - return jsonify({"status": "error", "message": "Неполный запрос (initData, current_folder_id)"}), 400 - - init_data = request.form['initData'] - current_folder_id = request.form['current_folder_id'] + init_data = request.form.get('initData') + current_folder_id = request.form.get('current_folder_id', 'root') files = request.files.getlist('files') user_info = check_telegram_authorization(init_data, BOT_TOKEN) if not user_info or 'id' not in user_info: - return jsonify({"status": "error", "message": "Ошибка авторизации"}), 403 + return jsonify({"status": "error", "message": "Not authorized"}), 403 tg_user_id = str(user_info['id']) if not HF_TOKEN_WRITE: - logging.error("Upload failed: HF_TOKEN_WRITE is not configured.") - return jsonify({'status': 'error', 'message': 'Загрузка временно недоступна (ошибка конфигурации сервера).' }), 500 + return jsonify({'status': 'error', 'message': 'Upload not configured (missing HF token).'}), 503 if not files or all(not f.filename for f in files): - return jsonify({'status': 'error', 'message': 'Файлы для загрузки не выбраны.'}), 400 + return jsonify({'status': 'error', 'message': 'No files selected for upload.'}), 400 # Removed file count limit db_data = load_data() user_data = db_data.get('users', {}).get(tg_user_id) - if not user_data or 'filesystem' not in user_data: - logging.error(f"Cannot upload: User data or filesystem missing for {tg_user_id}") - return jsonify({"status": "error", "message": "Ошибка данных пользователя. Попробуйте перезайти."}), 500 + if not user_data or 'filesystem' not in user_data or not isinstance(user_data.get('filesystem'), dict): + logging.error(f"Upload failed: User data or filesystem missing/invalid for user {tg_user_id}") + return jsonify({"status": "error", "message": "User data error"}), 500 target_folder_node, _ = find_node_by_id(user_data['filesystem'], current_folder_id) if not target_folder_node or target_folder_node.get('type') != 'folder': - logging.error(f"Upload target folder {current_folder_id} not found for user {tg_user_id}") - return jsonify({'status': 'error', 'message': 'Папка для загрузки не найдена!'}), 404 + logging.error(f"Upload failed: Target folder {current_folder_id} not found for user {tg_user_id}") + return jsonify({'status': 'error', 'message': 'Target folder not found!'}), 404 - api = HfApi(token=HF_TOKEN_WRITE) + api = HfApi() uploaded_count = 0 errors = [] needs_save = False @@ -1438,120 +1272,132 @@ def upload_files(): if file and file.filename: original_filename = secure_filename(file.filename) if not original_filename: - logging.warning(f"Skipping file with invalid secure filename derived from: {file.filename}") - errors.append(f"Пропущено: Недопустимое имя файла ({file.filename})") - continue - + logging.warning(f"Skipping file with invalid secure filename: {file.filename}") + errors.append(f"Skipped file with invalid name: {file.filename}") + continue + + name_part, ext_part = os.path.splitext(original_filename) + unique_suffix = uuid.uuid4().hex[:8] + # Ensure filename isn't excessively long after adding suffix + max_name_len = 240 # Arbitrary limit, adjust if needed + unique_filename = f"{name_part[:max_name_len - len(ext_part) - 9]}_{unique_suffix}{ext_part}" file_id = uuid.uuid4().hex - # Use file_id in HF path for uniqueness, keep original name in metadata only - # This avoids issues with weird characters or length limits in HF paths - hf_path_filename = f"{file_id}{os.path.splitext(original_filename)[1]}" # e.g., abcdef12.jpg - hf_repo_path = f"cloud_files/{tg_user_id}/{current_folder_id}/{hf_path_filename}" - # Ensure base dir exists? HF Hub handles this. - temp_path = None # Use BytesIO instead of temp file if possible + # Construct path more carefully to avoid issues with folder IDs like 'root' + path_parts = ["cloud_files", tg_user_id] + if current_folder_id != 'root': + # Potentially get full path if needed, but simpler might be just ID + path_parts.append(current_folder_id) + path_parts.append(unique_filename) + hf_path = "/".join(path_parts) - try: - # Read file into memory (consider chunking for very large files if memory is an issue) - file_content = BytesIO(file.read()) - file_content.seek(0) + temp_path = os.path.join(UPLOAD_FOLDER, f"{file_id}_{unique_filename}") - logging.info(f"Uploading {original_filename} to {hf_repo_path} for user {tg_user_id}") + try: + file.save(temp_path) + logging.info(f"Uploading {original_filename} as {unique_filename} to {hf_path} for user {tg_user_id}") api.upload_file( - path_or_fileobj=file_content, - path_in_repo=hf_repo_path, - repo_id=REPO_ID, - repo_type="dataset", - # token implicitly used from HfApi instance + path_or_fileobj=temp_path, path_in_repo=hf_path, + repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE, commit_message=f"User {tg_user_id} uploaded {original_filename}" ) - logging.info(f"Successfully uploaded {original_filename} to HF.") + logging.info(f"Successfully uploaded to HF: {hf_path}") file_info = { 'type': 'file', 'id': file_id, - 'original_filename': original_filename, - 'hf_repo_path': hf_repo_path, # Store the actual HF path - 'file_type': get_file_type(original_filename), + 'original_filename': original_filename, 'unique_filename': unique_filename, + 'path': hf_path, 'file_type': get_file_type(original_filename), 'upload_date': datetime.now().strftime('%Y-%m-%d %H:%M:%S') } - # Add node to the *loaded* db_data structure if add_node(user_data['filesystem'], current_folder_id, file_info): uploaded_count += 1 - needs_save = True # Mark that the DB needs saving - logging.info(f"Added metadata for {original_filename} ({file_id}) to user {tg_user_id}") + needs_save = True + logging.info(f"Added node {file_id} to parent {current_folder_id} in DB for user {tg_user_id}") else: - error_msg = f"Критическая ошибка: Не удалось добавить метаданные для {original_filename} после загрузки. Файл может быть недоступен." - errors.append(error_msg) - logging.error(f"Failed add_node for {file_id} to {current_folder_id} for {tg_user_id} AFTER successful HF upload.") - # Attempt to delete the orphaned file from HF + errors.append(f"Error adding metadata for {original_filename}.") + logging.error(f"Failed add_node for {file_id} to {current_folder_id} for {tg_user_id}") try: - logging.warning(f"Attempting to delete orphaned HF file: {hf_repo_path}") - api.delete_file(path_in_repo=hf_repo_path, repo_id=REPO_ID, repo_type="dataset") + logging.warning(f"Attempting to delete orphaned HF file: {hf_path}") + api.delete_file(path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE) + logging.info(f"Deleted orphaned HF file: {hf_path}") except Exception as del_err: - logging.error(f"Failed to delete orphaned HF file {hf_repo_path}: {del_err}") + logging.error(f"Failed deleting orphaned HF file {hf_path}: {del_err}") except Exception as e: - logging.exception(f"Upload error for {original_filename} (user {tg_user_id}): {e}") - errors.append(f"Ошибка загрузки {original_filename}: {str(e)[:100]}") # Keep error brief + logging.exception(f"Upload error for {original_filename} ({tg_user_id}): {e}") + errors.append(f"Error uploading {original_filename}: {str(e)[:100]}") + finally: + if os.path.exists(temp_path): + try: + os.remove(temp_path) + except OSError as e: + logging.error(f"Error removing temp file {temp_path}: {e}") if needs_save: - try: - save_data(db_data) # Save the entire updated db_data - except Exception as e: - logging.exception(f"CRITICAL Error saving DB after upload for {tg_user_id}: {e}") - errors.append("Критическая ошибка: Не удалось сохранить изменения после загрузки.") - # If save fails, the newly added nodes in memory are lost on next request - - if not errors and uploaded_count > 0: - final_message = f"{uploaded_count} файл(ов) успешно загружено." - status = "ok" - elif uploaded_count > 0: - final_message = f"{uploaded_count} файл(ов) загружено с ошибками: {'; '.join(errors)}" - status = "ok" # Partial success is still ok-ish for frontend - else: - final_message = f"Не удалось загрузить файлы. Ошибки: {'; '.join(errors)}" - status = "error" + if not save_data(db_data): + logging.error(f"CRITICAL: Error saving DB after successful uploads for {tg_user_id}") + errors.append("Critical error saving database after upload.") + # Consider how to handle this - files are on HF but not in DB list + + final_message = f"{uploaded_count}/{len(files)} file(s) uploaded successfully." + status_code = 200 + if errors: + final_message += f" Errors encountered: {len(errors)}. Check logs." + logging.warning(f"Upload completed for user {tg_user_id} with errors: {'; '.join(errors)}") + if uploaded_count == 0: + status_code = 500 # Treat as complete failure if nothing got through + else: + status_code = 207 # Multi-Status / Partial Success return jsonify({ - "status": status, - "message": final_message - }) + "status": "ok" if not errors else ("partial" if uploaded_count > 0 else "error"), + "message": final_message, + "uploaded_count": uploaded_count, + "error_count": len(errors), + "error_details": errors if errors else [] # Optionally return error details + }), status_code @app.route('/create_folder', methods=['POST']) def create_folder(): data = request.get_json() if not data or 'initData' not in data or 'parent_folder_id' not in data or 'folder_name' not in data: - return jsonify({"status": "error", "message": "Неполный запрос (initData, parent_folder_id, folder_name)"}), 400 + return jsonify({"status": "error", "message": "Incomplete request"}), 400 user_info = check_telegram_authorization(data['initData'], BOT_TOKEN) if not user_info or 'id' not in user_info: - return jsonify({"status": "error", "message": "Ошибка авторизации"}), 403 + return jsonify({"status": "error", "message": "Not authorized"}), 403 tg_user_id = str(user_info['id']) parent_folder_id = data['parent_folder_id'] folder_name = data['folder_name'].strip() if not folder_name: - return jsonify({'status': 'error', 'message': 'Имя папки не может быть пустым.'}), 400 - # Validate name (simple check for problematic chars) - if '/' in folder_name or '\\' in folder_name or ':' in folder_name: - return jsonify({'status': 'error', 'message': 'Имя папки содержит недопустимые символы.'}), 400 + return jsonify({'status': 'error', 'message': 'Folder name cannot be empty.'}), 400 + if len(folder_name) > 100: + return jsonify({'status': 'error', 'message': 'Folder name too long (max 100).'}), 400 + if '/' in folder_name or '\\' in folder_name or folder_name == '.' or folder_name == '..': + return jsonify({'status': 'error', 'message': 'Folder name contains invalid characters.'}), 400 + db_data = load_data() user_data = db_data.get('users', {}).get(tg_user_id) - if not user_data or 'filesystem' not in user_data: - logging.error(f"Create folder failed: User data or filesystem missing for {tg_user_id}") - return jsonify({"status": "error", "message": "Ошибка данных пользователя."}), 500 + if not user_data or 'filesystem' not in user_data or not isinstance(user_data.get('filesystem'), dict): + logging.error(f"Create folder failed: User data or filesystem missing/invalid for user {tg_user_id}") + return jsonify({"status": "error", "message": "User data error"}), 500 - # Check if folder with same name already exists in parent parent_node, _ = find_node_by_id(user_data['filesystem'], parent_folder_id) - if parent_node and isinstance(parent_node.get('children'), list): + if not parent_node or parent_node.get('type') != 'folder': + logging.error(f"Create folder failed: Parent folder {parent_folder_id} not found for user {tg_user_id}") + return jsonify({'status': 'error', 'message': 'Parent folder not found.'}), 404 + + # Check for existing folder with the same name in the parent + if 'children' in parent_node and isinstance(parent_node['children'], list): for child in parent_node['children']: - if isinstance(child, dict) and child.get('type') == 'folder' and child.get('name') == folder_name: - return jsonify({'status': 'error', 'message': f'Папка с именем "{folder_name}" уже существует здесь.'}), 409 # Conflict + if isinstance(child, dict) and child.get('type') == 'folder' and child.get('name') == folder_name: + return jsonify({'status': 'error', 'message': f'A folder named "{folder_name}" already exists here.'}), 409 folder_id = uuid.uuid4().hex @@ -1561,230 +1407,209 @@ def create_folder(): } if add_node(user_data['filesystem'], parent_folder_id, folder_data): - try: - save_data(db_data) - logging.info(f"Folder '{folder_name}' ({folder_id}) created for user {tg_user_id} under {parent_folder_id}") - return jsonify({'status': 'ok', 'message': f'Папка "{folder_name}" создана.'}) - except Exception as e: - logging.exception(f"Create folder DB save error ({tg_user_id}): {e}") - # Attempt to roll back the add_node? Difficult with current structure. - return jsonify({'status': 'error', 'message': 'Папка создана, но произошла ошибка сохранения данных.'}), 500 + if save_data(db_data): + logging.info(f"Created folder '{folder_name}' ({folder_id}) under {parent_folder_id} for user {tg_user_id}") + return jsonify({'status': 'ok', 'message': f'Folder "{folder_name}" created.'}) + else: + logging.error(f"CRITICAL: Failed to save DB after creating folder node {folder_id} for {tg_user_id}") + # Attempt to rollback the node addition? Difficult without deepcopy/transaction + remove_node(user_data['filesystem'], folder_id) # Best effort rollback + return jsonify({'status': 'error', 'message': 'Error saving data after creating folder.'}), 500 else: - # This could happen if parent_folder_id is suddenly invalid - logging.error(f"Failed to add folder node '{folder_name}' to parent {parent_folder_id} for user {tg_user_id}") - return jsonify({'status': 'error', 'message': 'Не удалось добавить папку (возможно, родительская папка была удалена?).'}), 400 - + logging.error(f"Failed add_node for new folder {folder_id} to {parent_folder_id} for {tg_user_id}") + return jsonify({'status': 'error', 'message': 'Could not add folder to parent node.'}), 500 -def find_file_owner_and_node(file_id: str) -> (Optional[str], Optional[Dict[str, Any]]): - """ Helper to find a file node and its owner across all users """ - db_data = load_data() - for user_id, user_data in db_data.get('users', {}).items(): - if isinstance(user_data, dict) and 'filesystem' in user_data: - node, _ = find_node_by_id(user_data['filesystem'], file_id) - if node and isinstance(node, dict) and node.get('type') == 'file': - return user_id, node - return None, None +def find_file_owner_and_node(file_id): + db_data = load_data() + for user_id, user_data in db_data.get('users', {}).items(): + if isinstance(user_data, dict) and 'filesystem' in user_data: + node, _ = find_node_by_id(user_data['filesystem'], file_id) + if node and isinstance(node, dict) and node.get('type') == 'file': + return user_id, node + return None, None @app.route('/download/') def download_file_route(file_id): owner_user_id, file_node = find_file_owner_and_node(file_id) if not file_node: - logging.warning(f"Download request for unknown file_id: {file_id}") - return Response("Файл не найден", status=404, mimetype='text/plain') + return Response("File not found", status=404, mimetype='text/plain') - hf_repo_path = file_node.get('hf_repo_path') + hf_path = file_node.get('path') original_filename = file_node.get('original_filename', f'{file_id}_download') - if not hf_repo_path: - logging.error(f"Missing hf_repo_path for file ID {file_id} (owner: {owner_user_id})") - return Response("Ошибка сервера: Путь к файлу не найден в метаданных.", status=500, mimetype='text/plain') + if not hf_path: + logging.error(f"Missing HF path for file ID {file_id} (owner: {owner_user_id})") + return Response("Error: File path information missing", status=500, mimetype='text/plain') - # Construct the direct download URL - # Using /resolve/main/ implicitly assumes the 'main' branch. Fine for datasets usually. - file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_repo_path}?download=true" + file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true" + logging.info(f"Download request for file_id: {file_id}, owner: {owner_user_id}, hf_path: {hf_path}") try: headers = {} if HF_TOKEN_READ: - headers["Authorization"] = f"Bearer {HF_TOKEN_READ}" - else: - logging.warning(f"Attempting download of {hf_repo_path} without HF_TOKEN_READ. May fail for private repos.") + headers["authorization"] = f"Bearer {HF_TOKEN_READ}" - # Stream the download - logging.info(f"Streaming download for file {file_id} ({original_filename}) from {file_url}") - response = requests.get(file_url, headers=headers, stream=True, timeout=60) # Increased timeout - response.raise_for_status() # Raises HTTPError for bad responses (4xx or 5xx) + response = requests.get(file_url, headers=headers, stream=True, timeout=30) + response.raise_for_status() - # Prepare response headers - resp_headers = {} content_type = response.headers.get('Content-Type', 'application/octet-stream') - resp_headers['Content-Type'] = content_type - # Set Content-Disposition for filename + # Prepare Content-Disposition header carefully try: - # More robust encoding for filename - encoded_filename = urlencode({'filename': original_filename}, encoding='utf-8')[9:] - disposition = f"attachment; filename=\"{original_filename.replace('\"', '_')}\"; filename*=UTF-8''{encoded_filename}" - resp_headers['Content-Disposition'] = disposition - except Exception as enc_e: - logging.warning(f"Could not properly encode filename {original_filename} for Content-Disposition: {enc_e}") - resp_headers['Content-Disposition'] = f"attachment; filename=\"{file_id}_download\"" # Fallback + # Try to encode filename safely for header + encoded_name = original_filename.encode('latin-1', 'replace').decode('latin-1') + utf8_encoded_name = urlencode({'filename': original_filename}, encoding='utf-8')[9:] + disposition = f'attachment; filename="{encoded_name}"; filename*=UTF-8\'\'{utf8_encoded_name}' + except Exception: + logging.warning(f"Could not properly encode filename '{original_filename}' for Content-Disposition. Using simplified.") + safe_filename = "".join(c for c in original_filename if c.isalnum() or c in ['.', '_', '-']).strip() or "download" + disposition = f'attachment; filename="{safe_filename}"' - # Return streaming response - return Response(response.iter_content(chunk_size=65536), headers=resp_headers) # Larger chunk size + return Response(response.iter_content(chunk_size=65536), + mimetype=content_type, + headers={"Content-Disposition": disposition, + "Content-Length": response.headers.get('Content-Length')}) except requests.exceptions.HTTPError as e: + logging.error(f"HTTP Error downloading file from HF ({hf_path}, owner: {owner_user_id}): {e}") status_code = e.response.status_code - logging.error(f"HTTP error downloading file {file_id} from HF ({hf_repo_path}, owner: {owner_user_id}): {status_code} - {e}") - if status_code == 404: - return Response(f"Ошибка: Файл не найден на сервере хранения (404). Возможно, он был удален.", status=404, mimetype='text/plain') - elif status_code == 401 or status_code == 403: - return Response(f"Ошибка: Доступ запрещен к файлу на сервере хранения ({status_code}).", status=403, mimetype='text/plain') - else: - return Response(f"Ошибка скачивания файла с сервера хранения ({status_code}).", status=status_code, mimetype='text/plain') + error_message = f"Error downloading file ({status_code})" + if status_code == 404: error_message = "File not found on storage." + return Response(error_message, status=status_code, mimetype='text/plain') except requests.exceptions.RequestException as e: - logging.error(f"Network error downloading file {file_id} from HF ({hf_repo_path}, owner: {owner_user_id}): {e}") - return Response(f"Ошибка сети при скачивании файла.", status=504, mimetype='text/plain') # Gateway Timeout + logging.error(f"Network Error downloading file from HF ({hf_path}, owner: {owner_user_id}): {e}") + return Response("Error downloading file (Network Error)", status=502, mimetype='text/plain') except Exception as e: - logging.exception(f"Unexpected error during download ({file_id}, {hf_repo_path}, owner: {owner_user_id}): {e}") - return Response("Внутренняя ошибка сервера при обработке запроса на скачивание.", status=500, mimetype='text/plain') + logging.exception(f"Unexpected error during download ({hf_path}, owner: {owner_user_id}): {e}") + return Response("Internal server error during download", status=500, mimetype='text/plain') @app.route('/delete_file/', methods=['POST']) def delete_file_route(file_id): data = request.get_json() - if not data or 'initData' not in data: # current_folder_id not strictly needed for delete itself - return jsonify({"status": "error", "message": "Неполный запрос (initData)"}), 400 + if not data or 'initData' not in data: # Removed 'current_folder_id' dependency + return jsonify({"status": "error", "message": "Incomplete request"}), 400 user_info = check_telegram_authorization(data['initData'], BOT_TOKEN) if not user_info or 'id' not in user_info: - return jsonify({"status": "error", "message": "Ошибка авторизации"}), 403 + return jsonify({"status": "error", "message": "Not authorized"}), 403 tg_user_id = str(user_info['id']) if not HF_TOKEN_WRITE: - logging.error("Delete failed: HF_TOKEN_WRITE is not configured.") - return jsonify({'status': 'error', 'message': 'Удаление временно недоступно (ошибка конфигурации сервера).' }), 500 + return jsonify({'status': 'error', 'message': 'Deletion not configured (missing HF token).'}), 503 db_data = load_data() user_data = db_data.get('users', {}).get(tg_user_id) - if not user_data or 'filesystem' not in user_data: - return jsonify({"status": "error", "message": "Ошибка данных пользователя."}), 500 + if not user_data or 'filesystem' not in user_data or not isinstance(user_data.get('filesystem'), dict): + logging.error(f"Delete file failed: User data or filesystem missing/invalid for user {tg_user_id}") + return jsonify({"status": "error", "message": "User data error"}), 500 file_node, parent_node = find_node_by_id(user_data['filesystem'], file_id) - if not file_node or file_node.get('type') != 'file': - return jsonify({'status': 'error', 'message': 'Файл не найден в ваших данных.'}), 404 - # Parent node check isn't strictly necessary for deletion itself if we have the file_node + if not file_node or file_node.get('type') != 'file' or not parent_node: + logging.warning(f"Delete request for non-existent file {file_id} by user {tg_user_id}") + return jsonify({'status': 'error', 'message': 'File not found or cannot be deleted.'}), 404 - hf_repo_path = file_node.get('hf_repo_path') - original_filename = file_node.get('original_filename', 'файл') + hf_path = file_node.get('path') + original_filename = file_node.get('original_filename', 'file') needs_save = False - hf_deleted = False - error_msg = None + hf_delete_success = False - if hf_repo_path: + if hf_path: try: - api = HfApi(token=HF_TOKEN_WRITE) - logging.info(f"Attempting to delete HF file: {hf_repo_path} for user {tg_user_id}") + api = HfApi() + logging.info(f"Attempting HF delete for {hf_path} by user {tg_user_id}") api.delete_file( - path_in_repo=hf_repo_path, repo_id=REPO_ID, repo_type="dataset", + path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE, commit_message=f"User {tg_user_id} deleted {original_filename}" ) - logging.info(f"Successfully deleted file {hf_repo_path} from HF Hub for user {tg_user_id}") - hf_deleted = True + logging.info(f"Successfully deleted file {hf_path} from HF Hub for user {tg_user_id}") + hf_delete_success = True except hf_utils.EntryNotFoundError: - logging.warning(f"File {hf_repo_path} not found on HF Hub during delete attempt for user {tg_user_id}. Assuming already deleted or path mismatch.") - hf_deleted = True # Treat as deleted if not found + logging.warning(f"File {hf_path} not found on HF Hub for delete attempt ({tg_user_id}). Assuming already deleted or inconsistent state.") + hf_delete_success = True # Treat as success for DB removal purpose except Exception as e: - logging.exception(f"Error deleting file from HF Hub ({hf_repo_path}, user {tg_user_id}): {e}") - error_msg = f'Ошибка удаления файла с сервера хранения: {str(e)[:100]}' - # Decide whether to proceed with DB removal if HF delete fails - # Let's proceed: remove from DB even if HF fails, to avoid user seeing a broken file link + logging.error(f"Error deleting file from HF Hub ({hf_path}, {tg_user_id}): {e}") + # Decide whether to proceed with DB removal. Generally yes, to clean up metadata. + # return jsonify({'status': 'error', 'message': f'Error deleting file from storage: {e}'}), 500 else: - logging.warning(f"No hf_repo_path found for file {file_id} (user {tg_user_id}). Skipping HF delete.") - hf_deleted = True # Nothing to delete on HF + logging.warning(f"No HF path found for file node {file_id} during delete for user {tg_user_id}. Proceeding with DB removal.") + hf_delete_success = True # Allow DB removal if path is missing - # Remove from DB structure - if remove_node(user_data['filesystem'], file_id): - needs_save = True - logging.info(f"Removed file node {file_id} from DB for user {tg_user_id}") + if hf_delete_success: + if remove_node(user_data['filesystem'], file_id): + needs_save = True + logging.info(f"Removed file node {file_id} from DB for user {tg_user_id}") + else: + logging.error(f"Failed to remove file node {file_id} from DB structure for {tg_user_id} after potential HF delete.") + # This is a potential inconsistency state + return jsonify({'status': 'error', 'message': 'File deleted from storage (or wasn\'t found), but failed to update database.'}), 500 else: - # This is unexpected if find_node_by_id found it earlier - logging.error(f"Failed to remove file node {file_id} from DB structure for {tg_user_id} even though it was found.") - if not error_msg: # Don't overwrite HF error - error_msg = 'Ошибка удаления файла из базы данных.' + # If HF delete failed explicitly (not 404), don't remove from DB yet + return jsonify({'status': 'error', 'message': 'Failed to delete file from storage. Database not updated.'}), 500 - if needs_save: - try: - save_data(db_data) - except Exception as e: - logging.exception(f"Delete file DB save error ({tg_user_id}): {e}") - error_msg = (error_msg + "; " if error_msg else "") + "Ошибка сохранения изменений в базе данных." - if not error_msg: - return jsonify({'status': 'ok', 'message': f'Файл "{original_filename}" удален.'}) + if needs_save: + if save_data(db_data): + return jsonify({'status': 'ok', 'message': f'File "{original_filename}" deleted.'}) + else: + logging.error(f"CRITICAL: Delete file DB save error ({tg_user_id}) after node removal.") + # Data is inconsistent here. Node removed in memory, but not saved. HF file might be gone. + return jsonify({'status': 'error', 'message': 'File deleted, but error saving database update.'}), 500 else: - # Return error but the file might be partially deleted (e.g., from DB but not HF or vice-versa) - return jsonify({'status': 'error', 'message': error_msg}), 500 + # Should not happen if hf_delete_success was true and remove_node failed, as that returns error above. + # This path implies HF delete failed and we didn't proceed. + return jsonify({'status': 'error', 'message': 'File could not be deleted.'}), 500 @app.route('/delete_folder/', methods=['POST']) def delete_folder_route(folder_id): if folder_id == 'root': - return jsonify({'status': 'error', 'message': 'Нельзя удалить корневую папку!'}), 400 + return jsonify({'status': 'error', 'message': 'Cannot delete the root folder.'}), 400 data = request.get_json() - if not data or 'initData' not in data: - return jsonify({"status": "error", "message": "Неполный запрос (initData)"}), 400 + if not data or 'initData' not in data: # Removed 'current_folder_id' dependency + return jsonify({"status": "error", "message": "Incomplete request"}), 400 user_info = check_telegram_authorization(data['initData'], BOT_TOKEN) if not user_info or 'id' not in user_info: - return jsonify({"status": "error", "message": "Ошибка авторизации"}), 403 + return jsonify({"status": "error", "message": "Not authorized"}), 403 tg_user_id = str(user_info['id']) db_data = load_data() user_data = db_data.get('users', {}).get(tg_user_id) - if not user_data or 'filesystem' not in user_data: - return jsonify({"status": "error", "message": "Ошибка данных пользователя."}), 500 + if not user_data or 'filesystem' not in user_data or not isinstance(user_data.get('filesystem'), dict): + logging.error(f"Delete folder failed: User data or filesystem missing/invalid for user {tg_user_id}") + return jsonify({"status": "error", "message": "User data error"}), 500 folder_node, parent_node = find_node_by_id(user_data['filesystem'], folder_id) - if not folder_node or folder_node.get('type') != 'folder': - return jsonify({'status': 'error', 'message': 'Папка не найдена в ваших данных.'}), 404 - if not parent_node: - # Should not happen unless it's root, but we check root earlier - logging.error(f"Could not find parent for folder {folder_id} (user {tg_user_id}) during delete.") - return jsonify({'status': 'error', 'message': 'Ошибка структуры данных (родитель папки не найден).'}), 500 - - - folder_name = folder_node.get('name', 'папка') + if not folder_node or folder_node.get('type') != 'folder' or not parent_node: + logging.warning(f"Delete request for non-existent folder {folder_id} by user {tg_user_id}") + return jsonify({'status': 'error', 'message': 'Folder not found or cannot be deleted.'}), 404 - # Check if folder is empty (server-side) - if folder_node.get('children') and len(folder_node.get('children')) > 0: - logging.warning(f"Attempt to delete non-empty folder {folder_id} by user {tg_user_id}") - return jsonify({'status': 'error', 'message': f'Папку "{folder_name}" можно удалить только если она пуста.'}), 400 + folder_name = folder_node.get('name', 'folder') - # Note: This does NOT delete files within the folder on HF Hub. - # A recursive delete would be much more complex and potentially dangerous. - # We rely on the check above. + if folder_node.get('children'): + logging.warning(f"Attempt to delete non-empty folder {folder_id} ('{folder_name}') by user {tg_user_id}") + return jsonify({'status': 'error', 'message': f'Folder "{folder_name}" is not empty. Only empty folders can be deleted.'}), 400 if remove_node(user_data['filesystem'], folder_id): - try: - save_data(db_data) - logging.info(f"Deleted empty folder '{folder_name}' ({folder_id}) for user {tg_user_id}") - return jsonify({'status': 'ok', 'message': f'Папка "{folder_name}" удалена.'}) - except Exception as e: - logging.exception(f"Delete folder save error ({tg_user_id}): {e}") - # Attempt to roll back? Hard. Inform user. - return jsonify({'status': 'error', 'message': 'Папка удалена, но произошла ошибка сохранения изменений.'}), 500 + logging.info(f"Removed empty folder node {folder_id} ('{folder_name}') from DB for user {tg_user_id}") + if save_data(db_data): + return jsonify({'status': 'ok', 'message': f'Folder "{folder_name}" deleted.'}) + else: + logging.error(f"CRITICAL: Delete folder save error ({tg_user_id}) after node removal.") + # Inconsistent state: folder removed in memory but not saved. + return jsonify({'status': 'error', 'message': 'Folder removed, but error saving database update.'}), 500 else: logging.error(f"Failed to remove empty folder node {folder_id} from DB for {tg_user_id}") - return jsonify({'status': 'error', 'message': 'Не удалось удалить папку из базы данных (внутренняя ошибка).' }), 500 + return jsonify({'status': 'error', 'message': 'Failed to remove folder from database.'}), 500 @app.route('/get_text_content/') @@ -1792,157 +1617,139 @@ def get_text_content_route(file_id): owner_user_id, file_node = find_file_owner_and_node(file_id) if not file_node or file_node.get('file_type') != 'text': - return Response("Текстовый файл не найден", status=404, mimetype='text/plain') + return Response("Text file not found", status=404, mimetype='text/plain') - hf_repo_path = file_node.get('hf_repo_path') - if not hf_repo_path: - return Response("Ошибка сервера: путь к файлу отсутствует", status=500, mimetype='text/plain') + hf_path = file_node.get('path') + if not hf_path: + return Response("Error: File path missing", status=500, mimetype='text/plain') - file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_repo_path}?download=true" + file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true" + logging.info(f"Text content request for file_id: {file_id}, owner: {owner_user_id}, hf_path: {hf_path}") try: headers = {} if HF_TOKEN_READ: - headers["Authorization"] = f"Bearer {HF_TOKEN_READ}" + headers["authorization"] = f"Bearer {HF_TOKEN_READ}" - logging.info(f"Fetching text content for {file_id} from {file_url}") - response = requests.get(file_url, headers=headers, timeout=20) # Shorter timeout for text + response = requests.get(file_url, headers=headers, timeout=15) response.raise_for_status() - # Limit preview size to prevent browser freezing on huge logs etc. + # Limit preview size to avoid browser issues max_preview_size = 2 * 1024 * 1024 # 2 MB - if 'Content-Length' in response.headers and int(response.headers['Content-Length']) > max_preview_size: - logging.warning(f"Text file {file_id} too large for preview ({response.headers['Content-Length']} bytes).") - return Response(f"Файл слишком большой для предпросмотра (>{max_preview_size // 1024 // 1024}MB).\nСкачайте его для просмотра.", status=413, mimetype='text/plain') # Payload Too Large + content_length = int(response.headers.get('Content-Length', 0)) - # Read content, check size again if Content-Length was missing - content_bytes = response.content - if len(content_bytes) > max_preview_size: - logging.warning(f"Text file {file_id} too large for preview ({len(content_bytes)} bytes), Content-Length was missing or inaccurate.") - return Response(f"Файл слишком большой для предпросмотра (>{max_preview_size // 1024 // 1024}MB).\nСкачайте его для просмотра.", status=413, mimetype='text/plain') + if content_length > max_preview_size: + logging.warning(f"Text file {file_id} too large for preview ({content_length} bytes)") + return Response(f"File is too large for preview (> {max_preview_size // 1024 // 1024}MB).", status=413, mimetype='text/plain') - - # Attempt to decode using common encodings + content = response.content text_content = None detected_encoding = None - encodings_to_try = ['utf-8', 'cp1251', 'latin-1', 'utf-16'] # Add more if needed - - # Try auto-detection from headers first (less common for raw files) - content_type_header = response.headers.get('Content-Type', '') - if 'charset=' in content_type_header: - charset = content_type_header.split('charset=')[-1].split(';')[0].strip() - try: - text_content = content_bytes.decode(charset) - detected_encoding = charset - logging.info(f"Decoded text file {file_id} using charset from header: {charset}") - except (LookupError, UnicodeDecodeError): - logging.warning(f"Failed to decode using charset from header: {charset}") - - # Try common encodings if header didn't work or wasn't present + encodings_to_try = ['utf-8', 'cp1251', 'latin-1', 'utf-16'] + + try: + # Use requests' auto-detection first if available + detected_encoding = response.encoding + if detected_encoding: + text_content = content.decode(detected_encoding) + logging.info(f"Decoded text file {file_id} using detected encoding: {detected_encoding}") + except (UnicodeDecodeError, TypeError): + text_content = None # Reset if detected encoding failed + if text_content is None: for enc in encodings_to_try: try: - text_content = content_bytes.decode(enc) + text_content = content.decode(enc) detected_encoding = enc logging.info(f"Decoded text file {file_id} using fallback encoding: {enc}") break except UnicodeDecodeError: continue - except Exception as dec_e: # Catch other potential errors during decode - logging.warning(f"Error decoding {file_id} with {enc}: {dec_e}") - continue if text_content is None: - logging.error(f"Could not determine encoding for text file {file_id}") - return Response("Не удалось определить кодировку файла для предпросмотра.", status=500, mimetype='text/plain') + logging.error(f"Could not decode text file {file_id} with any tried encoding.") + return Response("Could not determine file encoding.", status=500, mimetype='text/plain') - # Return as plain text with UTF-8 charset (browser should handle display) + # Return as plain text with UTF-8 charset specified return Response(text_content, mimetype='text/plain; charset=utf-8') except requests.exceptions.HTTPError as e: - status_code = e.response.status_code - logging.error(f"HTTP error fetching text content {file_id} ({hf_repo_path}): {status_code} - {e}") - return Response(f"Ошибка загрузки содержимого файла ({status_code}).", status=status_code, mimetype='text/plain') + logging.error(f"HTTP Error fetching text content from HF ({hf_path}, owner {owner_user_id}): {e}") + status_code = e.response.status_code + error_message = f"Error fetching content ({status_code})" + if status_code == 404: error_message = "File not found on storage." + return Response(error_message, status=status_code, mimetype='text/plain') except requests.exceptions.RequestException as e: - logging.error(f"Network error fetching text content {file_id} ({hf_repo_path}): {e}") - return Response("Ошибка сети при загрузке содержимого.", status=504, mimetype='text/plain') + logging.error(f"Network Error fetching text content from HF ({hf_path}, owner {owner_user_id}): {e}") + return Response("Error fetching content (Network Error)", status=502, mimetype='text/plain') except Exception as e: - logging.exception(f"Unexpected error fetching text content {file_id} ({hf_repo_path}): {e}") - return Response("Внутренняя ошибка сервера при получении содержимого.", status=500, mimetype='text/plain') + logging.exception(f"Unexpected error fetching text content ({hf_path}, owner {owner_user_id}): {e}") + return Response("Internal server error", status=500, mimetype='text/plain') @app.route('/preview_thumb/') def preview_thumb_route(file_id): - # This serves the *full* image, relying on browser resizing/caching and the img tag `loading="lazy"` - # Generating actual thumbnails server-side would require an image library (Pillow) - # and potentially more complex caching/storage. owner_user_id, file_node = find_file_owner_and_node(file_id) if not file_node or file_node.get('file_type') != 'image': - # Return a placeholder or 404? 404 is cleaner. - return Response("Превью не найдено", status=404, mimetype='text/plain') + return Response("Image not found", status=404, mimetype='text/plain') - hf_repo_path = file_node.get('hf_repo_path') - if not hf_repo_path: - return Response("Ошибка сервера: путь к файлу превью отсутствует", status=500, mimetype='text/plain') + hf_path = file_node.get('path') + if not hf_path: + return Response("Error: File path missing", status=500, mimetype='text/plain') - # Use the direct file URL (not download=true needed for img src) - file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_repo_path}" + # Use the raw file URL, not download=true for browser rendering + file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}" + logging.info(f"Image preview request for file_id: {file_id}, owner: {owner_user_id}, hf_path: {hf_path}") try: headers = {} if HF_TOKEN_READ: - headers["Authorization"] = f"Bearer {HF_TOKEN_READ}" + headers["authorization"] = f"Bearer {HF_TOKEN_READ}" - logging.info(f"Streaming image preview for {file_id} from {file_url}") - response = requests.get(file_url, headers=headers, stream=True, timeout=30) + # Use a shorter timeout for thumbnails + response = requests.get(file_url, headers=headers, stream=True, timeout=15) response.raise_for_status() - # Pass through necessary headers - resp_headers = { - 'Content-Type': response.headers.get('Content-Type', 'image/jpeg'), - # Add caching headers? Let browser handle based on HF response maybe - # 'Cache-Control': 'public, max-age=3600' # Example: Cache for 1 hour - } - if 'Content-Length' in response.headers: - resp_headers['Content-Length'] = response.headers['Content-Length'] + content_type = response.headers.get('Content-Type', 'image/jpeg') + # Basic check for image types + if not content_type.startswith('image/'): + logging.warning(f"Preview requested for non-image content type '{content_type}' at {hf_path}") + return Response("Invalid content type for image preview", status=415, mimetype='text/plain') - return Response(response.iter_content(chunk_size=65536), headers=resp_headers) + # Stream the response directly + return Response(response.iter_content(chunk_size=65536), + mimetype=content_type, + headers={"Content-Length": response.headers.get('Content-Length')}) except requests.exceptions.HTTPError as e: - status_code = e.response.status_code - logging.error(f"HTTP error fetching preview {file_id} ({hf_repo_path}): {status_code} - {e}") - # Return a placeholder image or text? 404 is simpler. - return Response(f"Ошибка загрузки превью ({status_code})", status=status_code, mimetype='text/plain') + logging.error(f"HTTP Error fetching preview from HF ({hf_path}, owner: {owner_user_id}): {e}") + status_code = e.response.status_code + error_message = f"Error loading preview ({status_code})" + if status_code == 404: error_message = "Image not found on storage." + return Response(error_message, status=status_code, mimetype='text/plain') except requests.exceptions.RequestException as e: - logging.error(f"Network error fetching preview {file_id} ({hf_repo_path}): {e}") - return Response("Ошибка сети при загрузке превью", status=504, mimetype='text/plain') + logging.error(f"Network Error fetching preview from HF ({hf_path}, owner: {owner_user_id}): {e}") + return Response("Error loading preview (Network Error)", status=502, mimetype='text/plain') except Exception as e: - logging.exception(f"Unexpected error during preview fetch {file_id} ({hf_repo_path}): {e}") - return Response("Внутренняя ошибка сервера при загрузке превью", status=500, mimetype='text/plain') + logging.exception(f"Unexpected error during preview ({hf_path}, owner: {owner_user_id}): {e}") + return Response("Internal server error loading preview", status=500, mimetype='text/plain') - -# --- Main Execution --- if __name__ == '__main__': - if not BOT_TOKEN or BOT_TOKEN == 'YOUR_BOT_TOKEN' or ':' not in BOT_TOKEN: - logging.critical("\n" + "*"*60 + - "\n CRITICAL: TELEGRAM_BOT_TOKEN is not set or is invalid. " + - "\n Telegram authentication WILL FAIL. Set the environment variable." + - "\n" + "*"*60) + if not BOT_TOKEN or BOT_TOKEN == 'YOUR_BOT_TOKEN': + logging.critical("CRITICAL: TELEGRAM_BOT_TOKEN is not set correctly. Telegram authentication WILL FAIL.") if not HF_TOKEN_WRITE: - logging.warning("HF_TOKEN (write access) is not set. File uploads and deletions will fail.") + logging.warning("HF_TOKEN_WRITE (write access) is not set. File uploads, deletions, and folder creation will fail.") if not HF_TOKEN_READ: - logging.warning("HF_TOKEN_READ is not set (or same as write token). File downloads/previews might fail if the HF dataset is private.") - if app.secret_key == "supersecretkey_mini_app_unique_dev": - logging.warning("Using default Flask secret key. Set FLASK_SECRET_KEY env var for production.") + logging.warning("HF_TOKEN_READ is not set. File downloads/previews might fail if the HF dataset is private.") logging.info("Attempting initial database download/load...") - load_data() # Load data into cache on startup - logging.info("Initial data load attempt complete.") - - # Use waitress or gunicorn for production instead of Flask dev server - logging.info("Starting Flask application server...") - app.run(debug=False, host='0.0.0.0', port=7860) - # Example using waitress (install waitress first: pip install waitress) - # from waitress import serve - # serve(app, host='0.0.0.0', port=7860) \ No newline at end of file + initial_data = load_data() + if initial_data == {'users': {}}: + logging.warning("Started with empty or unrecoverable database.") + else: + logging.info(f"Initial load successful. Found {len(initial_data.get('users',{}))} user(s).") + + # Use waitress or gunicorn in production instead of Flask's built-in server + # Example: waitress-serve --host 0.0.0.0 --port 7860 'app (24):app' + app.run(debug=False, host='0.0.0.0', port=7860, threaded=True) # threaded=True for concurrent requests \ No newline at end of file