diff --git "a/app.py" "b/app.py"
--- "a/app.py"
+++ "b/app.py"
@@ -1,6 +1,3 @@
-
-# START OF FILE app (24)_updated.py
-
import os
import hmac
import hashlib
@@ -17,28 +14,26 @@ from werkzeug.utils import secure_filename
import requests
from io import BytesIO
import uuid
-from typing import Union, Optional, Dict, Any, Tuple, List
-
+from typing import Union, Optional, Dict, Any
app = Flask(__name__)
-app.secret_key = os.getenv("FLASK_SECRET_KEY", "supersecretkey_mini_app_unique_v2")
+app.secret_key = os.getenv("FLASK_SECRET_KEY", "supersecretkey_mini_app_unique_dev")
BOT_TOKEN = os.getenv('TELEGRAM_BOT_TOKEN', '6750208873:AAE2hvPlJ99dBdhGa_Brre0IIpUdOvXxHt4') # MUST be set
-DATA_FILE = 'cloudeng_mini_app_data_v2.json'
-REPO_ID = "Eluza133/Z1e1u" # Same HF Repo
+DATA_FILE = 'cloudeng_mini_app_data.json'
+DATA_FILE_BACKUP = 'cloudeng_mini_app_data.json.bak'
+REPO_ID = "Eluza133/Z1e1u"
HF_TOKEN_WRITE = os.getenv("HF_TOKEN")
HF_TOKEN_READ = os.getenv("HF_TOKEN_READ") or HF_TOKEN_WRITE
-UPLOAD_FOLDER = 'uploads_mini_app_v2'
+UPLOAD_FOLDER = 'uploads_mini_app'
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
-
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
-
AUTH_DATA_LIFETIME = 3600 # 1 hour
-
-def find_node_by_id(filesystem: Optional[Dict[str, Any]], node_id: str) -> Tuple[Optional[Dict[str, Any]], Optional[Dict[str, Any]]]:
+# --- Filesystem Utilities ---
+def find_node_by_id(filesystem: Dict[str, Any], node_id: str) -> (Optional[Dict[str, Any]], Optional[Dict[str, Any]]):
if not filesystem or not isinstance(filesystem, dict):
return None, None
if filesystem.get('id') == node_id:
@@ -50,7 +45,10 @@ def find_node_by_id(filesystem: Optional[Dict[str, Any]], node_id: str) -> Tuple
while queue:
current_node, parent = queue.pop(0)
if current_node.get('type') == 'folder' and 'children' in current_node:
- for child in current_node.get('children', []):
+ for i, child in enumerate(current_node.get('children', [])):
+ if not isinstance(child, dict):
+ logging.warning(f"Invalid child found in node {current_node.get('id')}: {child}")
+ continue
child_id = child.get('id')
if not child_id: continue
@@ -63,48 +61,68 @@ def find_node_by_id(filesystem: Optional[Dict[str, Any]], node_id: str) -> Tuple
def add_node(filesystem: Dict[str, Any], parent_id: str, node_data: Dict[str, Any]) -> bool:
parent_node, _ = find_node_by_id(filesystem, parent_id)
- if parent_node and parent_node.get('type') == 'folder':
+ if parent_node and isinstance(parent_node, dict) and parent_node.get('type') == 'folder':
if 'children' not in parent_node or not isinstance(parent_node['children'], list):
parent_node['children'] = []
- existing_ids = {child.get('id') for child in parent_node['children'] if isinstance(child, dict)}
+
+ existing_ids = set()
+ valid_children = []
+ for child in parent_node['children']:
+ if isinstance(child, dict) and 'id' in child:
+ existing_ids.add(child['id'])
+ valid_children.append(child)
+ else:
+ logging.warning(f"Found invalid child structure in parent {parent_id}, removing: {child}")
+ parent_node['children'] = valid_children # Clean up invalid entries
+
if node_data.get('id') not in existing_ids:
parent_node['children'].append(node_data)
return True
+ else:
+ logging.warning(f"Node with id {node_data.get('id')} already exists in parent {parent_id}")
+ return False # Indicate node already exists, maybe update instead?
return False
def remove_node(filesystem: Dict[str, Any], node_id: str) -> bool:
+ if node_id == filesystem.get('id'):
+ logging.error("Attempted to remove the root node.")
+ return False
+
node_to_remove, parent_node = find_node_by_id(filesystem, node_id)
- if node_to_remove and parent_node and 'children' in parent_node and isinstance(parent_node['children'], list):
+
+ if node_to_remove and parent_node and isinstance(parent_node, dict) and 'children' in parent_node and isinstance(parent_node['children'], list):
original_length = len(parent_node['children'])
- parent_node['children'] = [child for child in parent_node['children'] if not isinstance(child, dict) or child.get('id') != node_id]
+ parent_node['children'] = [child for child in parent_node['children'] if not (isinstance(child, dict) and child.get('id') == node_id)]
return len(parent_node['children']) < original_length
- if node_to_remove and node_id == filesystem.get('id'):
- logging.warning("Attempted to remove root node directly.")
- return False
+ elif node_to_remove:
+ logging.error(f"Found node {node_id} but its parent was not found or invalid.")
return False
-def get_node_path_list(filesystem: Dict[str, Any], node_id: str) -> List[Dict[str, str]]:
+
+def get_node_path_list(filesystem: Dict[str, Any], node_id: str) -> list:
path_list = []
current_id = node_id
processed_ids = set()
- max_depth = 20
+ max_depth = 50
depth = 0
while current_id and current_id not in processed_ids and depth < max_depth:
processed_ids.add(current_id)
depth += 1
node, parent = find_node_by_id(filesystem, current_id)
- if not node:
- logging.warning(f"Node {current_id} not found during path generation.")
- break
+
+ if not node or not isinstance(node, dict):
+ logging.error(f"Path traversal failed: Node not found or invalid for ID {current_id}")
+ break
+
path_list.append({
- 'id': node.get('id', ''),
+ 'id': node.get('id'),
'name': node.get('name', node.get('original_filename', 'Unknown'))
})
- if not parent:
- if node.get('id') != 'root':
- logging.warning(f"Node {current_id} found but has no parent (and is not root).")
+
+ if not parent or not isinstance(parent, dict):
break
+
parent_id = parent.get('id')
if parent_id == current_id:
logging.error(f"Filesystem loop detected at node {current_id}")
@@ -112,157 +130,161 @@ def get_node_path_list(filesystem: Dict[str, Any], node_id: str) -> List[Dict[st
current_id = parent_id
if not any(p['id'] == 'root' for p in path_list):
+ # Check if root exists before adding it
root_node, _ = find_node_by_id(filesystem, 'root')
- root_name = root_node.get('name', 'Root') if root_node else 'Root'
- path_list.append({'id': 'root', 'name': root_name})
+ if root_node and isinstance(root_node, dict):
+ path_list.append({'id': 'root', 'name': root_node.get('name', 'Root')})
+ else:
+ logging.error("Root node ('root') not found in filesystem during path generation.")
+
final_path = []
seen_ids = set()
for item in reversed(path_list):
- item_id = item.get('id')
- if item_id and item_id not in seen_ids:
+ if item.get('id') not in seen_ids:
final_path.append(item)
- seen_ids.add(item_id)
- return final_path
+ seen_ids.add(item.get('id'))
+
+ if not final_path or final_path[0].get('id') != 'root':
+ logging.warning(f"Path generation for {node_id} resulted in unexpected structure. Correcting.")
+ # Attempt to reconstruct from root if possible
+ root_node, _ = find_node_by_id(filesystem, 'root')
+ if root_node:
+ corrected_path = [{'id': 'root', 'name': root_node.get('name', 'Root')}]
+ # We might not be able to fully reconstruct the broken path here easily
+ # Return at least the root if the target node path failed badly
+ return corrected_path
+ else:
+ return [] # Return empty if root is also missing
+ return final_path
+
def initialize_user_filesystem(user_data: Dict[str, Any]):
- if 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict) or not user_data['filesystem'].get('id') == 'root':
- logging.warning(f"Filesystem missing or invalid for user. Initializing.")
+ if 'filesystem' not in user_data or not isinstance(user_data.get('filesystem'), dict) or not user_data['filesystem'].get('id') == 'root':
+ logging.warning(f"Initializing/resetting filesystem for user data: {user_data.get('user_info', {}).get('id', 'UNKNOWN')}")
user_data['filesystem'] = {
"type": "folder",
"id": "root",
"name": "Root",
"children": []
}
- elif 'children' not in user_data['filesystem'] or not isinstance(user_data['filesystem']['children'], list):
- logging.warning(f"Root node missing 'children' list. Initializing.")
- user_data['filesystem']['children'] = []
-
-@cache.memoize(timeout=60)
+# --- Data Loading/Saving ---
def load_data() -> Dict[str, Any]:
- local_data = None
- if os.path.exists(DATA_FILE):
- try:
- with open(DATA_FILE, 'r', encoding='utf-8') as file:
- local_data = json.load(file)
- if not isinstance(local_data, dict):
- logging.warning(f"Local data file {DATA_FILE} is not a dict. Will try HF download.")
- local_data = None
- else:
- logging.info(f"Successfully loaded local data from {DATA_FILE}")
- except json.JSONDecodeError:
- logging.error(f"Error decoding JSON from local file {DATA_FILE}. Will try HF download.")
- local_data = None
- except Exception as e:
- logging.error(f"Error loading local data from {DATA_FILE}: {e}. Will try HF download.")
- local_data = None
-
- hf_data = None
- if HF_TOKEN_READ:
- try:
- download_path = hf_hub_download(
- repo_id=REPO_ID, filename=DATA_FILE, repo_type="dataset",
- token=HF_TOKEN_READ, local_dir=".", local_dir_use_symlinks=False,
- force_download=True, etag_timeout=10
- )
- with open(download_path, 'r', encoding='utf-8') as file:
- hf_data = json.load(file)
- if not isinstance(hf_data, dict):
- logging.warning(f"Downloaded HF data file {DATA_FILE} is not a dict. Discarding.")
- hf_data = None
- else:
- logging.info("Successfully downloaded and loaded data from Hugging Face.")
- # Make downloaded file the main file
- if download_path != DATA_FILE:
- os.replace(download_path, DATA_FILE)
-
- except hf_utils.RepositoryNotFoundError:
- logging.error(f"Repository {REPO_ID} not found on Hugging Face.")
- except hf_utils.EntryNotFoundError:
- logging.warning(f"{DATA_FILE} not found in repo {REPO_ID}. Using local version if available.")
- except requests.exceptions.ConnectionError as e:
- logging.error(f"Connection error downloading DB from HF: {e}. Using local version if available.")
- except json.JSONDecodeError:
- logging.error(f"Error decoding JSON from downloaded HF file {DATA_FILE}.")
- except Exception as e:
- logging.error(f"Generic error downloading/processing database from HF: {e}")
- else:
- logging.warning("HF_TOKEN_READ not set, skipping database download.")
+ global_data = {'users': {}}
+ try:
+ if not os.path.exists(DATA_FILE):
+ logging.warning(f"{DATA_FILE} not found locally. Attempting download.")
+ download_db_from_hf() # Try to fetch first
- data_to_use = hf_data if hf_data is not None else local_data
+ if os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'r', encoding='utf-8') as file:
+ try:
+ global_data = json.load(file)
+ if not isinstance(global_data, dict):
+ logging.error(f"Data file {DATA_FILE} is not a dictionary. Trying backup.")
+ raise ValueError("Data is not a dictionary")
+ global_data.setdefault('users', {})
+ except (json.JSONDecodeError, ValueError) as e:
+ logging.error(f"Error decoding JSON from {DATA_FILE}: {e}. Attempting to load backup {DATA_FILE_BACKUP}")
+ if os.path.exists(DATA_FILE_BACKUP):
+ try:
+ with open(DATA_FILE_BACKUP, 'r', encoding='utf-8') as bak_file:
+ global_data = json.load(bak_file)
+ if not isinstance(global_data, dict):
+ logging.error(f"Backup file {DATA_FILE_BACKUP} is also invalid. Initializing empty data.")
+ global_data = {'users': {}}
+ else:
+ logging.info(f"Successfully loaded data from backup {DATA_FILE_BACKUP}")
+ # Optionally try to restore the main file from backup here
+ try:
+ with open(DATA_FILE, 'w', encoding='utf-8') as main_file:
+ json.dump(global_data, main_file, ensure_ascii=False, indent=4)
+ logging.info(f"Restored {DATA_FILE} from backup.")
+ except Exception as write_err:
+ logging.error(f"Failed to restore {DATA_FILE} from backup: {write_err}")
+ except Exception as bak_e:
+ logging.error(f"Error reading backup file {DATA_FILE_BACKUP}: {bak_e}. Initializing empty data.")
+ global_data = {'users': {}}
+ else:
+ logging.warning(f"Backup file {DATA_FILE_BACKUP} not found. Initializing empty data.")
+ global_data = {'users': {}}
+ else:
+ logging.warning(f"{DATA_FILE} still not found after download attempt. Initializing empty data.")
+ global_data = {'users': {}}
- if data_to_use is None:
- logging.warning(f"Neither local nor HF data could be loaded. Initializing empty data structure.")
- data_to_use = {'users': {}}
- # Attempt to save this initial structure locally
- try:
- with open(DATA_FILE, 'w', encoding='utf-8') as f:
- json.dump(data_to_use, f, ensure_ascii=False, indent=4)
- except Exception as e:
- logging.error(f"Failed to write initial empty data file: {e}")
+ except Exception as e:
+ logging.error(f"Unexpected error loading data: {e}. Returning empty data.")
+ global_data = {'users': {}}
+
+ # Ensure filesystem is initialized for all users after loading
+ users = global_data.setdefault('users', {})
+ if isinstance(users, dict):
+ for user_id, user_data in users.items():
+ if isinstance(user_data, dict):
+ initialize_user_filesystem(user_data)
+ else:
+ logging.warning(f"Invalid user data structure for user {user_id}. Skipping initialization.")
+ logging.info("Data loaded and filesystems checked/initialized.")
else:
- logging.info("Using loaded data (HF preferred over local).")
+ logging.error("User data structure is not a dictionary. Resetting users.")
+ global_data['users'] = {}
- data_to_use.setdefault('users', {})
- for user_id, user_data in data_to_use['users'].items():
- if isinstance(user_data, dict):
- initialize_user_filesystem(user_data)
- else:
- logging.warning(f"User data for ID {user_id} is not a dictionary. Skipping filesystem check.")
- logging.info("Data loading complete.")
- return data_to_use
+ cache.set('app_data', global_data)
+ logging.info(f"Loaded data into cache. User count: {len(global_data.get('users', {}))}")
+ return global_data
-_save_lock = threading.Lock()
def save_data(data: Dict[str, Any]):
if not isinstance(data, dict) or 'users' not in data:
- logging.error("Attempted to save invalid data structure. Aborting save.")
- return False
+ logging.error("Attempted to save invalid data structure. Aborting save.")
+ return # Prevent saving malformed data
- with _save_lock:
- try:
- # Write to a temporary file first
- temp_data_file = DATA_FILE + ".tmp"
- with open(temp_data_file, 'w', encoding='utf-8') as file:
- json.dump(data, file, ensure_ascii=False, indent=4)
+ # Validate filesystem integrity before saving (basic check)
+ for user_id, user_data in data.get('users', {}).items():
+ if not isinstance(user_data, dict) or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict) or user_data['filesystem'].get('id') != 'root':
+ logging.error(f"Filesystem integrity check failed for user {user_id}. Aborting save.")
+ # Optionally try to recover/reset the user's filesystem here? Risky.
+ return
- # If write successful, replace original file
- os.replace(temp_data_file, DATA_FILE)
- logging.info(f"Data successfully saved locally to {DATA_FILE}.")
+ try:
+ # Backup current file before overwriting
+ if os.path.exists(DATA_FILE):
+ try:
+ os.replace(DATA_FILE, DATA_FILE_BACKUP) # Atomic rename if possible
+ logging.info(f"Created backup {DATA_FILE_BACKUP}")
+ except OSError as e:
+ logging.warning(f"Could not create backup file {DATA_FILE_BACKUP}: {e}. Proceeding with caution.")
- # Clear cache after successful local save
- cache.clear()
- logging.info("Cache cleared after successful save.")
- # Schedule HF upload
- upload_db_to_hf_async()
- return True
+ with open(DATA_FILE, 'w', encoding='utf-8') as file:
+ json.dump(data, file, ensure_ascii=False, indent=4)
- except Exception as e:
- logging.error(f"Error saving data locally: {e}")
- # Clean up temp file if it exists
- if os.path.exists(temp_data_file):
- try:
- os.remove(temp_data_file)
- except OSError as remove_err:
- logging.error(f"Error removing temporary save file {temp_data_file}: {remove_err}")
- return False
+ cache.set('app_data', data) # Update cache immediately
+ logging.info("Data saved locally successfully.")
+ upload_db_to_hf() # Initiate upload after successful local save
+ except Exception as e:
+ logging.error(f"Error saving data locally: {e}")
+ # Attempt to restore from backup if save failed
+ if os.path.exists(DATA_FILE_BACKUP):
+ try:
+ os.replace(DATA_FILE_BACKUP, DATA_FILE)
+ logging.info(f"Restored {DATA_FILE} from backup due to save failure.")
+ # Reload data from restored file?
+ load_data()
+ except OSError as restore_e:
+ logging.error(f"CRITICAL: Failed to save data AND failed to restore backup: {restore_e}")
-def upload_db_to_hf_async():
+def upload_db_to_hf():
if not HF_TOKEN_WRITE:
logging.warning("HF_TOKEN_WRITE not set, skipping database upload.")
return
if not os.path.exists(DATA_FILE):
- logging.warning(f"Local data file {DATA_FILE} not found for upload.")
- return
-
- threading.Thread(target=upload_db_to_hf_worker, daemon=True).start()
-
-def upload_db_to_hf_worker():
+ logging.error(f"Cannot upload {DATA_FILE} to HF: File does not exist locally.")
+ return
try:
api = HfApi()
api.upload_file(
@@ -272,33 +294,106 @@ def upload_db_to_hf_worker():
repo_type="dataset",
token=HF_TOKEN_WRITE,
commit_message=f"Backup MiniApp {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
+ run_as_future=True # Schedule async
+ )
+ logging.info("Database upload to Hugging Face scheduled.")
+ except Exception as e:
+ logging.error(f"Error scheduling database upload to HF: {e}")
+
+def download_db_from_hf():
+ if not HF_TOKEN_READ:
+ logging.warning("HF_TOKEN_READ not set, skipping database download.")
+ if not os.path.exists(DATA_FILE):
+ logging.info(f"Local file {DATA_FILE} missing and no read token. Creating empty DB.")
+ with open(DATA_FILE, 'w', encoding='utf-8') as f:
+ json.dump({'users': {}}, f)
+ return
+ try:
+ # Backup local file before potentially overwriting
+ if os.path.exists(DATA_FILE):
+ backup_path = f"{DATA_FILE}.{int(time.time())}.local_bak"
+ try:
+ os.rename(DATA_FILE, backup_path)
+ logging.info(f"Backed up local DB to {backup_path} before downloading.")
+ except OSError as e:
+ logging.warning(f"Could not backup local DB before download: {e}")
+
+ downloaded_path = hf_hub_download(
+ repo_id=REPO_ID,
+ filename=DATA_FILE,
+ repo_type="dataset",
+ token=HF_TOKEN_READ,
+ local_dir=".",
+ local_dir_use_symlinks=False, # Ensure actual file is created
+ force_filename=DATA_FILE, # Ensure it overwrites/creates the correct name
+ etag_timeout=10
)
- logging.info("Database upload to Hugging Face completed.")
+ logging.info(f"Database downloaded from Hugging Face to {downloaded_path}")
+ # Basic validation of the downloaded file
+ try:
+ with open(downloaded_path, 'r', encoding='utf-8') as f:
+ content = json.load(f)
+ if not isinstance(content, dict) or 'users' not in content:
+ logging.error("Downloaded DB file is invalid. Restoring local backup if exists.")
+ raise ValueError("Invalid DB structure downloaded")
+ except (json.JSONDecodeError, ValueError, Exception) as validate_e:
+ logging.error(f"Validation of downloaded DB failed: {validate_e}")
+ if 'backup_path' in locals() and os.path.exists(backup_path):
+ try:
+ os.replace(backup_path, DATA_FILE)
+ logging.info("Restored local DB from backup due to invalid download.")
+ except OSError as restore_e:
+ logging.error(f"Failed to restore local DB backup: {restore_e}")
+ # If restore fails, we might be left with a bad file or no file
+ if os.path.exists(DATA_FILE): os.remove(DATA_FILE)
+ with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
+
+
+ except hf_utils.RepositoryNotFoundError:
+ logging.error(f"Repository {REPO_ID} not found on HF.")
+ if not os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
+ except hf_utils.EntryNotFoundError:
+ logging.warning(f"{DATA_FILE} not found in repo {REPO_ID}. Using local version or creating empty.")
+ if not os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
+ except requests.exceptions.RequestException as e:
+ logging.error(f"Network error downloading DB from HF: {e}. Using local version if available.")
+ # Don't create empty if local exists and network fails
+ if not os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
except Exception as e:
- logging.error(f"Error during background database upload: {e}")
+ logging.error(f"Unexpected error downloading database: {e}")
+ if not os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
+# --- File Type Helper ---
def get_file_type(filename: Optional[str]) -> str:
if not filename or '.' not in filename: return 'other'
ext = filename.lower().split('.')[-1]
- if ext in ['mp4', 'mov', 'avi', 'webm', 'mkv']: return 'video'
- if ext in ['jpg', 'jpeg', 'png', 'gif', 'bmp', 'webp', 'svg', 'heic', 'avif']: return 'image'
+ if ext in ['mp4', 'mov', 'avi', 'webm', 'mkv', 'wmv', 'flv']: return 'video'
+ if ext in ['jpg', 'jpeg', 'png', 'gif', 'bmp', 'webp', 'svg', 'ico']: return 'image'
if ext == 'pdf': return 'pdf'
- if ext in ['txt', 'md', 'log', 'csv', 'json', 'xml', 'html', 'css', 'js', 'py', 'sh']: return 'text'
- if ext in ['doc', 'docx', 'rtf']: return 'document'
- if ext in ['xls', 'xlsx']: return 'spreadsheet'
- if ext in ['ppt', 'pptx']: return 'presentation'
- if ext in ['zip', 'rar', '7z', 'gz', 'tar']: return 'archive'
- if ext in ['mp3', 'wav', 'ogg', 'flac', 'aac', 'm4a']: return 'audio'
+ if ext in ['txt', 'log', 'md', 'json', 'xml', 'html', 'css', 'js', 'py', 'java', 'c', 'cpp', 'h', 'hpp', 'sh', 'bat']: return 'text'
+ if ext in ['mp3', 'wav', 'ogg', 'aac', 'flac', 'm4a']: return 'audio'
+ if ext in ['zip', 'rar', '7z', 'tar', 'gz', 'bz2']: return 'archive'
+ if ext in ['doc', 'docx', 'rtf']: return 'document_word'
+ if ext in ['xls', 'xlsx']: return 'document_excel'
+ if ext in ['ppt', 'pptx']: return 'document_ppt'
return 'other'
-
-def check_telegram_authorization(auth_data: str, bot_token: str) -> Optional[Dict[str, Any]]:
- if not auth_data or not bot_token or bot_token == 'YOUR_BOT_TOKEN':
+# --- Telegram Validation ---
+def check_telegram_authorization(auth_data_str: str, bot_token: str) -> Optional[Dict[str, Any]]:
+ if not auth_data_str or not bot_token or bot_token == 'YOUR_BOT_TOKEN':
logging.warning("Validation skipped: Missing auth_data or valid BOT_TOKEN.")
+ # In development/debug mode, maybe return a mock user?
+ # if app.debug:
+ # return {"id": "12345", "first_name": "Debug", "username": "debug_user"}
return None
+
try:
- parsed_data = dict(parse_qsl(unquote(auth_data)))
+ parsed_data = dict(parse_qsl(unquote(auth_data_str)))
if "hash" not in parsed_data:
logging.error("Hash not found in auth data")
return None
@@ -308,14 +403,17 @@ def check_telegram_authorization(auth_data: str, bot_token: str) -> Optional[Dic
current_ts = int(time.time())
if abs(current_ts - auth_date_ts) > AUTH_DATA_LIFETIME:
- logging.warning(f"Auth data expired (Auth: {auth_date_ts}, Now: {current_ts}, Diff: {current_ts - auth_date_ts})")
+ logging.warning(f"Auth data expired (Auth date: {auth_date_ts}, Now: {current_ts}, Diff: {current_ts - auth_date_ts} > {AUTH_DATA_LIFETIME})")
return None
- data_check_string = "\n".join(sorted([f"{k}={v}" for k, v in parsed_data.items()]))
+ data_check_list = sorted([f"{k}={v}" for k, v in parsed_data.items()])
+ data_check_string = "\n".join(data_check_list)
+
secret_key = hmac.new("WebAppData".encode(), bot_token.encode(), hashlib.sha256).digest()
- calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest()
+ calculated_hash_bytes = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).digest()
+ calculated_hash_hex = calculated_hash_bytes.hex()
- if calculated_hash == telegram_hash:
+ if hmac.compare_digest(calculated_hash_hex, telegram_hash):
user_data_str = parsed_data.get('user')
if user_data_str:
try:
@@ -325,53 +423,52 @@ def check_telegram_authorization(auth_data: str, bot_token: str) -> Optional[Dic
return None
user_info['id'] = str(user_info['id']) # Ensure ID is string
return user_info
- except json.JSONDecodeError:
- logging.error("Failed to decode user JSON from auth data")
+ except json.JSONDecodeError as e:
+ logging.error(f"Failed to decode user JSON from auth data: {e} - Data: {user_data_str}")
return None
else:
logging.warning("No 'user' field in validated auth data")
return None
else:
- logging.warning("Hash mismatch during validation")
+ logging.warning(f"Hash mismatch during validation. Received: {telegram_hash}, Calculated: {calculated_hash_hex}")
return None
except Exception as e:
- logging.error(f"Exception during validation: {e}")
+ logging.exception(f"Exception during Telegram validation: {e}")
return None
+# --- HTML, CSS, JS Template ---
HTML_TEMPLATE = """
-
+
Zeus Cloud
-
+
Загрузка...
-
-
-
Zeus Cloud
-
+
+
Zeus Cloud
-
-
-
-
-
Действия
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Содержимое
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
+
+
+
+
Содержимое
+
+
+
+
-
-
-
+ ×
@@ -752,65 +634,56 @@ HTML_TEMPLATE = """
const userInfoHeaderEl = document.getElementById('user-info-header');
const flashContainerEl = document.getElementById('flash-container');
const breadcrumbsContainerEl = document.getElementById('breadcrumbs-container');
- const fileListUl = document.getElementById('file-list-ul');
- const fileListContainer = document.getElementById('file-list-container');
+ const itemGridContainerEl = document.getElementById('item-grid-container');
const currentFolderTitleEl = document.getElementById('current-folder-title');
- const uploadForm = document.getElementById('upload-form');
+ const uploadForm = document.getElementById('upload-form'); // Still needed for FormData
const fileInput = document.getElementById('file-input');
- const fileInputLabel = document.getElementById('file-input-label');
- const uploadBtn = document.getElementById('upload-btn');
+ const uploadLabelBtn = document.getElementById('upload-label-btn');
const progressContainer = document.getElementById('progress-container');
const progressBar = document.getElementById('progress-bar');
- const progressText = document.getElementById('progress-text');
+ const progressText = document.getElementById('progress-text'); // Kept in HTML, maybe use later
const newFolderInput = document.getElementById('new-folder-name');
const createFolderBtn = document.getElementById('create-folder-btn');
- const headerTitleEl = document.querySelector('.header-title');
let currentFolderId = 'root';
let validatedInitData = null;
let currentUser = null;
let currentItems = [];
+ let isUploading = false;
+ // --- API Communication ---
async function apiCall(endpoint, method = 'POST', body = {}) {
if (!validatedInitData) {
- showError("Ошибка: Данные авторизации отсутствуют. Попробуйте перезапустить приложение.");
+ showError("Ошибка: Данные авторизации отсутствуют. Попробуйте перезапустить.");
throw new Error("Not authenticated");
}
body.initData = validatedInitData;
- const controller = new AbortController();
- const timeoutId = setTimeout(() => controller.abort(), 30000); // 30 second timeout
-
try {
const response = await fetch(endpoint, {
method: method,
headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify(body),
- signal: controller.signal
+ body: JSON.stringify(body)
});
- clearTimeout(timeoutId);
if (!response.ok) {
let errorMsg = `Ошибка сервера (${response.status})`;
try {
const errData = await response.json();
errorMsg = errData.message || errorMsg;
- } catch (e) { /* Ignore */ }
+ } catch (e) { /* Ignore if error body is not JSON */ }
throw new Error(errorMsg);
}
return await response.json();
} catch (error) {
- clearTimeout(timeoutId);
- console.error(`API call to ${endpoint} failed:`, error);
- if (error.name === 'AbortError') {
- showFlash('Ошибка: Запрос занял слишком много времени.', 'error');
- } else {
- showFlash(`Ошибка сети или сервера: ${error.message}`, 'error');
- }
- throw error;
+ console.error(`API call to ${endpoint} failed:`, error);
+ showFlash(`Ошибка: ${error.message}`, 'error');
+ throw error;
}
}
- function showLoadingScreen() {
+ // --- UI Rendering ---
+ function showLoadingScreen(message = 'Загрузка...') {
+ loadingEl.textContent = message;
loadingEl.style.display = 'flex';
errorViewEl.style.display = 'none';
appContentEl.style.display = 'none';
@@ -822,6 +695,8 @@ HTML_TEMPLATE = """
errorViewEl.style.display = 'flex';
appContentEl.style.display = 'none';
if (tg.HapticFeedback) tg.HapticFeedback.notificationOccurred('error');
+ // Disable main button if error occurs
+ tg.MainButton.hide();
}
function showAppContent() {
@@ -830,39 +705,33 @@ HTML_TEMPLATE = """
appContentEl.style.display = 'flex';
}
- function showFlash(message, type = 'success', duration = 4000) {
+ function showFlash(message, type = 'success') {
const flashDiv = document.createElement('div');
flashDiv.className = `flash ${type}`;
flashDiv.textContent = message;
+ flashContainerEl.innerHTML = ''; // Clear previous messages
flashContainerEl.appendChild(flashDiv);
- // Trigger reflow to enable transition
- flashDiv.offsetHeight;
-
- flashDiv.classList.add('show');
+ // Use Telegram's popup for important messages?
+ // tg.showAlert(message);
+ const timeout = type === 'error' ? 8000 : 5000;
setTimeout(() => {
- flashDiv.classList.remove('show');
- // Remove element after transition ends
- flashDiv.addEventListener('transitionend', () => {
- if (flashDiv.parentNode === flashContainerEl) {
- flashContainerEl.removeChild(flashDiv);
- }
- }, { once: true });
- }, duration);
+ if (flashDiv.parentNode === flashContainerEl) {
+ flashDiv.style.opacity = '0';
+ setTimeout(() => {
+ if (flashDiv.parentNode === flashContainerEl) {
+ flashContainerEl.removeChild(flashDiv);
+ }
+ }, 300); // Transition duration
+ }
+ }, timeout);
if (tg.HapticFeedback) tg.HapticFeedback.notificationOccurred(type);
}
function renderBreadcrumbs(breadcrumbs) {
breadcrumbsContainerEl.innerHTML = '';
- if (!breadcrumbs || breadcrumbs.length === 0) {
- breadcrumbsContainerEl.innerHTML = 'Root';
- headerTitleEl.textContent = 'Zeus Cloud';
- currentFolderTitleEl.textContent = `Содержимое: Root`;
- return;
- }
-
breadcrumbs.forEach((crumb, index) => {
if (index > 0) {
const separator = document.createElement('span');
@@ -874,8 +743,7 @@ HTML_TEMPLATE = """
span.className = 'current-folder';
span.textContent = crumb.name;
breadcrumbsContainerEl.appendChild(span);
- headerTitleEl.textContent = crumb.name; // Update header title
- currentFolderTitleEl.textContent = `Содержимое: ${crumb.name}`;
+ currentFolderTitleEl.textContent = `${crumb.name}`; // Simpler title
} else {
const link = document.createElement('a');
link.href = '#';
@@ -884,179 +752,203 @@ HTML_TEMPLATE = """
breadcrumbsContainerEl.appendChild(link);
}
});
- // Ensure the last breadcrumb is visible if overflowing
- breadcrumbsContainerEl.scrollLeft = breadcrumbsContainerEl.scrollWidth;
+ // Scroll to the end of breadcrumbs if they overflow
+ breadcrumbsContainerEl.scrollLeft = breadcrumbsContainerEl.scrollWidth;
+
+ // Update BackButton visibility
+ if (breadcrumbs.length > 1) {
+ tg.BackButton.show();
+ } else {
+ tg.BackButton.hide();
+ }
}
function getItemIcon(item) {
- if (item.type === 'folder') return '📁';
-
- switch(item.file_type) {
- case 'image': return '🖼️';
- case 'video': return '🎬';
- case 'pdf': return '📄';
- case 'text': return '📝';
- case 'audio': return '🎵';
- case 'archive': return '📦';
- case 'document': return '📃';
- case 'spreadsheet': return '📊';
- case 'presentation': return '🖥️';
- default: return '❓';
- }
+ if (item.type === 'folder') return '📁';
+ switch (item.file_type) {
+ case 'image': return ``; // Preview or fallback icon
+ case 'video': return '🎬';
+ case 'audio': return '🎵';
+ case 'pdf': return '📄'; // Specific PDF icon
+ case 'text': return '📝';
+ case 'archive': return '📦';
+ case 'document_word':
+ case 'document_excel':
+ case 'document_ppt':
+ return '📊'; // Generic doc icon
+ default: return '❓'; // Unknown
+ }
}
+
function getItemIconClass(item) {
if (item.type === 'folder') return 'folder';
return item.file_type || 'other';
}
function renderItems(items) {
- fileListUl.innerHTML = '';
+ itemGridContainerEl.innerHTML = ''; // Clear previous items
if (!items || items.length === 0) {
- fileListContainer.style.display = 'none';
- const emptyState = document.createElement('div');
- emptyState.className = 'empty-state';
- emptyState.innerHTML = '
Эта папка пуста.
';
- // Insert after the file-list container or its title
- const titleElement = document.getElementById('current-folder-title');
- titleElement.parentNode.insertBefore(emptyState, titleElement.nextSibling);
- // Remove previous empty state if exists
- const existingEmptyState = fileListContainer.parentNode.querySelector('.empty-state');
- if(existingEmptyState && existingEmptyState !== emptyState) {
- existingEmptyState.remove();
- }
+ itemGridContainerEl.innerHTML = '
`;
+ function handleItemClick(item) {
+ if (item.type === 'folder') {
+ loadFolderContent(item.id);
+ } else if (item.type === 'file') {
+ // Decide default action: preview or download? Preview seems better.
+ const previewable = ['image', 'video', 'pdf', 'text'].includes(item.file_type);
+ if (previewable) {
+ openModalForItem(item);
+ } else {
+ // Offer download directly for non-previewable
+ const dlUrl = `/download/${item.id}`;
+ // Maybe show confirm before download?
+ // tg.showConfirm(`Скачать файл "${item.original_filename || item.id}"?`, (confirmed) => {
+ // if(confirmed) window.open(dlUrl, '_blank');
+ // });
+ // Or just download:
+ window.open(dlUrl, '_blank');
}
+ }
+ }
- li.setAttribute('role', 'button');
- li.setAttribute('tabindex', '0');
- // Add main click action if defined
- if (mainAction) {
- // Prevent action buttons from triggering the main li click
- li.addEventListener('click', (e) => {
- if (e.target.closest('.item-actions')) {
- return; // Click was inside actions, do nothing on li
- }
- // Execute the main action (defined in innerHTML)
- eval(mainAction.substring(8, mainAction.length-1));
- });
+ function openModalForItem(item) {
+ if (item.file_type === 'text') {
+ openModal(`/get_text_content/${item.id}`, 'text', item.id);
+ } else {
+ // For image, video, pdf use the download URL which serves the content
+ openModal(`/download/${item.id}`, item.file_type, item.id);
+ }
+ }
+
+ function showItemContextMenu(item, x, y) {
+ // Telegram doesn't have a native context menu API.
+ // We can implement a custom one, but it might feel non-native.
+ // A simpler approach for Mini Apps is often to use tg.showPopup or tg.showAlert
+ // with action buttons, triggered by a long press or a dedicated action button.
+ // For simplicity here, let's use tg.showConfirm for delete actions.
+
+ const buttons = [];
+ if (item.type === 'folder') {
+ buttons.push({ id: 'open', type: 'default', text: 'Открыть' });
+ buttons.push({ id: 'delete', type: 'destructive', text: 'Удалить папку' });
+ } else { // File
+ const previewable = ['image', 'video', 'pdf', 'text'].includes(item.file_type);
+ if (previewable) {
+ buttons.push({ id: 'preview', type: 'default', text: 'Просмотр' });
}
+ buttons.push({ id: 'download', type: 'default', text: 'Скачать' });
+ buttons.push({ id: 'delete', type: 'destructive', text: 'Удалить файл' });
+ }
+ buttons.push({ id: 'cancel', type: 'cancel' }); // Standard cancel
+
+ tg.showPopup({
+ title: item.name || item.original_filename,
+ message: `Выберите действие:`,
+ buttons: buttons
+ }, (buttonId) => {
+ if (!buttonId || buttonId === 'cancel') return;
+
+ if (buttonId === 'open') {
+ loadFolderContent(item.id);
+ } else if (buttonId === 'preview') {
+ openModalForItem(item);
+ } else if (buttonId === 'download') {
+ window.open(`/download/${item.id}`, '_blank');
+ } else if (buttonId === 'delete') {
+ if (item.type === 'folder') {
+ deleteFolder(item.id, item.name || 'папку');
+ } else {
+ deleteFile(item.id, item.original_filename || 'файл');
+ }
+ }
+ });
+ }
- fileListUl.appendChild(li);
- });
- }
+ // --- Modal Logic ---
async function openModal(srcOrUrl, type, itemId) {
const modal = document.getElementById('mediaModal');
const modalContent = document.getElementById('modalContent');
- modalContent.innerHTML = '
Загрузка...
'; // Use loading indicator style
+ modalContent.innerHTML = '
Загрузка...
';
modal.style.display = 'flex';
- tg.BackButton.show();
- tg.BackButton.onClick(closeModalManual);
+ if (tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light');
try {
if (type === 'pdf') {
- // Use iframe for PDF. Google Docs viewer is often blocked in TG webview.
- // Rely on browser's native PDF support within iframe.
+ // PDF handling in iframe can be tricky, especially mobile
+ // Option 1: Google Docs Viewer (might have CORS issues or need proxy)
+ // modalContent.innerHTML = ``;
+ // Option 2: Link to open externally
+ // modalContent.innerHTML = `
PDF файлы лучше открывать в отдельном приложении.
Открыть PDF`;
+ // Option 3: Basic iframe (might work for some PDFs/browsers)
modalContent.innerHTML = ``;
+
} else if (type === 'image') {
- const img = new Image();
- img.onload = () => modalContent.innerHTML = ''; modalContent.appendChild(img);
- img.onerror = () => { throw new Error('Не удалось загрузить изображение.') };
- img.src = srcOrUrl;
- img.alt="Просмотр изображения";
+ modalContent.innerHTML = ``;
} else if (type === 'video') {
- modalContent.innerHTML = ``;
+ modalContent.innerHTML = ``;
} else if (type === 'text') {
const response = await fetch(srcOrUrl);
- if (!response.ok) throw new Error(`Ошибка загрузки текста: ${response.statusText}`);
+ if (!response.ok) throw new Error(`Ошибка загрузки текста: ${response.statusText || response.status}`);
const text = await response.text();
- const escapedText = text.replace(/&/g, "&").replace(//g, ">");
+ // Basic escaping for HTML safety
+ const escapedText = text.replace(/&/g, "&").replace(//g, ">");
modalContent.innerHTML = `
${escapedText}
`;
} else {
modalContent.innerHTML = '
Предпросмотр для этого типа файла не поддерживается.
Не удалось загрузить содержимое для предпросмотра. ${error.message}
`;
- if (tg.HapticFeedback) tg.HapticFeedback.notificationOccurred('error');
+ modalContent.innerHTML = `
Не удалось загрузить содержимое для предпросмотра. ${error.message}
`;
+ if (tg.HapticFeedback) tg.HapticFeedback.notificationOccurred('error');
}
}
function closeModal(event) {
const modal = document.getElementById('mediaModal');
- // Close only if backdrop is clicked, not content inside
- if (event.target === modal) {
+ if (event.target === modal) { // Click outside content
closeModalManual();
}
}
@@ -1065,48 +957,36 @@ HTML_TEMPLATE = """
const modal = document.getElementById('mediaModal');
modal.style.display = 'none';
const video = modal.querySelector('video');
- if (video) { video.pause(); video.src = ''; }
+ if (video) { try { video.pause(); video.src = ''; } catch(e){} }
const iframe = modal.querySelector('iframe');
- if (iframe) iframe.src = 'about:blank';
+ if (iframe) try { iframe.src = 'about:blank'; } catch(e){}
document.getElementById('modalContent').innerHTML = ''; // Clear content
- tg.BackButton.hide();
- tg.BackButton.offClick(closeModalManual);
+ if (tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light');
}
+ // --- Folder Operations ---
async function loadFolderContent(folderId) {
currentFolderId = folderId;
console.log(`Loading folder: ${folderId}`);
- // Optionally show a loading indicator specific to the list
- fileListUl.innerHTML = '
Загрузка содержимого...
';
+ showLoadingScreen(`Загрузка папки...`);
try {
const data = await apiCall('/get_dashboard_data', 'POST', { folder_id: folderId });
if (data.status === 'ok') {
currentItems = data.items || [];
renderBreadcrumbs(data.breadcrumbs || [{'id': 'root', 'name': 'Root'}]);
- renderItems(currentItems.sort((a, b) => (a.type !== 'folder') - (b.type !== 'folder') || (a.name || a.original_filename || '').localeCompare(b.name || b.original_filename || '')));
- // Handle back button visibility
- if (folderId !== 'root' && data.breadcrumbs && data.breadcrumbs.length > 1) {
- tg.BackButton.show();
- // Use closure to capture the correct parent ID for the back button
- const parentId = data.breadcrumbs[data.breadcrumbs.length - 2].id;
- const backButtonHandler = () => {
- loadFolderContent(parentId);
- tg.BackButton.offClick(backButtonHandler); // Remove this specific listener
- };
- tg.BackButton.onClick(backButtonHandler);
- } else {
- tg.BackButton.hide();
- tg.BackButton.offClick(); // Remove any previous listener
- }
-
+ renderItems(currentItems);
+ showAppContent(); // Ensure app is visible after loading
} else {
- showFlash(data.message || 'Не удалось загрузить содержимое папки.', 'error');
- fileListUl.innerHTML = '
Ошибка загрузки
';
- tg.BackButton.hide(); // Hide on error too
+ // API call already shows flash on error, but we might need to show error screen
+ showError(data.message || 'Не удалось загрузить содержимое папки.');
}
} catch (error) {
- fileListUl.innerHTML = '
Ошибка загрузки
';
- tg.BackButton.hide();
+ showError(`Не удалось загрузить содержимое папки: ${error.message}`);
+ } finally {
+ // Hide loading screen if it wasn't hidden by success/error path
+ if (loadingEl.style.display !== 'none') {
+ loadingEl.style.display = 'none';
+ }
}
}
@@ -1117,19 +997,16 @@ HTML_TEMPLATE = """
newFolderInput.focus();
return;
}
- // Allow most characters, disallow only slashes
- if (folderName.includes('/') || folderName.includes('\\')) {
- showFlash('Имя папки не должно содержать слэши (/ или \\).', 'error');
- newFolderInput.focus();
+ // Basic validation: avoid problematic chars like / \ : * ? " < > |
+ if (/[\\/:*?"<>|]/.test(folderName)) {
+ showFlash('Имя папки содержит недопустимые символы.', 'error');
+ newFolderInput.focus();
return;
}
- if (folderName === '.' || folderName === '..') {
- showFlash('Недопустимое имя папки.', 'error');
- return;
- }
createFolderBtn.disabled = true;
- createFolderBtn.textContent = '...';
+ createFolderBtn.textContent = 'Создание...';
+ if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light');
try {
const data = await apiCall('/create_folder', 'POST', {
@@ -1137,67 +1014,85 @@ HTML_TEMPLATE = """
folder_name: folderName
});
if (data.status === 'ok') {
- showFlash(`Папка "${folderName}" создана.`);
+ showFlash(`Папка "${folderName}" создана.`, 'success');
newFolderInput.value = '';
- loadFolderContent(currentFolderId);
+ loadFolderContent(currentFolderId); // Refresh content
} else {
showFlash(data.message || 'Не удалось создать папку.', 'error');
}
} catch (error) {
- /* Handled by apiCall */
+ // Error handled by apiCall
} finally {
createFolderBtn.disabled = false;
createFolderBtn.textContent = 'Создать';
}
}
- async function deleteItem(isFolder, itemId, itemName) {
- const itemType = isFolder ? 'папку' : 'файл';
- const endpoint = isFolder ? `/delete_folder/${itemId}` : `/delete_file/${itemId}`;
- const confirmMessage = `Вы уверены, что хотите удалить ${itemType} "${itemName}"? ${isFolder ? 'Папка должна быть пустой.' : ''}`;
+ async function deleteFolder(folderId, folderName) {
+ tg.showConfirm(`Вы уверены, что хотите удалить папку "${folderName}"? Убедитесь, что папка пуста.`, async (confirmed) => {
+ if (confirmed) {
+ showLoadingScreen('Удаление папки...');
+ if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('medium');
+ try {
+ const data = await apiCall(`/delete_folder/${folderId}`, 'POST', { current_folder_id: currentFolderId }); // Send current folder for context if needed
+ if (data.status === 'ok') {
+ showFlash(`Папка "${folderName}" удалена.`, 'success');
+ loadFolderContent(currentFolderId); // Refresh
+ } else {
+ showFlash(data.message || 'Не удалось удалить папку.', 'error');
+ showAppContent(); // Hide loading if error occurred
+ }
+ } catch (error) {
+ showAppContent(); // Hide loading if error occurred
+ }
+ }
+ });
+ }
- tg.showConfirm(confirmMessage, async (confirmed) => {
+ async function deleteFile(fileId, fileName) {
+ tg.showConfirm(`Вы уверены, что хотите удалить файл "${fileName}"?`, async (confirmed) => {
if (confirmed) {
+ showLoadingScreen('Удаление файла...');
+ if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('medium');
try {
- const data = await apiCall(endpoint, 'POST', { current_folder_id: currentFolderId });
- if (data.status === 'ok') {
- showFlash(`${isFolder ? 'Папка' : 'Файл'} "${itemName}" удален${isFolder ? 'а' : '.'}`);
- loadFolderContent(currentFolderId);
- } else {
- showFlash(data.message || `Не удалось удалить ${itemType}.`, 'error');
- }
- } catch (error) {
- /* Handled by apiCall */
- }
+ const data = await apiCall(`/delete_file/${fileId}`, 'POST', { current_folder_id: currentFolderId });
+ if (data.status === 'ok') {
+ showFlash(`Файл "${fileName}" удален.`, 'success');
+ loadFolderContent(currentFolderId); // Refresh
+ } else {
+ showFlash(data.message || 'Не удалось удалить файл.', 'error');
+ showAppContent(); // Hide loading if error occurred
+ }
+ } catch (error) {
+ showAppContent(); // Hide loading if error occurred
+ }
}
});
}
- function deleteFolder(folderId, folderName) {
- deleteItem(true, folderId, folderName);
+ // --- File Upload ---
+ function triggerFileUpload() {
+ fileInput.click();
}
- function deleteFile(fileId, fileName) {
- deleteItem(false, fileId, fileName);
- }
-
- function handleFileUpload(event) {
- event.preventDefault();
+ function handleFileSelection() {
const files = fileInput.files;
if (files.length === 0) {
- showFlash('Выберите файлы для загрузки.', 'error');
+ return; // No files selected
+ }
+ if (isUploading) {
+ showFlash('Дождитесь завершения текущей загрузки.', 'error');
return;
}
+ isUploading = true;
progressContainer.style.display = 'block';
progressBar.style.width = '0%';
- // progressText.textContent = '0%'; // Text removed for cleaner look
- uploadBtn.disabled = true;
- uploadBtn.textContent = '...';
- document.getElementById('file-input-label').textContent = files.length === 1 ? files[0].name : `${files.length} файлов выбрано`;
+ uploadLabelBtn.disabled = true;
+ uploadLabelBtn.textContent = 'Загрузка...';
+ if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light');
-
- const formData = new FormData();
+ const formData = new FormData(); // Use the hidden form
for (let i = 0; i < files.length; i++) {
formData.append('files', files[i]);
}
@@ -1210,117 +1105,149 @@ HTML_TEMPLATE = """
if (event.lengthComputable) {
const percentComplete = Math.round((event.loaded / event.total) * 100);
progressBar.style.width = percentComplete + '%';
- // progressText.textContent = percentComplete + '%';
}
});
xhr.addEventListener('load', function() {
- uploadBtn.disabled = false;
- uploadBtn.textContent = 'Загрузить';
+ isUploading = false;
+ uploadLabelBtn.disabled = false;
+ uploadLabelBtn.textContent = 'Загрузить';
progressContainer.style.display = 'none';
- fileInput.value = ''; // Reset file input
- fileInputLabel.textContent = 'Файлы не выбраны';
-
+ fileInput.value = ''; // Clear selection
if (xhr.status >= 200 && xhr.status < 300) {
try {
const data = JSON.parse(xhr.responseText);
if (data.status === 'ok') {
- showFlash(data.message || `${files.length} файл(ов) успешно загружено.`);
- loadFolderContent(currentFolderId);
+ showFlash(data.message || `${files.length} файл(ов) загружено.`, 'success');
+ loadFolderContent(currentFolderId); // Refresh
} else {
showFlash(data.message || 'Ошибка при обработке загрузки на сервере.', 'error');
}
} catch (e) {
showFlash('Некорректный ответ от сервера после загрузки.', 'error');
- console.error("Upload response parse error:", e, xhr.responseText);
}
} else {
- let errorMsg = `Ошибка загрузки (${xhr.status})`;
- try {
- const errData = JSON.parse(xhr.responseText);
- errorMsg = errData.message || errorMsg;
- } catch(e) {}
+ let errorMsg = `Ошибка загрузки: ${xhr.statusText || xhr.status}`;
+ try {
+ const errData = JSON.parse(xhr.responseText);
+ errorMsg = errData.message || errorMsg;
+ } catch (e) {}
showFlash(errorMsg, 'error');
}
});
xhr.addEventListener('error', function() {
+ isUploading = false;
showFlash('Ошибка сети во время загрузки.', 'error');
- uploadBtn.disabled = false;
- uploadBtn.textContent = 'Загрузить';
+ uploadLabelBtn.disabled = false;
+ uploadLabelBtn.textContent = 'Загрузить';
progressContainer.style.display = 'none';
- fileInput.value = '';
- fileInputLabel.textContent = 'Файлы не выбраны';
});
xhr.addEventListener('abort', function() {
+ isUploading = false;
showFlash('Загрузка отменена.', 'error');
- uploadBtn.disabled = false;
- uploadBtn.textContent = 'Загрузить';
+ uploadLabelBtn.disabled = false;
+ uploadLabelBtn.textContent = 'Загрузить';
progressContainer.style.display = 'none';
- fileInput.value = '';
- fileInputLabel.textContent = 'Файлы не выбраны';
});
xhr.open('POST', '/upload', true);
xhr.send(formData);
}
- function setupEventListeners() {
- uploadForm.addEventListener('submit', handleFileUpload);
- createFolderBtn.addEventListener('click', handleCreateFolder);
- fileInput.addEventListener('change', () => {
- const files = fileInput.files;
- if (files.length === 1) {
- fileInputLabel.textContent = files[0].name;
- } else if (files.length > 1) {
- fileInputLabel.textContent = `${files.length} файлов выбрано`;
- } else {
- fileInputLabel.textContent = 'Файлы не выбраны';
- }
- });
- }
+ // --- Initialization ---
function initializeApp() {
tg.ready();
tg.expand();
- tg.setHeaderColor(tg.themeParams.secondary_bg_color || '#ffffff');
- // Apply theme colors dynamically if needed (mostly handled by CSS variables now)
- document.body.style.backgroundColor = tg.themeParams.bg_color || '#efeff4';
+
+ // Apply theme colors
+ document.body.style.backgroundColor = tg.themeParams.bg_color || '#ffffff';
document.body.style.color = tg.themeParams.text_color || '#000000';
+ // Set header color to match secondary background or a specific header color if available
+ tg.setHeaderColor(tg.themeParams.secondary_bg_color || tg.themeParams.header_bg_color || '#f1f1f1');
+
+ // Configure Back Button
+ tg.BackButton.onClick(() => {
+ if (currentFolderId !== 'root') {
+ // Find parent ID from breadcrumbs
+ const breadcrumbs = Array.from(breadcrumbsContainerEl.querySelectorAll('a, span.current-folder'));
+ if (breadcrumbs.length > 1) {
+ // Second to last element's ID (if it's a link)
+ const parentLink = breadcrumbs[breadcrumbs.length - 2];
+ if (parentLink && parentLink.tagName === 'A') {
+ const parentId = parentLink.onclick.toString().match(/'([^']+)'/)[1];
+ if (parentId) {
+ loadFolderContent(parentId);
+ } else {
+ loadFolderContent('root'); // Fallback
+ }
+ } else {
+ loadFolderContent('root'); // Fallback if structure is odd
+ }
+ } else {
+ loadFolderContent('root'); // Should not happen if BackButton is visible, but safe fallback
+ }
+ }
+ if(tg.HapticFeedback) tg.HapticFeedback.impactOccurred('light');
+ });
+
- if (!tg.initData) {
- showError("Ошибка: Не удалось получить данные авторизации Telegram (initData). Убедитесь, что приложение открыто через Telegram и попробуйте перезапустить.");
+ if (!tg.initDataUnsafe || !tg.initDataUnsafe.user) {
+ showError("Не удалось получить данные пользователя Telegram (initDataUnsafe). Попробуйте перезапустить Mini App.");
return;
}
- validatedInitData = tg.initData;
+ // We still need initData for validation server-side
+ if (!tg.initData) {
+ showError("Не удалось получить данные авторизации Telegram (initData). Попробуйте перезапустить Mini App.");
+ return;
+ }
+
+ validatedInitData = tg.initData; // Send this to backend
+
+ // Show loading screen while validating
+ showLoadingScreen('Проверка авторизации...');
fetch('/validate_init_data', {
- method: 'POST',
- headers: { 'Content-Type': 'application/json' },
- body: JSON.stringify({ initData: validatedInitData })
- })
- .then(response => response.json())
- .then(data => {
- if (data.status === 'ok' && data.user) {
- currentUser = data.user;
- const userName = currentUser.username ? `@${currentUser.username}` : `${currentUser.first_name || ''} ${currentUser.last_name || ''}`.trim();
- userInfoHeaderEl.textContent = `Пользователь: ${userName} (ID: ${currentUser.id})`;
- showAppContent();
- setupEventListeners();
- loadFolderContent('root');
- } else {
- throw new Error(data.message || 'Не удалось верифицировать пользователя.');
- }
- })
- .catch(error => {
- console.error("Validation or Initial Load failed:", error);
- showError(`Ошибка инициализации: ${error.message}. Попробуйте перезапустить приложение.`);
- validatedInitData = null;
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ initData: validatedInitData })
+ })
+ .then(response => response.json())
+ .then(data => {
+ if (data.status === 'ok' && data.user) {
+ currentUser = data.user;
+ let userName = currentUser.first_name || '';
+ if (currentUser.last_name) userName += ` ${currentUser.last_name}`;
+ if (!userName && currentUser.username) userName = currentUser.username;
+ if (!userName) userName = `User ${currentUser.id}`;
+ userInfoHeaderEl.textContent = `Пользователь: ${userName}`;
+ // showAppContent(); // Content shown after first folder load
+ loadFolderContent('root'); // Load initial content
+ } else {
+ throw new Error(data.message || 'Не удалось верифицировать пользователя.');
+ }
+ })
+ .catch(error => {
+ console.error("Validation failed:", error);
+ showError(`Ошибка авторизации: ${error.message}. Попробуйте перезапустить.`);
+ validatedInitData = null; // Invalidate data on failure
+ });
+
+ // Event listeners
+ fileInput.addEventListener('change', handleFileSelection);
+ uploadLabelBtn.addEventListener('click', triggerFileUpload); // Trigger hidden input
+ createFolderBtn.addEventListener('click', handleCreateFolder);
+ newFolderInput.addEventListener('keypress', (e) => {
+ if (e.key === 'Enter') {
+ handleCreateFolder();
+ }
});
}
+ // --- Start the App ---
initializeApp();
@@ -1329,6 +1256,8 @@ HTML_TEMPLATE = """
"""
+# --- Flask Routes ---
+
@app.route('/')
def index():
return Response(HTML_TEMPLATE, mimetype='text/html')
@@ -1337,51 +1266,46 @@ def index():
def validate_init_data():
data = request.get_json()
if not data or 'initData' not in data:
- return jsonify({"status": "error", "message": "Отсутствует initData"}), 400
+ return jsonify({"status": "error", "message": "Отсутствуют данные initData"}), 400
- init_data = data['initData']
- user_info = check_telegram_authorization(init_data, BOT_TOKEN)
+ init_data_str = data['initData']
+ user_info = check_telegram_authorization(init_data_str, BOT_TOKEN)
if user_info and 'id' in user_info:
tg_user_id = str(user_info['id'])
- needs_save = False
- try:
- db_data = load_data()
- except Exception as e:
- logging.critical(f"FATAL: Failed to load initial data: {e}")
- return jsonify({"status": "error", "message": "Критическая ошибка загрузки данных."}), 500
-
+ db_data = load_data() # Load fresh data or from cache
users = db_data.setdefault('users', {})
+ needs_save = False
if tg_user_id not in users or not isinstance(users.get(tg_user_id), dict):
- logging.info(f"New user detected or invalid user data: {tg_user_id}. Initializing.")
+ logging.info(f"New user detected or data invalid: {tg_user_id}. Initializing.")
users[tg_user_id] = {
'user_info': user_info,
- 'created_at': datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+ 'created_at': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
+ 'filesystem': { "type": "folder", "id": "root", "name": "Root", "children": [] }
}
- initialize_user_filesystem(users[tg_user_id])
needs_save = True
else:
- # Ensure filesystem exists and update user info if changed
- if 'filesystem' not in users[tg_user_id]:
- initialize_user_filesystem(users[tg_user_id])
- needs_save = True
- current_user_info = users[tg_user_id].get('user_info', {})
- if current_user_info.get('username') != user_info.get('username') or \
- current_user_info.get('first_name') != user_info.get('first_name') or \
- current_user_info.get('last_name') != user_info.get('last_name'):
- users[tg_user_id]['user_info'] = user_info
- needs_save = True
+ # Ensure filesystem is initialized and update user info if changed
+ user_data = users[tg_user_id]
+ initialize_user_filesystem(user_data) # Check and initialize if needed
+ # Update user info if different (e.g., username change)
+ if user_data.get('user_info') != user_info:
+ user_data['user_info'] = user_info
+ logging.info(f"Updating user_info for {tg_user_id}")
+ needs_save = True # Save if user info changed or FS was initialized
if needs_save:
- if not save_data(db_data):
- # Log error but proceed if possible, user already exists or created in memory
- logging.error(f"Failed initial save for user {tg_user_id}, proceeding with in-memory data.")
-
+ try:
+ save_data(db_data) # Save the updated structure
+ except Exception as e:
+ logging.exception(f"Failed to save data for user {tg_user_id} during validation/init: {e}")
+ # Don't block validation if save fails, but log error
+ return jsonify({"status": "error", "message": "Ошибка сохранения данных пользователя."}), 500
return jsonify({"status": "ok", "user": user_info})
else:
- logging.warning(f"Validation failed for initData.")
+ logging.warning(f"Validation failed for initData starting with: {init_data_str[:100]}...")
return jsonify({"status": "error", "message": "Недействительные данные авторизации Telegram."}), 403
@@ -1389,54 +1313,71 @@ def validate_init_data():
def get_dashboard_data():
data = request.get_json()
if not data or 'initData' not in data or 'folder_id' not in data:
- return jsonify({"status": "error", "message": "Неполный запрос"}), 400
+ return jsonify({"status": "error", "message": "Неполный запрос (initData, folder_id)"}), 400
user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
if not user_info or 'id' not in user_info:
- return jsonify({"status": "error", "message": "Не авторизован"}), 403
+ return jsonify({"status": "error", "message": "Ошибка авторизации"}), 403
tg_user_id = str(user_info['id'])
folder_id = data['folder_id']
-
- try:
- db_data = load_data()
- except Exception as e:
- logging.error(f"Failed to load data in get_dashboard_data for user {tg_user_id}: {e}")
- return jsonify({"status": "error", "message": "Ошибка загрузки данных."}), 500
-
+ db_data = load_data() # Use cached or load fresh
user_data = db_data.get('users', {}).get(tg_user_id)
- if not user_data or 'filesystem' not in user_data:
- logging.error(f"User data or filesystem missing for validated user {tg_user_id}")
- # Attempt to re-initialize if missing, maybe a race condition?
- if user_data and 'filesystem' not in user_data:
+ if not user_data or not isinstance(user_data, dict) or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict):
+ logging.error(f"User data or filesystem structure missing/invalid for validated user {tg_user_id}")
+ # Attempt to re-initialize and save
+ if isinstance(user_data, dict):
initialize_user_filesystem(user_data)
- if not save_data(db_data): # Try saving the fix
- logging.error(f"Failed to save re-initialized filesystem for user {tg_user_id}")
- # Continue with the initialized data if save failed but initialization worked
+ try:
+ save_data(db_data)
+ logging.info(f"Re-initialized filesystem for user {tg_user_id} on data fetch.")
+ except Exception as e:
+ logging.exception(f"Failed to save re-initialized filesystem for {tg_user_id}: {e}")
+ return jsonify({"status": "error", "message": "Критическая ошибка данных пользователя. Попробуйте перезапустить."}), 500
else:
- return jsonify({"status": "error", "message": "Ошибка данных пользователя"}), 500
+ # If user_data itself is not a dict, something is very wrong
+ db_data['users'][tg_user_id] = {} # Reset user data to empty dict
+ initialize_user_filesystem(db_data['users'][tg_user_id])
+ try:
+ save_data(db_data)
+ logging.warning(f"Reset user data structure for user {tg_user_id}.")
+ except Exception as e:
+ logging.exception(f"Failed to save reset user data for {tg_user_id}: {e}")
+ return jsonify({"status": "error", "message": "Критическая ошибка данных пользователя. Попробуйте перезапустить."}), 500
+
+ # Re-fetch user_data after potential fix
+ user_data = db_data.get('users', {}).get(tg_user_id)
+ if not user_data or 'filesystem' not in user_data: # If still bad, give up
+ return jsonify({"status": "error", "message": "Не удалось восстановить данные пользователя."}), 500
current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id)
- if not current_folder or current_folder.get('type') != 'folder':
- logging.warning(f"Folder {folder_id} not found or invalid for user {tg_user_id}. Defaulting to root.")
+ if not current_folder or not isinstance(current_folder, dict) or current_folder.get('type') != 'folder':
+ logging.warning(f"Requested folder {folder_id} not found or invalid for user {tg_user_id}. Defaulting to root.")
folder_id = 'root'
current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id)
- if not current_folder:
- logging.error(f"CRITICAL: Root folder not found for user {tg_user_id}")
- # Attempt recovery: Re-initialize and save
+ if not current_folder or not isinstance(current_folder, dict) or current_folder.get('id') != 'root':
+ logging.error(f"CRITICAL: Root folder invalid or not found for user {tg_user_id}. Attempting reset.")
initialize_user_filesystem(user_data)
- if save_data(db_data):
- current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id)
- if not current_folder: # Still not found after recovery attempt
- return jsonify({"status": "error", "message": "Критическая ошибка: Корневая папка отсутствует, восстановление не удалось."}), 500
-
+ try:
+ save_data(db_data)
+ current_folder, _ = find_node_by_id(user_data['filesystem'], 'root')
+ if not current_folder: raise ValueError("Root still missing after reset")
+ except Exception as e:
+ logging.exception(f"Failed to reset/find root for user {tg_user_id}: {e}")
+ return jsonify({"status": "error", "message": "Критическая ошибка: Корневая папка отсутствует или повреждена."}), 500
items_in_folder = current_folder.get('children', [])
- # Filter out potentially invalid children (though add_node should prevent this)
- items_in_folder = [item for item in items_in_folder if isinstance(item, dict) and item.get('id')]
+ # Ensure items are dicts before sending
+ valid_items = [item for item in items_in_folder if isinstance(item, dict)]
+ if len(valid_items) != len(items_in_folder):
+ logging.warning(f"Found non-dict items in folder {folder_id} for user {tg_user_id}. Filtering.")
+ # Optionally clean the data permanently here?
+ # current_folder['children'] = valid_items
+ # save_data(db_data)
+
breadcrumbs = get_node_path_list(user_data['filesystem'], folder_id)
@@ -1447,7 +1388,7 @@ def get_dashboard_data():
return jsonify({
"status": "ok",
- "items": items_in_folder,
+ "items": valid_items,
"breadcrumbs": breadcrumbs,
"current_folder": current_folder_info
})
@@ -1455,157 +1396,126 @@ def get_dashboard_data():
@app.route('/upload', methods=['POST'])
def upload_files():
- init_data = request.form.get('initData')
- current_folder_id = request.form.get('current_folder_id', 'root')
+ if 'initData' not in request.form or 'current_folder_id' not in request.form:
+ return jsonify({"status": "error", "message": "Неполный запрос (initData, current_folder_id)"}), 400
+
+ init_data = request.form['initData']
+ current_folder_id = request.form['current_folder_id']
files = request.files.getlist('files')
user_info = check_telegram_authorization(init_data, BOT_TOKEN)
if not user_info or 'id' not in user_info:
- return jsonify({"status": "error", "message": "Не авторизован"}), 403
+ return jsonify({"status": "error", "message": "Ошибка авторизации"}), 403
tg_user_id = str(user_info['id'])
if not HF_TOKEN_WRITE:
- logging.error("HF_TOKEN_WRITE not configured. Upload disabled.")
- return jsonify({'status': 'error', 'message': 'Загрузка невозможна: Ошибка конфигурации сервера.'}), 503
+ logging.error("Upload failed: HF_TOKEN_WRITE is not configured.")
+ return jsonify({'status': 'error', 'message': 'Загрузка временно недоступна (ошибка конфигурации сервера).' }), 500
if not files or all(not f.filename for f in files):
return jsonify({'status': 'error', 'message': 'Файлы для загрузки не выбраны.'}), 400
- try:
- db_data = load_data()
- except Exception as e:
- logging.error(f"Failed to load data during upload for user {tg_user_id}: {e}")
- return jsonify({"status": "error", "message": "Ошибка загрузки данных пользователя."}), 500
+ # Removed file count limit
+ db_data = load_data()
user_data = db_data.get('users', {}).get(tg_user_id)
if not user_data or 'filesystem' not in user_data:
- logging.error(f"User data or filesystem missing for upload user {tg_user_id}")
- return jsonify({"status": "error", "message": "Ошибка данных пользователя"}), 500
+ logging.error(f"Cannot upload: User data or filesystem missing for {tg_user_id}")
+ return jsonify({"status": "error", "message": "Ошибка данных пользователя. Попробуйте перезайти."}), 500
target_folder_node, _ = find_node_by_id(user_data['filesystem'], current_folder_id)
if not target_folder_node or target_folder_node.get('type') != 'folder':
- logging.warning(f"Target folder {current_folder_id} not found for upload by user {tg_user_id}. Defaulting to root.")
- current_folder_id = 'root' # Try to recover by uploading to root
- target_folder_node, _ = find_node_by_id(user_data['filesystem'], current_folder_id)
- if not target_folder_node: # Still can't find root? Major issue.
- logging.error(f"CRITICAL: Cannot find root folder during upload for user {tg_user_id}")
- return jsonify({'status': 'error', 'message': 'Ошибка: Не удается найти корневую папку.'}), 500
+ logging.error(f"Upload target folder {current_folder_id} not found for user {tg_user_id}")
+ return jsonify({'status': 'error', 'message': 'Папка для загрузки не найдена!'}), 404
-
- api = HfApi()
+ api = HfApi(token=HF_TOKEN_WRITE)
uploaded_count = 0
errors = []
needs_save = False
- temp_files_to_clean = []
for file in files:
if file and file.filename:
original_filename = secure_filename(file.filename)
if not original_filename:
- logging.warning(f"Skipping file with invalid name from user {tg_user_id}")
- errors.append("Пропущен файл с недопустимым именем.")
- continue
-
- name_part, ext_part = os.path.splitext(original_filename)
- unique_suffix = uuid.uuid4().hex[:8]
- # Ensure unique_filename doesn't exceed typical path limits, though unlikely
- safe_name_part = name_part[:100] # Limit base name length
- unique_filename = f"{safe_name_part}_{unique_suffix}{ext_part}"
+ logging.warning(f"Skipping file with invalid secure filename derived from: {file.filename}")
+ errors.append(f"Пропущено: Недопустимое имя файла ({file.filename})")
+ continue
+
file_id = uuid.uuid4().hex
+ # Use file_id in HF path for uniqueness, keep original name in metadata only
+ # This avoids issues with weird characters or length limits in HF paths
+ hf_path_filename = f"{file_id}{os.path.splitext(original_filename)[1]}" # e.g., abcdef12.jpg
+ hf_repo_path = f"cloud_files/{tg_user_id}/{current_folder_id}/{hf_path_filename}"
+ # Ensure base dir exists? HF Hub handles this.
- hf_path = f"cloud_files/{tg_user_id}/{current_folder_id}/{unique_filename}"
- temp_path = os.path.join(UPLOAD_FOLDER, f"{file_id}_{unique_filename}")
- temp_files_to_clean.append(temp_path)
- file_added_to_db = False
+ temp_path = None # Use BytesIO instead of temp file if possible
try:
- file.save(temp_path)
+ # Read file into memory (consider chunking for very large files if memory is an issue)
+ file_content = BytesIO(file.read())
+ file_content.seek(0)
+ logging.info(f"Uploading {original_filename} to {hf_repo_path} for user {tg_user_id}")
api.upload_file(
- path_or_fileobj=temp_path, path_in_repo=hf_path,
- repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE,
- commit_message=f"User {tg_user_id} uploaded {original_filename} to {current_folder_id}"
+ path_or_fileobj=file_content,
+ path_in_repo=hf_repo_path,
+ repo_id=REPO_ID,
+ repo_type="dataset",
+ # token implicitly used from HfApi instance
+ commit_message=f"User {tg_user_id} uploaded {original_filename}"
)
- logging.info(f"Successfully uploaded {original_filename} to HF path {hf_path} for user {tg_user_id}")
+ logging.info(f"Successfully uploaded {original_filename} to HF.")
file_info = {
'type': 'file', 'id': file_id,
- 'original_filename': original_filename, 'unique_filename': unique_filename,
- 'path': hf_path, 'file_type': get_file_type(original_filename),
+ 'original_filename': original_filename,
+ 'hf_repo_path': hf_repo_path, # Store the actual HF path
+ 'file_type': get_file_type(original_filename),
'upload_date': datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
+ # Add node to the *loaded* db_data structure
if add_node(user_data['filesystem'], current_folder_id, file_info):
uploaded_count += 1
- needs_save = True
- file_added_to_db = True
- logging.info(f"Successfully added file metadata {file_id} to DB for user {tg_user_id}")
+ needs_save = True # Mark that the DB needs saving
+ logging.info(f"Added metadata for {original_filename} ({file_id}) to user {tg_user_id}")
else:
- # This case should be rare if target_folder_node was found correctly
- error_msg = f"Критическая ошибка: Не удалось добавить метаданные для {original_filename} в БД."
+ error_msg = f"Критическая ошибка: Не удалось добавить метаданные для {original_filename} после загрузки. Файл может быть недоступен."
errors.append(error_msg)
- logging.error(f"Failed add_node for {file_id} to {current_folder_id} for {tg_user_id} even after finding parent.")
- # Attempt to delete the orphaned HF file
+ logging.error(f"Failed add_node for {file_id} to {current_folder_id} for {tg_user_id} AFTER successful HF upload.")
+ # Attempt to delete the orphaned file from HF
try:
- api.delete_file(path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE)
- logging.warning(f"Deleted orphaned HF file {hf_path} after DB add failure.")
+ logging.warning(f"Attempting to delete orphaned HF file: {hf_repo_path}")
+ api.delete_file(path_in_repo=hf_repo_path, repo_id=REPO_ID, repo_type="dataset")
except Exception as del_err:
- logging.error(f"Failed to delete orphaned HF file {hf_path}: {del_err}")
-
+ logging.error(f"Failed to delete orphaned HF file {hf_repo_path}: {del_err}")
except Exception as e:
- error_msg = f"Ошибка загрузки файла {original_filename}: {e}"
- errors.append(error_msg)
- logging.error(f"Upload or processing error for {original_filename} (user {tg_user_id}): {e}")
- # If DB entry was already added (shouldn't happen with current logic, but defensively)
- # or if HF upload succeeded but DB add failed, try cleaning up HF file
- if not file_added_to_db and 'api.upload_file' in str(e.__traceback__): # Check if error was likely during HF upload
- pass # HF upload failed, no need to delete
- elif file_added_to_db: # DB add succeeded but something else failed? Rollback DB? Complex.
- logging.warning(f"File {file_id} added to DB but error occurred later. State might be inconsistent.")
- else: # HF upload likely succeeded, but DB add failed or error after HF success
- try:
- # Check if file exists on HF before attempting delete
- api.file_exists(repo_id=REPO_ID, repo_type="dataset", filename=hf_path, token=HF_TOKEN_READ or HF_TOKEN_WRITE)
- api.delete_file(path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE)
- logging.warning(f"Attempted cleanup of HF file {hf_path} due to error: {e}")
- except hf_utils.EntryNotFoundError:
- logging.info(f"HF file {hf_path} not found, no cleanup needed.")
- except Exception as del_err:
- logging.error(f"Failed attempt to cleanup HF file {hf_path} after error: {del_err}")
+ logging.exception(f"Upload error for {original_filename} (user {tg_user_id}): {e}")
+ errors.append(f"Ошибка загрузки {original_filename}: {str(e)[:100]}") # Keep error brief
-
- # Cleanup all temporary files
- for temp_file in temp_files_to_clean:
- if os.path.exists(temp_file):
- try:
- os.remove(temp_file)
- except OSError as e:
- logging.error(f"Error removing temp file {temp_file}: {e}")
-
- # Save the database IF any uploads were successfully added
if needs_save:
- if not save_data(db_data):
- logging.error(f"CRITICAL: Failed to save DB after successful uploads for user {tg_user_id}. Data inconsistency likely.")
- errors.append("Критическая ошибка: Не удалось сохранить изменения в базе данных после загрузки.")
- # Don't totally fail the response, report partial success but warn user
- final_status = "error"
- final_message = f"{uploaded_count} файл(ов) загружено, НО ОШИБКА СОХРАНЕНИЯ БАЗЫ ДАННЫХ. Данные могут быть неактуальны."
- if errors:
- final_message += " Другие ошибки: " + "; ".join(errors)
- return jsonify({"status": final_status, "message": final_message }), 500
-
-
- final_status = "ok" if not errors else "partial_error"
- final_message = f"{uploaded_count} из {len(files)} файл(ов) успешно загружено."
- if errors:
- final_status = "error" if uploaded_count == 0 else "partial_error"
- final_message = f"{uploaded_count} из {len(files)} файл(ов) загружено. Ошибки: " + "; ".join(errors)
+ try:
+ save_data(db_data) # Save the entire updated db_data
+ except Exception as e:
+ logging.exception(f"CRITICAL Error saving DB after upload for {tg_user_id}: {e}")
+ errors.append("Критическая ошибка: Не удалось сохранить изменения после загрузки.")
+ # If save fails, the newly added nodes in memory are lost on next request
+
+ if not errors and uploaded_count > 0:
+ final_message = f"{uploaded_count} файл(ов) успешно загружено."
+ status = "ok"
+ elif uploaded_count > 0:
+ final_message = f"{uploaded_count} файл(ов) загружено с ошибками: {'; '.join(errors)}"
+ status = "ok" # Partial success is still ok-ish for frontend
+ else:
+ final_message = f"Не удалось загрузить файлы. Ошибки: {'; '.join(errors)}"
+ status = "error"
return jsonify({
- "status": final_status,
+ "status": status,
"message": final_message
})
@@ -1614,11 +1524,11 @@ def upload_files():
def create_folder():
data = request.get_json()
if not data or 'initData' not in data or 'parent_folder_id' not in data or 'folder_name' not in data:
- return jsonify({"status": "error", "message": "Неполный запрос"}), 400
+ return jsonify({"status": "error", "message": "Неполный запрос (initData, parent_folder_id, folder_name)"}), 400
user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
if not user_info or 'id' not in user_info:
- return jsonify({"status": "error", "message": "Не авторизован"}), 403
+ return jsonify({"status": "error", "message": "Ошибка авторизации"}), 403
tg_user_id = str(user_info['id'])
parent_folder_id = data['parent_folder_id']
@@ -1626,30 +1536,22 @@ def create_folder():
if not folder_name:
return jsonify({'status': 'error', 'message': 'Имя папки не может быть пустым.'}), 400
- if '/' in folder_name or '\\' in folder_name:
- return jsonify({'status': 'error', 'message': 'Имя папки содержит недопустимые символы (/ или \\).'}), 400
- if folder_name == '.' or folder_name == '..':
- return jsonify({'status': 'error', 'message': 'Недопустимое имя папки.'}), 400
- if len(folder_name) > 100:
- return jsonify({'status': 'error', 'message': 'Имя папки слишком длинное (макс 100 симв).'}), 400
-
-
- try:
- db_data = load_data()
- except Exception as e:
- logging.error(f"Failed to load data during create_folder for user {tg_user_id}: {e}")
- return jsonify({"status": "error", "message": "Ошибка загрузки данных пользователя."}), 500
+ # Validate name (simple check for problematic chars)
+ if '/' in folder_name or '\\' in folder_name or ':' in folder_name:
+ return jsonify({'status': 'error', 'message': 'Имя папки содержит недопустимые символы.'}), 400
+ db_data = load_data()
user_data = db_data.get('users', {}).get(tg_user_id)
if not user_data or 'filesystem' not in user_data:
- logging.error(f"User data or filesystem missing for create_folder user {tg_user_id}")
- return jsonify({"status": "error", "message": "Ошибка данных пользователя"}), 500
+ logging.error(f"Create folder failed: User data or filesystem missing for {tg_user_id}")
+ return jsonify({"status": "error", "message": "Ошибка данных пользователя."}), 500
- # Check if folder with the same name already exists in the parent
+ # Check if folder with same name already exists in parent
parent_node, _ = find_node_by_id(user_data['filesystem'], parent_folder_id)
- if parent_node and parent_node.get('type') == 'folder' and 'children' in parent_node:
- if any(child.get('name') == folder_name and child.get('type') == 'folder' for child in parent_node.get('children', []) if isinstance(child, dict)):
- return jsonify({'status': 'error', 'message': f'Папка с именем "{folder_name}" уже существует здесь.'}), 409 # Conflict
+ if parent_node and isinstance(parent_node.get('children'), list):
+ for child in parent_node['children']:
+ if isinstance(child, dict) and child.get('type') == 'folder' and child.get('name') == folder_name:
+ return jsonify({'status': 'error', 'message': f'Папка с именем "{folder_name}" уже существует здесь.'}), 409 # Conflict
folder_id = uuid.uuid4().hex
@@ -1659,185 +1561,175 @@ def create_folder():
}
if add_node(user_data['filesystem'], parent_folder_id, folder_data):
- if save_data(db_data):
- logging.info(f"Folder '{folder_name}' (id: {folder_id}) created successfully for user {tg_user_id} under {parent_folder_id}.")
+ try:
+ save_data(db_data)
+ logging.info(f"Folder '{folder_name}' ({folder_id}) created for user {tg_user_id} under {parent_folder_id}")
return jsonify({'status': 'ok', 'message': f'Папка "{folder_name}" создана.'})
- else:
- logging.error(f"Create folder DB save error for user {tg_user_id}. Attempting to rollback DB change.")
- # Attempt rollback (remove the added node) - best effort
- remove_node(user_data['filesystem'], folder_id)
- return jsonify({'status': 'error', 'message': 'Ошибка сохранения данных после создания папки.'}), 500
+ except Exception as e:
+ logging.exception(f"Create folder DB save error ({tg_user_id}): {e}")
+ # Attempt to roll back the add_node? Difficult with current structure.
+ return jsonify({'status': 'error', 'message': 'Папка создана, но произошла ошибка сохранения данных.'}), 500
else:
- # This implies parent_folder_id was not found or wasn't a folder
- logging.error(f"Failed to add folder node '{folder_name}' for user {tg_user_id}. Parent folder {parent_folder_id} likely invalid.")
- return jsonify({'status': 'error', 'message': 'Не удалось найти родительскую папку или добавить узел.'}), 400
+ # This could happen if parent_folder_id is suddenly invalid
+ logging.error(f"Failed to add folder node '{folder_name}' to parent {parent_folder_id} for user {tg_user_id}")
+ return jsonify({'status': 'error', 'message': 'Не удалось добавить папку (возможно, родительская папка была удалена?).'}), 400
-@app.route('/download/')
-def download_file_route(file_id):
- try:
- db_data = load_data()
- except Exception as e:
- logging.error(f"Failed to load data for download request file_id {file_id}: {e}")
- return Response("Ошибка сервера при получении данных файла", status=500)
+def find_file_owner_and_node(file_id: str) -> (Optional[str], Optional[Dict[str, Any]]):
+ """ Helper to find a file node and its owner across all users """
+ db_data = load_data()
+ for user_id, user_data in db_data.get('users', {}).items():
+ if isinstance(user_data, dict) and 'filesystem' in user_data:
+ node, _ = find_node_by_id(user_data['filesystem'], file_id)
+ if node and isinstance(node, dict) and node.get('type') == 'file':
+ return user_id, node
+ return None, None
- file_node = None
- owner_user_id = None
- for user_id, user_data in db_data.get('users', {}).items():
- if isinstance(user_data, dict) and 'filesystem' in user_data:
- node, _ = find_node_by_id(user_data['filesystem'], file_id)
- if node and isinstance(node, dict) and node.get('type') == 'file':
- file_node = node
- owner_user_id = user_id
- break
+@app.route('/download/')
+def download_file_route(file_id):
+ owner_user_id, file_node = find_file_owner_and_node(file_id)
- if not file_node or not isinstance(file_node, dict):
- return Response("Файл не найден", status=404)
+ if not file_node:
+ logging.warning(f"Download request for unknown file_id: {file_id}")
+ return Response("Файл не найден", status=404, mimetype='text/plain')
- hf_path = file_node.get('path')
+ hf_repo_path = file_node.get('hf_repo_path')
original_filename = file_node.get('original_filename', f'{file_id}_download')
- if not hf_path:
- logging.error(f"Missing HF path for file ID {file_id} (owner: {owner_user_id})")
- return Response("Ошибка: Путь к файлу не определен", status=500)
+ if not hf_repo_path:
+ logging.error(f"Missing hf_repo_path for file ID {file_id} (owner: {owner_user_id})")
+ return Response("Ошибка сервера: Путь к файлу не найден в метаданных.", status=500, mimetype='text/plain')
- file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true"
- logging.info(f"Attempting download for user (implicit) file {file_id} from HF URL: {file_url}")
+ # Construct the direct download URL
+ # Using /resolve/main/ implicitly assumes the 'main' branch. Fine for datasets usually.
+ file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_repo_path}?download=true"
try:
headers = {}
if HF_TOKEN_READ:
- headers["authorization"] = f"Bearer {HF_TOKEN_READ}"
+ headers["Authorization"] = f"Bearer {HF_TOKEN_READ}"
+ else:
+ logging.warning(f"Attempting download of {hf_repo_path} without HF_TOKEN_READ. May fail for private repos.")
- # Use GET with stream=True for potentially large files
+ # Stream the download
+ logging.info(f"Streaming download for file {file_id} ({original_filename}) from {file_url}")
response = requests.get(file_url, headers=headers, stream=True, timeout=60) # Increased timeout
- response.raise_for_status()
+ response.raise_for_status() # Raises HTTPError for bad responses (4xx or 5xx)
- # Correct Content-Disposition encoding (RFC 6266)
+ # Prepare response headers
+ resp_headers = {}
+ content_type = response.headers.get('Content-Type', 'application/octet-stream')
+ resp_headers['Content-Type'] = content_type
+
+ # Set Content-Disposition for filename
try:
- # Simple ASCII fallback
- ascii_filename = original_filename.encode('ascii', 'ignore').decode('ascii')
- disposition = f'attachment; filename="{ascii_filename}"'
- # If filename contains non-ASCII, use filename*
- if ascii_filename != original_filename:
- encoded_filename = urlencode({'filename': original_filename}, encoding='utf-8')[9:]
- disposition += f"; filename*=UTF-8''{encoded_filename}"
- except Exception as e:
- logging.warning(f"Error encoding filename '{original_filename}' for Content-Disposition: {e}. Using simplified name.")
- disposition = f'attachment; filename="downloaded_file"'
+ # More robust encoding for filename
+ encoded_filename = urlencode({'filename': original_filename}, encoding='utf-8')[9:]
+ disposition = f"attachment; filename=\"{original_filename.replace('\"', '_')}\"; filename*=UTF-8''{encoded_filename}"
+ resp_headers['Content-Disposition'] = disposition
+ except Exception as enc_e:
+ logging.warning(f"Could not properly encode filename {original_filename} for Content-Disposition: {enc_e}")
+ resp_headers['Content-Disposition'] = f"attachment; filename=\"{file_id}_download\"" # Fallback
- # Get content type from HF response, default to octet-stream
- content_type = response.headers.get('Content-Type', 'application/octet-stream')
- return Response(response.iter_content(chunk_size=65536), # 64KB chunk
- mimetype=content_type,
- headers={"Content-Disposition": disposition})
+ # Return streaming response
+ return Response(response.iter_content(chunk_size=65536), headers=resp_headers) # Larger chunk size
except requests.exceptions.HTTPError as e:
- status_code = e.response.status_code
- logging.error(f"HTTP Error {status_code} downloading file from HF ({hf_path}, owner: {owner_user_id}): {e}")
- message = "Ошибка скачивания файла"
- if status_code == 404:
- message = "Файл не найден на сервере хранения."
- elif status_code == 401 or status_code == 403:
- message = "Ошибка доступа к файлу на сервере хранения."
- return Response(f"{message} (Код: {status_code})", status=status_code)
- except requests.exceptions.Timeout:
- logging.error(f"Timeout downloading file from HF ({hf_path}, owner: {owner_user_id})")
- return Response("Тайм-аут при скачивании файла с сервера хранения.", status=504)
+ status_code = e.response.status_code
+ logging.error(f"HTTP error downloading file {file_id} from HF ({hf_repo_path}, owner: {owner_user_id}): {status_code} - {e}")
+ if status_code == 404:
+ return Response(f"Ошибка: Файл не найден на сервере хранения (404). Возможно, он был удален.", status=404, mimetype='text/plain')
+ elif status_code == 401 or status_code == 403:
+ return Response(f"Ошибка: Доступ запрещен к файлу на сервере хранения ({status_code}).", status=403, mimetype='text/plain')
+ else:
+ return Response(f"Ошибка скачивания файла с сервера хранения ({status_code}).", status=status_code, mimetype='text/plain')
except requests.exceptions.RequestException as e:
- logging.error(f"Network error downloading file from HF ({hf_path}, owner: {owner_user_id}): {e}")
- return Response("Сетевая ошибка при скачивании файла.", status=502) # Bad Gateway
+ logging.error(f"Network error downloading file {file_id} from HF ({hf_repo_path}, owner: {owner_user_id}): {e}")
+ return Response(f"Ошибка сети при скачивании файла.", status=504, mimetype='text/plain') # Gateway Timeout
except Exception as e:
- logging.error(f"Unexpected error during download ({hf_path}, owner: {owner_user_id}): {e}", exc_info=True)
- return Response("Внутренняя ошибка сервера при подготовке файла к скачиванию.", status=500)
+ logging.exception(f"Unexpected error during download ({file_id}, {hf_repo_path}, owner: {owner_user_id}): {e}")
+ return Response("Внутренняя ошибка сервера при обработке запроса на скачивание.", status=500, mimetype='text/plain')
@app.route('/delete_file/', methods=['POST'])
def delete_file_route(file_id):
data = request.get_json()
- if not data or 'initData' not in data or 'current_folder_id' not in data: # current_folder_id might not be needed but good for context
- return jsonify({"status": "error", "message": "Неполный запрос"}), 400
+ if not data or 'initData' not in data: # current_folder_id not strictly needed for delete itself
+ return jsonify({"status": "error", "message": "Неполный запрос (initData)"}), 400
user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
if not user_info or 'id' not in user_info:
- return jsonify({"status": "error", "message": "Не авторизован"}), 403
+ return jsonify({"status": "error", "message": "Ошибка авторизации"}), 403
tg_user_id = str(user_info['id'])
if not HF_TOKEN_WRITE:
- logging.error("HF_TOKEN_WRITE not configured. Delete disabled.")
- return jsonify({'status': 'error', 'message': 'Удаление невозможно: Ошибка конфигурации сервера.'}), 503
-
- try:
- db_data = load_data()
- except Exception as e:
- logging.error(f"Failed to load data during delete_file for user {tg_user_id}: {e}")
- return jsonify({"status": "error", "message": "Ошибка загрузки данных пользователя."}), 500
+ logging.error("Delete failed: HF_TOKEN_WRITE is not configured.")
+ return jsonify({'status': 'error', 'message': 'Удаление временно недоступно (ошибка конфигурации сервера).' }), 500
+ db_data = load_data()
user_data = db_data.get('users', {}).get(tg_user_id)
if not user_data or 'filesystem' not in user_data:
- logging.error(f"User data or filesystem missing for delete_file user {tg_user_id}")
- return jsonify({"status": "error", "message": "Ошибка данных пользователя"}), 500
+ return jsonify({"status": "error", "message": "Ошибка данных пользователя."}), 500
file_node, parent_node = find_node_by_id(user_data['filesystem'], file_id)
- if not file_node or not isinstance(file_node, dict) or file_node.get('type') != 'file' or not parent_node:
- logging.warning(f"File node {file_id} not found or invalid for deletion by user {tg_user_id}")
- return jsonify({'status': 'error', 'message': 'Файл не найден или не может быть удален.'}), 404
+ if not file_node or file_node.get('type') != 'file':
+ return jsonify({'status': 'error', 'message': 'Файл не найден в ваших данных.'}), 404
+ # Parent node check isn't strictly necessary for deletion itself if we have the file_node
- hf_path = file_node.get('path')
+ hf_repo_path = file_node.get('hf_repo_path')
original_filename = file_node.get('original_filename', 'файл')
- db_removed = False
- hf_deleted_or_missing = False
+ needs_save = False
+ hf_deleted = False
+ error_msg = None
- # 1. Try deleting from Hugging Face Hub
- if hf_path:
+ if hf_repo_path:
try:
- api = HfApi()
+ api = HfApi(token=HF_TOKEN_WRITE)
+ logging.info(f"Attempting to delete HF file: {hf_repo_path} for user {tg_user_id}")
api.delete_file(
- path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE,
- commit_message=f"User {tg_user_id} deleted file {original_filename} (id: {file_id})"
+ path_in_repo=hf_repo_path, repo_id=REPO_ID, repo_type="dataset",
+ commit_message=f"User {tg_user_id} deleted {original_filename}"
)
- logging.info(f"Deleted file {hf_path} from HF Hub for user {tg_user_id}")
- hf_deleted_or_missing = True
+ logging.info(f"Successfully deleted file {hf_repo_path} from HF Hub for user {tg_user_id}")
+ hf_deleted = True
except hf_utils.EntryNotFoundError:
- logging.warning(f"File {hf_path} not found on HF Hub during delete attempt by {tg_user_id}. Assuming deleted or never existed.")
- hf_deleted_or_missing = True # Treat as success for DB removal purposes
+ logging.warning(f"File {hf_repo_path} not found on HF Hub during delete attempt for user {tg_user_id}. Assuming already deleted or path mismatch.")
+ hf_deleted = True # Treat as deleted if not found
except Exception as e:
- logging.error(f"Error deleting file from HF Hub ({hf_path}, user {tg_user_id}): {e}")
- # Proceed to try removing from DB anyway, but report HF error later
+ logging.exception(f"Error deleting file from HF Hub ({hf_repo_path}, user {tg_user_id}): {e}")
+ error_msg = f'Ошибка удаления файла с сервера хранения: {str(e)[:100]}'
+ # Decide whether to proceed with DB removal if HF delete fails
+ # Let's proceed: remove from DB even if HF fails, to avoid user seeing a broken file link
else:
- logging.warning(f"File node {file_id} is missing 'path' attribute for user {tg_user_id}. Skipping HF delete.")
- hf_deleted_or_missing = True # Cannot delete from HF, proceed with DB removal
+ logging.warning(f"No hf_repo_path found for file {file_id} (user {tg_user_id}). Skipping HF delete.")
+ hf_deleted = True # Nothing to delete on HF
+ # Remove from DB structure
+ if remove_node(user_data['filesystem'], file_id):
+ needs_save = True
+ logging.info(f"Removed file node {file_id} from DB for user {tg_user_id}")
+ else:
+ # This is unexpected if find_node_by_id found it earlier
+ logging.error(f"Failed to remove file node {file_id} from DB structure for {tg_user_id} even though it was found.")
+ if not error_msg: # Don't overwrite HF error
+ error_msg = 'Ошибка удаления файла из базы данных.'
- # 2. Try removing from DB if HF delete was successful or skipped/not found
- if hf_deleted_or_missing:
- if remove_node(user_data['filesystem'], file_id):
- logging.info(f"Removed file node {file_id} from DB for user {tg_user_id}")
- db_removed = True
- else:
- # This is unexpected if find_node_by_id worked initially
- logging.error(f"CRITICAL: Failed to remove file node {file_id} from DB structure for {tg_user_id} after successful find.")
- # Don't save data if the structure seems inconsistent
- return jsonify({'status': 'error', 'message': 'Критическая ошибка при обновлении базы данных.'}), 500
-
- # 3. Save DB changes if node was removed
- if db_removed:
- if save_data(db_data):
- return jsonify({'status': 'ok', 'message': f'Файл "{original_filename}" успешно удален.'})
- else:
- logging.error(f"CRITICAL: Delete file DB save error for user {tg_user_id} after successful removal from structure. State inconsistent.")
- # Data was removed in memory, but save failed. Very bad state.
- return jsonify({'status': 'error', 'message': 'Файл удален, но ПРОИЗОШЛА КРИТИЧЕСКАЯ ОШИБКА сохранения базы данных.'}), 500
- elif not hf_deleted_or_missing:
- # HF delete failed, DB not touched
- return jsonify({'status': 'error', 'message': f'Ошибка при удалении файла "{original_filename}" с сервера хранения. База данных не изменена.'}), 500
+ if needs_save:
+ try:
+ save_data(db_data)
+ except Exception as e:
+ logging.exception(f"Delete file DB save error ({tg_user_id}): {e}")
+ error_msg = (error_msg + "; " if error_msg else "") + "Ошибка сохранения изменений в базе данных."
+
+ if not error_msg:
+ return jsonify({'status': 'ok', 'message': f'Файл "{original_filename}" удален.'})
else:
- # HF delete ok/skipped, but DB remove failed (shouldn't happen based on checks)
- return jsonify({'status': 'error', 'message': 'Не удалось удалить файл из базы данных после операции на сервере.'}), 500
+ # Return error but the file might be partially deleted (e.g., from DB but not HF or vice-versa)
+ return jsonify({'status': 'error', 'message': error_msg}), 500
@app.route('/delete_folder/', methods=['POST'])
@@ -1847,246 +1739,210 @@ def delete_folder_route(folder_id):
data = request.get_json()
if not data or 'initData' not in data:
- return jsonify({"status": "error", "message": "Неполный запрос"}), 400
+ return jsonify({"status": "error", "message": "Неполный запрос (initData)"}), 400
user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
if not user_info or 'id' not in user_info:
- return jsonify({"status": "error", "message": "Не авторизован"}), 403
+ return jsonify({"status": "error", "message": "Ошибка авторизации"}), 403
tg_user_id = str(user_info['id'])
- try:
- db_data = load_data()
- except Exception as e:
- logging.error(f"Failed to load data during delete_folder for user {tg_user_id}: {e}")
- return jsonify({"status": "error", "message": "Ошибка загрузки данных пользователя."}), 500
-
+ db_data = load_data()
user_data = db_data.get('users', {}).get(tg_user_id)
if not user_data or 'filesystem' not in user_data:
- logging.error(f"User data or filesystem missing for delete_folder user {tg_user_id}")
- return jsonify({"status": "error", "message": "Ошибка данных пользователя"}), 500
+ return jsonify({"status": "error", "message": "Ошибка данных пользователя."}), 500
folder_node, parent_node = find_node_by_id(user_data['filesystem'], folder_id)
- if not folder_node or not isinstance(folder_node, dict) or folder_node.get('type') != 'folder' or not parent_node:
- logging.warning(f"Folder node {folder_id} not found or invalid for deletion by user {tg_user_id}")
- return jsonify({'status': 'error', 'message': 'Папка не найдена или не может быть удалена.'}), 404
+ if not folder_node or folder_node.get('type') != 'folder':
+ return jsonify({'status': 'error', 'message': 'Папка не найдена в ваших данных.'}), 404
+ if not parent_node:
+ # Should not happen unless it's root, but we check root earlier
+ logging.error(f"Could not find parent for folder {folder_id} (user {tg_user_id}) during delete.")
+ return jsonify({'status': 'error', 'message': 'Ошибка структуры данных (родитель папки не найден).'}), 500
+
folder_name = folder_node.get('name', 'папка')
- # Check if folder is empty
- if folder_node.get('children'):
+ # Check if folder is empty (server-side)
+ if folder_node.get('children') and len(folder_node.get('children')) > 0:
logging.warning(f"Attempt to delete non-empty folder {folder_id} by user {tg_user_id}")
return jsonify({'status': 'error', 'message': f'Папку "{folder_name}" можно удалить только если она пуста.'}), 400
- # Remove the folder node from the filesystem structure
+ # Note: This does NOT delete files within the folder on HF Hub.
+ # A recursive delete would be much more complex and potentially dangerous.
+ # We rely on the check above.
+
if remove_node(user_data['filesystem'], folder_id):
- logging.info(f"Removed empty folder node {folder_id} ('{folder_name}') from DB for user {tg_user_id}")
- # Save the updated data
- if save_data(db_data):
- return jsonify({'status': 'ok', 'message': f'Папка "{folder_name}" успешно удалена.'})
- else:
- logging.error(f"CRITICAL: Delete folder DB save error for user {tg_user_id} after successful removal. State inconsistent.")
- # Attempt to rollback? Very difficult state. Return error.
- # Re-adding might be complex if parent isn't easily available or structure changed.
- return jsonify({'status': 'error', 'message': 'Папка удалена из структуры, но ПРОИЗОШЛА КРИТИЧЕСКАЯ ОШИБКА сохранения базы данных.'}), 500
+ try:
+ save_data(db_data)
+ logging.info(f"Deleted empty folder '{folder_name}' ({folder_id}) for user {tg_user_id}")
+ return jsonify({'status': 'ok', 'message': f'Папка "{folder_name}" удалена.'})
+ except Exception as e:
+ logging.exception(f"Delete folder save error ({tg_user_id}): {e}")
+ # Attempt to roll back? Hard. Inform user.
+ return jsonify({'status': 'error', 'message': 'Папка удалена, но произошла ошибка сохранения изменений.'}), 500
else:
- # This implies remove_node failed, which is unexpected if find_node worked
- logging.error(f"CRITICAL: Failed to remove empty folder node {folder_id} from DB for {tg_user_id} after successful find.")
- return jsonify({'status': 'error', 'message': 'Не удалось удалить папку из базы данных (внутренняя ошибка).'}), 500
+ logging.error(f"Failed to remove empty folder node {folder_id} from DB for {tg_user_id}")
+ return jsonify({'status': 'error', 'message': 'Не удалось удалить папку из базы данных (внутренняя ошибка).' }), 500
@app.route('/get_text_content/')
def get_text_content_route(file_id):
- try:
- db_data = load_data()
- except Exception as e:
- logging.error(f"Failed to load data for text content request file_id {file_id}: {e}")
- return Response("Ошибка сервера при получении данных файла", status=500)
+ owner_user_id, file_node = find_file_owner_and_node(file_id)
- file_node = None
- owner_user_id = None
+ if not file_node or file_node.get('file_type') != 'text':
+ return Response("Текстовый файл не найден", status=404, mimetype='text/plain')
- for user_id, user_data in db_data.get('users', {}).items():
- if isinstance(user_data, dict) and 'filesystem' in user_data:
- node, _ = find_node_by_id(user_data['filesystem'], file_id)
- if node and isinstance(node, dict) and node.get('type') == 'file' and node.get('file_type') == 'text':
- file_node = node
- owner_user_id = user_id
- break
-
- if not file_node:
- return Response("Текстовый файл не найден", status=404)
+ hf_repo_path = file_node.get('hf_repo_path')
+ if not hf_repo_path:
+ return Response("Ошибка сервера: путь к файлу отсутствует", status=500, mimetype='text/plain')
- hf_path = file_node.get('path')
- if not hf_path:
- logging.error(f"Missing HF path for text file ID {file_id} (owner: {owner_user_id})")
- return Response("Ошибка: путь к файлу отсутствует", status=500)
-
- file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true"
- logging.info(f"Fetching text content for file {file_id} from {file_url}")
+ file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_repo_path}?download=true"
try:
headers = {}
if HF_TOKEN_READ:
- headers["authorization"] = f"Bearer {HF_TOKEN_READ}"
+ headers["Authorization"] = f"Bearer {HF_TOKEN_READ}"
- # Use a reasonable timeout for potentially small text files
- response = requests.get(file_url, headers=headers, timeout=20)
+ logging.info(f"Fetching text content for {file_id} from {file_url}")
+ response = requests.get(file_url, headers=headers, timeout=20) # Shorter timeout for text
response.raise_for_status()
- # Limit preview size to prevent memory issues with huge text files
- max_preview_size = 2 * 1024 * 1024 # 2 MB limit for text preview
- content_length = response.headers.get('Content-Length')
- if content_length and int(content_length) > max_preview_size:
- logging.warning(f"Text file {file_id} ({hf_path}) is too large for preview ({content_length} bytes).")
- return Response(f"Файл слишком большой для предпросмотра (>{max_preview_size // 1024 // 1024}MB).", status=413) # Payload Too Large
+ # Limit preview size to prevent browser freezing on huge logs etc.
+ max_preview_size = 2 * 1024 * 1024 # 2 MB
+ if 'Content-Length' in response.headers and int(response.headers['Content-Length']) > max_preview_size:
+ logging.warning(f"Text file {file_id} too large for preview ({response.headers['Content-Length']} bytes).")
+ return Response(f"Файл слишком большой для предпросмотра (>{max_preview_size // 1024 // 1024}MB).\nСкачайте его для просмотра.", status=413, mimetype='text/plain') # Payload Too Large
- content = response.content
- if len(content) > max_preview_size: # Double check if Content-Length was missing
- logging.warning(f"Text file {file_id} ({hf_path}) content length exceeds preview limit ({len(content)} bytes).")
- return Response(f"Файл слишком большой для предпросмотра (>{max_preview_size // 1024 // 1024}MB).", status=413)
+ # Read content, check size again if Content-Length was missing
+ content_bytes = response.content
+ if len(content_bytes) > max_preview_size:
+ logging.warning(f"Text file {file_id} too large for preview ({len(content_bytes)} bytes), Content-Length was missing or inaccurate.")
+ return Response(f"Файл слишком большой для предпросмотра (>{max_preview_size // 1024 // 1024}MB).\nСкачайте его для просмотра.", status=413, mimetype='text/plain')
+ # Attempt to decode using common encodings
text_content = None
detected_encoding = None
- # Try common encodings
- encodings_to_try = ['utf-8', 'cp1251', 'latin-1', 'utf-16']
- for enc in encodings_to_try:
+ encodings_to_try = ['utf-8', 'cp1251', 'latin-1', 'utf-16'] # Add more if needed
+
+ # Try auto-detection from headers first (less common for raw files)
+ content_type_header = response.headers.get('Content-Type', '')
+ if 'charset=' in content_type_header:
+ charset = content_type_header.split('charset=')[-1].split(';')[0].strip()
try:
- text_content = content.decode(enc)
- detected_encoding = enc
- logging.info(f"Decoded text file {file_id} using {enc}")
- break
- except UnicodeDecodeError:
- continue
- except Exception as decode_err: # Catch broader errors like utf-16 bom issues
- logging.warning(f"Error decoding {file_id} with {enc}: {decode_err}")
- continue
+ text_content = content_bytes.decode(charset)
+ detected_encoding = charset
+ logging.info(f"Decoded text file {file_id} using charset from header: {charset}")
+ except (LookupError, UnicodeDecodeError):
+ logging.warning(f"Failed to decode using charset from header: {charset}")
+ # Try common encodings if header didn't work or wasn't present
+ if text_content is None:
+ for enc in encodings_to_try:
+ try:
+ text_content = content_bytes.decode(enc)
+ detected_encoding = enc
+ logging.info(f"Decoded text file {file_id} using fallback encoding: {enc}")
+ break
+ except UnicodeDecodeError:
+ continue
+ except Exception as dec_e: # Catch other potential errors during decode
+ logging.warning(f"Error decoding {file_id} with {enc}: {dec_e}")
+ continue
if text_content is None:
- logging.error(f"Could not decode text file {file_id} ({hf_path}) with tried encodings.")
- # Try decoding with 'ignore' errors as a last resort
- try:
- text_content = content.decode('utf-8', errors='ignore')
- detected_encoding = 'utf-8 (ignored errors)'
- logging.warning(f"Decoded text file {file_id} using utf-8 with ignored errors.")
- except Exception:
- return Response("Не удалось определить кодировку файла или произошла ошибка декодирования.", status=500)
+ logging.error(f"Could not determine encoding for text file {file_id}")
+ return Response("Не удалось определить кодировку файла для предпросмотра.", status=500, mimetype='text/plain')
- return Response(text_content, mimetype=f'text/plain; charset=utf-8') # Always serve as UTF-8
+ # Return as plain text with UTF-8 charset (browser should handle display)
+ return Response(text_content, mimetype='text/plain; charset=utf-8')
except requests.exceptions.HTTPError as e:
status_code = e.response.status_code
- logging.error(f"HTTP Error {status_code} fetching text content from HF ({hf_path}, owner {owner_user_id}): {e}")
- message = "Ошибка загрузки содержимого"
- if status_code == 404: message = "Файл не найден на сервере."
- return Response(f"{message} ({status_code})", status=status_code)
- except requests.exceptions.Timeout:
- logging.error(f"Timeout fetching text content from HF ({hf_path}, owner {owner_user_id})")
- return Response("Тайм-аут при загрузке содержимого.", status=504)
+ logging.error(f"HTTP error fetching text content {file_id} ({hf_repo_path}): {status_code} - {e}")
+ return Response(f"Ошибка загрузки содержимого файла ({status_code}).", status=status_code, mimetype='text/plain')
except requests.exceptions.RequestException as e:
- logging.error(f"Network error fetching text content from HF ({hf_path}, owner {owner_user_id}): {e}")
- return Response("Сетевая ошибка при загрузке содержимого.", status=502)
+ logging.error(f"Network error fetching text content {file_id} ({hf_repo_path}): {e}")
+ return Response("Ошибка сети при загрузке содержимого.", status=504, mimetype='text/plain')
except Exception as e:
- logging.error(f"Unexpected error fetching text content ({hf_path}, owner {owner_user_id}): {e}", exc_info=True)
- return Response("Внутренняя ошибка сервера при обработке текстового файла.", status=500)
+ logging.exception(f"Unexpected error fetching text content {file_id} ({hf_repo_path}): {e}")
+ return Response("Внутренняя ошибка сервера при получении содержимого.", status=500, mimetype='text/plain')
@app.route('/preview_thumb/')
def preview_thumb_route(file_id):
- try:
- db_data = load_data()
- except Exception as e:
- logging.error(f"Failed to load data for preview thumb request file_id {file_id}: {e}")
- return Response("Ошибка сервера", status=500)
+ # This serves the *full* image, relying on browser resizing/caching and the img tag `loading="lazy"`
+ # Generating actual thumbnails server-side would require an image library (Pillow)
+ # and potentially more complex caching/storage.
+ owner_user_id, file_node = find_file_owner_and_node(file_id)
- file_node = None
- owner_user_id = None
+ if not file_node or file_node.get('file_type') != 'image':
+ # Return a placeholder or 404? 404 is cleaner.
+ return Response("Превью не найдено", status=404, mimetype='text/plain')
- for user_id, user_data in db_data.get('users', {}).items():
- if isinstance(user_data, dict) and 'filesystem' in user_data:
- node, _ = find_node_by_id(user_data['filesystem'], file_id)
- if node and isinstance(node, dict) and node.get('type') == 'file' and node.get('file_type') == 'image':
- file_node = node
- owner_user_id = user_id
- break
+ hf_repo_path = file_node.get('hf_repo_path')
+ if not hf_repo_path:
+ return Response("Ошибка сервера: путь к файлу превью отсутствует", status=500, mimetype='text/plain')
- if not file_node: return Response("Изображение не найдено", status=404)
- hf_path = file_node.get('path')
- if not hf_path: return Response("Путь к файлу не найден", status=500)
-
- # Use the non-download link for potential browser caching / direct rendering
- file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}"
- logging.info(f"Fetching image preview for {file_id} from {file_url}")
+ # Use the direct file URL (not download=true needed for img src)
+ file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_repo_path}"
try:
headers = {}
- if HF_TOKEN_READ: headers["authorization"] = f"Bearer {HF_TOKEN_READ}"
- # Use stream=True to avoid loading large images fully into memory server-side
+ if HF_TOKEN_READ:
+ headers["Authorization"] = f"Bearer {HF_TOKEN_READ}"
+
+ logging.info(f"Streaming image preview for {file_id} from {file_url}")
response = requests.get(file_url, headers=headers, stream=True, timeout=30)
response.raise_for_status()
- content_type = response.headers.get('Content-Type', 'image/jpeg') # Default guess
- if not content_type.startswith('image/'):
- logging.warning(f"Unexpected Content-Type '{content_type}' for image preview {file_id} ({hf_path}).")
- # Fallback or return error? Let's try serving it anyway.
+ # Pass through necessary headers
+ resp_headers = {
+ 'Content-Type': response.headers.get('Content-Type', 'image/jpeg'),
+ # Add caching headers? Let browser handle based on HF response maybe
+ # 'Cache-Control': 'public, max-age=3600' # Example: Cache for 1 hour
+ }
+ if 'Content-Length' in response.headers:
+ resp_headers['Content-Length'] = response.headers['Content-Length']
- return Response(response.iter_content(chunk_size=65536), mimetype=content_type)
+ return Response(response.iter_content(chunk_size=65536), headers=resp_headers)
except requests.exceptions.HTTPError as e:
status_code = e.response.status_code
- logging.error(f"HTTP Error {status_code} fetching preview from HF ({hf_path}, owner: {owner_user_id}): {e}")
- message = "Ошибка загрузки превью"
- if status_code == 404: message = "Превью не найдено."
- return Response(f"{message} ({status_code})", status=status_code)
- except requests.exceptions.Timeout:
- logging.error(f"Timeout fetching preview from HF ({hf_path}, owner: {owner_user_id})")
- return Response("Тайм-аут загрузки превью.", status=504)
+ logging.error(f"HTTP error fetching preview {file_id} ({hf_repo_path}): {status_code} - {e}")
+ # Return a placeholder image or text? 404 is simpler.
+ return Response(f"Ошибка загрузки превью ({status_code})", status=status_code, mimetype='text/plain')
except requests.exceptions.RequestException as e:
- logging.error(f"Network error fetching preview from HF ({hf_path}, owner: {owner_user_id}): {e}")
- return Response("Сетевая ошибка загрузки превью.", status=502)
+ logging.error(f"Network error fetching preview {file_id} ({hf_repo_path}): {e}")
+ return Response("Ошибка сети при загрузке превью", status=504, mimetype='text/plain')
except Exception as e:
- logging.error(f"Unexpected error during preview ({hf_path}, owner: {owner_user_id}): {e}", exc_info=True)
- return Response("Внутренняя ошибка сервера при загрузке превью.", status=500)
+ logging.exception(f"Unexpected error during preview fetch {file_id} ({hf_repo_path}): {e}")
+ return Response("Внутренняя ошибка сервера при загрузке превью", status=500, mimetype='text/plain')
+# --- Main Execution ---
if __name__ == '__main__':
- print("*"*60)
- if not BOT_TOKEN or BOT_TOKEN == 'YOUR_BOT_TOKEN':
- logging.critical("CRITICAL: TELEGRAM_BOT_TOKEN is not set properly.")
- print(" CRITICAL: TELEGRAM_BOT_TOKEN is not set or is default.")
- print(" Telegram authentication WILL FAIL. Set the environment variable.")
- else:
- print(f" BOT_TOKEN detected (Length: {len(BOT_TOKEN)}).")
-
+ if not BOT_TOKEN or BOT_TOKEN == 'YOUR_BOT_TOKEN' or ':' not in BOT_TOKEN:
+ logging.critical("\n" + "*"*60 +
+ "\n CRITICAL: TELEGRAM_BOT_TOKEN is not set or is invalid. " +
+ "\n Telegram authentication WILL FAIL. Set the environment variable." +
+ "\n" + "*"*60)
if not HF_TOKEN_WRITE:
- logging.warning("HF_TOKEN_WRITE (write access) is not set. File uploads/deletions will fail.")
- print(" WARNING: HF_TOKEN_WRITE is not set. Uploads/deletes disabled.")
- else:
- print(f" HF_TOKEN_WRITE detected (Length: {len(HF_TOKEN_WRITE)}). Uploads/deletes enabled.")
-
+ logging.warning("HF_TOKEN (write access) is not set. File uploads and deletions will fail.")
if not HF_TOKEN_READ:
- logging.warning("HF_TOKEN_READ is not set. Will use HF_TOKEN_WRITE if available, else downloads/previews might fail for private repos.")
- print(" WARNING: HF_TOKEN_READ is not set. Downloads/previews might fail for private repos.")
- else:
- print(f" HF_TOKEN_READ detected (Length: {len(HF_TOKEN_READ)}).")
- print("*"*60)
+ logging.warning("HF_TOKEN_READ is not set (or same as write token). File downloads/previews might fail if the HF dataset is private.")
+ if app.secret_key == "supersecretkey_mini_app_unique_dev":
+ logging.warning("Using default Flask secret key. Set FLASK_SECRET_KEY env var for production.")
- logging.info("Attempting initial database load/download...")
- try:
- initial_data = load_data()
- user_count = len(initial_data.get('users', {}))
- logging.info(f"Initial data load complete. Found {user_count} user(s).")
- except Exception as e:
- logging.critical(f"FATAL: Could not perform initial data load: {e}", exc_info=True)
- print("\nFATAL ERROR DURING INITIAL DATA LOAD. Check logs. Exiting.")
- exit(1)
+ logging.info("Attempting initial database download/load...")
+ load_data() # Load data into cache on startup
+ logging.info("Initial data load attempt complete.")
- print(f"Starting Flask server on 0.0.0.0:7860...")
- # Use waitress or gunicorn in production instead of app.run(debug=False)
- # For simplicity here, we keep app.run
+ # Use waitress or gunicorn for production instead of Flask dev server
+ logging.info("Starting Flask application server...")
app.run(debug=False, host='0.0.0.0', port=7860)
-
-# END OF FILE
+ # Example using waitress (install waitress first: pip install waitress)
+ # from waitress import serve
+ # serve(app, host='0.0.0.0', port=7860)
\ No newline at end of file