diff --git "a/app.py" "b/app.py"
--- "a/app.py"
+++ "b/app.py"
@@ -2,9 +2,11 @@ import os
import hmac
import hashlib
import json
-from urllib.parse import unquote, parse_qsl
-from flask import Flask, request, jsonify, Response, session, redirect, url_for, flash, render_template_string, send_file
+from urllib.parse import unquote, parse_qsl, urlencode
+from flask import Flask, request, jsonify, Response, send_file
from flask_caching import Cache
+import logging
+import threading
import time
from datetime import datetime
from huggingface_hub import HfApi, hf_hub_download, utils as hf_utils
@@ -12,37 +14,282 @@ from werkzeug.utils import secure_filename
import requests
from io import BytesIO
import uuid
-import threading
-import logging
-
-logging.basicConfig(level=logging.INFO)
+# --- Configuration ---
app = Flask(__name__)
-app.secret_key = os.getenv("FLASK_SECRET_KEY", "supersecretkey_folders_unique_telegram")
-
-BOT_TOKEN = os.environ.get('TELEGRAM_BOT_TOKEN', 'YOUR_BOT_TOKEN')
-HOST = '0.0.0.0'
-PORT = 7860
-AUTH_DATA_LIFETIME = 3600
-DATA_FILE = 'cloudeng_data_telegram.json'
-REPO_ID = "Eluza133/Z1e1u"
+app.secret_key = os.getenv("FLASK_SECRET_KEY", "supersecretkey_mini_app_unique")
+BOT_TOKEN = os.getenv('TELEGRAM_BOT_TOKEN', 'YOUR_BOT_TOKEN') # MUST be set
+DATA_FILE = 'cloudeng_mini_app_data.json'
+REPO_ID = "Eluza133/Z1e1u" # Same HF Repo
HF_TOKEN_WRITE = os.getenv("HF_TOKEN")
HF_TOKEN_READ = os.getenv("HF_TOKEN_READ") or HF_TOKEN_WRITE
-UPLOAD_FOLDER = 'uploads'
+UPLOAD_FOLDER = 'uploads_mini_app'
os.makedirs(UPLOAD_FOLDER, exist_ok=True)
+# --- Caching and Logging ---
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
+logging.basicConfig(level=logging.INFO)
+
+# --- Constants ---
+AUTH_DATA_LIFETIME = 3600 # 1 hour validity for initData
+
+# --- Filesystem Utilities ---
+def find_node_by_id(filesystem, node_id):
+ if not filesystem or not isinstance(filesystem, dict):
+ return None, None
+ if filesystem.get('id') == node_id:
+ return filesystem, None
+
+ queue = [(filesystem, None)]
+ visited = {filesystem.get('id')}
+
+ while queue:
+ current_node, parent = queue.pop(0)
+ if current_node.get('type') == 'folder' and 'children' in current_node:
+ for child in current_node.get('children', []):
+ child_id = child.get('id')
+ if not child_id: continue # Skip nodes without id
+
+ if child_id == node_id:
+ return child, current_node
+ if child_id not in visited and child.get('type') == 'folder':
+ visited.add(child_id)
+ queue.append((child, current_node))
+ return None, None
+
+def add_node(filesystem, parent_id, node_data):
+ parent_node, _ = find_node_by_id(filesystem, parent_id)
+ if parent_node and parent_node.get('type') == 'folder':
+ if 'children' not in parent_node:
+ parent_node['children'] = []
+ # Prevent adding duplicates by id
+ existing_ids = {child.get('id') for child in parent_node['children']}
+ if node_data.get('id') not in existing_ids:
+ parent_node['children'].append(node_data)
+ return True
+ return False
+
+def remove_node(filesystem, node_id):
+ node_to_remove, parent_node = find_node_by_id(filesystem, node_id)
+ if node_to_remove and parent_node and 'children' in parent_node:
+ original_length = len(parent_node['children'])
+ parent_node['children'] = [child for child in parent_node['children'] if child.get('id') != node_id]
+ return len(parent_node['children']) < original_length # Return True if something was removed
+ # Handle root node deletion attempt (should not happen normally)
+ if node_to_remove and node_id == filesystem.get('id'):
+ logging.warning("Attempted to remove root node directly.")
+ return False
+ return False
+
+def get_node_path_list(filesystem, node_id):
+ path_list = []
+ current_id = node_id
+
+ processed_ids = set()
+
+ while current_id and current_id not in processed_ids:
+ processed_ids.add(current_id)
+ node, parent = find_node_by_id(filesystem, current_id)
+ if not node:
+ break
+ path_list.append({
+ 'id': node.get('id'),
+ 'name': node.get('name', node.get('original_filename', 'Unknown'))
+ })
+ if not parent:
+ break
+ parent_id = parent.get('id')
+ if parent_id == current_id: # Prevent infinite loop if parent is self
+ logging.error(f"Filesystem loop detected at node {current_id}")
+ break
+ current_id = parent_id
+
+ # Ensure root is always first if found, otherwise add default root
+ if not any(p['id'] == 'root' for p in path_list):
+ path_list.append({'id': 'root', 'name': 'Root'})
+
+ # Filter out potential duplicates while preserving order, then reverse
+ final_path = []
+ seen_ids = set()
+ for item in reversed(path_list):
+ if item['id'] not in seen_ids:
+ final_path.append(item)
+ seen_ids.add(item['id'])
+
+ return final_path
+
+
+def initialize_user_filesystem(user_data):
+ if 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict):
+ user_data['filesystem'] = {
+ "type": "folder",
+ "id": "root",
+ "name": "Root",
+ "children": []
+ }
+
+# --- Data Loading/Saving ---
+@cache.memoize(timeout=120) # Cache for 2 minutes
+def load_data():
+ try:
+ download_db_from_hf()
+ with open(DATA_FILE, 'r', encoding='utf-8') as file:
+ data = json.load(file)
+ if not isinstance(data, dict):
+ logging.warning("Data file is not a dict, initializing empty.")
+ return {'users': {}}
+ data.setdefault('users', {})
+ # Ensure all users have a valid filesystem structure
+ for user_id, user_data in data['users'].items():
+ initialize_user_filesystem(user_data)
+ logging.info("Data loaded and filesystems checked/initialized.")
+ return data
+ except FileNotFoundError:
+ logging.warning(f"{DATA_FILE} not found locally. Initializing empty data.")
+ return {'users': {}}
+ except json.JSONDecodeError:
+ logging.error(f"Error decoding JSON from {DATA_FILE}. Returning empty data.")
+ return {'users': {}}
+ except Exception as e:
+ logging.error(f"Error loading data: {e}")
+ return {'users': {}}
+
+def save_data(data):
+ try:
+ with open(DATA_FILE, 'w', encoding='utf-8') as file:
+ json.dump(data, file, ensure_ascii=False, indent=4)
+ # Upload immediately after saving
+ upload_db_to_hf()
+ cache.clear() # Clear cache after saving
+ logging.info("Data saved locally and upload to HF initiated.")
+ except Exception as e:
+ logging.error(f"Error saving data: {e}")
+ # Consider not raising here to potentially allow app to continue
+ # raise
+
+def upload_db_to_hf():
+ if not HF_TOKEN_WRITE:
+ logging.warning("HF_TOKEN_WRITE not set, skipping database upload.")
+ return
+ try:
+ api = HfApi()
+ api.upload_file(
+ path_or_fileobj=DATA_FILE,
+ path_in_repo=DATA_FILE,
+ repo_id=REPO_ID,
+ repo_type="dataset",
+ token=HF_TOKEN_WRITE,
+ commit_message=f"Backup MiniApp {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
+ run_as_future=True # Upload in background
+ )
+ logging.info("Database upload to Hugging Face scheduled.")
+ except Exception as e:
+ logging.error(f"Error scheduling database upload: {e}")
+
+def download_db_from_hf():
+ if not HF_TOKEN_READ:
+ logging.warning("HF_TOKEN_READ not set, skipping database download.")
+ if not os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'w', encoding='utf-8') as f:
+ json.dump({'users': {}}, f)
+ logging.info(f"Created empty local database file: {DATA_FILE}")
+ return
+ try:
+ hf_hub_download(
+ repo_id=REPO_ID,
+ filename=DATA_FILE,
+ repo_type="dataset",
+ token=HF_TOKEN_READ,
+ local_dir=".",
+ local_dir_use_symlinks=False,
+ force_download=True, # Ensure we get the latest
+ etag_timeout=10 # Short timeout for checking freshness
+ )
+ logging.info("Database downloaded from Hugging Face")
+ except hf_utils.RepositoryNotFoundError:
+ logging.error(f"Repository {REPO_ID} not found.")
+ if not os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
+ except hf_utils.EntryNotFoundError:
+ logging.warning(f"{DATA_FILE} not found in repo {REPO_ID}. Using/Creating local.")
+ if not os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
+ except requests.exceptions.ConnectionError as e:
+ logging.error(f"Connection error downloading DB from HF: {e}. Using local version if available.")
+ except Exception as e:
+ logging.error(f"Error downloading database: {e}")
+ if not os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
+
+# --- File Type Helper ---
+def get_file_type(filename):
+ if not filename or '.' not in filename: return 'other'
+ ext = filename.lower().split('.')[-1]
+ if ext in ['mp4', 'mov', 'avi', 'webm', 'mkv']: return 'video'
+ if ext in ['jpg', 'jpeg', 'png', 'gif', 'bmp', 'webp', 'svg']: return 'image'
+ if ext == 'pdf': return 'pdf'
+ if ext == 'txt': return 'text'
+ return 'other'
+
+# --- Telegram Validation ---
+def check_telegram_authorization(auth_data: str, bot_token: str) -> dict | None:
+ if not auth_data or not bot_token or bot_token == 'YOUR_BOT_TOKEN':
+ logging.warning("Validation skipped: Missing auth_data or valid BOT_TOKEN.")
+ return None
+ try:
+ parsed_data = dict(parse_qsl(unquote(auth_data)))
+ if "hash" not in parsed_data:
+ logging.error("Hash not found in auth data")
+ return None
+
+ telegram_hash = parsed_data.pop('hash')
+ auth_date_ts = int(parsed_data.get('auth_date', 0))
+ current_ts = int(time.time())
+
+ if abs(current_ts - auth_date_ts) > AUTH_DATA_LIFETIME:
+ logging.warning(f"Auth data expired (Auth: {auth_date_ts}, Now: {current_ts}, Diff: {current_ts - auth_date_ts})")
+ return None
+
+ data_check_string = "\n".join(sorted([f"{k}={v}" for k, v in parsed_data.items()]))
+ secret_key = hmac.new("WebAppData".encode(), bot_token.encode(), hashlib.sha256).digest()
+ calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest()
+
+ if calculated_hash == telegram_hash:
+ user_data_str = parsed_data.get('user')
+ if user_data_str:
+ try:
+ user_info = json.loads(user_data_str)
+ if 'id' not in user_info:
+ logging.error("Validated user data missing 'id'")
+ return None
+ return user_info # Success
+ except json.JSONDecodeError:
+ logging.error("Failed to decode user JSON from auth data")
+ return None
+ else:
+ logging.warning("No 'user' field in validated auth data")
+ return None # Require user field
+ else:
+ logging.warning("Hash mismatch during validation")
+ return None
+ except Exception as e:
+ logging.error(f"Exception during validation: {e}")
+ return None
+
+# --- HTML, CSS, JS Template ---
HTML_TEMPLATE = """
- Zeus Cloud TG
+ Zeus Cloud Mini App
+
+ Загрузка и проверка данных Telegram...
+
+
+
Zeus Cloud
+
+
-
Загрузка данных Telegram...
-
-
-
-
- {% with messages = get_flashed_messages(with_categories=true) %}
- {% if messages %}
- {% for category, message in messages %}
-
{{ message }}
- {% endfor %}
- {% endif %}
- {% endwith %}
-
-
-
-
Ваш профиль
-
Загрузка...
-
-
-
+
-
+
+
-
-
-
Содержимое папки: Загрузка...
-
-
Загрузка...
+
Содержимое папки
+
+
-
-
-
-
-
-
-
-"""
-
-
-def check_telegram_authorization(auth_data: str, bot_token: str) -> dict | None:
- if not auth_data:
- logging.error("Auth data is empty.")
- return None
-
- try:
- parsed_data = dict(parse_qsl(unquote(auth_data)))
- if "hash" not in parsed_data:
- logging.error("Hash not found in auth data.")
- return None
-
- telegram_hash = parsed_data.pop('hash')
-
- auth_date_ts = int(parsed_data.get('auth_date', 0))
- current_ts = int(time.time())
- if current_ts - auth_date_ts > AUTH_DATA_LIFETIME:
- logging.warning(f"Auth data expired. Auth time: {auth_date_ts}, Current time: {current_ts}, Diff: {current_ts - auth_date_ts}")
- return None
-
- data_check_string = "\n".join(sorted([f"{k}={v}" for k, v in parsed_data.items()]))
-
- secret_key = hmac.new("WebAppData".encode(), bot_token.encode(), hashlib.sha256).digest()
- calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest()
-
- if calculated_hash == telegram_hash:
- user_data_str = parsed_data.get('user')
- if user_data_str:
- try:
- user_data = json.loads(user_data_str)
- if 'id' not in user_data:
- logging.error("User data is missing 'id'.")
- return None
- return user_data
- except json.JSONDecodeError:
- logging.error("Could not decode user JSON.")
- return None
- else:
- logging.error("No 'user' field found in validated data.")
- return None
- else:
- logging.warning(f"Hash mismatch. Calculated: {calculated_hash}, Received: {telegram_hash}")
- # logging.debug(f"Data check string:\n{data_check_string}") # Uncomment for deep debug
- return None
-
- except Exception as e:
- logging.exception("Error during Telegram authorization check:")
- return None
-
-
-def find_node_by_id(filesystem, node_id):
- if filesystem.get('id') == node_id:
- return filesystem, None
-
- queue = [(filesystem, None)]
- while queue:
- current_node, parent = queue.pop(0)
- if current_node.get('type') == 'folder' and 'children' in current_node:
- for i, child in enumerate(current_node['children']):
- if child.get('id') == node_id:
- return child, current_node
- if child.get('type') == 'folder':
- queue.append((child, current_node))
- return None, None
-
-def add_node(filesystem, parent_id, node_data):
- parent_node, _ = find_node_by_id(filesystem, parent_id)
- if parent_node and parent_node.get('type') == 'folder':
- if 'children' not in parent_node:
- parent_node['children'] = []
- parent_node['children'].append(node_data)
- return True
- return False
-
-def remove_node(filesystem, node_id):
- node_to_remove, parent_node = find_node_by_id(filesystem, node_id)
- if node_to_remove and parent_node and 'children' in parent_node:
- parent_node['children'] = [child for child in parent_node['children'] if child.get('id') != node_id]
- return True
- return False
-
-def get_node_path(filesystem, node_id, path_elements=None):
- if path_elements is None:
- path_elements = []
- node, parent = find_node_by_id(filesystem, node_id)
- if node:
- # Use node's name or original_filename for path string, but ID for internal path construction
- path_elements.append({'id': node.get('id'), 'name': node.get('name', node.get('original_filename', ''))})
- if parent:
- return get_node_path(filesystem, parent['id'], path_elements)
- return list(reversed(path_elements)) # Reverse to get path from root
-
-def get_node_path_string(filesystem, node_id):
- path_list = get_node_path(filesystem, node_id)
- # Filter out the 'root' name for the string representation unless it's just root
- string_parts = [p['name'] for p in path_list if p['id'] != 'root' or len(path_list) == 1]
- return " / ".join(string_parts) or "Root"
-
-
-def initialize_user_filesystem(user_data):
- if 'filesystem' not in user_data:
- user_data['filesystem'] = {
- "type": "folder",
- "id": "root",
- "name": "root",
- "children": []
- }
- # Handle potential old 'files' structure during migration if needed, but assuming fresh start here.
-
-
-@cache.memoize(timeout=300)
-def load_data():
- try:
- download_db_from_hf()
- with open(DATA_FILE, 'r', encoding='utf-8') as file:
- data = json.load(file)
- if not isinstance(data, dict):
- logging.warning("Data is not in dict format, initializing empty database")
- return {'users': {}}
- data.setdefault('users', {})
- # Initialize filesystem for any users loaded without one
- for user_id, user_data in data['users'].items():
- initialize_user_filesystem(user_data)
- logging.info("Data successfully loaded and initialized")
- return data
- except FileNotFoundError:
- logging.warning(f"{DATA_FILE} not found, initializing empty database.")
- return {'users': {}}
- except json.JSONDecodeError:
- logging.error(f"Error decoding JSON from {DATA_FILE}, initializing empty database.")
- return {'users': {}}
- except Exception as e:
- logging.exception("Error loading data:")
- return {'users': {}}
-
-def save_data(data):
- try:
- # It's safer to save to a temporary file first and then replace
- temp_file = DATA_FILE + '.tmp'
- with open(temp_file, 'w', encoding='utf-8') as file:
- json.dump(data, file, ensure_ascii=False, indent=4)
- os.replace(temp_file, DATA_FILE)
+ async function deleteFile(fileId, fileName) {
+ if (!confirm(`Вы уверены, что хотите удалить файл "${fileName}"?`)) {
+ return;
+ }
+ try {
+ const data = await apiCall(`/delete_file/${fileId}`, 'POST', { current_folder_id: currentFolderId });
+ if (data.status === 'ok') {
+ showFlash(`Файл "${fileName}" удален.`);
+ loadFolderContent(currentFolderId); // Refresh
+ } else {
+ showFlash(data.message || 'Не удалось удалить файл.', 'error');
+ }
+ } catch (error) {
+ // Error handled by apiCall
+ }
+ }
- upload_db_to_hf()
- cache.clear()
- logging.info("Data saved and uploaded to HF")
- except Exception as e:
- logging.exception("Error saving data:")
- raise
-def upload_db_to_hf():
- if not HF_TOKEN_WRITE:
- logging.warning("HF_TOKEN_WRITE not set, skipping database upload.")
- return
- try:
- api = HfApi()
- api.upload_file(
- path_or_fileobj=DATA_FILE,
- path_in_repo=DATA_FILE,
- repo_id=REPO_ID,
- repo_type="dataset",
- token=HF_TOKEN_WRITE,
- commit_message=f"Backup {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
- )
- logging.info("Database uploaded to Hugging Face")
- except Exception as e:
- logging.exception("Error uploading database:")
+ // --- File Upload ---
+ function handleFileUpload(event) {
+ event.preventDefault();
+ const files = fileInput.files;
+ if (files.length === 0) {
+ showFlash('Выберите файлы для загрузки.', 'error');
+ return;
+ }
+ if (files.length > 20) {
+ showFlash('Максимум 20 файлов за раз!', 'error');
+ return;
+ }
+
+ progressContainer.style.display = 'block';
+ progressBar.style.width = '0%';
+ progressText.textContent = '0%';
+ uploadBtn.disabled = true;
+ uploadBtn.textContent = 'Загрузка...';
+
+ const formData = new FormData();
+ for (let i = 0; i < files.length; i++) {
+ formData.append('files', files[i]);
+ }
+ formData.append('current_folder_id', currentFolderId);
+ formData.append('initData', validatedInitData); // Add initData here
-def download_db_from_hf():
- if not HF_TOKEN_READ and not HF_TOKEN_WRITE:
- logging.warning("No HF_TOKENs set, skipping database download.")
- if not os.path.exists(DATA_FILE):
- with open(DATA_FILE, 'w', encoding='utf-8') as f:
- json.dump({'users': {}}, f)
- logging.info(f"Created empty local database file: {DATA_FILE}")
- return
- try:
- # Use HF_TOKEN_WRITE if READ token is not set, for flexibility
- token_to_use = HF_TOKEN_READ if HF_TOKEN_READ else HF_TOKEN_WRITE
- hf_hub_download(
- repo_id=REPO_ID,
- filename=DATA_FILE,
- repo_type="dataset",
- token=token_to_use,
- local_dir=".",
- local_dir_use_symlinks=False
- )
- logging.info("Database downloaded from Hugging Face")
- except hf_utils.RepositoryNotFoundError:
- logging.error(f"Repository {REPO_ID} not found.")
- if not os.path.exists(DATA_FILE):
- with open(DATA_FILE, 'w', encoding='utf-8') as f:
- json.dump({'users': {}}, f)
- logging.info(f"Created empty local database file: {DATA_FILE}")
- except hf_utils.EntryNotFoundError:
- logging.warning(f"{DATA_FILE} not found in repository {REPO_ID}. Initializing empty database.")
- if not os.path.exists(DATA_FILE):
- with open(DATA_FILE, 'w', encoding='utf-8') as f:
- json.dump({'users': {}}, f)
- logging.info(f"Created empty local database file: {DATA_FILE}")
- except Exception as e:
- logging.exception("Error downloading database:")
- if not os.path.exists(DATA_FILE):
- with open(DATA_FILE, 'w', encoding='utf-8') as f:
- json.dump({'users': {}}, f)
- logging.info(f"Created empty local database file: {DATA_FILE}")
+ const xhr = new XMLHttpRequest();
+ xhr.upload.addEventListener('progress', function(event) {
+ if (event.lengthComputable) {
+ const percentComplete = Math.round((event.loaded / event.total) * 100);
+ progressBar.style.width = percentComplete + '%';
+ progressText.textContent = percentComplete + '%';
+ }
+ });
+
+ xhr.addEventListener('load', function() {
+ uploadBtn.disabled = false;
+ uploadBtn.textContent = 'Загрузить файлы сюда';
+ progressContainer.style.display = 'none';
+ fileInput.value = ''; // Reset file input
+
+ if (xhr.status >= 200 && xhr.status < 300) {
+ try {
+ const data = JSON.parse(xhr.responseText);
+ if (data.status === 'ok') {
+ showFlash(data.message || `${files.length} файл(ов) загружено.`);
+ loadFolderContent(currentFolderId); // Refresh
+ } else {
+ showFlash(data.message || 'Ошибка при обработке загрузки на сервере.', 'error');
+ }
+ } catch (e) {
+ showFlash('Некорректный ответ от сервера после загрузки.', 'error');
+ }
+ } else {
+ showFlash(`Ошибка загрузки: ${xhr.statusText || xhr.status}`, 'error');
+ }
+ });
+
+ xhr.addEventListener('error', function() {
+ showFlash('Ошибка сети во время загрузки.', 'error');
+ uploadBtn.disabled = false;
+ uploadBtn.textContent = 'Загрузить файлы сюда';
+ progressContainer.style.display = 'none';
+ });
+
+ xhr.addEventListener('abort', function() {
+ showFlash('Загрузка отменена.', 'error');
+ uploadBtn.disabled = false;
+ uploadBtn.textContent = 'Загрузить файлы сюда';
+ progressContainer.style.display = 'none';
+ });
+
+ xhr.open('POST', '/upload', true);
+ // Do not set Content-Type, browser will set it correctly for FormData
+ xhr.send(formData);
+ }
-def periodic_backup():
- if not HF_TOKEN_WRITE:
- logging.warning("Periodic backup disabled: HF_TOKEN_WRITE not set.")
- return
- while True:
- time.sleep(1800) # 30 minutes
- try:
- save_data(load_data()) # Ensure we save the latest state
- except Exception as e:
- logging.error(f"Periodic backup failed: {e}")
+ // --- Initialization ---
+ function initializeApp() {
+ tg.ready();
+ tg.expand();
+ // Set background color based on theme
+ document.body.style.backgroundColor = tg.themeParams.bg_color || '#ffffff';
+ tg.setHeaderColor(tg.themeParams.secondary_bg_color || '#f1f1f1');
-def get_file_type(filename):
- filename_lower = filename.lower()
- if filename_lower.endswith(('.mp4', '.mov', '.avi', '.webm', '.mkv')):
- return 'video'
- elif filename_lower.endswith(('.jpg', '.jpeg', '.png', '.gif', '.bmp', '.webp', '.svg')):
- return 'image'
- elif filename_lower.endswith('.pdf'):
- return 'pdf'
- elif filename_lower.endswith('.txt'):
- return 'text'
- return 'other'
+ if (!tg.initData) {
+ showError("Ошибка: Не удалось получить данные авторизации Telegram (initData). Попробуйте перезапустить Mini App.");
+ return;
+ }
+ validatedInitData = tg.initData; // Store raw initData
+
+ // Validate initData with backend
+ fetch('/validate_init_data', {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ initData: validatedInitData })
+ })
+ .then(response => response.json())
+ .then(data => {
+ if (data.status === 'ok' && data.user) {
+ currentUser = data.user;
+ userInfoHeaderEl.textContent = `Пользователь: ${currentUser.first_name || ''} ${currentUser.last_name || ''} (@${currentUser.username || currentUser.id})`;
+ showAppContent();
+ loadFolderContent('root'); // Load root folder initially
+ } else {
+ throw new Error(data.message || 'Не удалось верифицировать пользователя.');
+ }
+ })
+ .catch(error => {
+ console.error("Validation failed:", error);
+ showError(`Ошибка авторизации: ${error.message}. Попробуйте перезапустить.`);
+ validatedInitData = null; // Invalidate data on error
+ });
+
+ // Add event listeners
+ uploadForm.addEventListener('submit', handleFileUpload);
+ createFolderBtn.addEventListener('click', handleCreateFolder);
+
+ // Handle back button (optional, can conflict with folder navigation)
+ // tg.BackButton.onClick(() => {
+ // // Implement back navigation logic if needed, e.g., go to parent folder
+ // console.log("Back button clicked");
+ // });
+ // if (currentFolderId !== 'root') {
+ // tg.BackButton.show();
+ // } else {
+ // tg.BackButton.hide();
+ // }
+ }
-def get_user_data(user_id):
- data = load_data()
- user_id_str = str(user_id) # Ensure ID is string key
- return data['users'].get(user_id_str)
-
-def save_user_data(user_id, user_data):
- data = load_data()
- user_id_str = str(user_id)
- data['users'][user_id_str] = user_data
- save_data(data)
-
-def get_current_user():
- user_id = session.get('user_id')
- if user_id:
- return get_user_data(user_id)
- return None
-
-def requires_auth(f):
- from functools import wraps
- @wraps(f)
- def decorated(*args, **kwargs):
- if 'user_id' not in session:
- # For Mini App, return JSON error or specific response
- # For traditional web, redirect to login
- # Since this is a single-file Mini App, we assume JSON API calls
- return jsonify({"status": "error", "message": "Authentication required"}), 401
- return f(*args, **kwargs)
- return decorated
+ // --- Start the App ---
+ initializeApp();
-@app.route('/')
-def index():
- current_folder_id = request.args.get('folder_id', 'root')
- # Pass dummy folder ID to template for initial JS load
- return Response(render_template_string(HTML_TEMPLATE, current_folder_id=current_folder_id, REPO_ID=REPO_ID, HF_TOKEN_READ=HF_TOKEN_READ), mimetype='text/html')
+
+