diff --git "a/app.py" "b/app.py" --- "a/app.py" +++ "b/app.py" @@ -1,1309 +1,1071 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - -import os import flask -from flask import Flask, request, Response, render_template_string, jsonify, redirect, url_for, send_file -import hmac -import hashlib +from flask import Flask, render_template_string, request, redirect, url_for, session, flash, send_file, jsonify, Response +from flask_caching import Cache import json -from urllib.parse import unquote, parse_qs, quote -import time -from datetime import datetime +import os import logging import threading -from huggingface_hub import HfApi, hf_hub_download, list_repo_files -from huggingface_hub.utils import RepositoryNotFoundError, EntryNotFoundError -import mimetypes -import io -import math - -BOT_TOKEN = os.getenv("BOT_TOKEN", "6750208873:AAE2hvPlJ99dBdhGa_Brre0IIpUdOvXxHt4") -HOST = '0.0.0.0' -PORT = 7860 -DATA_FILE = 'data.json' - -REPO_ID = os.getenv("HF_REPO_ID", "Eluza133/Z1e1u") -HF_DATA_FILE_PATH = "data.json" -HF_UPLOAD_FOLDER = "uploads" -HF_TOKEN_WRITE = os.getenv("HF_TOKEN_WRITE") -HF_TOKEN_READ = os.getenv("HF_TOKEN_READ") - -MAX_UPLOAD_FILES = 20 -AUTH_TIMEOUT = 86400 +import time +from datetime import datetime +from huggingface_hub import HfApi, hf_hub_download, utils as hf_utils +from werkzeug.utils import secure_filename +import requests +from io import BytesIO +import uuid +import hashlib +import hmac + +from telegram import Update, InlineKeyboardButton, InlineKeyboardMarkup, WebAppInfo +from telegram.ext import Application as TelegramApplication, CommandHandler, ContextTypes app = Flask(__name__) -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') -app.secret_key = os.urandom(24) +app.secret_key = os.getenv("FLASK_SECRET_KEY", "supersecretkey_folders_unique_telegram_mini_app") -_data_lock = threading.RLock() -metadata_cache = {} +DATA_FILE = 'cloudeng_data_tg.json' +REPO_ID = "Eluza133/Z1e1u" +HF_TOKEN_WRITE = os.getenv("HF_TOKEN") +HF_TOKEN_READ = os.getenv("HF_TOKEN_READ") or HF_TOKEN_WRITE +UPLOAD_FOLDER = 'uploads_tg' +os.makedirs(UPLOAD_FOLDER, exist_ok=True) -def get_hf_api(write=False): - token = HF_TOKEN_WRITE if write else HF_TOKEN_READ - if not token: - logging.warning(f"Hugging Face {'write' if write else 'read'} token not set.") - return None - return HfApi(token=token) - -def download_metadata_from_hf(): - global metadata_cache - api = get_hf_api(write=False) - if not api: - logging.warning("HF Read token missing. Cannot download metadata.") - return False - try: - logging.info(f"Attempting to download {HF_DATA_FILE_PATH} from {REPO_ID}...") - download_path = hf_hub_download( - repo_id=REPO_ID, - filename=HF_DATA_FILE_PATH, - repo_type="dataset", - token=api.token, - local_dir=".", - local_dir_use_symlinks=False, - force_download=True, - etag_timeout=10 - ) - logging.info("Metadata file successfully downloaded from Hugging Face.") - with _data_lock: - try: - with open(download_path, 'r', encoding='utf-8') as f: - metadata_cache = json.load(f) - logging.info("Successfully loaded downloaded metadata into cache.") - except (FileNotFoundError, json.JSONDecodeError) as e: - logging.error(f"Error reading downloaded metadata file: {e}. Resetting cache.") - metadata_cache = {} +BOT_TOKEN = "6750208873:AAE2hvPlJ99dBdhGa_Brre0IIpUdOvXxHt4" +WEB_APP_URL = os.getenv("WEB_APP_URL", "https://your-flask-app-domain.com/launch_mini_app") # IMPORTANT: Update this to your ngrok/deployed URL +ADMIN_USER = os.getenv("ADMIN_USER", "admin_zeus") +ADMIN_PASSWORD = os.getenv("ADMIN_PASSWORD", "admin_password_zeus") + + +cache = Cache(app, config={'CACHE_TYPE': 'simple'}) +logging.basicConfig(level=logging.INFO) +logging.getLogger("httpx").setLevel(logging.WARNING) # Reduce verbosity from httpx used by huggingface_hub + +# --- Filesystem Helper Functions --- +def find_node_by_id(filesystem, node_id): + if not filesystem: return None, None + if filesystem.get('id') == node_id: + return filesystem, None + queue = [(filesystem, None)] + while queue: + current_node, parent = queue.pop(0) + if current_node.get('type') == 'folder' and 'children' in current_node: + for i, child in enumerate(current_node['children']): + if child.get('id') == node_id: + return child, current_node + if child.get('type') == 'folder': + queue.append((child, current_node)) + return None, None + +def add_node(filesystem, parent_id, node_data): + parent_node, _ = find_node_by_id(filesystem, parent_id) + if parent_node and parent_node.get('type') == 'folder': + if 'children' not in parent_node: + parent_node['children'] = [] + parent_node['children'].append(node_data) return True - except EntryNotFoundError: - logging.warning(f"Metadata file '{HF_DATA_FILE_PATH}' not found in repo '{REPO_ID}'. Starting fresh.") - with _data_lock: - metadata_cache = {} + return False + +def remove_node(filesystem, node_id): + node_to_remove, parent_node = find_node_by_id(filesystem, node_id) + if node_to_remove and parent_node and 'children' in parent_node: + parent_node['children'] = [child for child in parent_node['children'] if child.get('id') != node_id] return True - except RepositoryNotFoundError: - logging.error(f"Hugging Face repository '{REPO_ID}' not found. Cannot download metadata.") - except Exception as e: - logging.error(f"Error downloading metadata from Hugging Face: {e}", exc_info=True) return False -def load_local_metadata(): - global metadata_cache - with _data_lock: - if not metadata_cache: - try: - with open(DATA_FILE, 'r', encoding='utf-8') as f: - metadata_cache = json.load(f) - logging.info("Metadata loaded from local JSON.") - except FileNotFoundError: - logging.warning(f"{DATA_FILE} not found locally. Starting with empty data.") - metadata_cache = {} - except json.JSONDecodeError: - logging.error(f"Error decoding {DATA_FILE}. Starting with empty data.") - metadata_cache = {} - except Exception as e: - logging.error(f"Unexpected error loading metadata: {e}") - metadata_cache = {} - return metadata_cache - -def save_metadata(data_to_update=None): - global metadata_cache - with _data_lock: - try: - if data_to_update: - metadata_cache.update(data_to_update) - with open(DATA_FILE, 'w', encoding='utf-8') as f: - json.dump(metadata_cache, f, ensure_ascii=False, indent=4) - logging.info(f"Metadata successfully saved locally to {DATA_FILE}.") - upload_metadata_to_hf_async() - return True - except Exception as e: - logging.error(f"Error saving metadata: {e}", exc_info=True) - return False - -def update_user_file_metadata(user_id, file_info_list): - user_id_str = str(user_id) - with _data_lock: - if user_id_str not in metadata_cache: - metadata_cache[user_id_str] = {"user_info": {}, "files": []} - if "files" not in metadata_cache[user_id_str]: - metadata_cache[user_id_str]["files"] = [] - existing_filenames = {f['filename'] for f in metadata_cache[user_id_str]["files"]} - new_files_added = 0 - for file_info in file_info_list: - if file_info['filename'] not in existing_filenames: - metadata_cache[user_id_str]["files"].append(file_info) - existing_filenames.add(file_info['filename']) - new_files_added += 1 - else: - logging.warning(f"File '{file_info['filename']}' already exists for user {user_id}. Skipping add.") - if new_files_added > 0: - logging.info(f"Added {new_files_added} file metadata entries for user {user_id}.") - if not save_metadata(): - return False - else: - logging.info(f"No new file metadata added for user {user_id}.") - return True +def get_node_path_string(filesystem, node_id): + path_list = [] + current_id = node_id + while current_id: + node, parent = find_node_by_id(filesystem, current_id) + if not node: break + if node.get('id') != 'root': + path_list.append(node.get('name', node.get('original_filename', ''))) + if not parent: break + current_id = parent.get('id') if parent else None + return " / ".join(reversed(path_list)) or "Root" -def _upload_metadata_to_hf_task(): - api = get_hf_api(write=True) - if not api: - logging.warning("HF Write token missing. Skipping metadata upload.") - return - if not os.path.exists(DATA_FILE): - logging.warning(f"{DATA_FILE} does not exist locally. Skipping upload.") +def initialize_user_filesystem(user_data_param): # Renamed to avoid conflict + if 'filesystem' not in user_data_param: + user_data_param['filesystem'] = { + "type": "folder", "id": "root", "name": "root", "children": [] + } + if 'files' in user_data_param and isinstance(user_data_param['files'], list): # Migration for old structure + for old_file in user_data_param['files']: + file_id = old_file.get('id', uuid.uuid4().hex) + original_filename = old_file.get('filename', 'unknown_file') + name_part, ext_part = os.path.splitext(original_filename) + unique_suffix = uuid.uuid4().hex[:8] + unique_filename = f"{name_part}_{unique_suffix}{ext_part}" + # session['username'] would not be available here directly if called outside request context + # This part of migration might need username context if hf_path depends on it + # For new users, this block is skipped. + # Let's assume this is called when user_data_param is for current user, session is available + current_user_key = session.get('username', 'unknown_user_during_migration') + + hf_path = f"cloud_files/{current_user_key}/root/{unique_filename}" + file_node = { + 'type': 'file', 'id': file_id, 'original_filename': original_filename, + 'unique_filename': unique_filename, 'path': hf_path, + 'file_type': get_file_type(original_filename), + 'upload_date': old_file.get('upload_date', datetime.now().strftime('%Y-%m-%d %H:%M:%S')) + } + add_node(user_data_param['filesystem'], 'root', file_node) + del user_data_param['files'] + +# --- Data Persistence & HF Sync --- +@cache.memoize(timeout=300) +def load_data(): + try: + download_db_from_hf() + with open(DATA_FILE, 'r', encoding='utf-8') as file: + data = json.load(file) + if not isinstance(data, dict): + return {'users': {}} + data.setdefault('users', {}) + # Filesystem initialization handled on login/auth if needed + return data + except Exception as e: + logging.error(f"Error loading data: {e}") + return {'users': {}} + +def save_data(data): + try: + with open(DATA_FILE, 'w', encoding='utf-8') as file: + json.dump(data, file, ensure_ascii=False, indent=4) + upload_db_to_hf() + cache.clear() + except Exception as e: + logging.error(f"Error saving data: {e}") + raise + +def upload_db_to_hf(): + if not HF_TOKEN_WRITE: return + try: + api = HfApi() + api.upload_file(path_or_fileobj=DATA_FILE, path_in_repo=DATA_FILE, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE, commit_message=f"TGApp DB Backup {datetime.now()}") + except Exception as e: + logging.error(f"Error uploading database: {e}") + +def download_db_from_hf(): + if not HF_TOKEN_READ: + if not os.path.exists(DATA_FILE): + with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f) return try: - with _data_lock: - if os.path.getsize(DATA_FILE) == 0: - logging.warning(f"{DATA_FILE} is empty. Skipping upload.") - return - file_to_upload = DATA_FILE - logging.info(f"Attempting to upload {file_to_upload} to {REPO_ID}/{HF_DATA_FILE_PATH}...") - api.upload_file( - path_or_fileobj=file_to_upload, - path_in_repo=HF_DATA_FILE_PATH, - repo_id=REPO_ID, - repo_type="dataset", - commit_message=f"Update metadata {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}" - ) - logging.info("Metadata successfully uploaded to Hugging Face.") + hf_hub_download(repo_id=REPO_ID, filename=DATA_FILE, repo_type="dataset", token=HF_TOKEN_READ, local_dir=".", local_dir_use_symlinks=False) + except (hf_utils.RepositoryNotFoundError, hf_utils.EntryNotFoundError): + if not os.path.exists(DATA_FILE): + with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f) except Exception as e: - logging.error(f"Error uploading metadata to Hugging Face: {e}", exc_info=True) + logging.error(f"Error downloading database: {e}") + if not os.path.exists(DATA_FILE): + with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f) + +def periodic_backup(): + while True: + time.sleep(1800) + upload_db_to_hf() -def upload_metadata_to_hf_async(): - upload_thread = threading.Thread(target=_upload_metadata_to_hf_task, daemon=True) - upload_thread.start() +def get_file_type(filename): + ext = filename.lower().split('.')[-1] + if ext in ['mp4', 'mov', 'avi', 'webm', 'mkv']: return 'video' + if ext in ['jpg', 'jpeg', 'png', 'gif', 'bmp', 'webp', 'svg']: return 'image' + if ext == 'pdf': return 'pdf' + if ext == 'txt': return 'text' + return 'other' -def verify_telegram_data(init_data_str): +# --- Auth Helpers --- +def is_admin(): + return session.get('admin_logged_in', False) + +def validate_telegram_init_data(init_data_str, bot_token_to_validate): try: - parsed_data = parse_qs(init_data_str) - received_hash = parsed_data.pop('hash', [None])[0] - if not received_hash: - logging.warning("Verification failed: Hash missing from initData.") - return None, False, "Hash missing" - data_check_list = [] - for key, value in sorted(parsed_data.items()): - data_check_list.append(f"{key}={value[0]}") - data_check_string = "\n".join(data_check_list) - secret_key = hmac.new("WebAppData".encode(), BOT_TOKEN.encode(), hashlib.sha256).digest() + params = {} + for item in init_data_str.split('&'): + key, value = item.split('=', 1) + params[key] = value + + hash_received = params.pop('hash') + + data_check_string_parts = [] + for key in sorted(params.keys()): + data_check_string_parts.append(f"{key}={params[key]}") + data_check_string = "\n".join(data_check_string_parts) + + secret_key = hmac.new("WebAppData".encode(), bot_token_to_validate.encode(), hashlib.sha256).digest() calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest() - if calculated_hash != received_hash: - logging.warning(f"Verification failed: Hash mismatch. Calculated: {calculated_hash}, Received: {received_hash}") - return parsed_data, False, "Invalid hash" - auth_date = int(parsed_data.get('auth_date', [0])[0]) - current_time = int(time.time()) - if current_time - auth_date > AUTH_TIMEOUT: - logging.warning(f"Verification failed: initData expired. Auth time: {auth_date}, Current time: {current_time}") - return parsed_data, False, "Data expired" - user_info_dict = None - if 'user' in parsed_data: - try: - user_json_str = unquote(parsed_data['user'][0]) - user_info_dict = json.loads(user_json_str) - except Exception as e: - logging.error(f"Could not parse user JSON from initData: {e}") - logging.info(f"Telegram data verified successfully for user ID: {user_info_dict.get('id') if user_info_dict else 'Unknown'}") - return user_info_dict, True, "Verified" + + if calculated_hash == hash_received: + user_data_str = params.get('user') + if user_data_str: + return json.loads(requests.utils.unquote(user_data_str)) + return None except Exception as e: - logging.error(f"Error during Telegram data verification: {e}", exc_info=True) - return None, False, "Verification exception" - -def authenticate_and_get_user(init_data_str): - user_info, is_valid, message = verify_telegram_data(init_data_str) - if not is_valid: - return None, message - user_id = user_info.get('id') if user_info else None - if not user_id: - logging.warning("Verification successful but user ID is missing in user data.") - return None, "User ID missing" - user_id_str = str(user_id) - with _data_lock: - should_save = False - if user_id_str not in metadata_cache: - metadata_cache[user_id_str] = { - "user_info": user_info, - "files": [] - } - logging.info(f"New user registered: {user_id}") - should_save = True - else: - if "user_info" not in metadata_cache[user_id_str] or metadata_cache[user_id_str]["user_info"] != user_info: - metadata_cache[user_id_str]["user_info"] = user_info - should_save = True - if should_save: - if not save_metadata(): - logging.error(f"Failed to save metadata after updating/adding user {user_id}") - return user_info, "Authenticated" - -USER_TEMPLATE = """ - - -
- - -Инициализация...