+ {% with messages = get_flashed_messages(with_categories=true) %}
+ {% if messages %}
+ {% for category, message in messages %}
+
{{ message }}
+ {% endfor %}
+ {% endif %}
+ {% endwith %}
+
+
+
+
Ваш профиль
+
Загрузка...
+
+
+
+
+
+
+
+
+
+
0%
+
+
+
Содержимое папки: Загрузка...
+
+
Загрузка...
+
+
+
+
+
+
+ ×
+
+
+
+
+
+
+
+"""
+
+
+def check_telegram_authorization(auth_data: str, bot_token: str) -> dict | None:
+ if not auth_data:
+ logging.error("Auth data is empty.")
+ return None
+
+ try:
+ parsed_data = dict(parse_qsl(unquote(auth_data)))
+ if "hash" not in parsed_data:
+ logging.error("Hash not found in auth data.")
+ return None
+
+ telegram_hash = parsed_data.pop('hash')
+
+ auth_date_ts = int(parsed_data.get('auth_date', 0))
+ current_ts = int(time.time())
+ if current_ts - auth_date_ts > AUTH_DATA_LIFETIME:
+ logging.warning(f"Auth data expired. Auth time: {auth_date_ts}, Current time: {current_ts}, Diff: {current_ts - auth_date_ts}")
+ return None
+
+ data_check_string = "\n".join(sorted([f"{k}={v}" for k, v in parsed_data.items()]))
+
+ secret_key = hmac.new("WebAppData".encode(), bot_token.encode(), hashlib.sha256).digest()
+ calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest()
+
+ if calculated_hash == telegram_hash:
+ user_data_str = parsed_data.get('user')
+ if user_data_str:
+ try:
+ user_data = json.loads(user_data_str)
+ if 'id' not in user_data:
+ logging.error("User data is missing 'id'.")
+ return None
+ return user_data
+ except json.JSONDecodeError:
+ logging.error("Could not decode user JSON.")
+ return None
+ else:
+ logging.error("No 'user' field found in validated data.")
+ return None
+ else:
+ logging.warning(f"Hash mismatch. Calculated: {calculated_hash}, Received: {telegram_hash}")
+ # logging.debug(f"Data check string:\n{data_check_string}") # Uncomment for deep debug
+ return None
+
+ except Exception as e:
+ logging.exception("Error during Telegram authorization check:")
+ return None
+
-# --- Helper Functions ---
def find_node_by_id(filesystem, node_id):
- if not filesystem or not isinstance(filesystem, dict):
- return None, None
if filesystem.get('id') == node_id:
return filesystem, None
+
queue = [(filesystem, None)]
while queue:
current_node, parent = queue.pop(0)
- if current_node.get('type') == 'folder' and 'children' in current_node and isinstance(current_node['children'], list):
- for child in current_node['children']:
- if isinstance(child, dict): # Добавлена проверка типа
- if child.get('id') == node_id:
- return child, current_node
- if child.get('type') == 'folder':
- queue.append((child, current_node))
+ if current_node.get('type') == 'folder' and 'children' in current_node:
+ for i, child in enumerate(current_node['children']):
+ if child.get('id') == node_id:
+ return child, current_node
+ if child.get('type') == 'folder':
+ queue.append((child, current_node))
return None, None
def add_node(filesystem, parent_id, node_data):
parent_node, _ = find_node_by_id(filesystem, parent_id)
if parent_node and parent_node.get('type') == 'folder':
- if 'children' not in parent_node or not isinstance(parent_node['children'], list):
+ if 'children' not in parent_node:
parent_node['children'] = []
parent_node['children'].append(node_data)
return True
@@ -60,1391 +892,1165 @@ def add_node(filesystem, parent_id, node_data):
def remove_node(filesystem, node_id):
node_to_remove, parent_node = find_node_by_id(filesystem, node_id)
- if node_to_remove and parent_node and 'children' in parent_node and isinstance(parent_node['children'], list):
- parent_node['children'] = [child for child in parent_node['children'] if not isinstance(child, dict) or child.get('id') != node_id]
+ if node_to_remove and parent_node and 'children' in parent_node:
+ parent_node['children'] = [child for child in parent_node['children'] if child.get('id') != node_id]
return True
return False
+def get_node_path(filesystem, node_id, path_elements=None):
+ if path_elements is None:
+ path_elements = []
+ node, parent = find_node_by_id(filesystem, node_id)
+ if node:
+ # Use node's name or original_filename for path string, but ID for internal path construction
+ path_elements.append({'id': node.get('id'), 'name': node.get('name', node.get('original_filename', ''))})
+ if parent:
+ return get_node_path(filesystem, parent['id'], path_elements)
+ return list(reversed(path_elements)) # Reverse to get path from root
+
def get_node_path_string(filesystem, node_id):
- path_list = []
- current_id = node_id
- visited = set() # Защита от циклов
-
- while current_id and current_id not in visited:
- visited.add(current_id)
- node, parent = find_node_by_id(filesystem, current_id)
- if not node: break
- if node.get('id') != 'root':
- path_list.append(node.get('name', node.get('original_filename', '')))
- if not parent: break
- current_id = parent.get('id') if parent else None
- return " / ".join(reversed(path_list)) or "Root"
+ path_list = get_node_path(filesystem, node_id)
+ # Filter out the 'root' name for the string representation unless it's just root
+ string_parts = [p['name'] for p in path_list if p['id'] != 'root' or len(path_list) == 1]
+ return " / ".join(string_parts) or "Root"
+
def initialize_user_filesystem(user_data):
- if 'filesystem' not in user_data or not isinstance(user_data.get('filesystem'), dict):
+ if 'filesystem' not in user_data:
user_data['filesystem'] = {
"type": "folder",
"id": "root",
"name": "root",
"children": []
}
+ # Handle potential old 'files' structure during migration if needed, but assuming fresh start here.
-def get_file_type(filename):
- filename_lower = filename.lower()
- if filename_lower.endswith(('.mp4', '.mov', '.avi', '.webm', '.mkv')): return 'video'
- if filename_lower.endswith(('.jpg', '.jpeg', '.png', '.gif', '.bmp', '.webp', '.svg')): return 'image'
- if filename_lower.endswith('.pdf'): return 'pdf'
- if filename_lower.endswith('.txt'): return 'text'
- if filename_lower.endswith(('.doc', '.docx')): return 'doc'
- if filename_lower.endswith(('.xls', '.xlsx')): return 'xls'
- if filename_lower.endswith(('.ppt', '.pptx')): return 'ppt'
- if filename_lower.endswith(('.zip', '.rar', '.7z', '.tar', '.gz')): return 'archive'
- if filename_lower.endswith(('.mp3', '.wav', '.ogg', '.aac', '.flac')): return 'audio'
- return 'other'
-
-# --- Data Persistence ---
-data_lock = threading.Lock()
+@cache.memoize(timeout=300)
def load_data():
- with data_lock:
- try:
- download_db_from_hf()
- if os.path.exists(DATA_FILE) and os.path.getsize(DATA_FILE) > 0:
- with open(DATA_FILE, 'r', encoding='utf-8') as file:
- data = json.load(file)
- if not isinstance(data, dict):
- logging.warning(f"{DATA_FILE} is not a dict, initializing.")
- data = {'users': {}}
- else:
- data = {'users': {}}
-
+ try:
+ download_db_from_hf()
+ with open(DATA_FILE, 'r', encoding='utf-8') as file:
+ data = json.load(file)
+ if not isinstance(data, dict):
+ logging.warning("Data is not in dict format, initializing empty database")
+ return {'users': {}}
data.setdefault('users', {})
- # Ensure all users have initialized filesystem
+ # Initialize filesystem for any users loaded without one
for user_id, user_data in data['users'].items():
- if isinstance(user_data, dict): # Check if user_data is a dict
- initialize_user_filesystem(user_data)
- else:
- logging.warning(f"Invalid data format for user {user_id}, re-initializing.")
- data['users'][user_id] = {} # Initialize as empty dict or default structure
- initialize_user_filesystem(data['users'][user_id])
-
-
- logging.info("Data loaded/initialized")
+ initialize_user_filesystem(user_data)
+ logging.info("Data successfully loaded and initialized")
return data
- except json.JSONDecodeError:
- logging.error(f"Error decoding JSON from {DATA_FILE}. Returning empty data.")
- return {'users': {}}
- except Exception as e:
- logging.error(f"Error loading data: {e}")
- return {'users': {}}
+ except FileNotFoundError:
+ logging.warning(f"{DATA_FILE} not found, initializing empty database.")
+ return {'users': {}}
+ except json.JSONDecodeError:
+ logging.error(f"Error decoding JSON from {DATA_FILE}, initializing empty database.")
+ return {'users': {}}
+ except Exception as e:
+ logging.exception("Error loading data:")
+ return {'users': {}}
def save_data(data):
- with data_lock:
- try:
- # Ensure filesystem structure is valid before saving
- for user_id, user_data in data.get('users', {}).items():
- if isinstance(user_data, dict):
- initialize_user_filesystem(user_data) # Ensures filesystem exists and is a dict
-
- with open(DATA_FILE, 'w', encoding='utf-8') as file:
- json.dump(data, file, ensure_ascii=False, indent=4)
- upload_db_to_hf()
- logging.info("Data saved and uploaded to HF")
- except Exception as e:
- logging.error(f"Error saving data: {e}")
- # Optionally re-raise or handle appropriately
- # raise
+ try:
+ # It's safer to save to a temporary file first and then replace
+ temp_file = DATA_FILE + '.tmp'
+ with open(temp_file, 'w', encoding='utf-8') as file:
+ json.dump(data, file, ensure_ascii=False, indent=4)
+ os.replace(temp_file, DATA_FILE)
+
+ upload_db_to_hf()
+ cache.clear()
+ logging.info("Data saved and uploaded to HF")
+ except Exception as e:
+ logging.exception("Error saving data:")
+ raise
def upload_db_to_hf():
if not HF_TOKEN_WRITE:
logging.warning("HF_TOKEN_WRITE not set, skipping database upload.")
return
- if not os.path.exists(DATA_FILE):
- logging.warning(f"{DATA_FILE} not found, skipping upload.")
- return
try:
api = HfApi()
api.upload_file(
- path_or_fileobj=DATA_FILE, path_in_repo=DATA_FILE, repo_id=REPO_ID,
- repo_type="dataset", token=HF_TOKEN_WRITE,
- commit_message=f"Backup TG App {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
+ path_or_fileobj=DATA_FILE,
+ path_in_repo=DATA_FILE,
+ repo_id=REPO_ID,
+ repo_type="dataset",
+ token=HF_TOKEN_WRITE,
+ commit_message=f"Backup {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}"
)
logging.info("Database uploaded to Hugging Face")
except Exception as e:
- logging.error(f"Error uploading database: {e}")
+ logging.exception("Error uploading database:")
def download_db_from_hf():
- if not HF_TOKEN_READ:
- logging.warning("HF_TOKEN_READ not set, skipping database download.")
+ if not HF_TOKEN_READ and not HF_TOKEN_WRITE:
+ logging.warning("No HF_TOKENs set, skipping database download.")
if not os.path.exists(DATA_FILE):
- with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
+ with open(DATA_FILE, 'w', encoding='utf-8') as f:
+ json.dump({'users': {}}, f)
+ logging.info(f"Created empty local database file: {DATA_FILE}")
return
try:
+ # Use HF_TOKEN_WRITE if READ token is not set, for flexibility
+ token_to_use = HF_TOKEN_READ if HF_TOKEN_READ else HF_TOKEN_WRITE
hf_hub_download(
- repo_id=REPO_ID, filename=DATA_FILE, repo_type="dataset", token=HF_TOKEN_READ,
- local_dir=".", local_dir_use_symlinks=False, etag_timeout=60 # Increased timeout
+ repo_id=REPO_ID,
+ filename=DATA_FILE,
+ repo_type="dataset",
+ token=token_to_use,
+ local_dir=".",
+ local_dir_use_symlinks=False
)
logging.info("Database downloaded from Hugging Face")
+ except hf_utils.RepositoryNotFoundError:
+ logging.error(f"Repository {REPO_ID} not found.")
+ if not os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'w', encoding='utf-8') as f:
+ json.dump({'users': {}}, f)
+ logging.info(f"Created empty local database file: {DATA_FILE}")
except hf_utils.EntryNotFoundError:
- logging.warning(f"{DATA_FILE} not found in repo. Initializing empty DB locally.")
+ logging.warning(f"{DATA_FILE} not found in repository {REPO_ID}. Initializing empty database.")
if not os.path.exists(DATA_FILE):
- with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
+ with open(DATA_FILE, 'w', encoding='utf-8') as f:
+ json.dump({'users': {}}, f)
+ logging.info(f"Created empty local database file: {DATA_FILE}")
except Exception as e:
- logging.error(f"Error downloading database: {e}")
+ logging.exception("Error downloading database:")
if not os.path.exists(DATA_FILE):
- with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
+ with open(DATA_FILE, 'w', encoding='utf-8') as f:
+ json.dump({'users': {}}, f)
+ logging.info(f"Created empty local database file: {DATA_FILE}")
+
def periodic_backup():
+ if not HF_TOKEN_WRITE:
+ logging.warning("Periodic backup disabled: HF_TOKEN_WRITE not set.")
+ return
while True:
- time.sleep(1800) # Backup every 30 minutes
- logging.info("Starting periodic backup...")
- data = load_data() # Load current data before saving
- save_data(data)
-
-# --- Telegram Validation ---
-def check_telegram_authorization(auth_data: str, bot_token: str) -> dict | None:
- if not auth_data: return None
- try:
- parsed_data = dict(parse_qsl(unquote(auth_data)))
- if "hash" not in parsed_data: return None
-
- telegram_hash = parsed_data.pop('hash')
- auth_date_ts = int(parsed_data.get('auth_date', 0))
- current_ts = int(time.time())
- if current_ts - auth_date_ts > AUTH_DATA_LIFETIME:
- logging.warning(f"Auth data expired: {current_ts - auth_date_ts} seconds old.")
- return None
-
- data_check_string = "\n".join(sorted([f"{k}={v}" for k, v in parsed_data.items()]))
- secret_key = hmac.new("WebAppData".encode(), bot_token.encode(), hashlib.sha256).digest()
- calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest()
-
- if calculated_hash == telegram_hash:
- user_data_str = parsed_data.get('user')
- if user_data_str:
- try:
- return json.loads(user_data_str)
- except json.JSONDecodeError:
- logging.error("Failed to decode user JSON from initData")
- return None
- return {} # Valid hash, but no user field? Return empty dict
- else:
- logging.warning("Hash mismatch during validation.")
- return None
- except Exception as e:
- logging.error(f"Error during Telegram validation: {e}")
- return None
-
-# --- HTML Template ---
-HTML_TEMPLATE = """
-
-
-
-
-
- Zeus Cloud
-
-
-
-
-
-
-
-
Загрузка данных...
-
+@app.route('/validate', methods=['POST'])
+def validate_data():
+ if BOT_TOKEN == 'YOUR_BOT_TOKEN':
+ logging.warning("Using placeholder BOT_TOKEN. Validation will fail.")
+ # Decide whether to allow bypassing for testing or return error
+ # For security, better return error in production if token is default
+ # return jsonify({"status": "error", "message": "Server configuration error: BOT_TOKEN not set"}), 500
+ # Allowing bypass for local testing:
+ # if request.remote_addr in ['127.0.0.1', '::1']:
+ # dummy_user = {"id": 123, "first_name": "Тестовый", "last_name": "Пользователь", "username": "test_user", "language_code": "ru", "is_premium": False}
+ # session['user_id'] = dummy_user['id']
+ # session['user_data'] = dummy_user # Store basic data in session
+ # data = load_data()
+ # if str(dummy_user['id']) not in data['users']:
+ # data['users'][str(dummy_user['id'])] = {**dummy_user, 'created_at': datetime.now().strftime('%Y-%m-%d %H:%M:%S'), 'filesystem': {"type": "folder", "id": "root", "name": "root", "children": []}}
+ # save_data(data)
+ # return jsonify({"status": "ok", "user": dummy_user})
+ # else:
+ return jsonify({"status": "error", "message": "BOT_TOKEN не настроен на сервере"}), 500
-
-
-
-
-
-
-
- Файлы
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
0%
-
-
-
-
-
-
-
Эта папка пуста.
-
-
-
-
+ data = request.get_json()
+ if not data or 'initData' not in data:
+ return jsonify({"status": "error", "message": "Missing initData in request"}), 400
-
-
- ×
-
-
-
+ init_data_string = data['initData']
+ validated_user_data = check_telegram_authorization(init_data_string, BOT_TOKEN)
+
+ if validated_user_data:
+ user_id = validated_user_data['id']
+ session['user_id'] = user_id # Use Telegram user ID as primary session key
+ session['user_data'] = validated_user_data # Store the validated user data
+
+ # Load/create user in database
+ db_data = load_data()
+ user_id_str = str(user_id)
+ if user_id_str not in db_data['users']:
+ db_data['users'][user_id_str] = {
+ **validated_user_data, # Save all Telegram data
+ 'created_at': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
+ 'filesystem': {"type": "folder", "id": "root", "name": "root", "children": []}
+ }
+ logging.info(f"New user registered: {user_id}")
+ else:
+ # Optionally update user data (username, photo_url etc.) on subsequent logins
+ db_data['users'][user_id_str].update(validated_user_data)
+ logging.info(f"User {user_id} logged in.")
-
-
-
-"""
+ if not HF_TOKEN_WRITE:
+ return jsonify({"status": "error", "message": "Удаление невозможно: токен для записи не настроен."}), 503
-# --- Flask Routes ---
+ try:
+ api = HfApi()
+ api.delete_file(
+ path_in_repo=hf_path,
+ repo_id=REPO_ID,
+ repo_type="dataset",
+ token=HF_TOKEN_WRITE,
+ commit_message=f"User {user_id} deleted file {original_filename} (ID: {file_id})"
+ )
+ logging.info(f"Deleted file {hf_path} from HF Hub for user {user_id}")
-@app.route('/')
-def index():
- """Serves the main Mini App HTML."""
- # Replace placeholder in template - avoids exposing token directly in JS if inspected early
- # Although, initData mechanism is the primary security layer.
- html_content = HTML_TEMPLATE.replace("{{ bot_token_placeholder }}", "YOUR_BOT_TOKEN" if BOT_TOKEN == 'YOUR_BOT_TOKEN' else "HIDDEN")
- return Response(html_content, mimetype='text/html')
-
-@app.route('/validate_telegram', methods=['POST'])
-def validate_telegram_data():
- """Validates initData, creates user if new, and establishes session."""
- if BOT_TOKEN == 'YOUR_BOT_TOKEN':
- logging.warning("Attempting validation with placeholder BOT_TOKEN!")
- # Return error immediately if token isn't set for production
- # return jsonify({"status": "error", "message": "Server configuration error: Bot token not set."}), 500
+ if remove_node(user_data['filesystem'], file_id):
+ try:
+ save_data(db_data)
+ return jsonify({"status": "ok", "message": f'Файл "{original_filename}" успешно удален!'})
+ except Exception as e:
+ logging.exception(f"Error saving data after deleting file from HF for user {user_id}:")
+ return jsonify({"status": "error", "message": "Файл удален с сервера, но произошла ошибка обновления базы данных."}), 500
+ else:
+ logging.error(f"Failed to remove file node {file_id} from DB for user {user_id} after HF deletion.")
+ return jsonify({"status": "error", "message": "Файл удален с сервера, но не найден в базе данных для удаления."}), 500
- data = request.get_json()
- if not data or 'initData' not in data:
- return jsonify({"status": "error", "message": "Missing initData"}), 400
- init_data_string = data['initData']
- validated_user_info = check_telegram_authorization(init_data_string, BOT_TOKEN)
-
- if validated_user_info is not None and 'id' in validated_user_info:
- user_id_str = str(validated_user_info['id'])
- session['user_id'] = user_id_str
- session['user_info'] = validated_user_info # Store full info if needed
-
- # Check if user exists, create if not
- all_data = load_data()
- if user_id_str not in all_data['users']:
- logging.info(f"New user detected: {user_id_str}, Name: {validated_user_info.get('first_name')}")
- all_data['users'][user_id_str] = {
- 'tg_info': validated_user_info,
- 'created_at': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
- 'filesystem': { "type": "folder", "id": "root", "name": "root", "children": [] }
- }
+ except hf_utils.EntryNotFoundError:
+ logging.warning(f"File {hf_path} not found on HF Hub during delete attempt for user {user_id}. Removing from DB.")
+ if remove_node(user_data['filesystem'], file_id):
try:
- save_data(all_data)
- logging.info(f"New user {user_id_str} saved.")
+ save_data(db_data)
+ return jsonify({"status": "ok", "message": f'Файл "{original_filename}" не найден на сервере, удален из базы.'})
except Exception as e:
- logging.error(f"Failed to save new user {user_id_str}: {e}")
- # Should we proceed or return error? Let's proceed but log.
+ logging.exception(f"Error saving data after deleting metadata (HF not found) for user {user_id}:")
+ return jsonify({"status": "error", "message": "Ошибка сохранения данных после удаления метаданных (файл не найден на сервере)."})
else:
- # Ensure filesystem exists for existing user (data integrity check)
- if 'filesystem' not in all_data['users'][user_id_str] or not isinstance(all_data['users'][user_id_str]['filesystem'], dict):
- logging.warning(f"Filesystem missing or invalid for user {user_id_str}. Reinitializing.")
- initialize_user_filesystem(all_data['users'][user_id_str])
- # Optionally save immediately if structure was corrected
- # try: save_data(all_data)
- # except: logging.error(...)
-
- return jsonify({"status": "ok", "user": validated_user_info})
- else:
- logging.warning("Telegram validation failed or user ID missing.")
- session.clear() # Clear session on failed validation
- return jsonify({"status": "error", "message": "Invalid Telegram data or expired session."}), 403
+ logging.error(f"Failed to remove file node {file_id} (HF not found) from DB for user {user_id}.")
+ return jsonify({"status": "error", "message": "Файл не найден ни на сервере, ни в базе данных."}), 500
-# --- Filesystem API Routes (Require Session) ---
+ except Exception as e:
+ logging.exception(f"Error deleting file {hf_path} for {user_id}:")
+ return jsonify({"status": "error", "message": f'Ошибка удаления файла "{original_filename}": {e}'}), 500
-@app.before_request
-def check_session():
- # Allow access to '/' and '/validate_telegram' without session
- if request.endpoint in ['index', 'validate_telegram_data', 'static']:
- return
- # Allow download and text_content if accessed via specific means (e.g., direct link after initial auth)
- # This is less secure than validating initData on each request, but simpler for downloads.
- # Consider adding more robust checks if needed.
- if request.endpoint in ['download_file_route', 'get_text_content_route']:
- # Maybe check a temporary token here in the future?
- # For now, just rely on the existing session from the main app load.
- pass
-
- if 'user_id' not in session:
- logging.warning(f"Unauthorized access attempt to {request.endpoint}. No user_id in session.")
- return jsonify({"status": "error", "message": "Unauthorized. Please relaunch the app."}), 401
-
-@app.route('/filesystem/', methods=['GET'])
-def get_folder_content(folder_id):
- user_id = session.get('user_id')
- if not user_id: return jsonify({"status": "error", "message": "Unauthorized"}), 401
- data = load_data()
- user_data = data['users'].get(user_id)
+@app.route('/delete_folder/', methods=['POST'])
+@requires_auth
+def delete_folder(folder_id):
+ user_id = session['user_id']
+ db_data = load_data()
+ user_data = db_data['users'].get(str(user_id))
+
if not user_data or 'filesystem' not in user_data:
- return jsonify({"status": "error", "message": "User data not found"}), 404
-
- folder_node, _ = find_node_by_id(user_data['filesystem'], folder_id)
- if not folder_node or folder_node.get('type') != 'folder':
- # Try finding root if requested folder doesn't exist
- folder_node, _ = find_node_by_id(user_data['filesystem'], 'root')
- if not folder_node:
- return jsonify({"status": "error", "message": "Root folder not found"}), 404
- folder_id = 'root' # Reset to root
-
-
- items_in_folder = sorted(folder_node.get('children', []), key=lambda x: (x.get('type', 'file') != 'folder', x.get('name', x.get('original_filename', '')).lower()))
-
- # Build breadcrumbs
- breadcrumbs = []
- temp_id = folder_id
- visited_bc = set()
- while temp_id and temp_id not in visited_bc:
- visited_bc.add(temp_id)
- node, parent = find_node_by_id(user_data['filesystem'], temp_id)
- if not node: break
- breadcrumbs.append({'id': node['id'], 'name': node.get('name', 'Root')})
- if not parent: break
- temp_id = parent.get('id')
- breadcrumbs.reverse()
-
-
- return jsonify({
- "status": "ok",
- "items": items_in_folder,
- "breadcrumbs": breadcrumbs,
- "current_folder_name": folder_node.get('name', 'Root'),
- "current_folder_id": folder_id
- })
-
-
-@app.route('/folder', methods=['POST'])
-def create_folder_route():
- user_id = session.get('user_id')
- if not user_id: return jsonify({"status": "error", "message": "Unauthorized"}), 401
+ logging.error(f"User data or filesystem missing during delete_folder for user ID: {user_id}")
+ session.pop('user_id', None)
+ session.pop('user_data', None)
+ return jsonify({"status": "error", "message": "Internal server error."}), 500
- req_data = request.get_json()
- parent_folder_id = req_data.get('parent_folder_id', 'root')
- folder_name = req_data.get('folder_name', '').strip()
+ if folder_id == 'root':
+ return jsonify({"status": "error", "message": "Нельзя удалить корневую папку!"}), 400
- if not folder_name:
- return jsonify({'status': 'error', 'message': 'Имя папки не может быть пустым!'}), 400
- if not folder_name.replace(' ', '').replace('_', '').replace('-', '').isalnum():
- return jsonify({'status': 'error', 'message': 'Имя папки содержит недопустимые символы.'}), 400
- if len(folder_name) > 50: # Limit length
- return jsonify({'status': 'error', 'message': 'Имя папки слишком длинное.'}), 400
+ request_data = request.get_json()
+ current_view_folder_id = request_data.get('current_view_folder_id', 'root')
- data = load_data()
- user_data = data['users'].get(user_id)
- if not user_data: return jsonify({'status': 'error', 'message': 'User data not found'}), 404
+ folder_node, parent_node = find_node_by_id(user_data['filesystem'], folder_id)
- folder_id = uuid.uuid4().hex
- folder_data = { 'type': 'folder', 'id': folder_id, 'name': folder_name, 'children': [] }
+ if not folder_node or folder_node.get('type') != 'folder' or not parent_node:
+ return jsonify({"status": "error", "message": "Папка не найдена или не может быть удалена."}), 404
- if add_node(user_data['filesystem'], parent_folder_id, folder_data):
+ folder_name = folder_node.get('name', 'папка')
+
+ if folder_node.get('children'):
+ return jsonify({"status": "error", "message": f'Папку "{folder_name}" можно удалить только если она пуста.'}), 400
+
+ if remove_node(user_data['filesystem'], folder_id):
try:
- save_data(data)
- return jsonify({'status': 'success', 'message': f'Папка "{folder_name}" создана.'})
+ save_data(db_data)
+ redirect_to_folder_id = parent_node.get('id', 'root')
+ return jsonify({"status": "ok", "message": f'Пустая папка "{folder_name}" успешно удалена.', "redirect_to_folder_id": redirect_to_folder_id})
except Exception as e:
- logging.error(f"Create folder save error for user {user_id}: {e}")
- # Attempt to remove added node if save failed? Complex.
- return jsonify({'status': 'error', 'message': 'Ошибка сохранения данных при создании папки.'}), 500
+ logging.exception(f"Error saving data after deleting empty folder for user {user_id}:")
+ return jsonify({"status": "error", "message": "Ошибка сохранения данных после удаления папки."}), 500
else:
- return jsonify({'status': 'error', 'message': 'Не удалось найти родительскую папку.'}), 404
+ logging.error(f"Failed to remove folder node {folder_id} from DB for user {user_id}.")
+ return jsonify({"status": "error", "message": "Не удалось удалить папку из базы данных."}), 500
-@app.route('/upload/', methods=['POST'])
-def upload_file_route(folder_id):
- user_id = session.get('user_id')
- user_info = session.get('user_info', {})
- user_identifier = user_info.get('username', user_id) # Use username in path if available, else ID
+@app.route('/get_text_content/')
+@requires_auth
+def get_text_content(file_id):
+ user_id = session['user_id']
+ db_data = load_data()
+ user_data = db_data['users'].get(str(user_id))
- if not user_id: return jsonify({"status": "error", "message": "Unauthorized"}), 401
- if not HF_TOKEN_WRITE:
- return jsonify({'status': 'error', 'message': 'Загрузка невозможна: токен HF для записи не настроен.'}), 503
+ if not user_data or 'filesystem' not in user_data:
+ logging.error(f"User data or filesystem missing during get_text_content for user ID: {user_id}")
+ # Cannot return JSON here usually, it's fetched by JS for modal content
+ return Response("Internal server error: User data corrupted.", status=500)
- files = request.files.getlist('files')
- if not files or all(not f.filename for f in files):
- return jsonify({'status': 'error', 'message': 'Файлы для загрузки не выбраны.'}), 400
- if len(files) > 20:
- return jsonify({'status': 'error', 'message': 'Максимум 20 файлов за раз!'}), 400
+ file_node, _ = find_node_by_id(user_data['filesystem'], file_id)
- data = load_data()
- user_data = data['users'].get(user_id)
- if not user_data: return jsonify({'status': 'error', 'message': 'User data not found'}), 404
+ if not file_node or file_node.get('type') != 'file' or file_node.get('file_type') != 'text':
+ return Response("Текстовый файл не найден или недоступен для предпросмотра.", status=404)
- target_folder_node, _ = find_node_by_id(user_data['filesystem'], folder_id)
- if not target_folder_node or target_folder_node.get('type') != 'folder':
- return jsonify({'status': 'error', 'message': 'Целевая папка для загрузки не найдена!'}), 404
+ hf_path = file_node.get('path')
+ if not hf_path:
+ logging.error(f"Text file {file_id} for user {user_id} has no HF path.")
+ return Response("Ошибка: путь к файлу отсутствует", status=500)
- api = HfApi()
- uploaded_count = 0
- errors = []
- save_needed = False
+ file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}"
- for file in files:
- if file and file.filename:
- original_filename = secure_filename(file.filename)
- if len(original_filename) > 100: # Limit filename length
- original_filename = original_filename[:97] + '...'
+ try:
+ headers = {}
+ if HF_TOKEN_READ:
+ headers["authorization"] = f"Bearer {HF_TOKEN_READ}"
- name_part, ext_part = os.path.splitext(original_filename)
- unique_suffix = uuid.uuid4().hex[:6]
- unique_filename = f"{name_part}_{unique_suffix}{ext_part}"
- file_id = uuid.uuid4().hex
+ response = requests.get(file_url, headers=headers)
+ response.raise_for_status()
- # Use user_id in the HF path for uniqueness
- hf_path = f"cloud_files/{user_id}/{folder_id}/{unique_filename}"
- temp_path = os.path.join(UPLOAD_FOLDER, f"{file_id}_{unique_filename}")
+ if len(response.content) > 1 * 1024 * 1024: # Limit text file size for preview
+ return Response("Файл слишком большой для предпросмотра.", status=413)
+ try:
+ text_content = response.content.decode('utf-8')
+ except UnicodeDecodeError:
try:
- file.save(temp_path)
- api.upload_file(
- path_or_fileobj=temp_path, path_in_repo=hf_path, repo_id=REPO_ID,
- repo_type="dataset", token=HF_TOKEN_WRITE,
- commit_message=f"TG User {user_identifier} ({user_id}) uploaded {original_filename} to folder {folder_id}"
- )
- file_info = {
- 'type': 'file', 'id': file_id, 'original_filename': original_filename,
- 'unique_filename': unique_filename, 'path': hf_path,
- 'file_type': get_file_type(original_filename),
- 'upload_date': datetime.now().strftime('%Y-%m-%d %H:%M:%S')
- }
- if add_node(user_data['filesystem'], folder_id, file_info):
- uploaded_count += 1
- save_needed = True
- else:
- errors.append(f"Ошибка добавления метаданных для {original_filename}.")
- logging.error(f"Failed to add node metadata for file {file_id} to folder {folder_id} for user {user_id}")
- # Attempt to delete orphaned file from HF
- try: api.delete_file(path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE)
- except Exception as del_err: logging.error(f"Failed to delete orphaned file {hf_path}: {del_err}")
- except Exception as e:
- logging.error(f"Error uploading file {original_filename} for {user_id}: {e}")
- errors.append(f"Ошибка загрузки {original_filename}: {str(e)[:100]}") # Limit error msg length
- finally:
- if os.path.exists(temp_path):
- try: os.remove(temp_path)
- except Exception as rm_err: logging.error(f"Error removing temp file {temp_path}: {rm_err}")
-
- response_message = ""
- if uploaded_count > 0:
- response_message += f'{uploaded_count} файл(ов) успешно загружено. '
- if save_needed:
- try:
- save_data(data)
- except Exception as e:
- response_message += 'Ошибка сохранения метаданных. '
- logging.error(f"Error saving data after upload for {user_id}: {e}")
- if errors:
- response_message += "Ошибки: " + "; ".join(errors)
+ text_content = response.content.decode('latin-1')
+ except Exception:
+ logging.error(f"Could not decode text file {hf_path} for user {user_id}.")
+ return Response("Не удалось определить кодировку файла.", status=500)
- status = 'success' if uploaded_count > 0 else 'error'
- if uploaded_count > 0 and errors: status = 'partial_success' # Indicate partial success
+ return Response(text_content, mimetype='text/plain')
- return jsonify({'status': status, 'message': response_message.strip()})
+ except requests.exceptions.RequestException as e:
+ logging.exception(f"Error fetching text content from HF ({hf_path}) for user {user_id}:")
+ return Response(f"Ошибка загрузки содержимого: {e}", status=502)
+ except Exception as e:
+ logging.exception(f"Unexpected error fetching text content ({hf_path}) for user {user_id}:")
+ return Response("Внутренняя ошибка сервера", status=500)
+
+
+@app.route('/logout')
+def logout():
+ # This is unlikely to be used in a Mini App, but included for completeness
+ # Mini Apps are typically closed rather than logged out of explicitly.
+ session.pop('user_id', None)
+ session.pop('user_data', None)
+ # For Mini App, just return a message or blank page, closing is handled by JS
+ return "Logged out. Please close the Mini App."
+
+
+# --- ADMIN PANEL (using separate login for simplicity in this single file) ---
+ADMIN_USERNAME = os.getenv("ADMIN_USERNAME", "admin")
+ADMIN_PASSWORD = os.getenv("ADMIN_PASSWORD", "adminpass") # CHANGE ME!
+
+@app.route('/admhosto_login', methods=['GET', 'POST'])
+def admhosto_login():
+ if request.method == 'POST':
+ username = request.form.get('username')
+ password = request.form.get('password')
+ if username == ADMIN_USERNAME and password == ADMIN_PASSWORD:
+ session['is_admin'] = True
+ flash('Админ вход успешен.')
+ return redirect(url_for('admin_panel'))
+ else:
+ flash('Неверный логин или пароль администратора.', 'error')
+ html = '''
+
+Админ Логин
+
+
Админ Логин
+{% with messages = get_flashed_messages() %}{% if messages %}{% for message in messages %}
+
+
+
+'''
+ # Add truncate filter for Jinja2
+ def truncate_filter(s, length=25, killwords=False, end='...'):
+ if len(s) <= length:
+ return s
+ if killwords:
+ return s[:length] + end
+ words = s.split()
+ result = []
+ l = 0
+ for word in words:
+ if l + len(word) + len(result) > length:
+ break
+ result.append(word)
+ l += len(word)
+ return ' '.join(result) + end if result else s[:length] + end
+
+ app.jinja_env.filters['truncate'] = truncate_filter
+
+
+ return render_template_string(html, user_id=user_id, user_data=user_data, files=all_files, REPO_ID=REPO_ID)
+
+
+@app.route('/admhosto/delete_user/', methods=['POST'])
+def admin_delete_user(user_id):
+ if not is_admin():
+ flash('Доступ запрещен.', 'error')
+ return redirect(url_for('admhosto_login'))
+ if not HF_TOKEN_WRITE:
+ flash('Удаление невозможно: токен для записи не настроен.', 'error')
+ return redirect(url_for('admin_panel'))
- data = load_data()
- user_data = data['users'].get(user_id)
- if not user_data: return Response("User data not found", status=404)
+ db_data = load_data()
+ user_id_str = str(user_id)
- file_node, _ = find_node_by_id(user_data['filesystem'], file_id)
- if not file_node or file_node.get('type') != 'file':
- return Response("File not found", status=404)
+ if user_id_str not in db_data['users']:
+ flash(f'Пользователь ID {user_id} не найден!', 'error')
+ return redirect(url_for('admin_panel'))
- hf_path = file_node.get('path')
- original_filename = file_node.get('original_filename', 'downloaded_file')
- if not hf_path: return Response("Error: File path missing", status=500)
+ user_data = db_data['users'][user_id_str]
+ logging.warning(f"ADMIN ACTION: Attempting to delete user ID {user_id} and all their data.")
- file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true"
try:
- headers = {"authorization": f"Bearer {HF_TOKEN_READ}"}
- response = requests.get(file_url, headers=headers, stream=True, timeout=60) # Added timeout
- response.raise_for_status()
+ api = HfApi()
+ user_folder_path_on_hf = f"cloud_files/{user_id_str}"
+
+ logging.info(f"Attempting to delete HF Hub folder: {user_folder_path_on_hf} for user {user_id}")
+ # Use allow_missing=True in case the folder doesn't exist (e.g., new user with no files)
+ api.delete_folder(
+ folder_path=user_folder_path_on_hf,
+ repo_id=REPO_ID,
+ repo_type="dataset",
+ token=HF_TOKEN_WRITE,
+ commit_message=f"ADMIN ACTION: Deleted all files/folders for user ID {user_id}"
+ )
+ logging.info(f"Successfully initiated deletion of folder {user_folder_path_on_hf} on HF Hub (or it was missing).")
- # Stream response for potentially large files
- def generate():
- for chunk in response.iter_content(chunk_size=8192):
- yield chunk
+ except Exception as e:
+ logging.exception(f"Error during HF Hub folder deletion for user ID {user_id}:")
+ # Do NOT return here. We want to attempt DB cleanup even if HF failed.
+ flash(f'Ошибка при удалении файлов пользователя ID {user_id} с сервера: {e}. Проверьте HF Hub. Пользователь будет удален из базы.', 'error')
- return Response(generate(), mimetype='application/octet-stream', headers={
- "Content-Disposition": f"attachment; filename*=UTF-8''{secure_filename(original_filename)}"
- })
- except requests.exceptions.RequestException as e:
- logging.error(f"Error downloading file from HF ({hf_path}) for user {user_id}: {e}")
- return Response(f'Download error: {e}', status=502)
+ try:
+ del db_data['users'][user_id_str]
+ save_data(db_data)
+ flash(f'Пользователь ID {user_id} успешно удален из базы данных!')
+ logging.info(f"ADMIN ACTION: Successfully deleted user ID {user_id} from database.")
except Exception as e:
- logging.error(f"Unexpected error during download ({hf_path}) for user {user_id}: {e}")
- return Response('Internal server error during download', status=500)
+ logging.exception(f"Error saving data after deleting user ID {user_id}:")
+ flash(f'Ошибка при удалении пользователя ID {user_id} из базы данных: {e}', 'error')
+ return redirect(url_for('admin_panel'))
-@app.route('/file/', methods=['DELETE'])
-def delete_file_route(file_id):
- user_id = session.get('user_id')
- user_info = session.get('user_info', {})
- user_identifier = user_info.get('username', user_id)
- if not user_id: return jsonify({"status": "error", "message": "Unauthorized"}), 401
+
+@app.route('/admhosto/delete_file//', methods=['POST'])
+def admin_delete_file(user_id, file_id):
+ if not is_admin():
+ flash('Доступ запрещен.', 'error')
+ return redirect(url_for('admhosto_login'))
if not HF_TOKEN_WRITE:
- return jsonify({'status': 'error', 'message': 'Удаление невозможно: токен HF для записи не настроен.'}), 503
+ flash('Удаление невозможно: токен для записи не настроен.', 'error')
+ return redirect(url_for('admin_user_files', user_id=user_id))
- data = load_data()
- user_data = data['users'].get(user_id)
- if not user_data: return jsonify({'status': 'error', 'message': 'User data not found'}), 404
+ db_data = load_data()
+ user_id_str = str(user_id)
+ user_data = db_data.get('users', {}).get(user_id_str)
+ if not user_data:
+ flash(f'Пользователь ID {user_id} не найден.', 'error')
+ return redirect(url_for('admin_panel'))
+
+ file_node, parent_node = find_node_by_id(user_data.get('filesystem', {}), file_id)
- file_node, parent_node = find_node_by_id(user_data['filesystem'], file_id)
if not file_node or file_node.get('type') != 'file' or not parent_node:
- return jsonify({'status': 'error', 'message': 'Файл не найден.'}), 404
+ flash('Файл не найден в структуре пользователя.', 'error')
+ return redirect(url_for('admin_user_files', user_id=user_id))
hf_path = file_node.get('path')
original_filename = file_node.get('original_filename', 'файл')
- delete_from_hf = True
if not hf_path:
- logging.warning(f"HF path missing for file {file_id} user {user_id}. Deleting only metadata.")
- delete_from_hf = False
-
- if delete_from_hf:
- try:
- api = HfApi()
- api.delete_file(
- path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE,
- commit_message=f"TG User {user_identifier} ({user_id}) deleted file {original_filename}"
- )
- logging.info(f"Deleted file {hf_path} from HF Hub for user {user_id}")
- except hf_utils.EntryNotFoundError:
- logging.warning(f"File {hf_path} not found on HF Hub during delete for user {user_id}.")
- except Exception as e:
- logging.error(f"Error deleting file {hf_path} from HF for {user_id}: {e}")
- return jsonify({'status': 'error', 'message': f'Ошибка удаления файла с сервера: {str(e)[:100]}'}), 500
-
- # Always try to remove from DB
- if remove_node(user_data['filesystem'], file_id):
- try:
- save_data(data)
- return jsonify({'status': 'success', 'message': f'Файл {original_filename} удален.'})
- except Exception as e:
- logging.error(f"Delete file DB save error for user {user_id}: {e}")
- return jsonify({'status': 'error', 'message': 'Файл удален с сервера (если был), но ошибка сохранения базы.'}), 500
- else:
- # This case should ideally not happen if file_node was found earlier
- logging.error(f"Failed to remove node {file_id} from filesystem structure for user {user_id} after finding it initially.")
- return jsonify({'status': 'error', 'message': 'Ошибка удаления файла из структуры данных.'}), 500
-
+ logging.warning(f"ADMIN ACTION: File {original_filename} (ID: {file_id}) for user {user_id} has no HF path. Removing from DB only.")
+ if remove_node(user_data['filesystem'], file_id):
+ try:
+ save_data(db_data)
+ flash(f'Метаданные файла "{original_filename}" удалены (путь отсутствовал).')
+ except Exception as e:
+ logging.exception(f"Admin delete file metadata save error (no path) for user {user_id}:")
+ flash('Ошибка сохранения данных после удаления метаданных (путь отсутствовал).', 'error')
+ else:
+ logging.error(f"Admin failed to remove file node {file_id} (no path) from DB for user {user_id}.")
+ flash('Ошибка удаления метаданных файла из базы данных.', 'error')
+ return redirect(url_for('admin_user_files', user_id=user_id))
-@app.route('/folder/', methods=['DELETE'])
-def delete_folder_route(folder_id):
- user_id = session.get('user_id')
- if not user_id: return jsonify({"status": "error", "message": "Unauthorized"}), 401
- if folder_id == 'root': return jsonify({'status': 'error', 'message': 'Нельзя удалить корневую папку!'}), 400
+ try:
+ api = HfApi()
+ api.delete_file(
+ path_in_repo=hf_path,
+ repo_id=REPO_ID,
+ repo_type="dataset",
+ token=HF_TOKEN_WRITE,
+ commit_message=f"ADMIN ACTION: Deleted file {original_filename} (ID: {file_id}) for user ID {user_id}"
+ )
+ logging.info(f"ADMIN ACTION: Deleted file {hf_path} from HF Hub for user ID {user_id}")
- data = load_data()
- user_data = data['users'].get(user_id)
- if not user_data: return jsonify({'status': 'error', 'message': 'User data not found'}), 404
+ if remove_node(user_data['filesystem'], file_id):
+ try:
+ save_data(db_data)
+ flash(f'Файл "{original_filename}" успешно удален!')
+ except Exception as e:
+ logging.exception(f"Admin delete file DB update error for user {user_id}:")
+ flash('Файл удален с сервера, но произошла ошибка обновления базы данных.', 'error')
+ else:
+ logging.error(f"Admin failed to remove file node {file_id} from DB for user {user_id} after HF deletion.")
+ flash('Файл удален с сервера, но не найден в базе данных для удаления.', 'error')
- folder_node, parent_node = find_node_by_id(user_data['filesystem'], folder_id)
- if not folder_node or folder_node.get('type') != 'folder' or not parent_node:
- return jsonify({'status': 'error', 'message': 'Папка не найдена.'}), 404
+ except hf_utils.EntryNotFoundError:
+ logging.warning(f"ADMIN ACTION: File {hf_path} not found on HF Hub during delete for user {user_id}. Removing from DB.")
+ if remove_node(user_data['filesystem'], file_id):
+ try:
+ save_data(db_data)
+ flash(f'Файл "{original_filename}" не найден на сервере, удален из базы.')
+ except Exception as e:
+ logging.exception(f"Admin delete file metadata save error (HF not found) for user {user_id}:")
+ flash('Ошибка сохранения данных после удаления метаданных (файл не найден на сервере).', 'error')
+ else:
+ logging.error(f"Admin failed to remove file node {file_id} (HF not found) from DB for user {user_id}.")
+ flash('Файл не найден ни на сервере, ни в базе данных.', 'error')
- folder_name = folder_node.get('name', 'папка')
- if folder_node.get('children'):
- return jsonify({'status': 'error', 'message': f'Папку "{folder_name}" можно удалить только если она пуста.'}), 400
+ except Exception as e:
+ logging.exception(f"ADMIN ACTION: Error deleting file {hf_path} for user {user_id}:")
+ flash(f'Ошибка удаления файла "{original_filename}": {e}', 'error')
- if remove_node(user_data['filesystem'], folder_id):
- try:
- save_data(data)
- return jsonify({'status': 'success', 'message': f'Папка "{folder_name}" удалена.'})
- except Exception as e:
- logging.error(f"Delete empty folder save error for user {user_id}: {e}")
- # Attempt to add node back? Difficult state to recover.
- return jsonify({'status': 'error', 'message': 'Ошибка сохранения данных после удаления папки.'}), 500
- else:
- logging.error(f"Failed to remove folder node {folder_id} for user {user_id} after finding it.")
- return jsonify({'status': 'error', 'message': 'Ошибка удаления папки из структуры данных.'}), 500
+ return redirect(url_for('admin_user_files', user_id=user_id))
-@app.route('/text_content/')
-def get_text_content_route(file_id):
- user_id = session.get('user_id')
- if not user_id: return Response("Unauthorized", status=401)
- if not HF_TOKEN_READ: return Response("Server configuration error: Read token missing", status=503)
+@app.route('/admhosto/logout')
+def admhosto_logout():
+ session.pop('is_admin', None)
+ flash('Вы успешно вышли из админ-панели.')
+ return redirect(url_for('admhosto_login'))
- data = load_data()
- user_data = data['users'].get(user_id)
- if not user_data: return Response("User data not found", status=404)
- file_node, _ = find_node_by_id(user_data['filesystem'], file_id)
- if not file_node or file_node.get('type') != 'file' or file_node.get('file_type') != 'text':
- return Response("Text file not found", status=404)
+if __name__ == '__main__':
+ if not BOT_TOKEN or BOT_TOKEN == 'YOUR_BOT_TOKEN':
+ logging.error("\n*** ERROR: TELEGRAM_BOT_TOKEN is not set or is the default placeholder. Telegram validation will FAIL. ***\n")
+ if not HF_TOKEN_WRITE:
+ logging.warning("\n*** WARNING: HF_TOKEN (write access) is not set. File uploads, deletions, and backups will FAIL. ***\n")
+ if not HF_TOKEN_READ:
+ logging.warning("\n*** WARNING: HF_TOKEN_READ is not set. Falling back to HF_TOKEN. File downloads/previews might fail for private repos if HF_TOKEN is also not set. ***\n")
+ if ADMIN_PASSWORD == 'adminpass':
+ logging.warning("\n*** WARNING: Using default ADMIN_PASSWORD. CHANGE IT! ***\n")
- hf_path = file_node.get('path')
- if not hf_path: return Response("Error: File path missing", status=500)
+ logging.info("Performing initial database operations before starting.")
- file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true"
- try:
- headers = {"authorization": f"Bearer {HF_TOKEN_READ}"}
- response = requests.get(file_url, headers=headers, timeout=30)
- response.raise_for_status()
- if len(response.content) > 1 * 1024 * 1024: # 1MB limit for preview
- return Response("File too large for preview.", status=413)
- try: text_content = response.content.decode('utf-8')
- except UnicodeDecodeError: text_content = response.content.decode('latin-1', errors='replace')
- return Response(text_content, mimetype='text/plain; charset=utf-8')
- except requests.exceptions.RequestException as e:
- logging.error(f"Error fetching text content from HF ({hf_path}) for user {user_id}: {e}")
- return Response(f"Error loading content: {e}", status=502)
- except Exception as e:
- logging.error(f"Unexpected error fetching text content ({hf_path}) for user {user_id}: {e}")
- return Response("Internal server error", status=500)
+ # Ensure DATA_FILE exists before download/upload attempts
+ if not os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'w', encoding='utf-8') as f:
+ json.dump({'users': {}}, f)
+ logging.info(f"Created empty local database file: {DATA_FILE}")
+ # Always attempt download on startup
+ download_db_from_hf()
-# --- Main Execution ---
-if __name__ == '__main__':
- if BOT_TOKEN == 'YOUR_BOT_TOKEN':
- logging.warning("*"*60)
- logging.warning("WARNING: Using placeholder Telegram BOT_TOKEN!")
- logging.warning("Telegram validation will likely FAIL.")
- logging.warning("Set the TELEGRAM_BOT_TOKEN environment variable.")
- logging.warning("*"*60)
- if not HF_TOKEN_WRITE:
- logging.warning("HF_TOKEN (write access) is not set. Uploads, deletions, backups will fail.")
- if not HF_TOKEN_READ:
- logging.warning("HF_TOKEN_READ is not set (or HF_TOKEN). Downloads/previews might fail.")
- if not FLASK_SECRET_KEY or FLASK_SECRET_KEY == "supersecretkey_mini_app_unique":
- logging.warning("Using default/insecure FLASK_SECRET_KEY. Set a strong secret key via environment variable.")
-
- # Start periodic backup in a separate thread if write token exists
- if HF_TOKEN_WRITE:
- logging.info("Attempting initial database download before starting periodic backup.")
- download_db_from_hf() # Load the latest DB before starting
- backup_thread = threading.Thread(target=periodic_backup, daemon=True)
- backup_thread.start()
- logging.info("Periodic backup thread started.")
+ # Start periodic backup thread only if HF_TOKEN_WRITE is set
+ if HF_TOKEN_WRITE and BOT_TOKEN != 'YOUR_BOT_TOKEN': # Only backup if write token and bot token are set
+ logging.info("Starting periodic backup thread.")
+ threading.Thread(target=periodic_backup, daemon=True).start()
else:
- logging.warning("Periodic backup disabled (HF_TOKEN_WRITE not set).")
- # Still try to download if read token exists
- if HF_TOKEN_READ:
- logging.info("Attempting initial database download (read-only access).")
- download_db_from_hf()
- else:
- logging.warning("No read/write HF tokens. HF DB operations disabled.")
- # Ensure local file exists if no download possible
- if not os.path.exists(DATA_FILE):
- with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump({'users': {}}, f)
+ logging.warning("Periodic backup disabled.")
- logging.info(f"Starting Flask server for Telegram Mini App on 0.0.0.0:7860")
- app.run(host='0.0.0.0', port=7860, debug=False) # debug=False for production
-# --- END OF FILE app.py ---
\ No newline at end of file
+ app.run(host=HOST, port=PORT, debug=False) # debug=False for production
\ No newline at end of file