+
+
-
-
-"""
-
-
-@app.route('/')
-def index():
- return Response(HTML_TEMPLATE, mimetype='text/html')
-
-@app.route('/validate_init_data', methods=['POST'])
-def validate_init_data():
- data = request.get_json()
- if not data or 'initData' not in data:
- return jsonify({"status": "error", "message": "Missing initData"}), 400
-
- init_data = data['initData']
- user_info = check_telegram_authorization(init_data, BOT_TOKEN)
-
- if user_info and 'id' in user_info:
- tg_user_id = str(user_info['id'])
- db_data = load_data()
- users = db_data.setdefault('users', {})
- save_needed = False
-
- user_entry = users.get(tg_user_id)
-
- if not user_entry or not isinstance(user_entry, dict):
- logging.info(f"New user detected or invalid entry: {tg_user_id}. Initializing.")
- users[tg_user_id] = {
- 'user_info': user_info,
- 'created_at': datetime.now().isoformat() # Use ISO format
+ } catch (e) {
+ console.error("Failed to parse upload response:", e, xhr.responseText);
+ flashContainer.innerHTML = `
Неожиданный ответ от сервера.
`;
+ tg.showAlert('Неожиданный ответ от сервера после загрузки.');
}
- initialize_user_filesystem(users[tg_user_id])
- save_needed = True
- else:
- # Check if filesystem needs initialization or repair
- if 'filesystem' not in user_entry or not isinstance(user_entry.get('filesystem'), dict):
- logging.warning(f"Filesystem missing or invalid for user {tg_user_id}. Re-initializing.")
- initialize_user_filesystem(user_entry)
- save_needed = True
-
- # Optionally update user info if changed (e.g., username)
- if user_entry.get('user_info', {}).get('username') != user_info.get('username'):
- user_entry['user_info'] = user_info # Update stored info
- save_needed = True
-
- if save_needed:
- if not save_data(db_data):
- logging.error(f"Failed to save data for user {tg_user_id} during validation.")
- # Avoid returning 500 if possible, user might still be usable with loaded data
- # return jsonify({"status": "error", "message": "Error saving user data."}), 500
- pass # Logged the error, proceed with current (possibly unsaved) state
-
- return jsonify({"status": "ok", "user": user_info})
- else:
- logging.warning(f"Validation failed for initData prefix: {init_data[:100]}...")
- return jsonify({"status": "error", "message": "Invalid authorization data."}), 403
-
-
-@app.route('/get_dashboard_data', methods=['POST'])
-def get_dashboard_data():
- data = request.get_json()
- if not data or 'initData' not in data or 'folder_id' not in data:
- return jsonify({"status": "error", "message": "Incomplete request"}), 400
-
- user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
- if not user_info or 'id' not in user_info:
- return jsonify({"status": "error", "message": "Unauthorized"}), 403
-
- tg_user_id = str(user_info['id'])
- folder_id = data['folder_id']
- db_data = load_data()
- user_data = db_data.get('users', {}).get(tg_user_id)
-
- if not user_data or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict):
- logging.error(f"User data or filesystem missing/invalid for validated user {tg_user_id}")
- # Attempt recovery if filesystem is bad but user_data exists
- if isinstance(user_data, dict):
- logging.warning(f"Attempting to re-initialize filesystem for user {tg_user_id}")
- initialize_user_filesystem(user_data)
- if not save_data(db_data):
- logging.error(f"Failed to save re-initialized filesystem for user {tg_user_id}")
- # Continue with the newly initialized filesystem if save failed but init worked
- else:
- return jsonify({"status": "error", "message": "User data error"}), 500
-
-
- current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id)
-
- if not current_folder or current_folder.get('type') != 'folder':
- logging.warning(f"Folder {folder_id} not found or invalid for user {tg_user_id}. Defaulting to root.")
- folder_id = 'root'
- current_folder, _ = find_node_by_id(user_data['filesystem'], folder_id)
- if not current_folder:
- logging.critical(f"CRITICAL: Root folder cannot be found for user {tg_user_id} even after check.")
- # Attempt recovery again
- initialize_user_filesystem(user_data)
- if not save_data(db_data):
- logging.error(f"Failed to save re-initialized filesystem after root recovery attempt for {tg_user_id}")
-
- current_folder, _ = find_node_by_id(user_data['filesystem'], 'root')
- if not current_folder: # Still failing
- return jsonify({"status": "error", "message": "Critical error: Root folder missing."}), 500
-
- items_in_folder = current_folder.get('children', [])
- if not isinstance(items_in_folder, list):
- logging.warning(f"Invalid 'children' in folder {folder_id} for user {tg_user_id}. Resetting to empty list.")
- items_in_folder = []
- current_folder['children'] = []
- # Consider saving data here if you want to persist this fix immediately
- # save_data(db_data)
-
- breadcrumbs = get_node_path_list(user_data['filesystem'], folder_id)
-
- current_folder_info = {
- 'id': current_folder.get('id'),
- 'name': current_folder.get('name', 'Root')
+ // Clear file input after upload
+ fileInput.value = '';
+ });
+
+ xhr.addEventListener('error', function() {
+ handleUploadEnd('Произошла ошибка во время загрузки.');
+ });
+
+ xhr.addEventListener('abort', function() {
+ handleUploadEnd('Загрузка отменена.');
+ });
+
+ function handleUploadEnd(message) {
+ uploadBtn.disabled = false;
+ uploadBtn.textContent = 'Загрузить файлы сюда';
+ progressContainer.style.display = 'none';
+ tg.MainButton.hideProgress();
+ tg.MainButton.setText('Загрузить файлы сюда');
+ tg.MainButton.enable();
+ tg.showAlert(message);
+ flashContainer.innerHTML = `
${message}
`;
+ fileInput.value = '';
+ }
+
+ xhr.open('POST', form.action, true);
+ // Add headers if needed, e.g., CSRF token if implemented
+ xhr.send(formData);
+ });
+
+
+'''
+
+ template_context = {
+ 'telegram_user': telegram_user,
+ 'items': items_in_folder,
+ 'current_folder_id': current_folder_id,
+ 'current_folder': current_folder,
+ 'breadcrumbs': breadcrumbs,
+ 'repo_id': REPO_ID,
+ 'HF_TOKEN_READ': HF_TOKEN_READ,
+ 'hf_file_url': lambda path, download=False: f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{path}{'?download=true' if download else ''}",
+ 'os': os,
+ 'is_admin': user_id in ADMIN_TELEGRAM_IDS
}
-
- return jsonify({
- "status": "ok",
- "items": items_in_folder,
- "breadcrumbs": breadcrumbs,
- "current_folder": current_folder_info
- })
-
-
-@app.route('/upload', methods=['POST'])
-def upload_files():
- init_data = request.form.get('initData')
- current_folder_id = request.form.get('current_folder_id', 'root')
- files = request.files.getlist('files')
-
- user_info = check_telegram_authorization(init_data, BOT_TOKEN)
- if not user_info or 'id' not in user_info:
- return jsonify({"status": "error", "message": "Unauthorized"}), 403
-
- tg_user_id = str(user_info['id'])
-
- if not HF_TOKEN_WRITE:
- return jsonify({'status': 'error', 'message': 'Upload configuration error.'}), 500
-
- if not files or all(not f.filename for f in files):
- return jsonify({'status': 'error', 'message': 'No files selected for upload.'}), 400
-
- if len(files) > 20:
- return jsonify({'status': 'error', 'message': 'Maximum 20 files per upload.'}), 400
-
- db_data = load_data()
- user_data = db_data.get('users', {}).get(tg_user_id)
-
- if not user_data or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict):
- logging.error(f"Upload error: User data or filesystem missing/invalid for {tg_user_id}")
- return jsonify({"status": "error", "message": "User data error during upload."}), 500
-
- target_folder_node, _ = find_node_by_id(user_data['filesystem'], current_folder_id)
- if not target_folder_node or target_folder_node.get('type') != 'folder':
- logging.error(f"Upload error: Target folder {current_folder_id} not found for user {tg_user_id}")
- return jsonify({'status': 'error', 'message': 'Target folder not found!'}), 404
-
- api = HfApi()
- uploaded_count = 0
- errors = []
- nodes_added = [] # Keep track of nodes added in this request
-
- for file in files:
- if file and file.filename:
- original_filename = secure_filename(file.filename)
- if not original_filename:
- logging.warning(f"Skipping file with potentially insecure name: {file.filename}")
- errors.append(f"Skipped file with invalid name: {file.filename}")
- continue
-
- name_part, ext_part = os.path.splitext(original_filename)
- unique_suffix = uuid.uuid4().hex[:8]
- # Ensure filename doesn't become excessively long
- max_len = 100
- safe_name_part = name_part[:max_len]
- unique_filename = f"{safe_name_part}_{unique_suffix}{ext_part}"
- file_id = uuid.uuid4().hex
-
- # Define path relative to user/folder for organization
- hf_path = f"cloud_files/{tg_user_id}/{file_id[:2]}/{file_id}_{unique_filename}" # Add subfolder based on ID start
- temp_path = os.path.join(UPLOAD_FOLDER, f"{file_id}_{unique_filename}")
-
- file_info = {
- 'type': 'file', 'id': file_id,
- 'original_filename': original_filename,
- 'unique_filename': unique_filename, # Store the unique name used on HF
- 'path': hf_path,
- 'file_type': get_file_type(original_filename),
- 'upload_date': datetime.now().isoformat() # Use ISO format
- }
-
- try:
- file.save(temp_path)
- logging.info(f"Attempting HF upload to: {hf_path}")
- api.upload_file(
- path_or_fileobj=temp_path, path_in_repo=hf_path,
- repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE,
- commit_message=f"User {tg_user_id} uploaded {original_filename}"
- )
- logging.info(f"HF upload successful for {original_filename} ({file_id})")
-
- # Add node to filesystem structure *after* successful HF upload
- if add_node(user_data['filesystem'], current_folder_id, file_info):
- uploaded_count += 1
- nodes_added.append(file_info) # Track success
- else:
- # This case is critical - file is on HF, but not in DB structure
- error_msg = f"Failed to add metadata for {original_filename} after upload."
- errors.append(error_msg)
- logging.error(f"{error_msg} User: {tg_user_id}, FileID: {file_id}, TargetFolder: {current_folder_id}")
- # Attempt to delete the orphaned HF file
- try:
- logging.warning(f"Attempting cleanup of orphaned HF file: {hf_path}")
- api.delete_file(path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE)
- logging.info(f"Orphaned file {hf_path} deleted from HF.")
- except Exception as del_err:
- logging.error(f"CRITICAL: Failed to delete orphaned HF file {hf_path}: {del_err}")
-
- except Exception as e:
- logging.error(f"Upload error for {original_filename} (User: {tg_user_id}, FileID: {file_id}): {e}", exc_info=True)
- errors.append(f"Error uploading {original_filename}")
- # Ensure node wasn't partially added if error occurred during add_node or before
- if file_info in nodes_added: nodes_added.remove(file_info)
- finally:
- # Clean up local temporary file
- if os.path.exists(temp_path):
- try: os.remove(temp_path)
- except OSError as e_rm: logging.warning(f"Error removing temp file {temp_path}: {e_rm}")
-
- # Save data only if at least one file was successfully uploaded AND added to structure
- if uploaded_count > 0 and nodes_added:
- logging.info(f"Saving DB for user {tg_user_id} after {uploaded_count} successful uploads.")
- if not save_data(db_data):
- # If save fails, we have inconsistency: files on HF, maybe some nodes added in memory, but not persisted.
- logging.error(f"CRITICAL: Failed to save DB after successful uploads for user {tg_user_id}.")
- errors.append("Critical error saving file metadata after upload.")
- # Attempt to revert the in-memory additions? Very complex. Logging is key here.
- # Rollback: Remove nodes that were added in this request from the in-memory structure
- for node_info in nodes_added:
- remove_node(user_data['filesystem'], node_info['id'])
- uploaded_count = 0 # Reflect that the save failed
- # Do NOT try to delete the HF files here, could lead to data loss if DB save fails intermittently
-
- final_message = f"{uploaded_count} file(s) uploaded."
- if errors:
- final_message += f" Errors occurred with {len(errors)} file(s)."
- # Consider logging the specific errors to the user if appropriate
- # final_message += " Details: " + "; ".join(errors)
-
- return jsonify({
- "status": "ok" if uploaded_count > 0 else "error", # Status based on successful *persisted* uploads
- "message": final_message
- })
+ return render_template_string(html, **template_context)
@app.route('/create_folder', methods=['POST'])
+@login_required
def create_folder():
- data = request.get_json()
- if not data or 'initData' not in data or 'parent_folder_id' not in data or 'folder_name' not in data:
- return jsonify({"status": "error", "message": "Incomplete request"}), 400
+ telegram_user = session['telegram_user']
+ user_id = telegram_user['id']
+ user_id_str = str(user_id)
- user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
- if not user_info or 'id' not in user_info:
- return jsonify({"status": "error", "message": "Unauthorized"}), 403
+ data = load_data()
+ user_data = data['users'].get(user_id_str)
+ if not user_data:
+ flash('Пользователь не найден!', 'error')
+ return redirect(url_for('dashboard')) # Redirect to root dashboard
- tg_user_id = str(user_info['id'])
- parent_folder_id = data['parent_folder_id']
- folder_name = data['folder_name'].strip()
+ parent_folder_id = request.form.get('parent_folder_id', 'root')
+ folder_name = request.form.get('folder_name', '').strip()
if not folder_name:
- return jsonify({'status': 'error', 'message': 'Folder name cannot be empty.'}), 400
- if len(folder_name) > 100:
- return jsonify({'status': 'error', 'message': 'Folder name is too long.'}), 400
- # Basic validation for problematic characters
- if /[<>:"/\\|?*]/.test(folder_name):
- return jsonify({'status': 'error', 'message': 'Folder name contains invalid characters.'}), 400
-
-
- db_data = load_data()
- user_data = db_data.get('users', {}).get(tg_user_id)
-
- if not user_data or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict):
- logging.error(f"Create folder error: User data or filesystem missing/invalid for {tg_user_id}")
- return jsonify({"status": "error", "message": "User data error."}), 500
+ flash('Имя папки не может быть пустым!', 'error')
+ return redirect(url_for('dashboard', folder_id=parent_folder_id))
+ # Allow more characters, sanitize later if needed
+ # if not folder_name.isalnum() and '_' not in folder_name and ' ' not in folder_name:
+ # flash('Имя папки может содержать буквы, цифры, пробелы и подчеркивания.', 'error')
+ # return redirect(url_for('dashboard', folder_id=parent_folder_id))
+ folder_name = secure_filename(folder_name.replace(' ', '_')) # Basic sanitization
- # Check if folder with the same name already exists in the parent
- parent_node, _ = find_node_by_id(user_data['filesystem'], parent_folder_id)
- if parent_node and 'children' in parent_node and isinstance(parent_node['children'], list):
- for child in parent_node['children']:
- if isinstance(child, dict) and child.get('type') == 'folder' and child.get('name') == folder_name:
- return jsonify({'status': 'error', 'message': f'A folder named "{folder_name}" already exists here.'}), 409 # 409 Conflict
+ if not folder_name: # If secure_filename removes everything
+ flash('Недопустимое имя папки.', 'error')
+ return redirect(url_for('dashboard', folder_id=parent_folder_id))
folder_id = uuid.uuid4().hex
folder_data = {
- 'type': 'folder', 'id': folder_id,
- 'name': folder_name, 'children': []
+ 'type': 'folder',
+ 'id': folder_id,
+ 'name': folder_name,
+ 'children': []
}
if add_node(user_data['filesystem'], parent_folder_id, folder_data):
- if save_data(db_data):
- return jsonify({'status': 'ok', 'message': f'Folder "{folder_name}" created.'})
- else:
- logging.error(f"Create folder save error ({tg_user_id}) after adding node {folder_id}.")
- # Attempt to rollback the in-memory addition
- remove_node(user_data['filesystem'], folder_id)
- return jsonify({'status': 'error', 'message': 'Error saving data after creating folder.'}), 500
+ try:
+ save_data(data)
+ flash(f'Папка "{folder_name}" успешно создана.')
+ except Exception as e:
+ flash('Ошибка сохранения данных при создании папки.', 'error')
+ logging.error(f"Create folder save error for user {user_id}: {e}")
+ # Attempt to remove the added node if save failed? Complex.
else:
- # This implies parent folder wasn't found or wasn't a folder type
- logging.error(f"Create folder error: Failed add_node. User: {tg_user_id}, Parent: {parent_folder_id}")
- return jsonify({'status': 'error', 'message': 'Could not find parent folder to add new folder.'}), 400
+ flash('Не удалось найти родительскую папку для создания новой.', 'error')
+
+ return redirect(url_for('dashboard', folder_id=parent_folder_id))
@app.route('/download/')
-def download_file_route(file_id):
- # Note: This route has NO BUILT-IN AUTHENTICATION.
- # It relies on the obscurity of file_id and HF path.
- # For sensitive data, proper auth (e.g., checking initData passed as query param,
- # or session-based auth) would be needed here, which complicates direct linking/previewing.
- db_data = load_data() # Use cached data if possible
- file_node = None
- owner_user_id = None
+@login_required # Require login even for download link access initially
+def download_file(file_id):
+ current_user_id = session['telegram_user']['id']
+ is_current_user_admin = current_user_id in ADMIN_TELEGRAM_IDS
- # Find the file node across all users
- for user_id_scan, user_data_scan in db_data.get('users', {}).items():
- if 'filesystem' in user_data_scan and isinstance(user_data_scan['filesystem'], dict):
- node, _ = find_node_by_id(user_data_scan['filesystem'], file_id)
- if node and isinstance(node, dict) and node.get('type') == 'file':
- file_node = node
- owner_user_id = user_id_scan
- break
+ data = load_data()
+ file_node = None
+ owner_user_id_str = None
+
+ # 1. Check if the file belongs to the current user
+ current_user_data = data['users'].get(str(current_user_id))
+ if current_user_data:
+ file_node, _ = find_node_by_id(current_user_data.get('filesystem', {}), file_id)
+ if file_node and file_node.get('type') == 'file':
+ owner_user_id_str = str(current_user_id)
+
+ # 2. If not found for current user AND current user is admin, search all users
+ if not file_node and is_current_user_admin:
+ logging.info(f"Admin {current_user_id} searching for file ID {file_id} across all users.")
+ for uid_str, udata in data.get('users', {}).items():
+ node, _ = find_node_by_id(udata.get('filesystem', {}), file_id)
+ if node and node.get('type') == 'file':
+ file_node = node
+ owner_user_id_str = uid_str
+ logging.info(f"Admin {current_user_id} found file ID {file_id} belonging to user {owner_user_id_str}")
+ break
if not file_node:
- logging.warning(f"Download request for unknown file_id: {file_id}")
- return Response("File not found", status=404, mimetype='text/plain')
+ flash('Файл не найден!', 'error')
+ # Redirect back to user's dashboard or admin panel depending on who requested
+ if is_current_user_admin and request.referrer and 'admhosto' in request.referrer:
+ return redirect(request.referrer)
+ return redirect(url_for('dashboard'))
+
hf_path = file_node.get('path')
- original_filename = file_node.get('original_filename', f'{file_id}_download')
+ original_filename = file_node.get('original_filename', 'downloaded_file')
if not hf_path:
- logging.error(f"Download error: Missing HF path for file ID {file_id} (Owner: {owner_user_id})")
- return Response("Error: File path configuration missing", status=500, mimetype='text/plain')
+ flash('Ошибка: Путь к файлу не найден в метаданных.', 'error')
+ if is_current_user_admin and request.referrer and 'admhosto' in request.referrer:
+ return redirect(request.referrer)
+ return redirect(url_for('dashboard'))
- # Construct the direct download URL
- # Using /info/refs might be faster for checking existence before redirecting, but resolve/main is simpler
file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true"
- logging.info(f"Attempting to serve file via redirect/proxy from: {file_url}")
try:
headers = {}
if HF_TOKEN_READ:
headers["authorization"] = f"Bearer {HF_TOKEN_READ}"
- # Use requests to stream the file from HF
- # Timeout set for initial connection and read chunks
- response = requests.get(file_url, headers=headers, stream=True, timeout=(10, 30)) # (connect_timeout, read_timeout)
- response.raise_for_status() # Check for 4xx/5xx errors from HF
-
- # Prepare Flask response headers
- resp_headers = {}
- content_type = response.headers.get('Content-Type', 'application/octet-stream')
- resp_headers['Content-Type'] = content_type
-
- # Create a safe filename for Content-Disposition
- # Simple approach: replace potentially problematic chars
- safe_filename = "".join(c if c.isalnum() or c in ['.', '-', '_'] else '_' for c in original_filename)
- # Encode for header value (URL encoding for filename*=UTF-8'')
- encoded_filename = urlencode({'filename': original_filename}, encoding='utf-8')[9:]
- resp_headers['Content-Disposition'] = f"attachment; filename=\"{safe_filename}\"; filename*=UTF-8''{encoded_filename}"
-
- # Add Content-Length if provided by HF
- if 'Content-Length' in response.headers:
- resp_headers['Content-Length'] = response.headers['Content-Length']
+ response = requests.get(file_url, headers=headers, stream=True, timeout=60) # Add timeout
+ response.raise_for_status()
- # Stream the response body
- return Response(response.iter_content(chunk_size=8192), status=response.status_code, headers=resp_headers)
+ # Stream download if needed for large files, but send_file handles BytesIO well too
+ file_content = BytesIO(response.content)
+ return send_file(
+ file_content,
+ as_attachment=True,
+ download_name=original_filename,
+ mimetype='application/octet-stream'
+ )
except requests.exceptions.Timeout:
- logging.error(f"Timeout downloading file from HF: {hf_path}")
- return Response("Error: Timed out connecting to file storage", status=504, mimetype='text/plain') # 504 Gateway Timeout
+ logging.error(f"Timeout downloading file from HF ({hf_path})")
+ flash(f'Ошибка скачивания файла {original_filename}: Тайм-аут соединения с сервером.', 'error')
except requests.exceptions.RequestException as e:
- status_code = e.response.status_code if e.response is not None else 502 # 502 Bad Gateway if no response
- logging.error(f"Error downloading file from HF ({hf_path}, Owner: {owner_user_id}): {e} (Status: {status_code})")
- # Don't expose detailed error message to client
- return Response(f"Error retrieving file ({status_code})", status=status_code, mimetype='text/plain')
+ logging.error(f"Error downloading file from HF ({hf_path}): {e}")
+ flash(f'Ошибка скачивания файла {original_filename}! ({e})', 'error')
except Exception as e:
- logging.error(f"Unexpected error during download proxy ({hf_path}, Owner: {owner_user_id}): {e}", exc_info=True)
- return Response("Internal server error during file download", status=500, mimetype='text/plain')
-
+ logging.error(f"Unexpected error during download ({hf_path}): {e}")
+ flash('Произошла непредвиденная ошибка при скачивании файла.', 'error')
-@app.route('/delete_file/', methods=['POST'])
-def delete_file_route(file_id):
- data = request.get_json()
- if not data or 'initData' not in data: # current_folder_id might not be strictly necessary
- return jsonify({"status": "error", "message": "Incomplete request"}), 400
-
- user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
- if not user_info or 'id' not in user_info:
- return jsonify({"status": "error", "message": "Unauthorized"}), 403
+ # Fallback redirect
+ if is_current_user_admin and request.referrer and 'admhosto' in request.referrer:
+ return redirect(request.referrer)
+ return redirect(url_for('dashboard'))
- tg_user_id = str(user_info['id'])
- if not HF_TOKEN_WRITE:
- return jsonify({'status': 'error', 'message': 'Deletion configuration error.'}), 500
-
- db_data = load_data()
- user_data = db_data.get('users', {}).get(tg_user_id)
-
- if not user_data or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict):
- logging.error(f"Delete file error: User data or filesystem missing/invalid for {tg_user_id}")
- # Don't reveal file existence, just say user data error
- return jsonify({"status": "error", "message": "User data error."}), 500
+@app.route('/delete_file/', methods=['POST'])
+@login_required
+def delete_file(file_id):
+ telegram_user = session['telegram_user']
+ user_id = telegram_user['id']
+ user_id_str = str(user_id)
+
+ data = load_data()
+ user_data = data['users'].get(user_id_str)
+ if not user_data:
+ flash('Пользователь не найден!', 'error')
+ session.pop('telegram_user', None)
+ return redirect(url_for('index')) # Force re-auth
file_node, parent_node = find_node_by_id(user_data['filesystem'], file_id)
+ # Determine the folder to redirect back to
+ current_view_folder_id = request.form.get('current_view_folder_id')
+ if not current_view_folder_id and parent_node:
+ current_view_folder_id = parent_node.get('id', 'root')
+ elif not current_view_folder_id:
+ current_view_folder_id = 'root'
+
- if not file_node or file_node.get('type') != 'file' or not parent_node:
- # File not found *for this user*. Do not confirm non-existence.
- logging.warning(f"Delete request for non-existent/invalid file ID {file_id} by user {tg_user_id}")
- return jsonify({'status': 'error', 'message': 'File not found.'}), 404
+ if not file_node or file_node.get('type') != 'file': # Parent check removed, root files possible
+ flash('Файл не найден или не может быть удален.', 'error')
+ return redirect(url_for('dashboard', folder_id=current_view_folder_id))
hf_path = file_node.get('path')
- original_filename = file_node.get('original_filename', 'file')
- db_removed = False
- hf_deleted = False
- save_error = False
+ original_filename = file_node.get('original_filename', 'файл')
- # 1. Attempt to delete from Hugging Face Hub
- if hf_path:
+ # Attempt to remove from DB first
+ if remove_node(user_data['filesystem'], file_id):
try:
- api = HfApi()
- logging.info(f"Attempting HF delete for: {hf_path} by user {tg_user_id}")
- api.delete_file(
- path_in_repo=hf_path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE,
- commit_message=f"User {tg_user_id} deleted {original_filename}"
- )
- hf_deleted = True
- logging.info(f"Successfully deleted file {hf_path} from HF Hub for user {tg_user_id}")
- except hf_utils.EntryNotFoundError:
- logging.warning(f"File {hf_path} already deleted or never existed on HF Hub for delete attempt by {tg_user_id}.")
- hf_deleted = True # Treat as success for the purpose of DB removal
+ save_data(data)
+ logging.info(f"Removed file {file_id} ({original_filename}) from DB for user {user_id}.")
+ # Now attempt to delete from HF
+ if hf_path and HF_TOKEN_WRITE:
+ try:
+ api = HfApi()
+ api.delete_file(
+ path_in_repo=hf_path,
+ repo_id=REPO_ID,
+ repo_type="dataset",
+ token=HF_TOKEN_WRITE,
+ commit_message=f"User {user_id} deleted file {original_filename} (ID: {file_id})"
+ )
+ logging.info(f"Deleted file {hf_path} from HF Hub for user {user_id}")
+ flash(f'Файл {original_filename} успешно удален!')
+ except hf_utils.EntryNotFoundError:
+ logging.warning(f"File {hf_path} not found on HF Hub during delete for user {user_id}, but removed from DB.")
+ flash(f'Файл {original_filename} удален из базы (не найден на сервере).')
+ except Exception as e:
+ logging.error(f"Error deleting file {hf_path} from HF Hub for user {user_id} (DB entry removed): {e}")
+ flash(f'Файл {original_filename} удален из базы, но ошибка при удалении с сервера: {e}', 'error')
+ elif not hf_path:
+ flash(f'Файл {original_filename} удален из базы (путь не найден).')
+ elif not HF_TOKEN_WRITE:
+ flash(f'Файл {original_filename} удален из базы (удаление с сервера невозможно - токен отсутствует).', 'warning')
+
except Exception as e:
- logging.error(f"Error deleting file from HF Hub ({hf_path}, User: {tg_user_id}): {e}")
- # Do not stop here; still try to remove from DB if HF delete fails,
- # but report the overall operation as potentially failed.
- # A background cleanup job might be needed for such inconsistencies.
+ # This is bad - removed from structure in memory, but failed to save
+ logging.critical(f"CRITICAL: Failed to save DB after removing file {file_id} for user {user_id}. Data inconsistency possible! Error: {e}")
+ flash('Критическая ошибка: не удалось сохранить базу данных после удаления файла. Перезагрузите данные.', 'error')
+ # Force cache clear and maybe reload?
+ cache.clear()
+ # Don't attempt HF delete if DB save failed
else:
- logging.warning(f"File node {file_id} for user {tg_user_id} has no HF path. Skipping HF deletion.")
- hf_deleted = True # No path means nothing to delete on HF
-
- # 2. Attempt to remove from DB structure *if HF deletion was successful or skipped*
- if hf_deleted:
- if remove_node(user_data['filesystem'], file_id):
- db_removed = True
- logging.info(f"Removed file node {file_id} from DB for user {tg_user_id}")
- # 3. Attempt to save the updated DB structure
- if not save_data(db_data):
- logging.error(f"CRITICAL: Delete file DB save error for user {tg_user_id} after removing node {file_id}.")
- save_error = True
- # Attempt to rollback the in-memory removal? Very risky. Better to log.
- # Re-adding the node might fail if parent was modified etc.
- # add_node(user_data['filesystem'], parent_node['id'], file_node) # Risky rollback attempt
+ flash('Не удалось найти файл в структуре для удаления.', 'error')
- else:
- # This shouldn't happen if find_node_by_id found it initially
- logging.error(f"Failed to remove file node {file_id} from DB structure for {tg_user_id} after it was found.")
-
- # Determine final status
- if db_removed and not save_error:
- return jsonify({'status': 'ok', 'message': f'File "{original_filename}" deleted.'})
- elif hf_deleted and db_removed and save_error:
- return jsonify({'status': 'error', 'message': f'File deleted from storage, but failed to update database.'}), 500
- elif hf_deleted and not db_removed:
- return jsonify({'status': 'error', 'message': f'File deleted from storage, but failed to remove from database structure.'}), 500
- else: # hf_deleted is False (meaning HF delete failed)
- return jsonify({'status': 'error', 'message': f'Failed to delete file from storage.'}), 500
+ return redirect(url_for('dashboard', folder_id=current_view_folder_id))
@app.route('/delete_folder/', methods=['POST'])
-def delete_folder_route(folder_id):
- if folder_id == 'root':
- return jsonify({'status': 'error', 'message': 'Cannot delete the root folder.'}), 400
-
- data = request.get_json()
- if not data or 'initData' not in data:
- return jsonify({"status": "error", "message": "Incomplete request"}), 400
+@login_required
+def delete_folder(folder_id):
+ telegram_user = session['telegram_user']
+ user_id = telegram_user['id']
+ user_id_str = str(user_id)
- user_info = check_telegram_authorization(data['initData'], BOT_TOKEN)
- if not user_info or 'id' not in user_info:
- return jsonify({"status": "error", "message": "Unauthorized"}), 403
-
- tg_user_id = str(user_info['id'])
-
- db_data = load_data()
- user_data = db_data.get('users', {}).get(tg_user_id)
+ if folder_id == 'root':
+ flash('Нельзя удалить корневую папку!', 'error')
+ return redirect(url_for('dashboard'))
- if not user_data or 'filesystem' not in user_data or not isinstance(user_data['filesystem'], dict):
- logging.error(f"Delete folder error: User data or filesystem missing/invalid for {tg_user_id}")
- return jsonify({"status": "error", "message": "User data error."}), 500
+ data = load_data()
+ user_data = data['users'].get(user_id_str)
+ if not user_data:
+ flash('Пользователь не найден!', 'error')
+ session.pop('telegram_user', None)
+ return redirect(url_for('index'))
folder_node, parent_node = find_node_by_id(user_data['filesystem'], folder_id)
+ current_view_folder_id = request.form.get('current_view_folder_id') # Where user was viewing from
+ redirect_to_folder_id = 'root' # Default redirect target
- if not folder_node or folder_node.get('type') != 'folder' or not parent_node:
- logging.warning(f"Delete request for non-existent/invalid folder ID {folder_id} by user {tg_user_id}")
- return jsonify({'status': 'error', 'message': 'Folder not found.'}), 404
+ if parent_node:
+ redirect_to_folder_id = parent_node.get('id', 'root')
+ elif current_view_folder_id: # Fallback to where user clicked delete
+ redirect_to_folder_id = current_view_folder_id
- folder_name = folder_node.get('name', 'folder')
+ if not folder_node or folder_node.get('type') != 'folder':
+ flash('Папка не найдена или не может быть удалена.', 'error')
+ return redirect(url_for('dashboard', folder_id=redirect_to_folder_id))
- # Check if folder is empty (safer to check 'children' array directly)
- if 'children' in folder_node and isinstance(folder_node['children'], list) and folder_node['children']:
- return jsonify({'status': 'error', 'message': f'Folder "{folder_name}" is not empty. Please delete its contents first.'}), 400
+ folder_name = folder_node.get('name', 'папка')
- # Attempt to remove the folder node
+ if folder_node.get('children'):
+ flash(f'Папку "{folder_name}" можно удалить только если она пуста.', 'error')
+ return redirect(url_for('dashboard', folder_id=current_view_folder_id or folder_id)) # Stay in current view or folder itself
+
+ # Proceed with deletion
if remove_node(user_data['filesystem'], folder_id):
- # Attempt to save the change
- if save_data(db_data):
- logging.info(f"Folder {folder_id} ('{folder_name}') deleted by user {tg_user_id}")
- return jsonify({'status': 'ok', 'message': f'Folder "{folder_name}" deleted.'})
- else:
- logging.error(f"Delete folder save error for user {tg_user_id} after removing node {folder_id}.")
- # Attempt rollback (risky)
- # add_node(user_data['filesystem'], parent_node['id'], folder_node)
- return jsonify({'status': 'error', 'message': 'Error saving database after deleting folder.'}), 500
+ try:
+ save_data(data)
+ flash(f'Пустая папка "{folder_name}" успешно удалена.')
+ except Exception as e:
+ flash('Ошибка сохранения данных после удаления папки.', 'error')
+ logging.error(f"Delete empty folder save error for user {user_id}: {e}")
+ # Data inconsistency - folder removed in memory, not saved.
+ cache.clear() # Clear cache to force reload on next request
else:
- # This indicates an internal logic error if the node was found before
- logging.error(f"Failed to remove empty folder node {folder_id} from DB for {tg_user_id} after it was found.")
- return jsonify({'status': 'error', 'message': 'Could not remove folder from database structure.'}), 500
+ flash('Не удалось удалить папку из базы данных (не найдена?).', 'error')
+
+ return redirect(url_for('dashboard', folder_id=redirect_to_folder_id))
@app.route('/get_text_content/')
-def get_text_content_route(file_id):
- # NO AUTHENTICATION - relies on file_id obscurity
- db_data = load_data()
- file_node = None
- owner_user_id = None
+@login_required # Require login
+def get_text_content(file_id):
+ current_user_id = session['telegram_user']['id']
+ is_current_user_admin = current_user_id in ADMIN_TELEGRAM_IDS
- for user_id_scan, user_data_scan in db_data.get('users', {}).items():
- if 'filesystem' in user_data_scan and isinstance(user_data_scan['filesystem'], dict):
- node, _ = find_node_by_id(user_data_scan['filesystem'], file_id)
- # Allow preview only for 'text' type files
- if node and isinstance(node, dict) and node.get('type') == 'file' and node.get('file_type') == 'text':
+ data = load_data()
+ file_node = None
+ owner_user_id_str = None
+
+ # 1. Check current user's files
+ current_user_data = data['users'].get(str(current_user_id))
+ if current_user_data:
+ node, _ = find_node_by_id(current_user_data.get('filesystem', {}), file_id)
+ if node and node.get('type') == 'file' and node.get('file_type') == 'text':
+ file_node = node
+ owner_user_id_str = str(current_user_id)
+
+ # 2. If admin and not found, check others
+ if not file_node and is_current_user_admin:
+ for uid_str, udata in data.get('users', {}).items():
+ node, _ = find_node_by_id(udata.get('filesystem', {}), file_id)
+ if node and node.get('type') == 'file' and node.get('file_type') == 'text':
file_node = node
- owner_user_id = user_id_scan
+ owner_user_id_str = uid_str
break
if not file_node:
- logging.warning(f"Text content request for unknown/non-text file_id: {file_id}")
- return Response("Text file not found or preview not allowed", status=404, mimetype='text/plain')
+ return Response("Текстовый файл не найден или доступ запрещен", status=404)
hf_path = file_node.get('path')
if not hf_path:
- logging.error(f"Text content error: Missing HF path for file ID {file_id} (Owner: {owner_user_id})")
- return Response("Error: File path configuration missing", status=500, mimetype='text/plain')
+ return Response("Ошибка: путь к файлу отсутствует в метаданных", status=500)
file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}?download=true"
- logging.info(f"Attempting to fetch text content from: {file_url}")
try:
headers = {}
if HF_TOKEN_READ:
headers["authorization"] = f"Bearer {HF_TOKEN_READ}"
- response = requests.get(file_url, headers=headers, timeout=15) # Shorter timeout for text files
+ response = requests.get(file_url, headers=headers, timeout=15) # Timeout for text files
response.raise_for_status()
- # Limit preview size to prevent loading huge files in browser
- max_preview_size = 1 * 1024 * 1024 # 1 MB limit
- if 'Content-Length' in response.headers and int(response.headers['Content-Length']) > max_preview_size:
- logging.warning(f"Text file {file_id} too large for preview ({response.headers['Content-Length']} bytes).")
- return Response("File is too large for preview (>1MB). Please download.", status=413, mimetype='text/plain') # 413 Payload Too Large
-
- # If size is unknown or within limits, proceed to read content
- content_bytes = response.content
- if len(content_bytes) > max_preview_size:
- logging.warning(f"Text file {file_id} too large for preview after download ({len(content_bytes)} bytes).")
- return Response("File is too large for preview (>1MB). Please download.", status=413, mimetype='text/plain')
-
- # Attempt to decode the text content
- text_content = None
- detected_encoding = None
- # Try common encodings
- encodings_to_try = ['utf-8', 'cp1251', 'latin-1']
- for enc in encodings_to_try:
+ # Limit preview size
+ max_preview_size = 1 * 1024 * 1024 # 1MB
+ if len(response.content) > max_preview_size:
+ # Return truncated content with a warning
+ text_content_bytes = response.content[:max_preview_size]
+ warning_message = "\n\n[Файл слишком большой, показана только первая 1MB]"
try:
- text_content = content_bytes.decode(enc)
- detected_encoding = enc
- logging.info(f"Decoded text file {file_id} using {enc}")
- break
- except UnicodeDecodeError:
- continue
+ text_content = text_content_bytes.decode('utf-8', errors='ignore') + warning_message
+ except UnicodeDecodeError: # Should be caught by errors='ignore'
+ text_content = "[Не удалось декодировать начало файла]" + warning_message
- if text_content is None:
- # Fallback: Try to detect using chardet if installed, or assume UTF-8 lossy
- try:
- import chardet
- result = chardet.detect(content_bytes)
- detected_encoding = result['encoding']
- if detected_encoding:
- text_content = content_bytes.decode(detected_encoding, errors='replace')
- logging.info(f"Decoded text file {file_id} using detected encoding {detected_encoding}")
- else:
- raise ValueError("Chardet could not detect encoding")
- except (ImportError, Exception) as E:
- logging.warning(f"Could not decode text file {file_id} with common encodings or chardet ({E}). Falling back to utf-8 replace.")
- text_content = content_bytes.decode('utf-8', errors='replace')
- detected_encoding = 'utf-8 (replaced errors)'
-
-
- # Return decoded text with appropriate content type
- return Response(text_content, mimetype=f'text/plain; charset={detected_encoding.split(" ")[0]}') # Use detected/fallback encoding
+ return Response(text_content, mimetype='text/plain')
+ # Or return an error:
+ # return Response("Файл слишком большой для предпросмотра ( > 1MB).", status=413)
+
+
+ # Try decoding
+ try:
+ text_content = response.content.decode('utf-8')
+ except UnicodeDecodeError:
+ try:
+ # Fallback to latin-1 or common windows encoding
+ text_content = response.content.decode('latin-1')
+ except UnicodeDecodeError:
+ try:
+ text_content = response.content.decode('cp1251')
+ except Exception:
+ return Response("Не удалось определить кодировку файла.", status=500)
+
+ return Response(text_content, mimetype='text/plain; charset=utf-8') # Specify charset
except requests.exceptions.Timeout:
- logging.error(f"Timeout fetching text content from HF: {hf_path}")
- return Response("Error: Timed out connecting to file storage", status=504, mimetype='text/plain')
+ logging.warning(f"Timeout fetching text content from HF ({hf_path})")
+ return Response("Тайм-аут при загрузке содержимого файла.", status=504)
except requests.exceptions.RequestException as e:
- status_code = e.response.status_code if e.response is not None else 502
- logging.error(f"Error fetching text content from HF ({hf_path}, Owner: {owner_user_id}): {e} (Status: {status_code})")
- return Response(f"Error retrieving text content ({status_code})", status=status_code, mimetype='text/plain')
+ logging.error(f"Error fetching text content from HF ({hf_path}): {e}")
+ return Response(f"Ошибка загрузки содержимого: {e}", status=502)
except Exception as e:
- logging.error(f"Unexpected error fetching text content ({hf_path}, Owner: {owner_user_id}): {e}", exc_info=True)
- return Response("Internal server error fetching text content", status=500, mimetype='text/plain')
+ logging.error(f"Unexpected error fetching text content ({hf_path}): {e}")
+ return Response("Внутренняя ошибка сервера", status=500)
-@app.route('/preview_thumb/')
-def preview_thumb_route(file_id):
- # NO AUTHENTICATION
- db_data = load_data()
- file_node = None
- owner_user_id = None
+# --- Admin Routes ---
- for user_id_scan, user_data_scan in db_data.get('users', {}).items():
- if 'filesystem' in user_data_scan and isinstance(user_data_scan['filesystem'], dict):
- node, _ = find_node_by_id(user_data_scan['filesystem'], file_id)
- if node and isinstance(node, dict) and node.get('type') == 'file' and node.get('file_type') == 'image':
- file_node = node
- owner_user_id = user_id_scan
- break
+@app.route('/admhosto')
+@admin_required
+def admin_panel():
+ data = load_data()
+ users = data.get('users', {})
- if not file_node: return Response("Image not found", status=404, mimetype='text/plain')
- hf_path = file_node.get('path')
- if not hf_path: return Response("Error: File path missing", status=500, mimetype='text/plain')
+ user_details = []
+ for user_id_str, udata in users.items():
+ file_count = 0
+ folder_count = 0
+ q = [(udata.get('filesystem', {}))] # Start with root object
+ visited_ids = set()
- # Use the /resolve/main path for direct file access
- file_url = f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{hf_path}"
- logging.info(f"Attempting to serve image preview via proxy from: {file_url}")
+ while q:
+ current_node = q.pop(0)
+ if not current_node or not isinstance(current_node, dict) or current_node.get('id') in visited_ids:
+ continue
+ visited_ids.add(current_node.get('id'))
+
+ if current_node.get('type') == 'file':
+ file_count += 1
+ elif current_node.get('type') == 'folder':
+ if current_node.get('id') != 'root': # Don't count root itself
+ folder_count += 1
+ if 'children' in current_node and isinstance(current_node['children'], list):
+ for child in current_node['children']:
+ if isinstance(child, dict):
+ q.append(child)
+
+
+ user_details.append({
+ 'telegram_id': int(user_id_str),
+ 'username': udata.get('username', 'N/A'),
+ 'first_name': udata.get('first_name', ''),
+ 'last_name': udata.get('last_name', ''),
+ 'photo_url': udata.get('photo_url'),
+ 'created_at': udata.get('created_at', 'N/A'),
+ 'file_count': file_count,
+ 'folder_count': folder_count
+ })
- try:
- headers = {}
- if HF_TOKEN_READ: headers["authorization"] = f"Bearer {HF_TOKEN_READ}"
- response = requests.get(file_url, headers=headers, stream=True, timeout=20)
- response.raise_for_status()
+ user_details.sort(key=lambda x: x.get('created_at', ''), reverse=True)
+
+ html = '''
+
+Админ-панель
+
+
+
+
+
Админ-панель
+Назад в мой кабинет
+{% with messages = get_flashed_messages(with_categories=true) %}{% if messages %}{% for category, message in messages %}
+ Скачать
+ {% set previewable = file.file_type in ['image', 'video', 'pdf', 'text'] %}
+ {% if previewable %}
+
+ {% endif %}
+
+
+
+{% else %}
У пользователя нет файлов.
{% endfor %}
+
+
+
+
+ ×
+
+
+
+
+
+'''
+ return render_template_string(html,
+ user_info=user_info,
+ files=all_files,
+ repo_id=REPO_ID,
+ hf_file_url=lambda path, download=False: f"https://huggingface.co/datasets/{REPO_ID}/resolve/main/{path}{'?download=true' if download else ''}",
+ HF_TOKEN_READ=HF_TOKEN_READ)
- resp_headers['Content-Type'] = content_type
- if 'Content-Length' in response.headers:
- resp_headers['Content-Length'] = response.headers['Content-Length']
- # Add cache headers? Maybe Cache-Control: public, max-age=3600 ?
- return Response(response.iter_content(chunk_size=8192), status=response.status_code, headers=resp_headers)
+@app.route('/admhosto/delete_user/', methods=['POST'])
+@admin_required
+def admin_delete_user(user_id):
+ admin_user_id = session['telegram_user']['id']
+ user_id_str = str(user_id)
- except requests.exceptions.Timeout:
- logging.error(f"Timeout fetching preview from HF: {hf_path}")
- return Response("Error: Timed out connecting to storage", status=504, mimetype='text/plain')
- except requests.exceptions.RequestException as e:
- status_code = e.response.status_code if e.response is not None else 502
- logging.error(f"Error fetching preview from HF ({hf_path}, Owner: {owner_user_id}): {e} (Status: {status_code})")
- return Response(f"Error retrieving preview ({status_code})", status=status_code, mimetype='text/plain')
+ if not HF_TOKEN_WRITE:
+ flash('Удаление невозможно: токен для записи Hugging Face не настроен.', 'error')
+ return redirect(url_for('admin_panel'))
+
+ data = load_data()
+ if user_id_str not in data['users']:
+ flash('Пользователь не найден!', 'error')
+ return redirect(url_for('admin_panel'))
+
+ user_data_to_delete = data['users'][user_id_str]
+ username_for_log = user_data_to_delete.get('username', user_id_str)
+ logging.warning(f"ADMIN ACTION by {admin_user_id}: Attempting to delete user {username_for_log} (ID: {user_id_str}) and all their data.")
+
+ # --- Attempt to delete from Hugging Face first ---
+ hf_delete_successful = False
+ try:
+ api = HfApi()
+ # Path uses user ID
+ user_folder_path_on_hf = f"cloud_files/{user_id_str}"
+
+ logging.info(f"Attempting to delete HF Hub folder: {user_folder_path_on_hf} for user {user_id_str}")
+ # Note: delete_folder might require listing files first if it's not empty.
+ # A safer approach might be listing and deleting files individually, then the folder.
+ # However, let's try delete_folder directly first. It might handle non-empty folders.
+ # Update: delete_folder usually expects an *empty* folder. Robust deletion needs listing+deleting files.
+ # Let's try deleting the folder path prefix, which might work better.
+ objects_to_delete = api.list_repo_tree(repo_id=REPO_ID, repo_type="dataset", path_in_repo=user_folder_path_on_hf, token=HF_TOKEN_READ, recursive=True)
+
+ paths_to_delete = [obj.path for obj in objects_to_delete]
+
+ if paths_to_delete:
+ logging.info(f"Found {len(paths_to_delete)} items in {user_folder_path_on_hf} to delete.")
+ # Delete files first
+ for path in paths_to_delete:
+ if not path.endswith('/'): # Assuming paths ending in / are folders handled implicitly or need separate deletion
+ try:
+ api.delete_file(path_in_repo=path, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE)
+ logging.info(f"Admin deleted HF file: {path}")
+ except hf_utils.EntryNotFoundError:
+ logging.warning(f"File {path} not found during bulk delete, skipping.")
+ except Exception as file_del_e:
+ logging.error(f"Error deleting file {path} during user cleanup: {file_del_e}")
+ # Optionally decide whether to abort the whole user deletion
+ # Try deleting the folder itself after files are gone (might still fail if structure complex)
+ try:
+ api.delete_folder(folder_path=user_folder_path_on_hf, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN_WRITE,
+ commit_message=f"ADMIN ACTION by {admin_user_id}: Deleted folder for user {user_id_str}")
+ logging.info(f"Successfully deleted folder {user_folder_path_on_hf} on HF Hub.")
+ hf_delete_successful = True # Mark as successful if folder delete works
+ except hf_utils.HfHubHTTPError as e:
+ if e.response.status_code == 404 or "is not empty" in str(e): # Folder might be gone or implicitly deleted
+ logging.warning(f"Folder {user_folder_path_on_hf} possibly already gone or non-empty after file deletion attempt. Assuming HF cleanup done.")
+ hf_delete_successful = True # Count as success if folder seems gone
+ else: raise e # Re-raise other HF errors
+ except Exception as folder_del_e:
+ logging.error(f"Error deleting folder {user_folder_path_on_hf} after file deletion: {folder_del_e}")
+ # Don't mark hf_delete_successful = True
+ else:
+ logging.info(f"No objects found in HF path {user_folder_path_on_hf}. Assuming HF cleanup not needed or already done.")
+ hf_delete_successful = True # No files to delete = success
+
+
+ except hf_utils.HfHubHTTPError as e:
+ if e.response.status_code == 404: # Initial listing failed (folder never existed)
+ logging.warning(f"User folder {user_folder_path_on_hf} not found on HF Hub for user {user_id_str}. Skipping HF deletion.")
+ hf_delete_successful = True # Consider it success as there's nothing to delete
+ else:
+ logging.error(f"Error during HF cleanup for {user_id_str}: {e}")
+ flash(f'Ошибка при удалении файлов пользователя {username_for_log} с сервера: {e}. Пользователь НЕ удален из базы.', 'error')
+ return redirect(url_for('admin_panel'))
except Exception as e:
- logging.error(f"Unexpected error during preview proxy ({hf_path}, Owner: {owner_user_id}): {e}", exc_info=True)
- return Response("Internal server error during preview", status=500, mimetype='text/plain')
+ logging.error(f"Unexpected error during HF Hub data deletion for {user_id_str}: {e}")
+ flash(f'Неожиданная ошибка при удалении файлов {username_for_log} с сервера: {e}. Пользователь НЕ удален из базы.', 'error')
+ return redirect(url_for('admin_panel'))
+
+ # --- Proceed with DB deletion only if HF deletion was deemed successful or skipped ---
+ if hf_delete_successful:
+ try:
+ del data['users'][user_id_str]
+ save_data(data)
+ flash(f'Пользователь {username_for_log} (ID: {user_id_str}) и его данные успешно удалены из базы данных!')
+ logging.info(f"ADMIN ACTION by {admin_user_id}: Successfully deleted user {user_id_str} from database.")
+ except Exception as e:
+ logging.error(f"CRITICAL: Error saving data after deleting user {user_id_str} from dict. DB MIGHT BE INCONSISTENT. HF data likely deleted. Error: {e}")
+ flash(f'Данные пользователя {username_for_log} удалены с сервера, но произошла КРИТИЧЕСКАЯ ОШИБКА при удалении пользователя из базы данных: {e}', 'error')
+ cache.clear()
+ else:
+ flash(f'Удаление пользователя {username_for_log} из базы отменено из-за ошибки при удалении файлов с сервера.', 'error')
+
+
+ return redirect(url_for('admin_panel'))
+
+
+@app.route('/admhosto/delete_file//', methods=['POST'])
+@admin_required
+def admin_delete_file(user_id, file_id):
+ admin_user_id = session['telegram_user']['id']
+ user_id_str = str(user_id)
+
+ if not HF_TOKEN_WRITE:
+ flash('Удаление невозможно: токен для записи Hugging Face не настроен.', 'error')
+ return redirect(url_for('admin_user_files', user_id=user_id))
+
+ data = load_data()
+ user_data = data.get('users', {}).get(user_id_str)
+ if not user_data:
+ flash(f'Пользователь {user_id_str} не найден.', 'error')
+ return redirect(url_for('admin_panel'))
+
+ file_node, parent_node = find_node_by_id(user_data.get('filesystem',{}), file_id)
+
+ if not file_node or file_node.get('type') != 'file':
+ flash('Файл не найден в структуре пользователя.', 'error')
+ return redirect(url_for('admin_user_files', user_id=user_id))
+
+ hf_path = file_node.get('path')
+ original_filename = file_node.get('original_filename', 'файл')
+ username_for_log = user_data.get('username', user_id_str)
+
+ # Try removing from DB first
+ if remove_node(user_data['filesystem'], file_id):
+ try:
+ save_data(data)
+ logging.info(f"ADMIN ACTION by {admin_user_id}: Removed file {file_id} ({original_filename}) from DB for user {username_for_log} ({user_id_str}).")
+
+ # Now delete from HF
+ if hf_path:
+ try:
+ api = HfApi()
+ api.delete_file(
+ path_in_repo=hf_path,
+ repo_id=REPO_ID,
+ repo_type="dataset",
+ token=HF_TOKEN_WRITE,
+ commit_message=f"ADMIN ACTION by {admin_user_id}: Deleted file {original_filename} (ID: {file_id}) for user {user_id_str}"
+ )
+ logging.info(f"ADMIN ACTION by {admin_user_id}: Deleted file {hf_path} from HF Hub for user {user_id_str}")
+ flash(f'Файл {original_filename} успешно удален (админ)!')
+ except hf_utils.EntryNotFoundError:
+ logging.warning(f"ADMIN ACTION: File {hf_path} not found on HF Hub during delete for user {user_id_str}, but removed from DB.")
+ flash(f'Файл {original_filename} удален из базы (не найден на сервере) (админ).')
+ except Exception as e:
+ logging.error(f"ADMIN ACTION: Error deleting file {hf_path} from HF Hub for user {user_id_str} (DB entry removed): {e}")
+ flash(f'Файл {original_filename} удален из базы, но ошибка при удалении с сервера: {e} (админ)', 'error')
+ else:
+ flash(f'Файл {original_filename} удален из базы (путь не найден) (админ).')
+
+ except Exception as e:
+ logging.critical(f"CRITICAL ADMIN ACTION: Failed to save DB after removing file {file_id} for user {user_id_str}. Data inconsistency possible! Error: {e}")
+ flash('Критическая ошибка: не удалось сохранить базу данных после удаления файла (админ).', 'error')
+ cache.clear()
+ else:
+ flash('Не удалось найти файл в структуре для удаления (админ).', 'error')
+
+
+ return redirect(url_for('admin_user_files', user_id=user_id))
# --- Main Execution ---
+
if __name__ == '__main__':
- print("Starting Zeus Cloud Mini App Backend...")
- logging.info("Starting Zeus Cloud Mini App Backend...")
-
- # Initial sanity checks
- if not BOT_TOKEN or BOT_TOKEN == 'YOUR_BOT_TOKEN':
- logging.critical("\n" + "*"*60 +
- "\n CRITICAL: TELEGRAM_BOT_TOKEN is not set correctly. " +
- "\n Telegram authentication WILL FAIL. Set the environment variable." +
- "\n" + "*"*60)
- if not HF_TOKEN_WRITE:
- logging.warning("HF_TOKEN (write access) is not set. File uploads & deletions will fail.")
- if not HF_TOKEN_READ and HF_TOKEN_WRITE:
- logging.info("HF_TOKEN_READ not set, using HF_TOKEN (write token) for read access.")
- elif not HF_TOKEN_READ and not HF_TOKEN_WRITE:
- logging.warning("HF_TOKEN_READ is not set. File downloads/previews might fail if repo is private.")
- if not REPO_ID:
- logging.critical("HF REPO_ID is not set. Application cannot function.")
- exit(1)
-
- logging.info(f"Using HF Repo: {REPO_ID}")
- logging.info(f"Data file: {DATA_FILE}")
-
- # Attempt initial data load/sync
- logging.info("Performing initial database sync/load...")
- initial_data = load_data()
- if not initial_data or not initial_data.get('users'):
- logging.warning("Initial data load resulted in empty or invalid data. Check logs.")
+ app.permanent_session_lifetime = timedelta(days=30) # Extend session lifetime
+
+ if not TELEGRAM_BOT_TOKEN:
+ logging.critical("FATAL: TELEGRAM_BOT_TOKEN environment variable is not set. Application cannot verify users.")
+ exit(1)
+ if not ADMIN_TELEGRAM_IDS:
+ logging.warning("ADMIN_TELEGRAM_IDS environment variable is not set or empty. Admin panel will not be accessible.")
else:
- logging.info(f"Initial data loaded. User count: {len(initial_data['users'])}")
+ logging.info(f"Admin users configured: {ADMIN_TELEGRAM_IDS}")
- # Run Flask app
- # Use waitress or gunicorn in production instead of Flask's development server
- logging.info("Starting Flask server...")
- try:
- # For production deployment, replace app.run with a production server like waitress or gunicorn
- # Example using waitress (install with: pip install waitress):
- # from waitress import serve
- # serve(app, host='0.0.0.0', port=7860)
+ if not HF_TOKEN_WRITE:
+ logging.warning("HF_TOKEN (write access) is not set. File uploads, deletions, and backups will fail.")
+ if not HF_TOKEN_READ:
+ logging.warning("HF_TOKEN_READ is not set. Falling back to HF_TOKEN. File downloads/previews might fail for private repos if HF_TOKEN is also not set.")
+
+ if HF_TOKEN_WRITE:
+ logging.info("Performing initial database download before starting background backup.")
+ download_db_from_hf() # Download before starting backup thread
+ backup_thread = threading.Thread(target=periodic_backup, daemon=True)
+ backup_thread.start()
+ logging.info("Periodic backup thread started.")
+ elif HF_TOKEN_READ:
+ logging.info("Write token not found. Performing initial database download (read-only mode). Backups disabled.")
+ download_db_from_hf()
+ else:
+ logging.critical("Neither HF_TOKEN nor HF_TOKEN_READ is set. Hugging Face operations disabled. Loading/creating local DB only.")
+ if not os.path.exists(DATA_FILE):
+ with open(DATA_FILE, 'w', encoding='utf-8') as f:
+ json.dump({'users': {}}, f)
+ logging.info(f"Created empty local database file: {DATA_FILE}")
+ else:
+ logging.info(f"Using existing local database file: {DATA_FILE}")
- # Using Flask's development server (set debug=False for production-like behavior)
- app.run(debug=False, host='0.0.0.0', port=7860)
- except Exception as run_e:
- logging.critical(f"Failed to start Flask server: {run_e}", exc_info=True)
- exit(1)
+ app.run(debug=False, host='0.0.0.0', port=7860)
-# --- END OF FILE app (24).py ---
+# --- END OF FILE app.py ---
\ No newline at end of file