#!/usr/bin/env python3 # -*- coding: utf-8 -*- import os from flask import Flask, request, Response, render_template_string, jsonify, redirect, url_for import hmac import hashlib import json from urllib.parse import unquote, parse_qs, quote import time from datetime import datetime import logging import threading from huggingface_hub import HfApi, hf_hub_download from huggingface_hub.utils import RepositoryNotFoundError, HfHubHTTPError # --- Configuration --- BOT_TOKEN = os.getenv("BOT_TOKEN", "7566834146:AAGiG4MaTZZvvbTVsqEJVG5SYK5hUlc_Ewo") # Use environment variable or default HOST = '0.0.0.0' PORT = 7860 DATA_FILE = 'data.json' # File to store visited user data # Hugging Face Hub Configuration REPO_ID = "flpolprojects/teledata" HF_TOKEN = os.getenv("HF_TOKEN") # Write token HF_TOKEN_READ = os.getenv("HF_TOKEN_READ", HF_TOKEN) # Read token (defaults to write token if not set) BACKUP_INTERVAL = 900 # Seconds (15 minutes) app = Flask(__name__) app.secret_key = os.urandom(24) # Needed for flash messages or sessions if used later # Logging Setup logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s') # --- Hugging Face Hub Functions --- def download_db_from_hf(): if not HF_TOKEN_READ: logging.warning("HF_TOKEN_READ not set. Skipping download from Hugging Face Hub.") return False try: logging.info(f"Attempting to download {DATA_FILE} from {REPO_ID}...") hf_hub_download( repo_id=REPO_ID, filename=DATA_FILE, repo_type="dataset", token=HF_TOKEN_READ, local_dir=".", local_dir_use_symlinks=False, force_download=True, # Ensure we get the latest version resume_download=False ) logging.info(f"{DATA_FILE} successfully downloaded from Hugging Face Hub.") return True except RepositoryNotFoundError: logging.warning(f"Repository {REPO_ID} not found on Hugging Face Hub. Will use/create local file.") return False except HfHubHTTPError as e: if e.response.status_code == 404: logging.warning(f"{DATA_FILE} not found in repository {REPO_ID}. Will use/create local file.") else: logging.error(f"HTTP error downloading {DATA_FILE} from Hugging Face Hub: {e}") return False except Exception as e: logging.error(f"Error downloading {DATA_FILE} from Hugging Face Hub: {e}") return False def upload_db_to_hf(): if not HF_TOKEN: logging.warning("HF_TOKEN not set. Skipping upload to Hugging Face Hub.") return False if not os.path.exists(DATA_FILE): logging.warning(f"{DATA_FILE} not found locally. Skipping upload.") return False try: api = HfApi() logging.info(f"Attempting to upload {DATA_FILE} to {REPO_ID}...") api.upload_file( path_or_fileobj=DATA_FILE, path_in_repo=DATA_FILE, repo_id=REPO_ID, repo_type="dataset", token=HF_TOKEN, commit_message=f"Automated user data backup {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}" ) logging.info(f"{DATA_FILE} successfully uploaded to Hugging Face Hub.") return True except Exception as e: logging.error(f"Error uploading {DATA_FILE} to Hugging Face Hub: {e}") return False def periodic_backup(): logging.info(f"Starting periodic backup thread. Interval: {BACKUP_INTERVAL} seconds.") while True: time.sleep(BACKUP_INTERVAL) logging.info("Initiating scheduled backup...") upload_db_to_hf() # --- Data Handling --- def load_users(): # Attempt download first download_db_from_hf() if not os.path.exists(DATA_FILE): logging.warning(f"{DATA_FILE} not found. Initializing empty user data.") return {} try: with open(DATA_FILE, 'r', encoding='utf-8') as f: users_data = json.load(f) if not isinstance(users_data, dict): logging.warning(f"{DATA_FILE} does not contain a valid JSON dictionary. Resetting.") return {} logging.info(f"Loaded {len(users_data)} user records from {DATA_FILE}.") return users_data except json.JSONDecodeError: logging.error(f"Error decoding JSON from {DATA_FILE}. Returning empty data.") # Consider backing up the corrupted file here return {} except Exception as e: logging.error(f"Error loading user data from {DATA_FILE}: {e}") return {} def save_users(users_data): try: with open(DATA_FILE, 'w', encoding='utf-8') as f: json.dump(users_data, f, ensure_ascii=False, indent=4) logging.info(f"Saved {len(users_data)} user records to {DATA_FILE}.") # Attempt upload after saving locally upload_db_to_hf() except Exception as e: logging.error(f"Error saving user data to {DATA_FILE}: {e}") # Load initial data on startup visited_users = load_users() # --- Telegram Verification --- def verify_telegram_data(init_data_str): try: parsed_data = parse_qs(init_data_str) received_hash = parsed_data.pop('hash', [None])[0] if not received_hash: logging.warning("Verification failed: No hash found in initData.") return None, False data_check_list = [] for key, value in sorted(parsed_data.items()): # Ensure values are strings before appending data_check_list.append(f"{key}={value[0]}") data_check_string = "\n".join(data_check_list) secret_key = hmac.new("WebAppData".encode(), BOT_TOKEN.encode(), hashlib.sha256).digest() calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest() if calculated_hash == received_hash: auth_date = int(parsed_data.get('auth_date', [0])[0]) current_time = int(time.time()) # Allow slightly older data, adjust timeout as needed (e.g., 3600 for 1 hour) if current_time - auth_date > 86400: # 24 hours tolerance logging.warning(f"Telegram InitData is older than 24 hours (Auth Date: {auth_date}, Current: {current_time}).") # logging.info("Telegram data verified successfully.") return parsed_data, True else: logging.warning(f"Data verification failed. Calculated: {calculated_hash}, Received: {received_hash}") return parsed_data, False except Exception as e: logging.error(f"Error verifying Telegram data: {e}") return None, False # --- Templates --- TEMPLATE = """
Morshen Group
Мы — международный IT холдинг, объединяющий передовые технологические компании для создания прорывных решений мирового уровня в сферах AI, квантовых вычислений и разработки ПО.
Обсудить ваш проектВ состав холдинга входят специализированные компании, каждая из которых является экспертом в своей области передовых технологий.
Morshen Alpha
Флагман холдинга. Занимаемся R&D в области AI и квантовых технологий, разрабатываем передовые бизнес-решения, формирующие будущее индустрии.
Holmgard Studio
Студия разработки полного цикла. Создаем высокотехнологичные веб-сайты, мобильные приложения и кастомное ПО для бизнеса любого масштаба, используя современные стеки и методологии.
Наши решения и команды работают в ключевых регионах Центральной Азии:
Пока нет данных о посетителях.
{% endif %}