diff --git "a/app.py" "b/app.py"
deleted file mode 100644--- "a/app.py"
+++ /dev/null
@@ -1,2838 +0,0 @@
-
-import os
-from flask import Flask, request, Response, render_template_string, jsonify, redirect, url_for
-import hmac
-import hashlib
-import json
-from urllib.parse import unquote, parse_qs, quote
-import time
-from datetime import datetime
-import logging
-import threading
-import random
-import pytz
-import uuid
-
-from huggingface_hub import HfApi, hf_hub_download
-from huggingface_hub.utils import RepositoryNotFoundError
-
-BOT_TOKEN = os.getenv("BOT_TOKEN", "7835463659:AAGNePbelZIAOeaglyQi1qulOqnjs4BGQn4")
-HOST = '0.0.0.0'
-PORT = 7860
-DATA_FILE = 'data.json'
-
-REPO_ID = "flpolprojects/examplebonus"
-HF_DATA_FILE_PATH = "data.json"
-HF_TOKEN_WRITE = os.getenv("HF_TOKEN_WRITE")
-HF_TOKEN_READ = os.getenv("HF_TOKEN_READ")
-
-BISHKEK_TZ = pytz.timezone('Asia/Bishkek')
-
-app = Flask(__name__)
-logging.basicConfig(level=logging.INFO)
-app.secret_key = os.urandom(24)
-
-_data_lock = threading.Lock()
-visitor_data_cache = {}
-
-def generate_unique_id(all_data):
- while True:
- # Generate a 5-digit numeric ID
- new_id = str(random.randint(10000, 99999))
-
- # Check if this ID exists as a client ID or a partner code
- is_duplicate = False
- for user_id, user_data in all_data.items():
- if user_id == "organization_details": continue
- if user_id == new_id or user_data.get('partner_code') == new_id:
- is_duplicate = True
- break
-
- if not is_duplicate:
- return new_id
-
-def download_data_from_hf():
- global visitor_data_cache
- if not HF_TOKEN_READ:
- logging.warning("HF_TOKEN_READ not set. Skipping Hugging Face download.")
- return False
- try:
- logging.info(f"Attempting to download {HF_DATA_FILE_PATH} from {REPO_ID}...")
- hf_hub_download(
- repo_id=REPO_ID,
- filename=HF_DATA_FILE_PATH,
- repo_type="dataset",
- token=HF_TOKEN_READ,
- local_dir=".",
- local_dir_use_symlinks=False,
- force_download=True,
- etag_timeout=10
- )
- logging.info("Data file successfully downloaded from Hugging Face.")
- with _data_lock:
- try:
- with open(DATA_FILE, 'r', encoding='utf-8') as f:
- visitor_data_cache = json.load(f)
- logging.info("Successfully loaded downloaded data into cache.")
- except (FileNotFoundError, json.JSONDecodeError) as e:
- logging.error(f"Error reading downloaded data file: {e}. Starting with empty cache.")
- visitor_data_cache = {}
-
- # Ensure organization_details and initial user fields exist after loading
- if "organization_details" not in visitor_data_cache:
- visitor_data_cache["organization_details"] = {}
- if 'referral_percentage' not in visitor_data_cache["organization_details"]:
- visitor_data_cache["organization_details"]['referral_percentage'] = 2.0 # Default 2%
-
- # Ensure all user entries have required fields (partner_code, referred_by, referred_users, invoices)
- # This is a migration step for existing data
- users_to_update = {}
- for user_id, user_data in visitor_data_cache.items():
- if user_id == "organization_details": continue
- updated_user_data = user_data.copy()
-
- if 'partner_code' not in updated_user_data or not updated_user_data['partner_code']:
- updated_user_data['partner_code'] = generate_unique_id(visitor_data_cache) # Generate unique code
- if 'referred_by' not in updated_user_data:
- updated_user_data['referred_by'] = None
- if 'referred_users' not in updated_user_data or not isinstance(updated_user_data['referred_users'], list):
- updated_user_data['referred_users'] = []
- if 'invoices' not in updated_user_data or not isinstance(updated_user_data['invoices'], list):
- updated_user_data['invoices'] = []
-
- if updated_user_data != user_data:
- users_to_update[user_id] = updated_user_data
-
- for user_id, user_data in users_to_update.items():
- visitor_data_cache[user_id] = user_data
- logging.info(f"Migrated user data for ID {user_id}")
-
- if users_to_update:
- # Rebuild referred_users lists based on current referred_by fields
- temp_referred_users = {}
- for user_id, user_data in visitor_data_cache.items():
- if user_id == "organization_details": continue
- user_data['referred_users'] = [] # Clear old list
- referred_by_code = user_data.get('referred_by')
- if referred_by_code:
- temp_referred_users.setdefault(referred_by_code, []).append(user_id)
-
- # Update the actual referred_users lists
- for referrer_code, referred_ids in temp_referred_users.items():
- found_referrer = False
- for user_id, user_data in visitor_data_cache.items():
- if user_id == "organization_details": continue
- if user_data.get('partner_code') == referrer_code:
- user_data['referred_users'] = referred_ids
- found_referrer = True
- break
- if not found_referrer:
- logging.warning(f"Referrer with code {referrer_code} not found for referred users {referred_ids}. Clearing referred_by.")
- # Clear referred_by for these users if referrer doesn't exist
- for referred_id in referred_ids:
- if referred_id in visitor_data_cache and referred_id != "organization_details":
- visitor_data_cache[referred_id]['referred_by'] = None
-
-
- logging.info("Rebuilt referred_users lists.")
- # Save the potentially updated cache after migration and rebuild
- save_visitor_data(visitor_data_cache)
-
-
- return True
- except RepositoryNotFoundError:
- logging.error(f"Hugging Face repository '{REPO_ID}' not found. Cannot download data.")
- except Exception as e:
- logging.error(f"Error downloading data from Hugging Face: {e}")
- return False
-
-def load_visitor_data():
- global visitor_data_cache
- with _data_lock:
- if not visitor_data_cache:
- try:
- with open(DATA_FILE, 'r', encoding='utf-8') as f:
- visitor_data_cache = json.load(f)
- logging.info("Visitor data loaded from local JSON.")
- except FileNotFoundError:
- logging.warning(f"{DATA_FILE} not found locally. Starting with empty data.")
- visitor_data_cache = {"organization_details": {}}
- except json.JSONDecodeError:
- logging.error(f"Error decoding {DATA_FILE}. Starting with empty data.")
- visitor_data_cache = {"organization_details": {}}
- except Exception as e:
- logging.error(f"Unexpected error loading visitor data: {e}")
- visitor_data_cache = {"organization_details": {}}
-
- # Ensure organization_details key exists
- if "organization_details" not in visitor_data_cache:
- visitor_data_cache["organization_details"] = {}
- if 'referral_percentage' not in visitor_data_cache["organization_details"]:
- visitor_data_cache["organization_details"]['referral_percentage'] = 2.0 # Default
-
- # Ensure all user entries have required fields on load if not already present
- users_to_update = {}
- for user_id, user_data in visitor_data_cache.items():
- if user_id == "organization_details": continue
- updated_user_data = user_data.copy() # Copy to detect changes
-
- if 'partner_code' not in updated_user_data or not updated_user_data['partner_code']:
- # Generating a unique code here on load might be problematic if called frequently without saving.
- # Better to ensure it's generated on user creation. If missing, leave as None or generate and mark for save.
- # Let's ensure generation only happens on creation (verify or add_client).
- # If it's missing on load, maybe just log a warning or assign None. Let's assign None for now.
- # The initial download/load function handles generation for missing codes on migration.
- updated_user_data['partner_code'] = updated_user_data.get('partner_code') # Keep existing or None
-
- if 'referred_by' not in updated_user_data:
- updated_user_data['referred_by'] = None
- if 'referred_users' not in updated_user_data or not isinstance(updated_user_data['referred_users'], list):
- updated_user_data['referred_users'] = []
- if 'invoices' not in updated_user_data or not isinstance(updated_user_data['invoices'], list):
- updated_user_data['invoices'] = []
- if 'debt_history' not in updated_user_data or not isinstance(updated_user_data['debt_history'], list):
- updated_user_data['debt_history'] = []
- if 'history' not in updated_user_data or not isinstance(updated_user_data['history'], list):
- updated_user_data['history'] = []
-
- if updated_user_data != user_data:
- users_to_update[user_id] = updated_user_data # Mark for potential update
-
- if users_to_update:
- logging.warning(f"Missing keys in {len(users_to_update)} user entries. Updating cache but not saving automatically.")
- for user_id, updated_data in users_to_update.items():
- visitor_data_cache[user_id] = updated_data
-
- # Rebuild referred_users lists on every load just to be safe
- temp_referred_users = {}
- for user_id, user_data in visitor_data_cache.items():
- if user_id == "organization_details": continue
- user_data['referred_users'] = [] # Clear old list
- referred_by_code = user_data.get('referred_by')
- if referred_by_code:
- temp_referred_users.setdefault(referred_by_code, []).append(user_id)
-
- # Update the actual referred_users lists
- for referrer_code, referred_ids in temp_referred_users.items():
- found_referrer = False
- for user_id, user_data in visitor_data_cache.items():
- if user_id == "organization_details": continue
- if user_data.get('partner_code') == referrer_code:
- user_data['referred_users'] = referred_ids
- found_referrer = True
- break
- if not found_referrer:
- logging.warning(f"Referrer with code {referrer_code} not found during referred_users rebuild.")
- # Don't clear referred_by here, it's safer to keep the link even if referrer was deleted manually
-
- return visitor_data_cache
-
-def save_visitor_data(data_to_save):
- with _data_lock:
- try:
- # Ensure visitor_data_cache is the source of truth before saving
- # In calling code, modifications should happen directly on visitor_data_cache
- # This function now simply dumps the current state of visitor_data_cache
- with open(DATA_FILE, 'w', encoding='utf-8') as f:
- json.dump(visitor_data_cache, f, ensure_ascii=False, indent=4)
- logging.info(f"Visitor data successfully saved to {DATA_FILE}.")
- upload_data_to_hf_async()
- except Exception as e:
- logging.error(f"Error saving visitor data: {e}")
-
-def upload_data_to_hf():
- if not HF_TOKEN_WRITE:
- logging.warning("HF_TOKEN_WRITE not set. Skipping Hugging Face upload.")
- return
- if not os.path.exists(DATA_FILE):
- logging.warning(f"{DATA_FILE} does not exist. Skipping upload.")
- return
-
- try:
- api = HfApi()
- with _data_lock:
- # Check file size *within* the lock just before reading
- file_content_exists = os.path.getsize(DATA_FILE) > 0
- if not file_content_exists:
- logging.warning(f"{DATA_FILE} is empty. Skipping upload.")
- return
-
- logging.info(f"Attempting to upload {DATA_FILE} to {REPO_ID}/{HF_DATA_FILE_PATH}...")
- api.upload_file(
- path_or_fileobj=DATA_FILE,
- path_in_repo=HF_DATA_FILE_PATH,
- repo_id=REPO_ID,
- repo_type="dataset",
- token=HF_TOKEN_WRITE,
- commit_message=f"Update bonus data {datetime.now(BISHKEK_TZ).strftime('%Y-%m-%d %H:%M:%S')}"
- )
- logging.info("Bonus data successfully uploaded to Hugging Face.")
- except Exception as e:
- logging.error(f"Error uploading data to Hugging Face: {e}")
-
-def upload_data_to_hf_async():
- upload_thread = threading.Thread(target=upload_data_to_hf, daemon=True)
- upload_thread.start()
-
-def periodic_backup():
- if not HF_TOKEN_WRITE:
- logging.info("Periodic backup disabled: HF_TOKEN_WRITE not set.")
- return
- while True:
- time.sleep(3600)
- logging.info("Initiating periodic backup...")
- upload_data_to_hf()
-
-def verify_telegram_data(init_data_str):
- try:
- parsed_data = parse_qs(init_data_str)
- received_hash = parsed_data.pop('hash', [None])[0]
-
- if not received_hash:
- return None, False
-
- data_check_list = []
- for key, value in sorted(parsed_data.items()):
- data_check_list.append(f"{key}={value[0]}")
- data_check_string = "\n".join(data_check_list)
-
- secret_key = hmac.new("WebAppData".encode(), BOT_TOKEN.encode(), hashlib.sha256).digest()
- calculated_hash = hmac.new(secret_key, data_check_string.encode(), hashlib.sha256).hexdigest()
-
- if calculated_hash == received_hash:
- auth_date = int(parsed_data.get('auth_date', [0])[0])
- current_time = int(time.time())
- if current_time - auth_date > 86400:
- logging.warning(f"Telegram InitData is older than 24 hours (Auth Date: {auth_date}, Current: {current_time}).")
- return parsed_data, True
- else:
- logging.warning(f"Data verification failed. Calculated: {calculated_hash}, Received: {received_hash}")
- return parsed_data, False
- except Exception as e:
- logging.error(f"Error verifying Telegram data: {e}")
- return None, False
-
-TEMPLATE = """
-
-
-