| |
| """ |
| OpenClaw HF Spaces Persistence β Full Directory Sync |
| ===================================================== |
| |
| Simplified persistence: upload/download the entire ~/.openclaw directory |
| as-is to/from a Hugging Face Dataset repo. |
| |
| - Startup: snapshot_download β ~/.openclaw |
| - Periodic: upload_folder β dataset openclaw_data/ |
| - Shutdown: final upload_folder β dataset openclaw_data/ |
| """ |
|
|
| import os |
| import sys |
| import time |
| import threading |
| import subprocess |
| import signal |
| import json |
| import shutil |
| import tempfile |
| import traceback |
| import re |
| import urllib.request |
| import ssl |
| from pathlib import Path |
| from datetime import datetime |
| |
| os.environ.setdefault("HF_HUB_DOWNLOAD_TIMEOUT", "300") |
| os.environ.setdefault("HF_HUB_UPLOAD_TIMEOUT", "600") |
|
|
| from huggingface_hub import HfApi, snapshot_download |
|
|
| |
|
|
| class TeeLogger: |
| """Duplicate output to stream and file.""" |
| def __init__(self, filename, stream): |
| self.stream = stream |
| self.file = open(filename, "a", encoding="utf-8") |
| def write(self, message): |
| self.stream.write(message) |
| self.file.write(message) |
| self.flush() |
| def flush(self): |
| self.stream.flush() |
| self.file.flush() |
| def fileno(self): |
| return self.stream.fileno() |
|
|
| |
|
|
| HF_TOKEN = os.environ.get("HF_TOKEN") |
| OPENCLAW_HOME = Path.home() / ".openclaw" |
| APP_DIR = Path("/app/openclaw") |
|
|
| |
| DATASET_PATH = ".openclaw" |
|
|
| |
| OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY", "") |
| OPENAI_BASE_URL = os.environ.get("OPENAI_BASE_URL", "https://api.openai.com/v1").rstrip("/") |
|
|
| |
| OPENROUTER_API_KEY = os.environ.get("OPENROUTER_API_KEY", "") |
|
|
| |
| XAI_API_KEY = os.environ.get("XAI_API_KEY", "") |
| XAI_BASE_URL = os.environ.get("XAI_BASE_URL", "https://api.x.ai/v1") |
|
|
| |
| |
| GROQ_API_KEY = os.environ.get("GROQ_API_KEY", "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") |
| GROQ_BASE_URL = os.environ.get("GROQ_BASE_URL", "https://api.groq.com/openai/v1") |
|
|
| |
| GATEWAY_TOKEN = os.environ.get("GATEWAY_TOKEN", "huggingclaw") |
|
|
| |
| |
| OPENCLAW_DEFAULT_MODEL = os.environ.get("OPENCLAW_DEFAULT_MODEL") or ( |
| "groq/llama-3.3-70b-versatile" if GROQ_API_KEY else |
| "xai/grok-beta" if XAI_API_KEY else |
| "openai/gpt-5-nano" if OPENAI_API_KEY else "openrouter/openai/gpt-oss-20b:free" |
| ) |
| SPACE_HOST = os.environ.get("SPACE_HOST", "") |
| SPACE_ID = os.environ.get("SPACE_ID", "") |
|
|
| SYNC_INTERVAL = int(os.environ.get("SYNC_INTERVAL", "60")) |
| AUTO_CREATE_DATASET = os.environ.get("AUTO_CREATE_DATASET", "false").lower() in ("true", "1", "yes") |
|
|
| |
| |
| |
| HF_REPO_ID = os.environ.get("OPENCLAW_DATASET_REPO", "") |
| if not HF_REPO_ID and SPACE_ID: |
| |
| HF_REPO_ID = f"{SPACE_ID}-data" |
| print(f"[SYNC] OPENCLAW_DATASET_REPO not set β auto-derived from SPACE_ID: {HF_REPO_ID}") |
| elif not HF_REPO_ID and HF_TOKEN: |
| |
| try: |
| _api = HfApi(token=HF_TOKEN) |
| _username = _api.whoami()["name"] |
| HF_REPO_ID = f"{_username}/HuggingClaw-data" |
| print(f"[SYNC] OPENCLAW_DATASET_REPO not set β auto-derived from HF_TOKEN: {HF_REPO_ID}") |
| del _api, _username |
| except Exception as e: |
| print(f"[SYNC] WARNING: Could not derive username from HF_TOKEN: {e}") |
| HF_REPO_ID = "" |
|
|
| |
| log_dir = OPENCLAW_HOME / "workspace" |
| log_dir.mkdir(parents=True, exist_ok=True) |
| sys.stdout = TeeLogger(log_dir / "sync.log", sys.stdout) |
| sys.stderr = sys.stdout |
|
|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| |
| TELEGRAM_API_BASE = os.environ.get("TELEGRAM_API_BASE", "") |
|
|
| TELEGRAM_API_BASES = [ |
| "https://api.telegram.org", |
| "https://telegram-api.mykdigi.com", |
| "https://telegram-api-proxy-anonymous.pages.dev/api", |
| ] |
|
|
|
|
| def probe_telegram_api(timeout: int = 8) -> str: |
| """Probe Telegram API endpoints and return the first reachable one. |
| |
| First checks if official api.telegram.org is reachable (HTTP level). |
| If not, tries mirrors. No bot token required β just tests connectivity. |
| Returns the working base URL (without trailing slash), or "" if all fail. |
| """ |
| ctx = ssl.create_default_context() |
| for base in TELEGRAM_API_BASES: |
| url = base.rstrip("/") + "/" |
| try: |
| req = urllib.request.Request(url, method="GET") |
| resp = urllib.request.urlopen(req, timeout=timeout, context=ctx) |
| print(f"[TELEGRAM] β Reachable: {base} (HTTP {resp.status})") |
| return base.rstrip("/") |
| except urllib.error.HTTPError as e: |
| |
| print(f"[TELEGRAM] β Reachable: {base} (HTTP {e.code})") |
| return base.rstrip("/") |
| except Exception as e: |
| reason = str(e)[:80] |
| print(f"[TELEGRAM] β Unreachable: {base} ({reason})") |
| continue |
|
|
| print("[TELEGRAM] WARNING: All API endpoints unreachable!") |
| return "" |
|
|
|
|
| |
|
|
| class OpenClawFullSync: |
| """Upload/download the entire ~/.openclaw directory to HF Dataset.""" |
|
|
| def __init__(self): |
| self.enabled = False |
| self.dataset_exists = False |
| self.api = None |
|
|
| if not HF_TOKEN: |
| print("[SYNC] WARNING: HF_TOKEN not set. Persistence disabled.") |
| return |
| if not HF_REPO_ID: |
| print("[SYNC] WARNING: Could not determine dataset repo (no SPACE_ID or OPENCLAW_DATASET_REPO).") |
| print("[SYNC] Persistence disabled.") |
| return |
|
|
| self.enabled = True |
| self.api = HfApi(token=HF_TOKEN) |
| self.dataset_exists = self._ensure_repo_exists() |
|
|
| |
|
|
| def _ensure_repo_exists(self): |
| """Check if dataset repo exists; auto-create only when AUTO_CREATE_DATASET=true AND HF_TOKEN is set.""" |
| try: |
| self.api.repo_info(repo_id=HF_REPO_ID, repo_type="dataset") |
| print(f"[SYNC] Dataset repo found: {HF_REPO_ID}") |
| return True |
| except Exception: |
| if not AUTO_CREATE_DATASET: |
| print(f"[SYNC] Dataset repo NOT found: {HF_REPO_ID}") |
| print(f"[SYNC] Set AUTO_CREATE_DATASET=true to auto-create.") |
| print(f"[SYNC] Persistence disabled (app will still run normally).") |
| return False |
| print(f"[SYNC] Dataset repo NOT found: {HF_REPO_ID} β creating...") |
| try: |
| self.api.create_repo( |
| repo_id=HF_REPO_ID, |
| repo_type="dataset", |
| private=True, |
| ) |
| print(f"[SYNC] β Dataset repo created: {HF_REPO_ID}") |
| return True |
| except Exception as e: |
| print(f"[SYNC] β Failed to create dataset repo: {e}") |
| return False |
|
|
| |
|
|
| def load_from_repo(self): |
| """Download from dataset β ~/.openclaw""" |
| if not self.enabled: |
| print("[SYNC] Persistence disabled - skipping restore") |
| self._ensure_default_config() |
| self._patch_config() |
| return |
|
|
| if not self.dataset_exists: |
| print(f"[SYNC] Dataset {HF_REPO_ID} does not exist - starting fresh") |
| self._ensure_default_config() |
| self._patch_config() |
| return |
|
|
| print(f"[SYNC] βΆ Restoring ~/.openclaw from dataset {HF_REPO_ID} ...") |
| OPENCLAW_HOME.mkdir(parents=True, exist_ok=True) |
|
|
| try: |
| files = self.api.list_repo_files(repo_id=HF_REPO_ID, repo_type="dataset") |
| openclaw_files = [f for f in files if f.startswith(f"{DATASET_PATH}/")] |
| if not openclaw_files: |
| print(f"[SYNC] No {DATASET_PATH}/ folder in dataset. Starting fresh.") |
| self._ensure_default_config() |
| self._patch_config() |
| return |
|
|
| print(f"[SYNC] Found {len(openclaw_files)} files under {DATASET_PATH}/ in dataset") |
|
|
| with tempfile.TemporaryDirectory() as tmpdir: |
| snapshot_download( |
| repo_id=HF_REPO_ID, |
| repo_type="dataset", |
| allow_patterns=f"{DATASET_PATH}/**", |
| local_dir=tmpdir, |
| token=HF_TOKEN, |
| ) |
| downloaded_root = Path(tmpdir) / DATASET_PATH |
| if downloaded_root.exists(): |
| for item in downloaded_root.rglob("*"): |
| if item.is_file(): |
| rel = item.relative_to(downloaded_root) |
| dest = OPENCLAW_HOME / rel |
| dest.parent.mkdir(parents=True, exist_ok=True) |
| shutil.copy2(str(item), str(dest)) |
| print("[SYNC] β Restore completed.") |
| else: |
| print("[SYNC] Downloaded snapshot but dir not found. Starting fresh.") |
|
|
| except Exception as e: |
| print(f"[SYNC] β Restore failed: {e}") |
| traceback.print_exc() |
|
|
| |
| self._patch_config() |
| self._debug_list_files() |
|
|
| |
|
|
| def save_to_repo(self): |
| """Upload entire ~/.openclaw directory β dataset (all files, no filtering)""" |
| if not self.enabled: |
| return |
| if not OPENCLAW_HOME.exists(): |
| print("[SYNC] ~/.openclaw does not exist, nothing to save.") |
| return |
|
|
| |
| if not self._ensure_repo_exists(): |
| print(f"[SYNC] Dataset {HF_REPO_ID} unavailable - skipping save") |
| return |
|
|
| print(f"[SYNC] βΆ Uploading ~/.openclaw β dataset {HF_REPO_ID}/{DATASET_PATH}/ ...") |
|
|
| try: |
| |
| total_size = 0 |
| file_count = 0 |
| for root, dirs, fls in os.walk(OPENCLAW_HOME): |
| for fn in fls: |
| fp = os.path.join(root, fn) |
| sz = os.path.getsize(fp) |
| total_size += sz |
| file_count += 1 |
| rel = os.path.relpath(fp, OPENCLAW_HOME) |
| print(f"[SYNC] uploading: {rel} ({sz} bytes)") |
| print(f"[SYNC] Uploading: {file_count} files, {total_size} bytes total") |
|
|
| if file_count == 0: |
| print("[SYNC] Nothing to upload.") |
| return |
|
|
| |
| self.api.upload_folder( |
| folder_path=str(OPENCLAW_HOME), |
| path_in_repo=DATASET_PATH, |
| repo_id=HF_REPO_ID, |
| repo_type="dataset", |
| token=HF_TOKEN, |
| commit_message=f"Sync .openclaw β {datetime.now().isoformat()}", |
| ignore_patterns=[ |
| "*.log", |
| "*.lock", |
| "*.tmp", |
| "*.pid", |
| "__pycache__", |
| ], |
| ) |
| print(f"[SYNC] β Upload completed at {datetime.now().isoformat()}") |
|
|
| |
| try: |
| files = self.api.list_repo_files(repo_id=HF_REPO_ID, repo_type="dataset") |
| oc_files = [f for f in files if f.startswith(f"{DATASET_PATH}/")] |
| print(f"[SYNC] Dataset now has {len(oc_files)} files under {DATASET_PATH}/") |
| for f in oc_files[:30]: |
| print(f"[SYNC] {f}") |
| if len(oc_files) > 30: |
| print(f"[SYNC] ... and {len(oc_files) - 30} more") |
| except Exception: |
| pass |
|
|
| except Exception as e: |
| print(f"[SYNC] β Upload failed: {e}") |
| traceback.print_exc() |
|
|
| |
|
|
| def _ensure_default_config(self): |
| config_path = OPENCLAW_HOME / "openclaw.json" |
| if config_path.exists(): |
| return |
| default_src = Path(__file__).parent / "openclaw.json.default" |
| if default_src.exists(): |
| shutil.copy2(str(default_src), str(config_path)) |
| |
| try: |
| with open(config_path, "r") as f: |
| cfg = json.load(f) |
| |
| if "gateway" in cfg: |
| cfg["gateway"]["auth"] = {"token": GATEWAY_TOKEN} |
| if OPENAI_API_KEY and "models" in cfg and "providers" in cfg["models"] and "openai" in cfg["models"]["providers"]: |
| cfg["models"]["providers"]["openai"]["apiKey"] = OPENAI_API_KEY |
| if OPENAI_BASE_URL: |
| cfg["models"]["providers"]["openai"]["baseUrl"] = OPENAI_BASE_URL |
| elif "models" in cfg and "providers" in cfg["models"]: |
| if not OPENAI_API_KEY: |
| cfg["models"]["providers"].pop("openai", None) |
| if OPENROUTER_API_KEY: |
| if "models" in cfg and "providers" in cfg["models"] and "openrouter" in cfg["models"]["providers"]: |
| cfg["models"]["providers"]["openrouter"]["apiKey"] = OPENROUTER_API_KEY |
| else: |
| if "models" in cfg and "providers" in cfg["models"]: |
| cfg["models"]["providers"].pop("openrouter", None) |
| print("[SYNC] No OPENROUTER_API_KEY β removed openrouter provider from config") |
| with open(config_path, "w") as f: |
| json.dump(cfg, f, indent=2) |
| except Exception as e: |
| print(f"[SYNC] Warning: failed to patch default config: {e}") |
| print("[SYNC] Created openclaw.json from default template") |
| else: |
| with open(config_path, "w") as f: |
| json.dump({ |
| "gateway": { |
| "mode": "local", "bind": "lan", "port": 7860, |
| "trustedProxies": ["0.0.0.0/0"], |
| "controlUi": { |
| "allowInsecureAuth": True, |
| "allowedOrigins": [ |
| "https://huggingface.co" |
| ] |
| } |
| }, |
| "session": {"scope": "global"}, |
| "models": {"mode": "merge", "providers": {}}, |
| "agents": {"defaults": {"workspace": "~/.openclaw/workspace"}} |
| }, f) |
| print("[SYNC] Created minimal openclaw.json") |
|
|
| def _patch_config(self): |
| """Ensure critical settings after restore.""" |
| config_path = OPENCLAW_HOME / "openclaw.json" |
| if not config_path.exists(): |
| self._ensure_default_config() |
| return |
|
|
| print("[SYNC] Patching configuration...") |
| try: |
| with open(config_path, "r") as f: |
| data = json.load(f) |
| print("[SYNC] Config parsed OK.") |
| except (json.JSONDecodeError, Exception) as e: |
| |
| print(f"[SYNC] Config JSON is corrupt: {e}") |
| backup = config_path.with_suffix(f".corrupt_{int(time.time())}") |
| try: |
| shutil.copy2(config_path, backup) |
| print(f"[SYNC] Backed up corrupt config to {backup.name}") |
| except Exception: |
| pass |
| data = {} |
| print("[SYNC] Starting from clean config.") |
|
|
| try: |
| |
| if "plugins" in data and isinstance(data.get("plugins"), dict): |
| locs = data["plugins"].get("locations", []) |
| if isinstance(locs, list) and "/dev/null" in locs: |
| data["plugins"]["locations"] = [l for l in locs if l != "/dev/null"] |
|
|
| |
| |
| allowed_origins = [ |
| "https://huggingface.co", |
| "https://*.hf.space", |
| ] |
| if SPACE_HOST: |
| allowed_origins.append(f"https://{SPACE_HOST}") |
| print(f"[SYNC] SPACE_HOST detected: {SPACE_HOST}") |
| data["gateway"] = { |
| "mode": "local", |
| "bind": "lan", |
| "port": 7860, |
| "auth": {"token": GATEWAY_TOKEN}, |
| "trustedProxies": ["0.0.0.0/0"], |
| "controlUi": { |
| "allowInsecureAuth": True, |
| "dangerouslyDisableDeviceAuth": True, |
| "allowedOrigins": allowed_origins |
| } |
| } |
| print(f"[SYNC] Set gateway config (auth=token, origins={len(allowed_origins)})") |
|
|
| |
| data.setdefault("agents", {}).setdefault("defaults", {}).setdefault("model", {}) |
| data.setdefault("session", {})["scope"] = "global" |
|
|
| |
| data.setdefault("models", {}).setdefault("providers", {}) |
| if OPENAI_API_KEY: |
| data["models"]["providers"]["openai"] = { |
| "baseUrl": OPENAI_BASE_URL, |
| "apiKey": OPENAI_API_KEY, |
| "api": "openai-completions", |
| } |
| print(f"[SYNC] Set OpenAI-compatible provider (baseUrl={OPENAI_BASE_URL})") |
| |
| if OPENROUTER_API_KEY: |
| data["models"]["providers"]["openrouter"] = { |
| "baseUrl": "https://openrouter.ai/api/v1", |
| "apiKey": OPENROUTER_API_KEY, |
| "api": "openai-completions", |
| "models": [ |
| {"id": "openai/gpt-oss-20b:free", "name": "GPT-OSS-20B (Free)"}, |
| {"id": "deepseek/deepseek-chat:free", "name": "DeepSeek V3 (Free)"} |
| ] |
| } |
| print("[SYNC] Set OpenRouter provider") |
| |
| |
| if XAI_API_KEY: |
| data["models"]["providers"]["xai"] = { |
| "baseUrl": XAI_BASE_URL, |
| "apiKey": XAI_API_KEY, |
| "api": "openai-completions", |
| "models": [ |
| {"id": "grok-beta", "name": "xAI Grok Beta"}, |
| {"id": "grok-vision-beta", "name": "xAI Grok Vision Beta"}, |
| {"id": "grok-2", "name": "xAI Grok-2"}, |
| {"id": "grok-2-latest", "name": "xAI Grok-2 Latest"} |
| ] |
| } |
| print(f"[SYNC] Set xAI Grok provider ({XAI_BASE_URL})") |
| |
| |
| if GROQ_API_KEY: |
| data["models"]["providers"]["groq"] = { |
| "baseUrl": GROQ_BASE_URL, |
| "apiKey": GROQ_API_KEY, |
| "api": "openai-completions", |
| "models": [ |
| {"id": "llama-3.3-70b-versatile", "name": "Llama 3.3 70B (Default)"}, |
| {"id": "llama-3.1-8b-instant", "name": "Llama 3.1 8B (Fast)"}, |
| {"id": "meta-llama/llama-4-maverick-17b-128e-instruct", "name": "Llama 4 Maverick 17B"}, |
| {"id": "meta-llama/llama-4-scout-17b-16e-instruct", "name": "Llama 4 Scout 17B"}, |
| {"id": "qwen/qwen3-32b", "name": "Qwen3 32B"}, |
| {"id": "moonshotai/kimi-k2-instruct", "name": "Kimi K2 Instruct"}, |
| {"id": "openai/gpt-oss-20b", "name": "GPT-OSS 20B"}, |
| {"id": "allam-2-7b", "name": "Allam-2 7B"} |
| ] |
| } |
| print(f"[SYNC] Set Groq provider ({GROQ_BASE_URL}) - 8 models available") |
| |
| |
| data["agents"]["defaults"]["model"]["primary"] = "groq/llama-3.3-70b-versatile" |
| print("[SYNC] Llama 3.3 70B set as default model") |
| |
| if not OPENAI_API_KEY and not OPENROUTER_API_KEY and not XAI_API_KEY and not GROQ_API_KEY: |
| print("[SYNC] WARNING: No API key set (OPENAI/OPENROUTER/GROQ/XAI), LLM features will not work") |
| data["models"]["providers"].pop("gemini", None) |
| data["agents"]["defaults"]["model"]["primary"] = OPENCLAW_DEFAULT_MODEL |
|
|
| |
| data.setdefault("plugins", {}).setdefault("entries", {}) |
| data["plugins"]["allow"] = ["telegram", "whatsapp"] |
| if "telegram" not in data["plugins"]["entries"]: |
| data["plugins"]["entries"]["telegram"] = {"enabled": True} |
| elif isinstance(data["plugins"]["entries"]["telegram"], dict): |
| data["plugins"]["entries"]["telegram"]["enabled"] = True |
|
|
| |
| |
| tg_ch = data.setdefault("channels", {}).setdefault("telegram", {}) |
| tg_ch["dmPolicy"] = "open" |
| tg_ch["allowFrom"] = ["*"] |
| tg_ch["configWrites"] = True |
| print("[SYNC] Set channels.telegram: dmPolicy=open, allowFrom=[*], configWrites=true") |
|
|
| |
| |
| |
|
|
| with open(config_path, "w") as f: |
| json.dump(data, f, indent=2) |
| print("[SYNC] Config patched and saved.") |
|
|
| |
| with open(config_path, "r") as f: |
| verify_data = json.load(f) |
| gw = verify_data.get("gateway", {}) |
| providers = list(verify_data.get("models", {}).get("providers", {}).keys()) |
| primary = verify_data.get("agents", {}).get("defaults", {}).get("model", {}).get("primary") |
| print(f"[SYNC] VERIFY: gateway.port={gw.get('port')}, providers={providers}, primary={primary}") |
|
|
| except Exception as e: |
| print(f"[SYNC] Failed to patch config: {e}") |
| traceback.print_exc() |
|
|
| def _debug_list_files(self): |
| print(f"[SYNC] Local ~/.openclaw tree:") |
| try: |
| count = 0 |
| for root, dirs, files in os.walk(OPENCLAW_HOME): |
| dirs[:] = [d for d in dirs if d not in {".cache", "node_modules", "__pycache__"}] |
| for name in sorted(files): |
| rel = os.path.relpath(os.path.join(root, name), OPENCLAW_HOME) |
| print(f"[SYNC] {rel}") |
| count += 1 |
| if count > 50: |
| print("[SYNC] ... (truncated)") |
| return |
| except Exception as e: |
| print(f"[SYNC] listing failed: {e}") |
|
|
| |
|
|
| def background_sync_loop(self, stop_event): |
| print(f"[SYNC] Background sync started (interval={SYNC_INTERVAL}s)") |
| while not stop_event.is_set(): |
| if stop_event.wait(timeout=SYNC_INTERVAL): |
| break |
| print(f"[SYNC] ββ Periodic sync triggered at {datetime.now().isoformat()} ββ") |
| self.save_to_repo() |
|
|
| |
|
|
| def run_openclaw(self): |
| log_file = OPENCLAW_HOME / "workspace" / "startup.log" |
| log_file.parent.mkdir(parents=True, exist_ok=True) |
|
|
| |
| if not Path(APP_DIR).exists(): |
| print(f"[SYNC] ERROR: App directory does not exist: {APP_DIR}") |
| return None |
|
|
| |
| entry_js = Path(APP_DIR) / "dist" / "entry.js" |
| if not entry_js.exists(): |
| print(f"[SYNC] ERROR: dist/entry.js not found in {APP_DIR}") |
| return None |
|
|
| |
| print(f"[SYNC] Launching: node dist/entry.js gateway") |
| print(f"[SYNC] Working directory: {APP_DIR}") |
| print(f"[SYNC] Entry point exists: {entry_js}") |
| print(f"[SYNC] Log file: {log_file}") |
|
|
| |
| log_fh = open(log_file, "a") |
|
|
| |
| env = os.environ.copy() |
| if OPENAI_API_KEY: |
| env["OPENAI_API_KEY"] = OPENAI_API_KEY |
| env["OPENAI_BASE_URL"] = OPENAI_BASE_URL |
| if OPENROUTER_API_KEY: |
| env["OPENROUTER_API_KEY"] = OPENROUTER_API_KEY |
| if not OPENAI_API_KEY and not OPENROUTER_API_KEY: |
| print(f"[SYNC] WARNING: No OPENAI_API_KEY or OPENROUTER_API_KEY set, LLM features may not work") |
|
|
| |
| |
| |
| if TELEGRAM_API_BASE: |
| tg_root = TELEGRAM_API_BASE.rstrip("/") |
| print(f"[TELEGRAM] Using user-specified API base: {tg_root}") |
| else: |
| print("[TELEGRAM] Probing Telegram API endpoints...") |
| tg_root = probe_telegram_api() |
|
|
| if tg_root and tg_root != "https://api.telegram.org": |
| env["TELEGRAM_API_ROOT"] = tg_root |
| print(f"[TELEGRAM] Set TELEGRAM_API_ROOT={tg_root}") |
| print(f"[TELEGRAM] telegram-proxy.cjs will redirect fetch() calls") |
| elif tg_root: |
| print("[TELEGRAM] Official API reachable β no proxy needed") |
| else: |
| print("[TELEGRAM] No reachable endpoint found β Telegram will not work") |
| try: |
| |
| |
| process = subprocess.Popen( |
| ["node", "dist/entry.js", "gateway"], |
| cwd=str(APP_DIR), |
| stdout=subprocess.PIPE, |
| stderr=subprocess.STDOUT, |
| text=True, |
| bufsize=1, |
| env=env |
| ) |
|
|
| |
| def copy_output(): |
| try: |
| for line in process.stdout: |
| log_fh.write(line) |
| log_fh.flush() |
| print(line, end='') |
| except Exception as e: |
| print(f"[SYNC] Output copy error: {e}") |
| finally: |
| log_fh.close() |
|
|
| thread = threading.Thread(target=copy_output, daemon=True) |
| thread.start() |
|
|
| print(f"[SYNC] Process started with PID: {process.pid}") |
| return process |
|
|
| except Exception as e: |
| log_fh.close() |
| print(f"[SYNC] ERROR: Failed to start process: {e}") |
| traceback.print_exc() |
| return None |
|
|
| |
|
|
| def main(): |
| try: |
| t_main_start = time.time() |
|
|
| t0 = time.time() |
| sync = OpenClawFullSync() |
| print(f"[TIMER] sync_hf init: {time.time() - t0:.1f}s") |
|
|
| |
| t0 = time.time() |
| sync.load_from_repo() |
| print(f"[TIMER] load_from_repo (restore): {time.time() - t0:.1f}s") |
|
|
| |
| stop_event = threading.Event() |
| t = threading.Thread(target=sync.background_sync_loop, args=(stop_event,), daemon=True) |
| t.start() |
|
|
| |
| t0 = time.time() |
| process = sync.run_openclaw() |
| print(f"[TIMER] run_openclaw launch: {time.time() - t0:.1f}s") |
| print(f"[TIMER] Total startup (init β app launched): {time.time() - t_main_start:.1f}s") |
|
|
| |
| def handle_signal(sig, frame): |
| print(f"\n[SYNC] Signal {sig} received. Shutting down...") |
| stop_event.set() |
| |
| t.join(timeout=10) |
| if process: |
| process.terminate() |
| try: |
| process.wait(timeout=5) |
| except subprocess.TimeoutExpired: |
| process.kill() |
| print("[SYNC] Final sync...") |
| sync.save_to_repo() |
| sys.exit(0) |
|
|
| signal.signal(signal.SIGINT, handle_signal) |
| signal.signal(signal.SIGTERM, handle_signal) |
|
|
| |
| if process is None: |
| print("[SYNC] ERROR: Failed to start OpenClaw process. Exiting.") |
| stop_event.set() |
| t.join(timeout=5) |
| sys.exit(1) |
|
|
| exit_code = process.wait() |
| print(f"[SYNC] OpenClaw exited with code {exit_code}") |
| stop_event.set() |
| t.join(timeout=10) |
| print("[SYNC] Final sync...") |
| sync.save_to_repo() |
| sys.exit(exit_code) |
|
|
| except Exception as e: |
| print(f"[SYNC] FATAL ERROR in main: {e}") |
| traceback.print_exc() |
| sys.exit(1) |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|