Spaces:
Sleeping
Sleeping
| /** | |
| * One-time setup for OpenClaw on Hugging Face Spaces. | |
| * Runs at container startup; writes or merges openclaw.json from env (Secrets/Variables): | |
| * | |
| * Model priority (highest wins): | |
| * 1. GLM-4.7-Flash via Z.ai (ZHIPU_API_KEY set) β fast cloud, free, 203K ctx | |
| * 2. Custom Ollama HF Space (OPENCLAW_OLLAMA_BASE_URL set) β local CPU, slow | |
| * 3. HF Inference fallback (OPENCLAW_HF_DEFAULT_MODEL) β DeepSeek-R1 default | |
| * | |
| * Z.ai / GLM provider: | |
| * - providers["z-ai"] registered with GLM model catalog | |
| * - agents.defaults.model.primary = "z-ai/glm-4.7-flash" when ZHIPU_API_KEY is set | |
| * - Configure via: ZHIPU_API_KEY | |
| * | |
| * Custom Ollama provider (hf-ollama-qwen3-vl) β used when GLM key not set: | |
| * - providers["hf-ollama-qwen3-vl"] registered with baseUrl + apiKey | |
| * - Configure via: | |
| * OPENCLAW_OLLAMA_BASE_URL (default: https://ubix-Clawd.hf.space/v1) | |
| * OPENCLAW_OLLAMA_API_KEY (default: "ollama") | |
| * | |
| * OpenRouter free model catalog (15 models registered as fallbacks): | |
| * - providers.openrouter registered under models.providers with full model list | |
| * - agents.defaults.model.fallbacks populated with all free tier models | |
| * - Switch model in chat with: /model openrouter/<provider>/<model-id> | |
| * - Only written when OPENROUTER_API_KEY is present | |
| * | |
| * Gateway auth: | |
| * - gateway.auth: OPENCLAW_GATEWAY_TOKEN (token) or OPENCLAW_GATEWAY_PASSWORD (password) | |
| * token wins if both are set | |
| * - gateway.controlUi.dangerouslyDisableDeviceAuth when auth is set (no device pairing in Spaces) | |
| * | |
| * Networking: | |
| * - gateway.trustedProxies from OPENCLAW_GATEWAY_TRUSTED_PROXIES, or default HF proxy IPs | |
| * - gateway.controlUi.allowedOrigins from OPENCLAW_CONTROL_UI_ALLOWED_ORIGINS (comma-separated) | |
| * | |
| * HF_TOKEN is read by the gateway at runtime; this script only writes the above into config. | |
| */ | |
| import fs from "node:fs"; | |
| import path from "node:path"; | |
| const home = process.env.OPENCLAW_HOME || process.env.HOME || "/home/user"; | |
| const stateDir = path.join(home, ".openclaw"); | |
| const configPath = path.join(stateDir, "openclaw.json"); | |
| // ββ Gateway token (supports file-mounted secrets for platforms that mount secrets as files) ββ | |
| function readGatewayToken() { | |
| const fromEnv = process.env.OPENCLAW_GATEWAY_TOKEN?.trim(); | |
| if (fromEnv) return fromEnv; | |
| const filePath = process.env.OPENCLAW_GATEWAY_TOKEN_FILE?.trim(); | |
| if (filePath && fs.existsSync(filePath)) { | |
| try { | |
| return fs.readFileSync(filePath, "utf-8").trim(); | |
| } catch { | |
| return ""; | |
| } | |
| } | |
| return ""; | |
| } | |
| // ββ Env reads ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| // Z.ai / GLM API key β when set, GLM-4.7-Flash becomes primary (fast cloud, free) | |
| const zhipuApiKey = process.env.ZHIPU_API_KEY?.trim(); | |
| // Custom Ollama/OpenAI-compat provider hosted on HF Space | |
| // Set OPENCLAW_OLLAMA_BASE_URL="" to disable | |
| const ollamaBaseUrl = process.env.OPENCLAW_OLLAMA_BASE_URL !== undefined | |
| ? process.env.OPENCLAW_OLLAMA_BASE_URL.trim() | |
| : "https://ubix-Clawd.hf.space/v1"; | |
| const ollamaApiKey = process.env.OPENCLAW_OLLAMA_API_KEY?.trim() || "ollama"; | |
| const ollamaEnabled = ollamaBaseUrl.length > 0; | |
| // Fallback HF Inference model (used only when both GLM and Ollama are disabled) | |
| const hfFallbackModel = | |
| process.env.OPENCLAW_HF_DEFAULT_MODEL?.trim() || | |
| "huggingface/deepseek-ai/DeepSeek-R1"; | |
| // Resolved primary model β GLM wins if key is set (fastest + free cloud API) | |
| const defaultModel = zhipuApiKey | |
| ? "z-ai/glm-4.7-flash" | |
| : ollamaEnabled | |
| ? "hf-ollama-qwen3-vl/voytas26/openclaw-qwen3vl-8b-opt" | |
| : hfFallbackModel; | |
| const gatewayToken = readGatewayToken(); | |
| const gatewayPassword = process.env.OPENCLAW_GATEWAY_PASSWORD?.trim(); | |
| const openrouterKey = process.env.OPENROUTER_API_KEY?.trim(); | |
| // ββ Trusted proxies ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| // Default HF Space proxy IPs so the Control UI works without extra config. | |
| // Override with OPENCLAW_GATEWAY_TRUSTED_PROXIES (comma-separated) if you see | |
| // "Proxy headers detected from untrusted address" in the logs. | |
| const DEFAULT_HF_TRUSTED_PROXY_IPS = [ | |
| "10.16.4.123", | |
| "10.16.34.155", | |
| "10.20.1.9", | |
| "10.20.1.222", | |
| "10.20.26.157", | |
| "10.20.31.87", | |
| ]; | |
| const trustedProxiesRaw = process.env.OPENCLAW_GATEWAY_TRUSTED_PROXIES?.trim(); | |
| const trustedProxies = | |
| trustedProxiesRaw && trustedProxiesRaw.length > 0 | |
| ? trustedProxiesRaw.split(",").map((s) => s.trim()).filter(Boolean) | |
| : DEFAULT_HF_TRUSTED_PROXY_IPS; | |
| // ββ Allowed origins ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| const allowedOriginsRaw = process.env.OPENCLAW_CONTROL_UI_ALLOWED_ORIGINS?.trim(); | |
| const allowedOrigins = allowedOriginsRaw | |
| ? allowedOriginsRaw.split(",").map((s) => s.trim()).filter(Boolean) | |
| : []; | |
| // ββ Load existing config (merge, don't overwrite) βββββββββββββββββββββββββββββββββββββββββββββ | |
| let config = {}; | |
| if (fs.existsSync(configPath)) { | |
| try { | |
| config = JSON.parse(fs.readFileSync(configPath, "utf-8")); | |
| } catch { | |
| // keep config empty on parse error | |
| } | |
| } | |
| // ββ 1. Register providers & set default model βββββββββββββββββββββββββββββββββββββββββββββββββ | |
| // Providers must live under models.providers (not root-level providers) | |
| if (!config.models) config.models = {}; | |
| if (!config.models.providers) config.models.providers = {}; | |
| config.models.mode = config.models.mode || "merge"; | |
| if (!config.agents) config.agents = {}; | |
| if (!config.agents.defaults) config.agents.defaults = {}; | |
| if (!config.agents.defaults.model) config.agents.defaults.model = {}; | |
| // 1a. Z.ai / GLM provider β primary when key is set | |
| if (zhipuApiKey) { | |
| config.models.providers["z-ai"] = { | |
| baseUrl: "https://open.bigmodel.cn/api/paas/v4", | |
| apiKey: zhipuApiKey, | |
| api: "openai-completions", | |
| models: [ | |
| // ββ Free models βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| { | |
| id: "glm-4.7-flash", | |
| name: "GLM-4.7-Flash (free) β fast, 203K ctx, tool calls", | |
| reasoning: true, | |
| input: ["text"], | |
| contextWindow: 203000, | |
| maxTokens: 16384, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| { | |
| id: "glm-4.5-air", | |
| name: "GLM-4.5-Air (free) β MoE, thinking + tools", | |
| reasoning: true, | |
| input: ["text"], | |
| contextWindow: 131072, | |
| maxTokens: 16384, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| { | |
| id: "glm-4v-flash", | |
| name: "GLM-4V-Flash (free) β vision + tools", | |
| reasoning: false, | |
| input: ["text", "image"], | |
| contextWindow: 8192, | |
| maxTokens: 4096, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| // ββ Paid models (low cost) ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| { | |
| id: "glm-4.7", | |
| name: "GLM-4.7 (paid) β full model, strongest reasoning", | |
| reasoning: true, | |
| input: ["text"], | |
| contextWindow: 203000, | |
| maxTokens: 16384, | |
| cost: { input: 0.000003, output: 0.000003, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| { | |
| id: "glm-4.5", | |
| name: "GLM-4.5 (paid) β MoE flagship, long context", | |
| reasoning: true, | |
| input: ["text"], | |
| contextWindow: 131072, | |
| maxTokens: 16384, | |
| cost: { input: 0.000002, output: 0.000002, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| ], | |
| }; | |
| console.log("[openclaw-hf-setup] z-ai provider registered -> primary: glm-4.7-flash"); | |
| } else { | |
| console.warn( | |
| "[openclaw-hf-setup] ZHIPU_API_KEY not set. GLM models unavailable. " + | |
| "Add ZHIPU_API_KEY from https://open.bigmodel.cn to use GLM-4.7-Flash for free." | |
| ); | |
| } | |
| // 1b. Custom Ollama/OpenAI-compat provider (hf-ollama-qwen3-vl) | |
| if (ollamaEnabled) { | |
| config.models.providers["hf-ollama-qwen3-vl"] = { | |
| baseUrl: ollamaBaseUrl, | |
| apiKey: ollamaApiKey, | |
| api: "openai-completions", | |
| models: [ | |
| { | |
| id: "voytas26/openclaw-qwen3vl-8b-opt", | |
| name: "qwen3vl (HF Space - CPU)", | |
| reasoning: true, | |
| input: ["text"], | |
| contextWindow: 128000, | |
| maxTokens: 32000, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| ], | |
| }; | |
| console.log( | |
| `[openclaw-hf-setup] models.providers["hf-ollama-qwen3-vl"] registered -> ${ollamaBaseUrl}` | |
| ); | |
| } else { | |
| console.log( | |
| "[openclaw-hf-setup] OPENCLAW_OLLAMA_BASE_URL is empty β Ollama provider disabled, " + | |
| `falling back to: ${hfFallbackModel}` | |
| ); | |
| } | |
| // 1b. Set primary default model | |
| config.agents.defaults.model.primary = defaultModel; | |
| // ββ 2. OpenRouter provider + free model catalog as fallbacks ββββββββββββββββββββββββββββββββββ | |
| if (openrouterKey) { | |
| config.models.providers.openrouter = { | |
| apiKey: openrouterKey, | |
| api: "openai-completions", | |
| baseUrl: "https://openrouter.ai/api/v1", | |
| models: [ | |
| // ββ Web Search βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| { | |
| id: "perplexity/sonar", | |
| name: "Perplexity Sonar (web search - paid)", | |
| reasoning: false, | |
| input: ["text"], | |
| contextWindow: 127072, | |
| maxTokens: 8192, | |
| cost: { input: 0.000001, output: 0.000001, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| // ββ General Purpose (FREE) ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| { | |
| id: "meta-llama/llama-3.3-70b-instruct:free", | |
| name: "Llama 3.3 70B (free) β best general purpose", | |
| reasoning: false, | |
| input: ["text"], | |
| contextWindow: 128000, | |
| maxTokens: 8192, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| { | |
| id: "qwen/qwen3-next-80b-a3b-instruct:free", | |
| name: "Qwen3 Next 80B (free) β strong general, 262K ctx", | |
| reasoning: false, | |
| input: ["text"], | |
| contextWindow: 262144, | |
| maxTokens: 16384, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| { | |
| id: "mistralai/mistral-small-3.1-24b-instruct:free", | |
| name: "Mistral Small 3.1 24B (free) β vision + tools", | |
| reasoning: false, | |
| input: ["text", "image"], | |
| contextWindow: 128000, | |
| maxTokens: 8192, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| { | |
| id: "google/gemma-3-27b-it:free", | |
| name: "Gemma 3 27B (free) β vision + tools", | |
| reasoning: false, | |
| input: ["text", "image"], | |
| contextWindow: 131072, | |
| maxTokens: 8192, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| // ββ Coding (FREE) βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| { | |
| id: "qwen/qwen3-coder:free", | |
| name: "Qwen3 Coder 480B (free) β best free coding, 262K ctx", | |
| reasoning: false, | |
| input: ["text"], | |
| contextWindow: 262144, | |
| maxTokens: 16384, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| { | |
| id: "openai/gpt-oss-120b:free", | |
| name: "OpenAI OSS 120B (free) β strong coding + tools", | |
| reasoning: false, | |
| input: ["text"], | |
| contextWindow: 131072, | |
| maxTokens: 16384, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| // ββ Reasoning (FREE) ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| { | |
| id: "qwen/qwen3-235b-a22b-thinking-2507", | |
| name: "Qwen3 235B Thinking (free) β best free reasoning", | |
| reasoning: true, | |
| input: ["text"], | |
| contextWindow: 131072, | |
| maxTokens: 16384, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| { | |
| id: "arcee-ai/trinity-large-preview:free", | |
| name: "Arcee Trinity Large (free) β reasoning + tools", | |
| reasoning: true, | |
| input: ["text"], | |
| contextWindow: 131072, | |
| maxTokens: 8192, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| { | |
| id: "stepfun/step-3.5-flash:free", | |
| name: "StepFun Step 3.5 Flash (free) β fast reasoning, 256K ctx", | |
| reasoning: true, | |
| input: ["text"], | |
| contextWindow: 256000, | |
| maxTokens: 16384, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| // ββ Vision + Multimodal (FREE) ββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| { | |
| id: "qwen/qwen3-vl-30b-a3b-thinking", | |
| name: "Qwen3 VL 30B Thinking (free) β vision + reasoning", | |
| reasoning: true, | |
| input: ["text", "image"], | |
| contextWindow: 131072, | |
| maxTokens: 8192, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| { | |
| id: "nvidia/nemotron-nano-12b-v2-vl:free", | |
| name: "NVIDIA Nemotron 12B VL (free) β vision + tools", | |
| reasoning: false, | |
| input: ["text", "image"], | |
| contextWindow: 128000, | |
| maxTokens: 8192, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| // ββ Fast / Lightweight (FREE) βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| { | |
| id: "nvidia/nemotron-3-nano-30b-a3b:free", | |
| name: "NVIDIA Nemotron Nano 30B (free) β fast MoE, agentic", | |
| reasoning: false, | |
| input: ["text"], | |
| contextWindow: 256000, | |
| maxTokens: 16384, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| { | |
| id: "z-ai/glm-4.5-air:free", | |
| name: "GLM-4.5 Air (free) β MoE, thinking + tools", | |
| reasoning: true, | |
| input: ["text"], | |
| contextWindow: 131072, | |
| maxTokens: 8192, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| // ββ Auto Router (FREE random) βββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| { | |
| id: "openrouter/free", | |
| name: "OpenRouter Free Router β random free model", | |
| reasoning: false, | |
| input: ["text", "image"], | |
| contextWindow: 200000, | |
| maxTokens: 8192, | |
| cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, | |
| }, | |
| ], | |
| }; | |
| // Ordered fallback list β switch with /model in chat | |
| config.agents.defaults.model.fallbacks = [ | |
| // GLM (Z.ai free) β add first if key is set | |
| ...(zhipuApiKey ? [ | |
| "z-ai/glm-4.5-air", | |
| "z-ai/glm-4v-flash", | |
| ] : []), | |
| // Web search | |
| "openrouter/perplexity/sonar", | |
| // General purpose | |
| "openrouter/meta-llama/llama-3.3-70b-instruct:free", | |
| "openrouter/qwen/qwen3-next-80b-a3b-instruct:free", | |
| "openrouter/mistralai/mistral-small-3.1-24b-instruct:free", | |
| // Coding | |
| "openrouter/qwen/qwen3-coder:free", | |
| "openrouter/openai/gpt-oss-120b:free", | |
| // Reasoning | |
| "openrouter/qwen/qwen3-235b-a22b-thinking-2507", | |
| "openrouter/stepfun/step-3.5-flash:free", | |
| // Vision | |
| "openrouter/qwen/qwen3-vl-30b-a3b-thinking", | |
| "openrouter/mistralai/mistral-small-3.1-24b-instruct:free", | |
| // Auto | |
| "openrouter/openrouter/free", | |
| ]; | |
| console.log( | |
| `[openclaw-hf-setup] OpenRouter configured -> ${config.models.providers.openrouter.models.length} models registered, ` + | |
| `${config.agents.defaults.model.fallbacks.length} fallbacks set` | |
| ); | |
| } else { | |
| console.warn( | |
| "[openclaw-hf-setup] OPENROUTER_API_KEY not set. " + | |
| "OpenRouter free models will not be available. " + | |
| "Add OPENROUTER_API_KEY in Space Secrets to enable them." | |
| ); | |
| } | |
| // ββ 3. Gateway auth ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| const useTokenAuth = Boolean(gatewayToken); | |
| const usePasswordAuth = Boolean(gatewayPassword) && !useTokenAuth; | |
| if (useTokenAuth || usePasswordAuth) { | |
| if (!config.gateway) config.gateway = {}; | |
| if (!config.gateway.auth) config.gateway.auth = {}; | |
| if (useTokenAuth) { | |
| config.gateway.auth.mode = "token"; | |
| config.gateway.auth.token = gatewayToken; | |
| } else { | |
| config.gateway.auth.mode = "password"; | |
| config.gateway.auth.password = gatewayPassword; | |
| } | |
| } | |
| // Disable device pairing β Spaces have no CLI to approve pairing requests. | |
| // Control UI will accept token/password only. | |
| if (useTokenAuth || usePasswordAuth) { | |
| if (!config.gateway) config.gateway = {}; | |
| if (!config.gateway.controlUi) config.gateway.controlUi = {}; | |
| config.gateway.controlUi.dangerouslyDisableDeviceAuth = true; | |
| } | |
| // ββ 4. Trusted proxies βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| if (!config.gateway) config.gateway = {}; | |
| config.gateway.trustedProxies = trustedProxies; | |
| // ββ 5. Allowed origins βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| if (allowedOrigins.length > 0) { | |
| if (!config.gateway.controlUi) config.gateway.controlUi = {}; | |
| config.gateway.controlUi.allowedOrigins = allowedOrigins; | |
| } | |
| // ββ Write config βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| fs.mkdirSync(stateDir, { recursive: true }); | |
| fs.writeFileSync(configPath, JSON.stringify(config, null, 2), "utf-8"); | |
| // ββ Startup summary ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ | |
| const authKind = useTokenAuth ? "token" : usePasswordAuth ? "password" : "none"; | |
| const parts = [ | |
| `token_present=${useTokenAuth ? "1" : "0"}`, | |
| `password_present=${usePasswordAuth ? "1" : "0"}`, | |
| `auth=${authKind}`, | |
| `glm_configured=${zhipuApiKey ? "1" : "0"}`, | |
| `ollama_provider=${ollamaEnabled ? "1" : "0"}`, | |
| `default_model=${defaultModel}`, | |
| `openrouter_configured=${openrouterKey ? "1" : "0"}`, | |
| `openrouter_models=${openrouterKey ? (config.models?.providers?.openrouter?.models?.length ?? 0) : 0}`, | |
| `trustedProxies=${trustedProxies.length}`, | |
| `allowedOrigins=${allowedOrigins.length}`, | |
| ]; | |
| console.log(`[openclaw-hf-setup] ${parts.join(" ")} -> ${configPath}`); | |
| if (authKind === "none") { | |
| console.warn( | |
| "[openclaw-hf-setup] No auth set. " + | |
| "Add OPENCLAW_GATEWAY_TOKEN or OPENCLAW_GATEWAY_PASSWORD in Space Secrets, then restart." | |
| ); | |
| } | |