File size: 6,448 Bytes
29d6958
 
 
 
 
 
 
 
 
 
 
0bfb89f
29d6958
 
 
4987deb
0bfb89f
abf5aa0
0bfb89f
abf5aa0
0bfb89f
21a7032
 
 
 
29d6958
0bfb89f
 
 
abf5aa0
 
 
 
29d6958
0bfb89f
abf5aa0
0bfb89f
abf5aa0
29d6958
4987deb
0bfb89f
abf5aa0
 
 
 
0bfb89f
abf5aa0
0bfb89f
 
 
 
abf5aa0
0bfb89f
 
abf5aa0
0bfb89f
b19cd4a
0bfb89f
 
 
 
 
 
b19cd4a
0bfb89f
 
b19cd4a
0bfb89f
 
 
 
 
 
 
 
 
 
 
 
 
abf5aa0
 
b19cd4a
0bfb89f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
abf5aa0
0bfb89f
 
b19cd4a
0bfb89f
 
b19cd4a
0bfb89f
 
b19cd4a
0bfb89f
21a7032
29d6958
 
 
 
 
5365372
 
 
 
51ec4bc
bcbf1ad
 
 
 
29d6958
 
 
0bfb89f
29d6958
 
 
 
 
 
 
 
 
0bfb89f
29d6958
 
 
0bfb89f
29d6958
 
 
0bfb89f
 
29d6958
 
eec7304
 
29d6958
51ec4bc
 
29d6958
78e2c48
 
 
 
 
 
5365372
 
 
 
 
 
 
 
 
29d6958
0bfb89f
 
 
 
29d6958
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
# ════════════════════════════════════════════════════════════════
# 🦞 HuggingClaw β€” OpenClaw Gateway for HuggingFace Spaces
# ════════════════════════════════════════════════════════════════
# Copy this file to .env and fill in your values.
# For local development: cp .env.example .env && nano .env

# ── REQUIRED: Core Configuration ──
# [REQUIRED] LLM provider API key
# - Anthropic: sk-ant-v0-...
# - OpenAI: sk-...
# - Google: AIzaSy...
# - OpenRouter: sk-or-v1-... (300+ models via single key)
LLM_API_KEY=your_api_key_here

# [REQUIRED] LLM model to use (format: provider/model-name)
# Auto-detects provider from prefix β€” any provider is supported!
# Provider IDs from OpenClaw docs: docs.openclaw.ai/concepts/model-providers
#
# ── Core Providers ──
#
# Anthropic (ANTHROPIC_API_KEY):
#   - anthropic/claude-opus-4-6
#   - anthropic/claude-sonnet-4-6
#   - anthropic/claude-sonnet-4-5
#   - anthropic/claude-haiku-4-5
#
# OpenAI (OPENAI_API_KEY):
#   - openai/gpt-5.4-pro
#   - openai/gpt-5.4
#   - openai/gpt-5.4-mini
#   - openai/gpt-5.4-nano
#   - openai/gpt-4.1
#   - openai/gpt-4.1-mini
#
# Google Gemini (GEMINI_API_KEY):
#   - google/gemini-3.1-pro-preview
#   - google/gemini-3-flash-preview
#   - google/gemini-2.5-pro
#   - google/gemini-2.5-flash
#
# DeepSeek (DEEPSEEK_API_KEY):
#   - deepseek/deepseek-v3.2
#   - deepseek/deepseek-r1-0528
#   - deepseek/deepseek-r1
#
# ── OpenCode Providers ──
#
# OpenCode Zen β€” tested & verified models (OPENCODE_API_KEY):
#   - opencode/claude-opus-4-6
#   - opencode/gpt-5.4
#   Get key from: https://opencode.ai/auth
#
# OpenCode Go β€” low-cost open models (OPENCODE_API_KEY):
#   - opencode-go/kimi-k2.5
#
# ── Gateway/Router Providers ──
#
# OpenRouter β€” 300+ models via single API key (OPENROUTER_API_KEY):
#   - openrouter/anthropic/claude-sonnet-4-6
#   - openrouter/openai/gpt-5.4
#   - openrouter/deepseek/deepseek-v3.2
#   - openrouter/meta-llama/llama-3.3-70b-instruct:free
#   Get key from: https://openrouter.ai
#
# Kilo Gateway (KILOCODE_API_KEY):
#   - kilocode/anthropic/claude-opus-4.6
#
# ── Chinese/Asian Providers ──
#
# Z.ai / GLM (ZAI_API_KEY) β€” OpenClaw normalizes z-ai/z.ai β†’ zai:
#   - zai/glm-5
#   - zai/glm-5-turbo
#   - zai/glm-4.7
#   - zai/glm-4.7-flash
#
# Moonshot / Kimi (MOONSHOT_API_KEY):
#   - moonshot/kimi-k2.5
#   - moonshot/kimi-k2-thinking
#
# MiniMax (MINIMAX_API_KEY):
#   - minimax/minimax-m2.7
#   - minimax/minimax-m2.5
#
# Xiaomi / MiMo (XIAOMI_API_KEY):
#   - xiaomi/mimo-v2-pro
#   - xiaomi/mimo-v2-omni
#
# Volcengine / Doubao (VOLCANO_ENGINE_API_KEY):
#   - volcengine/doubao-seed-1-8-251228
#   - volcengine/kimi-k2-5-260127
#
# BytePlus β€” international (BYTEPLUS_API_KEY):
#   - byteplus/seed-1-8-251228
#
# ── Western Providers ──
#
# Mistral (MISTRAL_API_KEY):
#   - mistral/mistral-large-latest
#   - mistral/mistral-small-2603
#   - mistral/devstral-medium
#
# xAI / Grok (XAI_API_KEY):
#   - xai/grok-4.20-beta
#   - xai/grok-4
#
# NVIDIA (NVIDIA_API_KEY):
#   - nvidia/nemotron-3-super-120b-a12b
#
# Groq (GROQ_API_KEY):
#   - groq/mixtral-8x7b-32768
#
# Cohere (COHERE_API_KEY):
#   - cohere/command-a
#
# Together (TOGETHER_API_KEY):
#   - together/meta-llama/llama-3.3-70b-instruct
#
# Cerebras (CEREBRAS_API_KEY):
#   - cerebras/zai-glm-4.7
#
# HuggingFace Inference (HUGGINGFACE_HUB_TOKEN):
#   - huggingface/deepseek-ai/DeepSeek-R1
#
# Or any other OpenClaw-supported provider (format: provider/model-name)
LLM_MODEL=anthropic/claude-sonnet-4-5

# [REQUIRED] Gateway authentication token
# Generate: openssl rand -hex 32
GATEWAY_TOKEN=your_gateway_token_here

# (Optional) Password auth β€” simpler alternative to token for casual users
# If set, users can log in with this password instead of the token
# OPENCLAW_PASSWORD=your_password_here

# ── OPTIONAL: Chat Integrations ──
# Enable WhatsApp pairing flow
# Set to true only if you want WhatsApp enabled
WHATSAPP_ENABLED=false

# Get bot token from: https://t.me/BotFather
TELEGRAM_BOT_TOKEN=your_bot_token_here

# Single user ID (from https://t.me/userinfobot)
TELEGRAM_USER_ID=123456789

# Multiple user IDs (comma-separated for team access)
# TELEGRAM_USER_IDS=123456789,987654321,555555555

# ── OPTIONAL: Workspace Backup to HF Dataset ──
HF_USERNAME=your_hf_username
HF_TOKEN=hf_your_token_here

# Backup dataset name (auto-created if missing)
# Default: huggingclaw-backup
BACKUP_DATASET_NAME=huggingclaw-backup

# Git commit identity for workspace syncs
WORKSPACE_GIT_USER=openclaw@example.com
WORKSPACE_GIT_NAME=OpenClaw Bot

# ── OPTIONAL: Background Services ──
# Keep-alive ping interval (seconds). Default: 300. Set 0 to disable.
KEEP_ALIVE_INTERVAL=300

# Workspace auto-sync interval (seconds). Default: 180.
SYNC_INTERVAL=180

# Webhooks: Standard POST notifications for lifecycle events
# WEBHOOK_URL=https://your-webhook-endpoint.com/log

# Optional: external keep-alive via UptimeRobot
# Use the Main API key from UptimeRobot -> Integrations.
# Do not use the Read-only API key or a Monitor-specific API key.
# Run setup-uptimerobot.sh once from your own terminal to create the monitor.
# UPTIMEROBOT_API_KEY=ur_your_api_key_here

# Trusted proxies (comma-separated IPs)
# Fixes "Proxy headers detected from untrusted address" behind reverse proxies
# Only set if you see pairing/auth errors. Find IPs in Space logs (remote=x.x.x.x)
# TRUSTED_PROXIES=10.20.31.87,10.20.26.157

# Allowed origins for Control UI (comma-separated URLs)
# Locks down the web UI to only these origins
# ALLOWED_ORIGINS=https://your-space.hf.space

# ════════════════════════════════════════════════════════════════
# QUICK START: Only 3 secrets required!
#   1. LLM_API_KEY    β†’ From your LLM provider
#   2. LLM_MODEL      β†’ Pick a model above
#   3. GATEWAY_TOKEN   β†’ Run: openssl rand -hex 32
# ════════════════════════════════════════════════════════════════