FROM python:3.13-slim WORKDIR /app # Install runtime system packages and OpenClaw. RUN apt-get update && apt-get install -y --no-install-recommends \ bash \ curl \ ca-certificates \ caddy \ ffmpeg \ git \ supervisor \ && curl -fsSL https://deb.nodesource.com/setup_22.x | bash - \ && apt-get install -y --no-install-recommends nodejs \ && npm install -g openclaw \ && command -v openclaw \ && openclaw --help >/dev/null 2>&1 \ && node --version \ && rm -rf /var/lib/apt/lists/* # Python dependencies via uv + pyproject.toml. RUN pip install --no-cache-dir uv COPY pyproject.toml /app/pyproject.toml RUN uv sync --no-dev COPY . /app ENV PORT=7860 ENV OPENCLAW_PORT=18789 ENV OPENCLAW_BIN=openclaw ENV VAULT_PATH=/app/vault ENV OPENCLAW_HOME=/app/.openclaw ENV OPENCLAW_STATE_DIR=/app/.openclaw/state ENV AUTO_START_GATEWAY=0 ENV EXTERNAL_GATEWAY_MANAGED=1 ENV OPENCLAW_STANDARD_UI_PUBLIC_URL=/openclaw/ ENV OPENCLAW_GATEWAY_LOG_PATH=/tmp/openclaw-gateway.log ENV OPENCLAW_GATEWAY_ERR_LOG_PATH=/tmp/openclaw-gateway.err.log ENV OPENCLAW_DISABLE_BONJOUR=1 ENV OPENCLAW_DISABLE_MDNS=1 ENV OPENCLAW_ONBOARD_NONINTERACTIVE=0 ENV OPENCLAW_CUSTOM_BASE_URL=https://researchengineering-agi.hf.space/v1 ENV OPENCLAW_CUSTOM_MODEL_ID=deepseek-chat ENV OPENCLAW_CUSTOM_PROVIDER_ID=researchengineering-agi-hf-space ENV OPENCLAW_CUSTOM_COMPATIBILITY=openai ENV OPENCLAW_CUSTOM_API_KEY= ENV OPENCLAW_CUSTOM_API_KEY_OPTIONAL=1 ENV OPENCLAW_CUSTOM_API_KEY_PLACEHOLDER=no-key ENV OPENCLAW_GATEWAY_BIND=lan ENV OPENCLAW_BOOTSTRAP_ONBOARD=1 ENV OPENCLAW_CONTROL_UI_BASE_PATH=/openclaw ENV OPENCLAW_ALLOWED_ORIGINS=https://researchengineering-agi-assistant.hf.space,http://127.0.0.1:7860,http://localhost:7860 ENV OPENCLAW_TRUSTED_PROXIES=127.0.0.1,::1 ENV OPENCLAW_CONTROL_UI_ALLOW_INSECURE_AUTH=1 ENV OPENCLAW_CONTROL_UI_DISABLE_DEVICE_AUTH=1 ENV STREAMLIT_AUTH_ENABLED=1 ENV STREAMLIT_AUTH_USERNAME= ENV STREAMLIT_AUTH_PASSWORD= ENV LLAMA_SERVER_CTX_SIZE=8192 ENV NANOCLAW_ENABLED=1 ENV NANOCLAW_PORT=18889 ENV NANOCLAW_CMD=nanoclaw ENV NANOCLAW_ARGS= ENV NANOCLAW_BASE_PATH=/nanoclaw ENV NANOCLAW_CONFIG_PATH=/app/nanoclaw.json ENV NANOCLAW_LOG_PATH=/tmp/nanoclaw.log ENV NANOCLAW_ERR_LOG_PATH=/tmp/nanoclaw.err.log ENV NANOBOT_ENABLED=1 ENV NANOBOT_PORT=18790 ENV NANOBOT_CMD=nanobot ENV NANOBOT_ARGS=gateway --port 18790 ENV NANOBOT_BASE_PATH=/nanobot ENV NANOBOT_CONFIG_PATH=/app/nanobot.json ENV NANOBOT_LOG_PATH=/tmp/nanobot.log ENV NANOBOT_ERR_LOG_PATH=/tmp/nanobot.err.log ENV PICOCLAW_ENABLED=1 ENV PICOCLAW_PORT=18792 ENV PICOCLAW_CMD=picoclaw ENV PICOCLAW_ARGS=gateway --port 18792 ENV PICOCLAW_BASE_PATH=/picoclaw ENV PICOCLAW_CONFIG_PATH=/app/picoclaw.json ENV PICOCLAW_LOG_PATH=/tmp/picoclaw.log ENV PICOCLAW_ERR_LOG_PATH=/tmp/picoclaw.err.log ENV IRONCLAW_ENABLED=1 ENV IRONCLAW_CMD=ironclaw ENV IRONCLAW_ARGS= ENV IRONCLAW_LOG_PATH=/tmp/ironclaw.log ENV IRONCLAW_ERR_LOG_PATH=/tmp/ironclaw.err.log ENV ZEROCLAW_ENABLED=1 ENV ZEROCLAW_PORT=42617 ENV ZEROCLAW_CMD=zeroclaw ENV ZEROCLAW_ARGS=gateway --port 42617 ENV ZEROCLAW_BASE_PATH=/zeroclaw ENV ZEROCLAW_CONFIG_PATH=/app/zeroclaw.json ENV ZEROCLAW_LOG_PATH=/tmp/zeroclaw.log ENV ZEROCLAW_ERR_LOG_PATH=/tmp/zeroclaw.err.log ENV NULLCLAW_ENABLED=1 ENV NULLCLAW_PORT=3000 ENV NULLCLAW_CMD=nullclaw ENV NULLCLAW_ARGS=gateway --port 3000 ENV NULLCLAW_BASE_PATH=/nullclaw ENV NULLCLAW_CONFIG_PATH=/app/nullclaw.json ENV NULLCLAW_LOG_PATH=/tmp/nullclaw.log ENV NULLCLAW_ERR_LOG_PATH=/tmp/nullclaw.err.log RUN mkdir -p /app/vault /app/.openclaw/state EXPOSE 7860 # Note: This Space does not run llama.cpp directly. # Use the following command in your dedicated LLM Space Dockerfile: # CMD ["llama-server", "--host", "0.0.0.0", "--port", "8080", "--ctx-size", "8192", "--model", "/models/your-model.gguf"] CMD ["supervisord", "-c", "/app/supervisord.conf"]