# Local-dev convenience. HF Spaces ignores this file and consumes only the # Dockerfile. Run `docker compose up --build` to reproduce the production # container locally on http://localhost:7860. services: website: build: context: . dockerfile: Dockerfile image: bani57-website:local ports: - "7860:7860" # Larger /dev/shm than Docker's 64 MB default — torch's share_memory() # falls back to it for tensor storage. The registry monkey-patches the # COINs share_memory call to a no-op anyway, but bumping shm_size keeps # any other PyTorch shared-memory code path from blowing up locally. shm_size: "2gb" environment: DJANGO_DEBUG: "False" # Required. Generate locally with: # python -c "import secrets; print(secrets.token_urlsafe(50))" # then put it in .env at the repo root (see .env.example). DJANGO_SECRET_KEY: ${DJANGO_SECRET_KEY:?DJANGO_SECRET_KEY must be set in .env or your shell} DJANGO_ALLOWED_HOSTS: ${DJANGO_ALLOWED_HOSTS:-localhost,127.0.0.1} CORS_ALLOWED_ORIGINS: ${CORS_ALLOWED_ORIGINS:-http://localhost:7860} HF_CHECKPOINTS_REPO: ${HF_CHECKPOINTS_REPO:-Bani57/checkpoints} # Only needed for a private checkpoint repo. An empty value is safe — # entrypoint.sh unsets HF_TOKEN when empty so huggingface_hub doesn't # build a malformed 'Bearer ' header. HF_TOKEN: ${HF_TOKEN:-} # CHECKPOINTS_ROOT defaults to RESEARCH_ROOT (/app/research) in the # image — snapshot_download writes weights alongside the bundled # configs and Loader caches so every research-code path resolves # against a single tree. No named volume because mixing # image-baked files with a fresh volume mount hides the bundled # caches and configs. ~5.4 GB is re-pulled on each `compose up`, # which takes ~3 min with hf_transfer enabled. TORCH_DEVICE: ${TORCH_DEVICE:-cpu}