k-l-lambda Claude commited on
Commit
c4a0a71
·
1 Parent(s): 8c8e029

fix(docker): download LFS files at startup instead of COPY

Browse files

Docker COPY doesn't resolve Git LFS pointers. Instead, use a startup
script to download ONNX files from HuggingFace at container start.
This ensures ONNX model files are properly available for VS AI mode.

Co-Authored-By: Claude <noreply@anthropic.com>

Files changed (1) hide show
  1. Dockerfile +46 -18
Dockerfile CHANGED
@@ -2,28 +2,29 @@ FROM node:20-slim
2
 
3
  ENV DEBIAN_FRONTEND=noninteractive
4
 
5
- # Build timestamp: 2026-01-17T16:30
6
 
7
  WORKDIR /app
8
 
9
- # Install git and git-lfs for handling large files
10
- RUN apt-get update && apt-get install -y git git-lfs ca-certificates && git lfs install && rm -rf /var/lib/apt/lists/*
11
-
12
- # Clone the repository with LFS files - this properly resolves LFS pointers
13
- ARG REPO_URL=https://huggingface.co/spaces/k-l-lambda/trigo
14
- RUN git clone --depth 1 ${REPO_URL} /tmp/repo && \
15
- cd /tmp/repo && \
16
- git lfs pull && \
17
- cp -r trigo-web/backend/src/ /app/backend/src/ && \
18
- cp -r trigo-web/inc/ /app/inc/ && \
19
- cp -r trigo-web/app/dist/ /app/app/dist/ && \
20
- cp trigo-web/backend/package.json /app/package.json && \
21
- cp trigo-web/backend/.env /app/backend/.env && \
22
- rm -rf /tmp/repo
23
-
24
- # Install ALL deps (including dev for tsx)
25
  RUN npm install
26
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  # Create a Docker-specific entry point that sets correct paths
28
  RUN echo 'import express from "express"; \
29
  import { createServer } from "http"; \
@@ -44,10 +45,37 @@ app.get("*", (req, res, next) => { if (req.path.startsWith("/health") || req.pat
44
  io.on("connection", (socket) => { console.log("Client connected:", socket.id); setupSocketHandlers(io, socket, gameManager); socket.on("disconnect", () => console.log("Client disconnected:", socket.id)); }); \
45
  httpServer.listen(PORT, "0.0.0.0", () => console.log("Server running on port " + PORT));' > docker-entry.ts
46
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  ENV PORT=7860
48
  ENV HOST=0.0.0.0
49
  ENV NODE_ENV=production
50
 
51
  EXPOSE 7860
52
 
53
- CMD ["npx", "tsx", "docker-entry.ts"]
 
2
 
3
  ENV DEBIAN_FRONTEND=noninteractive
4
 
5
+ # Build timestamp: 2026-01-17T16:45
6
 
7
  WORKDIR /app
8
 
9
+ # Install curl for downloading LFS files at startup
10
+ RUN apt-get update && apt-get install -y curl ca-certificates && rm -rf /var/lib/apt/lists/*
11
+
12
+ # Copy backend package.json and install ALL deps (including dev for tsx)
13
+ COPY trigo-web/backend/package.json ./package.json
 
 
 
 
 
 
 
 
 
 
 
14
  RUN npm install
15
 
16
+ # Copy backend source
17
+ COPY trigo-web/backend/src/ ./backend/src/
18
+
19
+ # Copy inc folder
20
+ COPY trigo-web/inc/ ./inc/
21
+
22
+ # Copy frontend dist (ONNX files will be LFS pointers, we'll download them at startup)
23
+ COPY trigo-web/app/dist/ ./app/dist/
24
+
25
+ # Copy env files (only .env, .env.local is for local development only)
26
+ COPY trigo-web/backend/.env ./backend/.env
27
+
28
  # Create a Docker-specific entry point that sets correct paths
29
  RUN echo 'import express from "express"; \
30
  import { createServer } from "http"; \
 
45
  io.on("connection", (socket) => { console.log("Client connected:", socket.id); setupSocketHandlers(io, socket, gameManager); socket.on("disconnect", () => console.log("Client disconnected:", socket.id)); }); \
46
  httpServer.listen(PORT, "0.0.0.0", () => console.log("Server running on port " + PORT));' > docker-entry.ts
47
 
48
+ # Create startup script that downloads LFS files before starting server
49
+ RUN cat <<'STARTUP' > start.sh
50
+ #!/bin/bash
51
+ set -e
52
+
53
+ HF_BASE="https://huggingface.co/spaces/k-l-lambda/trigo/resolve/main/trigo-web/app/dist"
54
+
55
+ # List of ONNX files to download (relative to app/dist)
56
+ ONNX_FILES=(
57
+ "onnx/20251230-trigo-value-llama-l6-h64-it2_251221-value0.01-pretrain/LlamaCausalLM_ep0036_evaluation.onnx"
58
+ "onnx/20251230-trigo-value-llama-l6-h64-it2_251221-value0.01-pretrain/LlamaCausalLM_ep0036_tree.onnx"
59
+ )
60
+
61
+ echo "Downloading ONNX model files..."
62
+ for file in "${ONNX_FILES[@]}"; do
63
+ dir=$(dirname "app/dist/$file")
64
+ mkdir -p "$dir"
65
+ echo " Downloading $file..."
66
+ curl -sL "$HF_BASE/$file" -o "app/dist/$file"
67
+ done
68
+ echo "ONNX files downloaded successfully."
69
+
70
+ # Start the server
71
+ exec npx tsx docker-entry.ts
72
+ STARTUP
73
+ RUN chmod +x start.sh
74
+
75
  ENV PORT=7860
76
  ENV HOST=0.0.0.0
77
  ENV NODE_ENV=production
78
 
79
  EXPOSE 7860
80
 
81
+ CMD ["./start.sh"]