lanny xu commited on
Commit
5472704
·
1 Parent(s): dad583a

Fix Dockerfile permissions and background pull

Browse files
Files changed (1) hide show
  1. Dockerfile +16 -7
Dockerfile CHANGED
@@ -24,24 +24,32 @@ RUN pip install --no-cache-dir -r requirements.txt
24
  COPY . .
25
 
26
  # 创建启动脚本
27
- # 1. 启动 Ollama 服务后台运行
28
- # 2. 下载需要的模型 (这里用 tinyllama 以便快速演示,你可以改为 mistral 或 llama3)
29
- # 3. 启动 FastAPI 应用 (Hugging Face Spaces 要求监听 7860 端口)
 
 
30
  RUN echo '#!/bin/bash\n\
 
 
31
  echo "🔴 Starting Ollama..."\n\
32
  ollama serve &\n\
 
33
  echo "⏳ Waiting for Ollama to start..."\n\
34
  sleep 5\n\
35
- echo "⬇️ Pulling model..."\n\
36
- ollama pull tinyllama\n\
 
 
37
  echo "🟢 Starting FastAPI Server..."\n\
38
  uvicorn server:app --host 0.0.0.0 --port 7860\n\
39
  ' > start.sh && chmod +x start.sh
40
 
41
  # 创建非 root 用户 (Hugging Face 安全要求)
42
  RUN useradd -m -u 1000 user
43
- # 给用户 Ollama 目录的权限
44
- RUN mkdir -p /.ollama && chmod 777 /.ollama
 
45
  RUN mkdir -p /app && chown -R user:user /app
46
 
47
  # 切换用户
@@ -50,6 +58,7 @@ USER user
50
  # 设置环境变量
51
  ENV HOME=/home/user
52
  ENV PATH=$HOME/.local/bin:$PATH
 
53
 
54
  # 暴露端口 (Hugging Face 默认端口)
55
  EXPOSE 7860
 
24
  COPY . .
25
 
26
  # 创建启动脚本
27
+ # 优化策略:
28
+ # 1. 设置 OLLAMA_MODELS 环境变量到用户目录
29
+ # 2. 启动 Ollama
30
+ # 3. 后台拉取模型 (不阻塞服务器启动)
31
+ # 4. 启动 FastAPI (尽快监听端口以通过健康检查)
32
  RUN echo '#!/bin/bash\n\
33
+ export OLLAMA_MODELS=/home/user/.ollama/models\n\
34
+ \n\
35
  echo "🔴 Starting Ollama..."\n\
36
  ollama serve &\n\
37
+ \n\
38
  echo "⏳ Waiting for Ollama to start..."\n\
39
  sleep 5\n\
40
+ \n\
41
+ echo "⬇️ Pulling model in background..."\n\
42
+ ollama pull tinyllama &\n\
43
+ \n\
44
  echo "🟢 Starting FastAPI Server..."\n\
45
  uvicorn server:app --host 0.0.0.0 --port 7860\n\
46
  ' > start.sh && chmod +x start.sh
47
 
48
  # 创建非 root 用户 (Hugging Face 安全要求)
49
  RUN useradd -m -u 1000 user
50
+
51
+ # 确保目录存在并赋予权限
52
+ RUN mkdir -p /home/user/.ollama/models && chown -R user:user /home/user/.ollama
53
  RUN mkdir -p /app && chown -R user:user /app
54
 
55
  # 切换用户
 
58
  # 设置环境变量
59
  ENV HOME=/home/user
60
  ENV PATH=$HOME/.local/bin:$PATH
61
+ ENV OLLAMA_MODELS=$HOME/.ollama/models
62
 
63
  # 暴露端口 (Hugging Face 默认端口)
64
  EXPOSE 7860