Spaces:
lxui
/
Build error

apaw / Dockerfile
lxui's picture
Update Dockerfile
92e04d5 verified
# 基础镜像:官方 llama.cpp server 镜像
FROM ghcr.io/ggml-org/llama.cpp:server
LABEL maintainer="your-name <your-email@example.com>"
LABEL description="CoPaw + llama-server with Caddy proxy (no code modification)"
# 清空基础镜像的 ENTRYPOINT,避免干扰
ENTRYPOINT []
ENV PYTHONUNBUFFERED=1 \
PORT=7860 \
COPAW_WORKING_DIR=/app/working \
COPAW_SECRETS_DIR=/app/working.secret \
COPAW_ACCEPT_SECURITY_NOTICE=yes \
LLAMA_SERVER_PORT=8080 \
COPAW_PORT=7861 \
MODEL_PATH=/models/Qwen3.5-4B-Q4_K_M.gguf
# 安装 Python、基础工具
RUN apt-get update && apt-get install -y --no-install-recommends \
python3 \
python3-pip \
python3-venv \
curl \
jq \
ca-certificates \
&& rm -rf /var/lib/apt/lists/*
# 安装 CoPaw 和 Python 依赖
RUN pip3 install --no-cache-dir --upgrade pip && \
pip3 install --no-cache-dir copaw uvicorn fastapi && \
pip3 install 'websockets<14.0'
# 创建必要目录
RUN mkdir -p ${COPAW_WORKING_DIR} ${COPAW_SECRETS_DIR} /models
# 下载模型
RUN curl -L -o /models/Qwen3.5-4B-Q4_K_M.gguf \
https://huggingface.co/lmstudio-community/Qwen3.5-4B-GGUF/resolve/main/Qwen3.5-4B-Q4_K_M.gguf
# 初始化 CoPaw 配置
RUN echo "yes" | copaw init --defaults
# 修改配置,让 CoPaw 将模型请求发送到 /llama(相对路径)
RUN CONFIG_PATH="${HOME}/.copaw/config.json" && \
jq '.models.local_llama = {"provider":"openai","base_url":"/llama","api_key":"none","model_name":"qwen3.5-4b"}' ${CONFIG_PATH} > ${CONFIG_PATH}.tmp && \
mv ${CONFIG_PATH}.tmp ${CONFIG_PATH} && \
jq '.model.default = "local_llama"' ${CONFIG_PATH} > ${CONFIG_PATH}.tmp && \
mv ${CONFIG_PATH}.tmp ${CONFIG_PATH}
# 直接下载 Caddy 二进制(无需 gpg)
#RUN curl -L -o /usr/bin/caddy https://github.com/caddyserver/caddy/releases/download/v2.9.1/caddy_2.9.1_linux_amd64 && \
# chmod +x /usr/bin/caddy
RUN apt-get update && apt-get install -y --no-install-recommends gnupg
# 使用官方仓库安装 Caddy(需要 gnupg)
RUN curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/gpg.key' | gpg --dearmor -o /usr/share/keyrings/caddy-stable-archive-keyring.gpg && \
curl -1sLf 'https://dl.cloudsmith.io/public/caddy/stable/debian.deb.txt' | tee /etc/apt/sources.list.d/caddy-stable.list && \
apt-get update && apt-get install -y caddy && \
rm -rf /var/lib/apt/lists/*
# 复制 Caddy 配置文件
COPY Caddyfile /etc/caddy/Caddyfile
EXPOSE 7860
# 启动所有服务:llama-server、CoPaw、Caddy
CMD sh -c "llama-server --model ${MODEL_PATH} --host 127.0.0.1 --port 8080 --ctx-size 4096 --n-gpu-layers 0 > /tmp/llama.log 2>&1 & \
copaw app --host 127.0.0.1 --port 7861 & \
caddy run --config /etc/caddy/Caddyfile --adapter caddyfile"