File size: 1,329 Bytes
395d302
 
 
 
 
 
 
 
 
 
 
 
 
 
a4ad4e0
4055e66
 
06a598f
4055e66
 
 
 
395d302
201460e
40f5b34
4055e66
 
 
b6ccdfc
4055e66
 
b6ccdfc
395d302
 
4055e66
 
 
395d302
a4ad4e0
395d302
 
4055e66
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
FROM python:3.11-slim

WORKDIR /app

RUN apt-get update && apt-get install -y --no-install-recommends \
    build-essential git curl \
    && rm -rf /var/lib/apt/lists/*

COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt

COPY main.py .

RUN useradd -m -u 1000 user

# Pre-create the LanceDB data directory and hand ownership to user 1000.
RUN mkdir -p /data/lancedb && chown -R 1000:1000 /data

# Pre-download model + ONNX file into the image cache.
# The ONNX file (onnx/model_fp16.onnx) ships with the HF repo — it is fetched
# here so cold starts don't need a network hit.
# || true: network failure during build is non-fatal.
RUN python -c "\
import os; os.environ['HF_HUB_VERBOSITY'] = 'error'; \
from sentence_transformers import SentenceTransformer; \
SentenceTransformer( \
    'jinaai/jina-embeddings-v2-base-code', \
    backend='onnx', \
    model_kwargs={'file_name': 'onnx/model.onnx', 'provider': 'CPUExecutionProvider'}, \
    trust_remote_code=True, \
); \
print('ONNX fp32 model cached.')" || echo "Pre-cache skipped — will download at runtime."

USER user
ENV HOME=/home/user \
    PATH=/home/user/.local/bin:$PATH \
    OMP_NUM_THREADS=2

VOLUME ["/data"]
EXPOSE 7860

CMD ["uvicorn", "main:app", \
     "--host", "0.0.0.0", "--port", "7860", \
     "--timeout-keep-alive", "120"]