| FROM python:3.10-slim |
|
|
| |
| RUN useradd -m -u 1000 user |
| WORKDIR /app |
|
|
| |
| RUN apt-get update && apt-get install -y \ |
| libgomp1 \ |
| && rm -rf /var/lib/apt/lists/* |
|
|
| |
| RUN pip install --no-cache-dir \ |
| torch==2.3.1+cpu \ |
| --index-url https://download.pytorch.org/whl/cpu |
|
|
| |
| COPY requirements.txt . |
| RUN pip install --no-cache-dir -r requirements.txt |
|
|
| |
| |
| ENV HF_HOME=/app/hf_cache |
| RUN python -c "\ |
| from huggingface_hub import snapshot_download; \ |
| snapshot_download('BAAI/bge-m3', ignore_patterns=['*.msgpack','*.h5','flax_model*','tf_model*','rust_model*']); \ |
| snapshot_download('BAAI/bge-reranker-v2-m3', ignore_patterns=['*.msgpack','*.h5','flax_model*','tf_model*','rust_model*']); \ |
| print('Models downloaded successfully')" |
|
|
| RUN chown -R user:user /app/hf_cache |
| COPY --chown=user:user . . |
|
|
| USER user |
| EXPOSE 7860 |
|
|
| CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860", "--workers", "1"] |