Docker_ml / docker /dockerfiles /Dockerfile.onnx.trt
Sankie005's picture
Upload 434 files
c446951
FROM roboflow/roboflow-inference-server-trt-base:latest
WORKDIR /app
RUN apt-get update -y && apt-get install -y \
ffmpeg \
libsm6 \
libxext6 \
libopencv-dev \
uvicorn \
&& rm -rf /var/lib/apt/lists/*
COPY requirements/requirements.sam.txt \
requirements/requirements.clip.txt \
requirements/requirements.http.txt \
requirements/requirements.waf.txt \
requirements/requirements.gaze.txt \
requirements/requirements.doctr.txt \
requirements/_requirements.txt \
./
RUN pip install --upgrade pip && pip install \
-r _requirements.txt \
-r requirements.sam.txt \
-r requirements.clip.txt \
-r requirements.http.txt \
-r requirements.waf.txt \
-r requirements.gaze.txt \
-r requirements.doctr.txt \
--upgrade \
&& rm -rf ~/.cache/pip
RUN conda install -c anaconda libstdcxx-ng
WORKDIR /app/
COPY inference inference
COPY docker/config/trt_http.py trt_http.py
ENV VERSION_CHECK_MODE=continuous
ENV ONNXRUNTIME_EXECUTION_PROVIDERS=TensorrtExecutionProvider
ENV REQUIRED_ONNX_PROVIDERS=TensorrtExecutionProvider
ENV ORT_TENSORRT_ENGINE_CACHE_ENABLE=1
ENV CORE_MODEL_SAM_ENABLED=False
ENV PROJECT=roboflow-platform
ENV NUM_WORKERS=1
ENV HOST=0.0.0.0
ENV PORT=9001
ENTRYPOINT uvicorn trt_http:app --workers $NUM_WORKERS --host $HOST --port $PORT