llm-api / Dockerfile
winstxnhdw's picture
build: opt into semantic convention
ddff553
raw
history blame contribute delete
210 Bytes
FROM ghcr.io/winstxnhdw/llm-api:main
ENV SERVER_PORT=7860
ENV OMP_NUM_THREADS=2
ENV CT2_USE_EXPERIMENTAL_PACKED_GEMM=1
ENV CT2_FORCE_CPU_ISA=AVX512
ENV OTEL_SEMCONV_STABILITY_OPT_IN=http
EXPOSE $SERVER_PORT