| FROM ghcr.io/ggml-org/llama.cpp:full | |
| RUN apt update && apt install wget -y | |
| RUN wget "https://huggingface.co/Qwen/Qwen3-8B-GGUF/resolve/main/Qwen3-8B-Q4_K_M.gguf" -O /Qwen3-8B-Q4_K_M.gguf | |
| CMD ["--server", \ | |
| "-m", "/Qwen3-8B-Q4_K_M.gguf", \ | |
| "--port", "7860", \ | |
| "--host", "0.0.0.0", \ | |
| "--ctx-size", "32768", \ | |
| "--n-predict", "-1", \ | |
| "--threads", "2", \ | |
| "--jinja"] |