| FROM ghcr.io/ggml-org/llama.cpp:full | |
| RUN apt update && apt install wget -y | |
| RUN wget "https://huggingface.co/HuggingFaceTB/SmolLM2-360M-Instruct-GGUF/resolve/main/smollm2-360m-instruct-q8_0.gguf" -O /gemma-3-1b-it-Q8_0.gguf | |
| CMD ["--server","-m","/gemma-3-1b-it-Q8_0.gguf","--port","7860","--host","0.0.0.0","-n","1024","--threads","0",] | |