Spaces:
Sleeping
Sleeping
| FROM python:3.9-alpine | |
| RUN apk add --no-cache build-base cmake git wget gcc g++ make | |
| RUN pip install llama-cpp-python sse_starlette starlette_context pydantic_settings fastapi uvicorn | |
| RUN mkdir models | |
| RUN wget -q "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-GGUF/resolve/main/mistral-7b-instruct-v0.2.Q4_0.gguf" -O models/mistral-7b-instruct-v0.2.Q4_0.gguf | |
| EXPOSE 8000 | |
| CMD ["python", "-m", "llama_cpp.server", "--model", "models/mistral-7b-instruct-v0.2.Q4_0.gguf", "--host", "0.0.0.0"] |