Spaces:
Sleeping
Sleeping
Ved Gupta
commited on
Commit
·
6c59364
1
Parent(s):
18957bd
Update Dockerfile to download new AI models
Browse files- Dockerfile +2 -15
- models/luna-ai-llama2.tmpl +0 -2
Dockerfile
CHANGED
|
@@ -1,12 +1,6 @@
|
|
| 1 |
FROM quay.io/go-skynet/local-ai:v2.7.0-ffmpeg-core
|
| 2 |
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
# RUN wget -q "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-GGUF/resolve/main/mistral-7b-instruct-v0.2.Q4_0.gguf" -O models/mistral-7b-instruct-v0.2.Q4_0.gguf
|
| 6 |
-
# RUN wget -q "https://huggingface.co/TheBloke/Luna-AI-Llama2-Uncensored-GGUF/resolve/main/luna-ai-llama2-uncensored.Q4_0.gguf" -O models/luna-ai-llama2
|
| 7 |
-
# COPY models/* models/
|
| 8 |
-
RUN ls -a
|
| 9 |
-
RUN pwd
|
| 10 |
|
| 11 |
RUN useradd -m -u 1000 user
|
| 12 |
USER user
|
|
@@ -15,13 +9,6 @@ ENV HOME=/home/user \
|
|
| 15 |
|
| 16 |
RUN cp -R . $HOME
|
| 17 |
WORKDIR $HOME
|
| 18 |
-
# COPY --chown=user . $HOME
|
| 19 |
-
|
| 20 |
-
RUN ls -a
|
| 21 |
-
RUN pwd
|
| 22 |
|
| 23 |
EXPOSE 8080
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
# CMD ["--models-path", "./models", "--context-size", "700", "--threads", "4"]
|
| 27 |
-
CMD ["phi-2"]
|
|
|
|
| 1 |
FROM quay.io/go-skynet/local-ai:v2.7.0-ffmpeg-core
|
| 2 |
|
| 3 |
+
RUN wget -q "https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.2-GGUF/resolve/main/mistral-7b-instruct-v0.2.Q4_0.gguf" -O models/mistral-7b-instruct-v0.2.Q4_0.gguf
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
|
| 5 |
RUN useradd -m -u 1000 user
|
| 6 |
USER user
|
|
|
|
| 9 |
|
| 10 |
RUN cp -R . $HOME
|
| 11 |
WORKDIR $HOME
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
|
| 13 |
EXPOSE 8080
|
| 14 |
+
CMD ["--models-path", "./models"]
|
|
|
|
|
|
|
|
|
models/luna-ai-llama2.tmpl
DELETED
|
@@ -1,2 +0,0 @@
|
|
| 1 |
-
{{.Input}}
|
| 2 |
-
### Response:
|
|
|
|
|
|
|
|
|