л модель
Browse files- dockerfile +9 -0
- models.yaml +9 -0
dockerfile
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM quay.io/go-skynet/local-ai:latest
|
| 2 |
+
|
| 3 |
+
ENV MODELS_PATH=/models
|
| 4 |
+
ENV MODEL=deepseek-coder-1.3b
|
| 5 |
+
|
| 6 |
+
RUN mkdir -p /models
|
| 7 |
+
|
| 8 |
+
# Копирование модели в контейнер
|
| 9 |
+
COPY models/deepseek-coder-1.3b.Q4_K_M.gguf /models/deepseek-coder-1.3b.gguf
|
models.yaml
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
- name: deepseek-coder-1.3b
|
| 2 |
+
backend: llama-cpp
|
| 3 |
+
path: /models/deepseek-coder-1.3b.gguf
|
| 4 |
+
parameters:
|
| 5 |
+
context_size: 4096
|
| 6 |
+
gpu_layers: 20 # Оптимальное значение для GPU (при наличии)
|
| 7 |
+
temperature: 0.7
|
| 8 |
+
top_p: 0.9
|
| 9 |
+
repeat_penalty: 1.1
|