CyberCoder225 commited on
Commit
970de14
·
verified ·
1 Parent(s): 51c91db

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +11 -17
Dockerfile CHANGED
@@ -18,32 +18,26 @@ RUN pip install --no-cache-dir \
18
  # Set the working directory
19
  WORKDIR /app
20
 
21
- # Download models BEFORE copying the rest of the code (to speed up rebuilds)
22
- # Model 1: Small (360M)
 
23
  RUN python3 -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='CyberCoder225/maira-model', filename='SmolLM2-360M-Instruct.Q4_K_M.gguf', local_dir='.')"
24
 
25
- # Model 2: Medium (1B)
26
- RUN python3 -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='bartowski/Llama-3.2-1B-Instruct-GGUF', filename='Llama-3.2-1B-Instruct-Q4_K_M.gguf', local_dir='.')"
27
 
28
- # ... (Keep your existing RUN pip and apt-get lines)
 
29
 
30
- # 1. Original Small
31
- RUN python3 -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='CyberCoder225/maira-model', filename='SmolLM2-360M-Instruct.Q4_K_M.gguf', local_dir='.')"
32
- # 2. Existing Medium (Llama 3.2)
33
- RUN python3 -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='bartowski/Llama-3.2-1B-Instruct-GGUF', filename='Llama-3.2-1B-Instruct-Q4_K_M.gguf', local_dir='.')"
34
 
35
- # --- NEW FEMALE PERSONA MODELS ---
36
- # 3. Qwen 2.5 1.5B (Very smart/balanced)
37
- RUN python3 -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='Qwen/Qwen2.5-1.5B-Instruct-GGUF', filename='qwen2.5-1.5b-instruct-q4_k_m.gguf', local_dir='.')"
38
- # 4. Danube 3 (Fast and chatty)
39
- RUN python3 -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='h2oai/h2o-danube3-500m-chat-GGUF', filename='h2o-danube3-500m-chat-v1-q4_k_m.gguf', local_dir='.')"
40
- # 5. StableLM 2 (Elegant/Creative)
41
  RUN python3 -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='stabilityai/stablelm-2-zephyr-1_6b-gguf', filename='stablelm-2-zephyr-1_6b-Q4_K_M.gguf', local_dir='.')"
42
 
43
- # ... (rest of Dockerfile)
44
  # Copy your app.py and brain.py files
45
  COPY . .
46
 
47
  # Start the engine
48
  EXPOSE 7860
49
- CMD ["python", "app.py"]
 
18
  # Set the working directory
19
  WORKDIR /app
20
 
21
+ # --- NEURAL CORE DOWNLOADS ---
22
+ # We download all 5 models here. This is the "Heavy Lifting" part.
23
+ # 1. Maira Lite (360M)
24
  RUN python3 -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='CyberCoder225/maira-model', filename='SmolLM2-360M-Instruct.Q4_K_M.gguf', local_dir='.')"
25
 
26
+ # 2. Maira Prime (1B)
27
+ RUN python3 -c "from huggingface_hub import hf_hub_download(repo_id='bartowski/Llama-3.2-1B-Instruct-GGUF', filename='Llama-3.2-1B-Instruct-Q4_K_M.gguf', local_dir='.')"
28
 
29
+ # 3. Maira Logic (1.5B)
30
+ RUN python3 -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='Qwen/Qwen2.5-1.5B-Instruct-GGUF', filename='qwen2.5-1.5b-instruct-q4_k_m.gguf', local_dir='.')"
31
 
32
+ # 4. Maira Chat (500M) - FIXED FILENAME CASE
33
+ RUN python3 -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='h2oai/h2o-danube3-500m-chat-GGUF', filename='h2o-danube3-500m-chat-Q4_K_M.gguf', local_dir='.')"
 
 
34
 
35
+ # 5. Maira Art (1.6B)
 
 
 
 
 
36
  RUN python3 -c "from huggingface_hub import hf_hub_download; hf_hub_download(repo_id='stabilityai/stablelm-2-zephyr-1_6b-gguf', filename='stablelm-2-zephyr-1_6b-Q4_K_M.gguf', local_dir='.')"
37
 
 
38
  # Copy your app.py and brain.py files
39
  COPY . .
40
 
41
  # Start the engine
42
  EXPOSE 7860
43
+ CMD ["python3", "app.py"]