| #!/bin/bash |
|
|
| |
| MODEL_PATH="/app/models/nur.gguf" |
|
|
| |
| if [ -f "$MODEL_PATH" ]; then |
| echo "β
Nur Brain is already in storage." |
| else |
| echo "β³ Brain missing. Downloading Qwen2.5-1.5B-Instruct..." |
| |
| mkdir -p /app/models |
| |
| |
| python3 -c " |
| from huggingface_hub import hf_hub_download |
| import os |
| try: |
| # This is the official and stable repository for Qwen2.5 GGUF |
| path = hf_hub_download( |
| repo_id='Qwen/Qwen2.5-1.5B-Instruct-GGUF', |
| filename='qwen2.5-1.5b-instruct-q4_k_m.gguf', |
| local_dir='/app/models', |
| local_dir_use_symlinks=False |
| ) |
| # Rename to your custom filename |
| os.rename(path, '/app/models/nur.gguf') |
| print('β
Qwen Download complete!') |
| except Exception as e: |
| print(f'β Download failed: {e}') |
| exit(1) |
| " |
| fi |
|
|
| |
| if [ ! -f "$MODEL_PATH" ]; then |
| echo "β CRITICAL: Model file still missing." |
| exit 1 |
| fi |
|
|
| echo "π Starting Isam's Nur Brain on Port 7860..." |
|
|
| |
| exec uvicorn app.main:app --host 0.0.0.0 --port 7860 |