llm-api / start.sh
mrmadblack's picture
Update start.sh
a37be2a verified
raw
history blame contribute delete
652 Bytes
#!/bin/bash
mkdir -p models
echo "Downloading models..."
# TinyLlama
if [ ! -f models/tinyllama.gguf ]; then
curl -L -o models/tinyllama.gguf \
https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat.Q4_K_M.gguf
fi
# Qwen 1.5B
if [ ! -f models/qwen1.5b.gguf ]; then
curl -L -o models/qwen1.5b.gguf \
https://huggingface.co/Qwen/Qwen1.5-1.8B-Chat-GGUF/resolve/main/qwen1_5-1_8b-chat-q4_k_m.gguf
fi
# Gemma 2B
if [ ! -f models/gemma2b.gguf ]; then
curl -L -o models/gemma2b.gguf \
https://huggingface.co/google/gemma-2b-it-GGUF/resolve/main/gemma-2b-it.Q4_K_M.gguf
fi
echo "Starting API..."
python3 server.py