File size: 3,888 Bytes
0fa4cc9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 |
#!/bin/bash
# π Minimal LLM Setup - Everything in one script!
set -e
# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
echo -e "${BLUE}π Minimal LLM Setup${NC}"
echo "===================="
# Check Docker
if ! command -v docker &> /dev/null; then
echo -e "${RED}β Docker not found. Please install Docker first.${NC}"
exit 1
fi
# Check Docker Compose
if ! docker compose version &> /dev/null; then
echo -e "${RED}β Docker Compose not found. Please install Docker Compose.${NC}"
exit 1
fi
# Check NVIDIA Docker (optional)
if docker run --rm --gpus all nvidia/cuda:11.8-base-ubuntu22.04 nvidia-smi &>/dev/null; then
echo -e "${GREEN}β
NVIDIA Docker detected${NC}"
GPU_AVAILABLE=true
else
echo -e "${YELLOW}β οΈ No GPU detected, running on CPU${NC}"
GPU_AVAILABLE=false
fi
# Create project structure
echo -e "${BLUE}π Creating project structure...${NC}"
mkdir -p app
# Create minimal Streamlit app if it doesn't exist
if [ ! -f "app/main.py" ]; then
echo -e "${BLUE}π Creating Streamlit app...${NC}"
# The file content would be copied here in real scenario
echo "# Streamlit app created. Copy the main.py content here."
fi
# Modify docker-compose for CPU if no GPU
if [ "$GPU_AVAILABLE" = false ]; then
echo -e "${YELLOW}π§ Configuring for CPU mode...${NC}"
sed -i 's/deploy:/# deploy:/g' docker-compose.yml || true
sed -i 's/resources:/# resources:/g' docker-compose.yml || true
sed -i 's/reservations:/# reservations:/g' docker-compose.yml || true
sed -i 's/devices:/# devices:/g' docker-compose.yml || true
sed -i 's/- driver: nvidia/# - driver: nvidia/g' docker-compose.yml || true
sed -i 's/count: 1/# count: 1/g' docker-compose.yml || true
sed -i 's/capabilities: \[gpu\]/# capabilities: [gpu]/g' docker-compose.yml || true
fi
# Build and start services
echo -e "${BLUE}π¨ Building and starting services...${NC}"
docker compose up --build -d
# Wait for services
echo -e "${BLUE}β³ Waiting for services to start...${NC}"
# Wait for Ollama
echo -n "Waiting for Ollama"
for i in {1..30}; do
if curl -s http://localhost:11434/api/tags > /dev/null 2>&1; then
echo -e "${GREEN} β
${NC}"
break
fi
echo -n "."
sleep 2
done
# Wait for Streamlit
echo -n "Waiting for Streamlit"
for i in {1..30}; do
if curl -s http://localhost:8501/_stcore/health > /dev/null 2>&1; then
echo -e "${GREEN} β
${NC}"
break
fi
echo -n "."
sleep 2
done
# Check if model download completed
echo -e "${BLUE}π₯ Checking model download...${NC}"
docker logs model-setup | tail -5
echo
echo -e "${GREEN}π Setup completed!${NC}"
echo "==================="
echo
echo -e "${BLUE}π Access points:${NC}"
echo " β’ Streamlit UI: http://localhost:8501"
echo " β’ Ollama API: http://localhost:11434"
echo
echo -e "${BLUE}π Useful commands:${NC}"
echo " β’ Check logs: docker compose logs -f"
echo " β’ Stop services: docker compose down"
echo " β’ Restart: docker compose restart"
echo " β’ Shell access: docker exec -it ollama-engine bash"
echo
echo -e "${BLUE}π§ͺ Test API:${NC}"
echo ' curl -X POST http://localhost:11434/api/generate \'
echo ' -H "Content-Type: application/json" \'
echo ' -d '"'"'{"model": "mistral:7b-instruct", "prompt": "Hello!"}'\'
# Auto-open browser (optional)
if command -v xdg-open &> /dev/null; then
echo
read -p "Open browser automatically? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
xdg-open http://localhost:8501
fi
elif command -v open &> /dev/null; then
echo
read -p "Open browser automatically? (y/N): " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
open http://localhost:8501
fi
fi
echo
echo -e "${GREEN}Happy chatting! π€${NC}" |