#!/bin/bash ollama serve & while ! nc -z localhost 11434; do echo "⏳ Waiting for Ollama to start..." sleep 1 done ollama pull nomic-embed-text:latest ollama pull llama3.1:8b-instruct-q4_0 || true exec flask --app src/app run --host=0.0.0.0 --port=$PORT