webxos's picture
Update env
6b920ce verified
# Inference backend: "ollama" (default) or "transformers"
INFERENCE_BACKEND=ollama
# Ollama settings
OLLAMA_URL=http://localhost:11434
OLLAMA_MODEL=qwen2.5:0.5b
# Transformers.js model (if used)
TRANSFORMERS_MODEL=qwen2.5:0.5b