Medical-Triage / .env.example
Aspirant200715's picture
Enforce OpenAI-only LLM config and harden inference proxy calls
7f5228c
# Runtime
TRIAGE_DEFAULT_TASK=task2
TRIAGE_EVAL_EPISODES=30
TRIAGE_TRAIN_EPISODES=200
TRIAGE_SEED=42
# LLM (optional for LLMAgent)
TRIAGE_LLM_MODEL=gpt-4.1-mini
OPENAI_API_KEY=
TRIAGE_LLM_BASE_URL=
# Submission / inference
API_BASE_URL=https://api.openai.com/v1
MODEL_NAME=gpt-4.1-mini
HF_TOKEN=
LOCAL_IMAGE_NAME=
# Docker Hub deployment
DOCKERHUB_USERNAME=
DOCKERHUB_TOKEN=