rakuten / docker /docker-compose.yml
Demosthene-OR's picture
Configure LFS for images and update code
eb5ec73
version: '3.9'
x-airflow-common: &airflow-common
image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.9.1}
environment: &airflow-common-env
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
AIRFLOW__CORE__FERNET_KEY: ""
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: "true"
AIRFLOW__CORE__LOAD_EXAMPLES: "false"
AIRFLOW__API__AUTH_BACKEND: "airflow.api.auth.backend.basic_auth"
AIRFLOW__SMTP__SMTP_HOST: "smtp.gmail.com"
AIRFLOW__SMTP__SMTP_PORT: 587
AIRFLOW__SMTP__SMTP_USER: "olivier.airflow@gmail.com"
AIRFLOW__SMTP__SMTP_PASSWORD: "rmgpejmrntnhbhuw"
AIRFLOW__SMTP__SMTP_MAIL_FROM: "olivier.airflow@gmail.com"
AIRFLOW__WEBSERVER__SECRET_KEY: idZKcKQdXAnRhJcJnjFMwQ==
_PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:- apache-airflow-providers-docker pymongo} # librairie supplémentaire
volumes:
- ./../airflow/dags:/opt/airflow/dags
- ./../airflow/logs:/opt/airflow/logs
- ./../airflow/plugins:/opt/airflow/plugins
- ./../models:/app/models:z
- ./../data:/app/data:z
user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-50000}"
depends_on:
redis:
condition: service_healthy
postgres:
condition: service_healthy
services:
mysql:
image: mysql:latest
container_name: users_db
restart: always
environment:
MYSQL_ROOT_PASSWORD: ${MYSQL_ROOT_PWD}
MYSQL_DATABASE: rakuten_db
ports:
- "3306:3306"
volumes:
- ./../data/mysql-data:/var/lib/mysql:z
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
command: --innodb-flush-log-at-trx-commit=2
adminer:
image: adminer
container_name: admin_db
restart: always
ports:
- "8080:8080"
api_predict:
build:
context: ./../src
dockerfile: Dockerfile.predict
container_name: api_predict
restart: always
ports:
- "8000:8000"
volumes:
- ./../models:/app/models:z
- ./../data:/app/data:z
- ./../tests:/app/tests:z
api_oauth:
build:
context: ./../src/fastapi_oauth
dockerfile: Dockerfile.oauth
container_name: api_oauth
restart: always
ports:
- "8001:8001"
api_train:
build:
context: ./../src
dockerfile: Dockerfile.train
container_name: api_train
restart: always
ports:
- "8002:8002"
volumes:
- ./../models:/app/models:z
- ./../data:/app/data:z
- ./../logs:/app/logs:z
api_flows:
build:
context: ./../src/flows
dockerfile: Dockerfile.flows
container_name: api_flows
restart: always
ports:
- "8003:8003"
volumes:
- ./../models:/app/models:z
- ./../data:/app/data:z
streamlit:
build:
context: ./../streamlit_app
dockerfile: Dockerfile.streamlit
container_name: streamlit
restart: always
ports:
- "8501:8501"
volumes:
- ./../models:/app/models:z
- ./../data:/app/data:z
tensorboard:
build:
context: ./../tensorboard
dockerfile: Dockerfile.tensorboard
container_name: tensorboard
restart: always
ports:
- "6006:6006"
volumes:
- ./../logs:/app/logs:z
postgres:
image: postgres:13
environment:
POSTGRES_USER: airflow
POSTGRES_PASSWORD: airflow
POSTGRES_DB: airflow
volumes:
- postgres-db-volume:/var/lib/postgresql/data
healthcheck:
test: ["CMD", "pg_isready", "-U", "airflow"]
interval: 5s
retries: 5
restart: always
redis:
image: redis:latest
ports:
- 6379:6379
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 5s
timeout: 30s
retries: 50
restart: always
airflow-webserver:
<<: *airflow-common
command: webserver
ports:
- 8081:8080
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:8081/health"]
interval: 10s
timeout: 10s
retries: 5
restart: always
airflow-scheduler:
<<: *airflow-common
command: scheduler
healthcheck:
test:
[
"CMD-SHELL",
'airflow jobs check --job-type SchedulerJob --hostname "$${HOSTNAME}"',
]
interval: 10s
timeout: 10s
retries: 5
restart: always
airflow-worker:
<<: *airflow-common
command: celery worker
healthcheck:
test:
- "CMD-SHELL"
- 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
interval: 10s
timeout: 10s
retries: 5
restart: always
airflow-init:
<<: *airflow-common
command: version
environment:
<<: *airflow-common-env
_AIRFLOW_DB_UPGRADE: "true"
_AIRFLOW_WWW_USER_CREATE: "true"
_AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
_AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
user: "${AIRFLOW_UID:-50000}:${AIRFLOW_GID:-0}"
flower:
<<: *airflow-common
command: celery flower
ports:
- 5555:5555
healthcheck:
test: ["CMD", "curl", "--fail", "http://localhost:5555/"]
interval: 10s
timeout: 10s
retries: 5
restart: always
volumes:
postgres-db-volume: