279 lines
9.7 KiB
YAML
279 lines
9.7 KiB
YAML
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||
|
# or more contributor license agreements. See the NOTICE file
|
||
|
# distributed with this work for additional information
|
||
|
# regarding copyright ownership. The ASF licenses this file
|
||
|
# to you under the Apache License, Version 2.0 (the
|
||
|
# "License"); you may not use this file except in compliance
|
||
|
# with the License. You may obtain a copy of the License at
|
||
|
#
|
||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||
|
#
|
||
|
# Unless required by applicable law or agreed to in writing,
|
||
|
# software distributed under the License is distributed on an
|
||
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||
|
# KIND, either express or implied. See the License for the
|
||
|
# specific language governing permissions and limitations
|
||
|
# under the License.
|
||
|
#
|
||
|
|
||
|
# Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL.
|
||
|
#
|
||
|
# WARNING: This configuration is for local development. Do not use it in a production deployment.
|
||
|
#
|
||
|
# This configuration supports basic configuration using environment variables or an .env file
|
||
|
# The following variables are supported:
|
||
|
#
|
||
|
# AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow.
|
||
|
# Default: apache/airflow:2.9.3
|
||
|
# AIRFLOW_UID - User ID in Airflow containers
|
||
|
# Default: 50000
|
||
|
# AIRFLOW_PROJ_DIR - Base path to which all the files will be volumed.
|
||
|
# Default: .
|
||
|
# Those configurations are useful mostly in case of standalone testing/running Airflow in test/try-out mode
|
||
|
#
|
||
|
# _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account (if requested).
|
||
|
# Default: airflow
|
||
|
# _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account (if requested).
|
||
|
# Default: airflow
|
||
|
# _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when starting all containers.
|
||
|
# Use this option ONLY for quick checks. Installing requirements at container
|
||
|
# startup is done EVERY TIME the service is started.
|
||
|
# A better way is to build a custom image or extend the official image
|
||
|
# as described in https://airflow.apache.org/docs/docker-stack/build.html.
|
||
|
# Default: ''
|
||
|
#
|
||
|
# Feel free to modify this file to suit your needs.
|
||
|
---
|
||
|
networks: # <--- 이 섹션 추가 또는 수정
|
||
|
airflow_default: # Airflow 내부 통신용 기본 네트워크 (기존에 있다면 유지)
|
||
|
driver: bridge
|
||
|
db_network: # MySQL 컨테이너가 사용하는 네트워크 이름과 동일하게 지정
|
||
|
external: true # 이 네트워크가 다른 docker-compose 파일 또는 명령으로 이미 생성되었다고 가정
|
||
|
|
||
|
x-airflow-common: &airflow-common
|
||
|
build:
|
||
|
context: .
|
||
|
dockerfile: Dockerfile.airflow
|
||
|
networks: # <--- 모든 Airflow 서비스가 사용할 네트워크 지정
|
||
|
- airflow_default # Airflow 내부 서비스 간 통신용
|
||
|
- db_network # MySQL DB 접속용
|
||
|
environment: &airflow-common-env
|
||
|
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
|
||
|
AIRFLOW__CORE__FERNET_KEY: ${AIRFLOW__CORE__FERNET_KEY:-$(python -c "from cryptography.fernet import Fernet; FERNET_KEY = Fernet.generate_key().decode(); print(FERNET_KEY)")}
|
||
|
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: "true"
|
||
|
AIRFLOW__CORE__LOAD_EXAMPLES: "false"
|
||
|
AIRFLOW__CORE__DEFAULT_TIMEZONE: "Asia/Seoul" # Airflow 내부 처리 기준 KST
|
||
|
|
||
|
# Database Settings
|
||
|
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
|
||
|
|
||
|
# Celery Settings
|
||
|
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
|
||
|
AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
|
||
|
|
||
|
# Webserver Settings
|
||
|
AIRFLOW__WEBSERVER__DEFAULT_UI_TIMEZONE: "Asia/Seoul"
|
||
|
AIRFLOW__API__AUTH_BACKENDS: "airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session"
|
||
|
|
||
|
# Scheduler Settings
|
||
|
AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: "true"
|
||
|
|
||
|
# Logging Settings
|
||
|
AIRFLOW__LOGGING__LOG_FORMAT: "[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s"
|
||
|
# AIRFLOW__LOGGING__DEFAULT_DATE_FORMAT: '%Y-%m-%d %H:%M:%S%z' # 필요시 날짜 포맷 명시적 지정 고려
|
||
|
|
||
|
# PIP Requirements (handled by Dockerfile build)
|
||
|
_PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
|
||
|
|
||
|
# Container OS Timezone
|
||
|
TZ: "Asia/Seoul" # Docker 컨테이너 OS 시간대 KST
|
||
|
volumes:
|
||
|
- ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags
|
||
|
- ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs
|
||
|
- ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config
|
||
|
- ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins
|
||
|
user: "${AIRFLOW_UID:-50000}:0"
|
||
|
depends_on: &airflow-common-depends-on
|
||
|
redis:
|
||
|
condition: service_healthy
|
||
|
postgres:
|
||
|
condition: service_healthy
|
||
|
|
||
|
services:
|
||
|
postgres:
|
||
|
image: postgres:13
|
||
|
environment:
|
||
|
POSTGRES_USER: airflow
|
||
|
POSTGRES_PASSWORD: airflow
|
||
|
POSTGRES_DB: airflow
|
||
|
TZ: "Asia/Seoul" # DB 컨테이너 OS 시간대
|
||
|
PGTZ: "Asia/Seoul" # PostgreSQL 서버 시간대 설정 추가
|
||
|
volumes:
|
||
|
- postgres-db-volume:/var/lib/postgresql/data
|
||
|
networks:
|
||
|
- airflow_default
|
||
|
healthcheck:
|
||
|
test: ["CMD", "pg_isready", "-U", "airflow"]
|
||
|
interval: 10s
|
||
|
retries: 5
|
||
|
start_period: 5s
|
||
|
ports:
|
||
|
- "5432:5432"
|
||
|
restart: always
|
||
|
|
||
|
redis:
|
||
|
# Redis is limited to 7.2-bookworm due to licencing change
|
||
|
# https://redis.io/blog/redis-adopts-dual-source-available-licensing/
|
||
|
image: redis:7.2-bookworm
|
||
|
expose:
|
||
|
- 6379
|
||
|
networks:
|
||
|
- airflow_default
|
||
|
healthcheck:
|
||
|
test: ["CMD", "redis-cli", "ping"]
|
||
|
interval: 10s
|
||
|
timeout: 30s
|
||
|
retries: 50
|
||
|
start_period: 30s
|
||
|
restart: always
|
||
|
|
||
|
airflow-webserver:
|
||
|
<<: *airflow-common
|
||
|
command: webserver
|
||
|
ports:
|
||
|
- "8080:8080"
|
||
|
healthcheck:
|
||
|
test: ["CMD", "curl", "--fail", "http://localhost:8080/health"]
|
||
|
interval: 30s
|
||
|
timeout: 10s
|
||
|
retries: 5
|
||
|
start_period: 30s
|
||
|
restart: always
|
||
|
depends_on:
|
||
|
<<: *airflow-common-depends-on
|
||
|
airflow-init:
|
||
|
condition: service_completed_successfully
|
||
|
|
||
|
airflow-scheduler:
|
||
|
<<: *airflow-common
|
||
|
command: scheduler
|
||
|
healthcheck:
|
||
|
test: ["CMD", "curl", "--fail", "http://localhost:8974/health"]
|
||
|
interval: 30s
|
||
|
timeout: 10s
|
||
|
retries: 5
|
||
|
start_period: 30s
|
||
|
restart: always
|
||
|
depends_on:
|
||
|
<<: *airflow-common-depends-on
|
||
|
airflow-init:
|
||
|
condition: service_completed_successfully
|
||
|
|
||
|
airflow-worker:
|
||
|
<<: *airflow-common
|
||
|
command: celery worker
|
||
|
healthcheck:
|
||
|
# yamllint disable rule:line-length
|
||
|
test:
|
||
|
- "CMD-SHELL"
|
||
|
- 'celery --app airflow.providers.celery.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}" || celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
|
||
|
interval: 30s
|
||
|
timeout: 10s
|
||
|
retries: 5
|
||
|
start_period: 30s
|
||
|
environment:
|
||
|
<<: *airflow-common-env
|
||
|
# Required to handle warm shutdown of the celery workers properly
|
||
|
# See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
|
||
|
DUMB_INIT_SETSID: "0"
|
||
|
restart: always
|
||
|
depends_on:
|
||
|
<<: *airflow-common-depends-on
|
||
|
airflow-init:
|
||
|
condition: service_completed_successfully
|
||
|
|
||
|
airflow-triggerer:
|
||
|
<<: *airflow-common
|
||
|
command: triggerer
|
||
|
healthcheck:
|
||
|
test:
|
||
|
[
|
||
|
"CMD-SHELL",
|
||
|
'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"',
|
||
|
]
|
||
|
interval: 30s
|
||
|
timeout: 10s
|
||
|
retries: 5
|
||
|
start_period: 30s
|
||
|
restart: always
|
||
|
depends_on:
|
||
|
<<: *airflow-common-depends-on
|
||
|
airflow-init:
|
||
|
condition: service_completed_successfully
|
||
|
|
||
|
airflow-init:
|
||
|
<<: *airflow-common
|
||
|
entrypoint: /bin/bash
|
||
|
# yamllint disable rule:line-length
|
||
|
command:
|
||
|
- -c
|
||
|
- |
|
||
|
if [[ -z "${AIRFLOW_UID}" ]]; then
|
||
|
echo
|
||
|
echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m"
|
||
|
# ... (rest of the init messages from your original file) ...
|
||
|
echo
|
||
|
fi
|
||
|
mkdir -p /sources/logs /sources/dags /sources/plugins
|
||
|
chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins}
|
||
|
exec /entrypoint airflow version
|
||
|
# yamllint enable rule:line-length
|
||
|
environment:
|
||
|
<<: *airflow-common-env
|
||
|
_AIRFLOW_DB_MIGRATE: "true"
|
||
|
_AIRFLOW_WWW_USER_CREATE: "true"
|
||
|
_AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-imai_master}
|
||
|
_AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-imai240510!}
|
||
|
_PIP_ADDITIONAL_REQUIREMENTS: ""
|
||
|
user: "0:0"
|
||
|
volumes:
|
||
|
- ${AIRFLOW_PROJ_DIR:-.}:/sources
|
||
|
|
||
|
airflow-cli:
|
||
|
<<: *airflow-common
|
||
|
profiles:
|
||
|
- debug
|
||
|
environment:
|
||
|
<<: *airflow-common-env
|
||
|
CONNECTION_CHECK_MAX_COUNT: "0"
|
||
|
# Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252
|
||
|
command:
|
||
|
- bash
|
||
|
- -c
|
||
|
- airflow
|
||
|
|
||
|
# You can enable flower by adding "--profile flower" option e.g. docker-compose --profile flower up
|
||
|
# or by explicitly targeted on the command line e.g. docker-compose up flower.
|
||
|
# See: https://docs.docker.com/compose/profiles/
|
||
|
flower:
|
||
|
<<: *airflow-common
|
||
|
command: celery flower
|
||
|
profiles:
|
||
|
- flower
|
||
|
ports:
|
||
|
- "5555:5555"
|
||
|
healthcheck:
|
||
|
test: ["CMD", "curl", "--fail", "http://localhost:5555/"]
|
||
|
interval: 30s
|
||
|
timeout: 10s
|
||
|
retries: 5
|
||
|
start_period: 30s
|
||
|
restart: always
|
||
|
depends_on:
|
||
|
<<: *airflow-common-depends-on
|
||
|
airflow-init:
|
||
|
condition: service_completed_successfully
|
||
|
|
||
|
volumes:
|
||
|
postgres-db-volume:
|