-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathdocker-compose-worker.yaml
More file actions
68 lines (66 loc) · 2.93 KB
/
docker-compose-worker.yaml
File metadata and controls
68 lines (66 loc) · 2.93 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
version: '3'
x-airflow-common:
&airflow-common
# In order to add custom dependencies or upgrade provider packages you can use your extended image.
# Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml
# and uncomment the "build" line below, Then run `docker-compose build` to build the images.
image: ${AIRFLOW_IMAGE_NAME:-extending_airflow:2.5.1}
# build: .
environment:
&airflow-common-env
AIRFLOW__CORE__EXECUTOR: CeleryExecutor
# Airflow webserver의 varaibles,connections 등의 Metadata를 저장하는 DB
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@rds_endpoint:5432/airflow
# For backward compatibility, with Airflow <2.3
AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@rds_endpoint:5432/airflow
# Celery Executor의 task state를 기록하여 다수의 Worker로 분산 처리를 가능하게 해주는 DB
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@rds_endpoint:5432/airflow
# Worker가 가져갈 수 있도록 task들을 담아두는 Queue
AIRFLOW__CELERY__BROKER_URL: redis://redis_endpoint:6379/0
# log들을 S3에 원격으로 연결
AIRFLOW__CORE__REMOTE_LOGGING: 'true'
AIRFLOW__CORE__REMOTE_LOG_CONN_ID: s3_conn
AIRFLOW__CORE__REMOTE_BASE_LOG_FOLDER: s3://s3_bucket_name
# DB 암호화
AIRFLOW__CORE__FERNET_KEY: '...'
# DAG 생성 중에는 정지
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
# DAG 예제 생성 X
AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session'
# 실험적인 부분으로 사용을 권장하지 않음 -> pip install을 쉽게 해주는 환경 변수
_PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
volumes:
- ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags
- ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs
- ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins
# DAGs의 임시 파일 저장소
- ${AIRFLOW_PROJ_DIR:-.}/data:/opt/airflow/data
user: "${AIRFLOW_UID:-50000}:0"
services:
airflow-worker:
<<: *airflow-common
command: celery worker
healthcheck:
test:
- "CMD-SHELL"
- 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
interval: 10s
timeout: 60s
retries: 5
environment:
<<: *airflow-common-env
C_FORCE_ROOT: 'true'
# Required to handle warm shutdown of the celery workers properly
# See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
DUMB_INIT_SETSID: "0"
restart: always
selenium:
container_name: remote_chromedriver
image: seleniarm/standalone-chromium:latest
ports:
- 4444:4444
restart: always
volumes:
- ${AIRFLOW_PROJ_DIR:-.}/data:/opt/airflow/data
user: "${AIRFLOW_UID:-50000}:0"