-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
94 lines (87 loc) · 3.32 KB
/
docker-compose.yml
File metadata and controls
94 lines (87 loc) · 3.32 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
# docker-compose.yml
x-airflow-common: &airflow-common
image: apache/airflow:2.9.2
env_file: .env
environment:
# Airflow DB
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@airflow-db:5432/airflow
AIRFLOW__CORE__LOAD_EXAMPLES: "false"
AIRFLOW__CORE__FERNET_KEY: ${AIRFLOW__CORE__FERNET_KEY}
# App vars
ENTSOE_API_TOKEN: ${ENTSOE_API_TOKEN}
EDC_OUTPUT_DIR: ${EDC_OUTPUT_DIR:-/opt/airflow/data}
EDC_BIDDING_ZONES: ${EDC_BIDDING_ZONES:-ES,PT,FR}
EDC_STD_OUTPUT_DIR: ${EDC_STD_OUTPUT_DIR:-/opt/airflow/data/standard}
_PIP_ADDITIONAL_REQUIREMENTS: ""
PIP_DISABLE_PIP_VERSION_CHECK: "1"
volumes:
- ./airflow/dags:/opt/airflow/dags
- ./airflow/plugins:/opt/airflow/plugins
- ./data:/opt/airflow/data
- ./edc:/opt/airflow/edc
- ./pyproject.toml:/opt/airflow/pyproject.toml
- ./README.md:/opt/airflow/README.md
depends_on:
airflow-db:
condition: service_healthy
restart: unless-stopped
services:
airflow-db:
image: postgres:16
environment:
POSTGRES_USER: airflow
POSTGRES_PASSWORD: airflow
POSTGRES_DB: airflow
volumes:
- pgdata:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U airflow"]
interval: 5s
retries: 5
restart: unless-stopped
ports:
- "5432:5432"
airflow-init:
restart: "no"
<<: *airflow-common
command: >
bash -lc "pip install --no-cache-dir -q xmltodict==0.14.2 python-dotenv==1.0.1 &&
pip install -e /opt/airflow --no-deps &&
airflow db migrate &&
airflow users create --username ${_AIRFLOW_WWW_USER_USERNAME:-admin} --firstname ${_AIRFLOW_WWW_USER_FIRSTNAME:-Admin} --lastname ${_AIRFLOW_WWW_USER_LASTNAME:-User} --role Admin --email ${_AIRFLOW_WWW_USER_EMAIL:-admin@example.com} --password ${_AIRFLOW_WWW_USER_PASSWORD:-admin} || true"
airflow-webserver:
<<: *airflow-common
environment:
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@airflow-db:5432/airflow
AIRFLOW__CORE__LOAD_EXAMPLES: "false"
AIRFLOW__CORE__FERNET_KEY: ${AIRFLOW__CORE__FERNET_KEY}
ENTSOE_API_TOKEN: ${ENTSOE_API_TOKEN}
EDC_OUTPUT_DIR: ${EDC_OUTPUT_DIR:-/opt/airflow/data}
EDC_BIDDING_ZONES: ${EDC_BIDDING_ZONES:-ES,PT,FR}
EDC_STD_OUTPUT_DIR: ${EDC_STD_OUTPUT_DIR:-/opt/airflow/data/standard}
_PIP_ADDITIONAL_REQUIREMENTS: ""
PIP_DISABLE_PIP_VERSION_CHECK: "1"
AIRFLOW__WEBSERVER__WEB_SERVER_HOST: "0.0.0.0"
AIRFLOW__WEBSERVER__WEB_SERVER_PORT: "8080"
AIRFLOW__WEBSERVER__WORKERS: "2" # <-- número de workers (vía config de Airflow)
PYTHONUNBUFFERED: "1"
command: >
bash -lc "
airflow db check &&
pip install --no-cache-dir -q xmltodict==0.14.2 python-dotenv==1.0.1 &&
pip install -e /opt/airflow --no-deps || true &&
exec airflow webserver --access-logfile - --error-logfile -
"
ports:
- "8080:8080"
healthcheck:
test: ["CMD-SHELL", "curl -sf http://localhost:8080/health || exit 1"]
interval: 10s
timeout: 5s
retries: 24
airflow-scheduler:
<<: *airflow-common
command: >
bash -lc "pip install --no-cache-dir -q xmltodict==0.14.2 python-dotenv==1.0.1 && pip install -e /opt/airflow --no-deps || true && exec airflow scheduler"
volumes:
pgdata: