-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdocker-compose.yml
More file actions
103 lines (101 loc) · 3.03 KB
/
docker-compose.yml
File metadata and controls
103 lines (101 loc) · 3.03 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
services:
postgres:
image: postgres:15
ports:
- "5433:5432"
environment:
POSTGRES_USER: ${DATABASE_USER}
POSTGRES_PASSWORD: ${DATABASE_PASSWORD}
POSTGRES_DB: ${DATABASE_NAME}
healthcheck:
test:
[
"CMD",
"pg_isready",
"-U",
"${DATABASE_USER}",
"-d",
"${DATABASE_NAME}",
]
interval: 5s
timeout: 5s
retries: 5
volumes:
- postgres-db:/var/lib/postgresql/data
airflow-init:
build:
context: .
dockerfile: airflow/Dockerfile
container_name: airflow-init
depends_on:
postgres:
condition: service_healthy
environment:
AIRFLOW__CORE__EXECUTOR: LocalExecutor
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://${DATABASE_USER}:${DATABASE_PASSWORD}@${DATABASE_HOST}/${DATABASE_NAME}
volumes:
- ./airflow/init_airflow.sh:/airflow/init_airflow.sh
entrypoint: ["/bin/bash", "/airflow/init_airflow.sh"]
airflow:
build:
context: .
dockerfile: airflow/Dockerfile
container_name: airflow
restart: always
environment:
AIRFLOW__CORE__EXECUTOR: LocalExecutor
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://${DATABASE_USER}:${DATABASE_PASSWORD}@${DATABASE_HOST}/${DATABASE_NAME}
PYTHONPATH: "/opt/airflow:/opt/airflow/dags:/opt/airflow/spark_jobs"
# bronze_path_env: /opt/airflow/data/bronze_layer
# -> from .env file:
# JDBC_JAR_PATH_env: ${JDBC_JAR_PATH_env}
# MODEL_OUTPUT_PATH_env: ${MODEL_OUTPUT_PATH_env}
DATABASE_HOST: ${DATABASE_HOST}
DATABASE_PORT: ${DATABASE_PORT}
DATABASE_NAME: ${DATABASE_NAME}
DATABASE_USER: ${DATABASE_USER}
DATABASE_PASSWORD: ${DATABASE_PASSWORD}
volumes:
- ./airflow/dags:/opt/airflow/dags
- ./airflow/logs:/opt/airflow/logs
- ${DATA_SOURCE_PATH}:/opt/airflow/data
- ./airflow/spark_jobs:/opt/airflow/spark_jobs
- ./requirements.txt:/requirements.txt
- ./postgresql-42.6.0.jar:/opt/airflow/postgresql-42.6.0.jar
command: >
bash -c "
pip install --no-cache-dir -r /requirements.txt &&
airflow scheduler &
airflow webserver
"
depends_on:
airflow-init:
condition: service_completed_successfully
postgres:
condition: service_healthy
ports:
- "8080:8080"
backend:
build:
context: .
dockerfile: app/Dockerfile
container_name: backend_api
ports:
- "8000:8000"
environment:
- DATABASE_HOST=postgres
- DATABASE_USER=${DATABASE_USER}
- DATABASE_PASSWORD=${DATABASE_PASSWORD}
- DATABASE_NAME=${DATABASE_NAME}
- DATABASE_PORT=${DATABASE_PORT}
- SECRET_KEY=${SECRET_KEY}
- ALGORITHM=${ALGORITHM}
- ACCESS_TOKEN_EXPIRE_MINUTES=${ACCESS_TOKEN_EXPIRE_MINUTES}
- FRONTEND_URL=${FRONTEND_URL}
depends_on:
postgres:
condition: service_healthy
airflow-init:
condition: service_completed_successfully
volumes:
postgres-db: