diff --git a/.github/workflows/.unit-tests.yml b/.github/workflows/.unit-tests.yml index 0e9084a6..68b1c5c7 100644 --- a/.github/workflows/.unit-tests.yml +++ b/.github/workflows/.unit-tests.yml @@ -60,12 +60,14 @@ jobs: run: working-directory: ./client steps: - - name: Checkout + - name: Checkoutup uses: actions/checkout@v4 - name: Setup Node uses: actions/setup-node@v4 with: node-version: '20.18.0' + - name: Install Chrome + uses: browser-actions/setup-chrome@v1 - name: Cache NPM uses: actions/cache@v4 with: @@ -82,7 +84,7 @@ jobs: with: working-directory: ./client component: true - browser: electron + browser: chrome env: VITE_APP_MAPBOX_TOKEN: ${{ secrets.MAPBOX_TOKEN }} VITE_BASE_API_URL: http://localhost:8000 diff --git a/.github/workflows/dev-pr-run-unit-tests.yml b/.github/workflows/dev-pr-run-unit-tests.yml index e3f8559c..ebf79331 100644 --- a/.github/workflows/dev-pr-run-unit-tests.yml +++ b/.github/workflows/dev-pr-run-unit-tests.yml @@ -10,4 +10,4 @@ jobs: unit_tests: uses: ./.github/workflows/.unit-tests.yml secrets: - mapbox_token: ${{ secrets.MAPBOX_TOKEN }} + mapbox_token: ${{ secrets.CYPRESS_MAPBOX_TOKEN }} diff --git a/.github/workflows/foundry.okd.on-push.application.yml b/.github/workflows/foundry.okd.on-push.application.yml index 73d80ce1..14f402d4 100644 --- a/.github/workflows/foundry.okd.on-push.application.yml +++ b/.github/workflows/foundry.okd.on-push.application.yml @@ -191,6 +191,8 @@ jobs: --set-string secrets.bcwatFlowworks.password="${{ secrets.FOUNDRY_BCWAT_FLOWWORKS_PASSWORD }}" \ --set-string secrets.bcwatFlowworks.username="${{ secrets.FOUNDRY_BCWAT_FLOWWORKS_USERNAME }}" \ --set-string secrets.fernet.key="${{ secrets.FOUNDRY_AIRFLOW_FERNET_KEY }}" \ + --set-string secrets.api.secretKey="${{ secrets.FOUNDRY_AIRFLOW_API_SECRET_KEY_}}" \ + --set-string secrets.api.jwtKey="${{ secrets.FOUNDRY_AIRFLOW_API_JWT_KEY_}}" \ --set-string secrets.sendgridApiKey="${{ secrets.FOUNDRY_SENDGRID_API_KEY }}" \ --set migrations.image=${{ secrets.ACR_REGISTRY }}/bcwat/flyway:${{ needs.bump_version.outputs.version }} \ --set-string webserver.podAnnotations."rollout-timestamp"="$(date +%s)" \ diff --git a/airflow/Dockerfile b/airflow/Dockerfile index 5fc5506f..f0f8a756 100644 --- a/airflow/Dockerfile +++ b/airflow/Dockerfile @@ -1,4 +1,4 @@ -FROM apache/airflow:2.10.5-python3.12 +FROM apache/airflow:3.1.7-python3.12 USER root @@ -14,7 +14,15 @@ ENV PYTHONPATH = "/opt/airflow/etl_pipelines:${PYTHONPATH}" COPY requirements.txt . +# Switch to airflow user for pip installations USER airflow +# PIN AIRFLOW VERSION when installing packages +RUN pip install --no-cache-dir "apache-airflow==${AIRFLOW_VERSION}" \ + --constraint ${HOME}/constraints.txt \ + apache-airflow-providers-cncf-kubernetes \ + apache-airflow-providers-sendgrid + +RUN pip install --no-cache-dir apache-airflow-providers-fab==3.1.2 + RUN pip install --no-cache-dir -r requirements.txt -RUN pip install apache-airflow-providers-cncf-kubernetes apache-airflow-providers-sendgrid diff --git a/airflow/Dockerfile_OKD b/airflow/Dockerfile_OKD index fdef0a58..daa7aaea 100644 --- a/airflow/Dockerfile_OKD +++ b/airflow/Dockerfile_OKD @@ -1,4 +1,4 @@ -FROM apache/airflow:2.10.5-python3.12 +FROM apache/airflow:3.1.7-python3.12 USER root @@ -10,12 +10,18 @@ COPY etl_pipelines/ /opt/airflow/etl_pipelines/ RUN chown -R airflow: /opt/airflow/dags /opt/airflow/etl_pipelines /opt/airflow/shared - ENV PYTHONPATH = "/opt/airflow/etl_pipelines:${PYTHONPATH}" COPY requirements.txt . USER airflow +# PIN AIRFLOW VERSION when installing packages +RUN pip install --no-cache-dir "apache-airflow==${AIRFLOW_VERSION}" \ + --constraint ${HOME}/constraints.txt \ + apache-airflow-providers-cncf-kubernetes \ + apache-airflow-providers-sendgrid + +RUN pip install --no-cache-dir apache-airflow-providers-fab==3.1.2 + RUN pip install --no-cache-dir -r requirements.txt -RUN pip install apache-airflow-providers-cncf-kubernetes apache-airflow-providers-sendgrid diff --git a/airflow/README.md b/airflow/README.md index 2b75260b..b923b0d3 100644 --- a/airflow/README.md +++ b/airflow/README.md @@ -29,7 +29,7 @@ In production, we are using the `KubernetesExecutor`. This does not impact runni ```bash import os from datetime import datetime -from airflow.decorators import dag, task +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -41,7 +41,7 @@ ENVIRONMENT = os.getenv('ENVIRONMENT', 'no-env-found') # Does not prevent running locally # pod_template_file handles worker pod config @dag( - schedule_interval="@daily", + schedule="@daily", start_date=datetime(2024, 1, 1), catchup=False, tags=["example"], diff --git a/airflow/dags/asp_dag.py b/airflow/dags/asp_dag.py index d0013433..379461a1 100644 --- a/airflow/dags/asp_dag.py +++ b/airflow/dags/asp_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="asp_dag", - schedule_interval="5 8 * * *", - start_date=pendulum.datetime(2025, 5, 7, tz="UTC"), + schedule="5 8 * * *", + start_date=datetime(2025, 5, 7), catchup=False, tags=["water","climate", "station_observations", "daily"], default_args=default_args diff --git a/airflow/dags/ec_xml_dag.py b/airflow/dags/ec_xml_dag.py index 6381e31b..d63ec499 100644 --- a/airflow/dags/ec_xml_dag.py +++ b/airflow/dags/ec_xml_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="ec_xml_dag", - schedule_interval="0 8 * * *", - start_date=pendulum.datetime(2025, 5, 7, tz="UTC"), + schedule="0 8 * * *", + start_date=datetime(2025, 5, 7), catchup=False, tags=["climate", "station_observations", "daily"], default_args=default_args diff --git a/airflow/dags/env_aqn_dag.py b/airflow/dags/env_aqn_dag.py index b8854f22..4ee1c110 100644 --- a/airflow/dags/env_aqn_dag.py +++ b/airflow/dags/env_aqn_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="env_aqn_dag", - schedule_interval="0 8 * * *", - start_date=pendulum.datetime(2025, 5, 15, tz="UTC"), + schedule="0 8 * * *", + start_date=datetime(2025, 5, 15), catchup=False, tags=["climate", "station_observations", "daily"], default_args=default_args diff --git a/airflow/dags/env_hydro_dag.py b/airflow/dags/env_hydro_dag.py index b4802013..208697a4 100644 --- a/airflow/dags/env_hydro_dag.py +++ b/airflow/dags/env_hydro_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="env_hydro_dag", - schedule_interval="10 8 * * *", - start_date=pendulum.datetime(2025, 4, 17, tz="UTC"), + schedule="10 8 * * *", + start_date=datetime(2025, 4, 17), catchup=False, tags=["water", "station_observations", "daily"], default_args=default_args diff --git a/airflow/dags/flnro_dag.py b/airflow/dags/flnro_dag.py index c53d5012..4417b414 100644 --- a/airflow/dags/flnro_dag.py +++ b/airflow/dags/flnro_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="flnro_wmb_dag", - schedule_interval="0 8 * * *", - start_date=pendulum.datetime(2025, 5, 15, tz="UTC"), + schedule="0 8 * * *", + start_date=datetime(2025, 5, 15), catchup=False, tags=["climate", "station_observations", "daily"], default_args=default_args diff --git a/airflow/dags/flow_works_dag.py b/airflow/dags/flow_works_dag.py index 7791bee0..581184de 100644 --- a/airflow/dags/flow_works_dag.py +++ b/airflow/dags/flow_works_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="flowworks_dag", - schedule_interval="0 8 * * *", - start_date=pendulum.datetime(2025, 4, 17, tz="UTC"), + schedule="0 8 * * *", + start_date=datetime(2025, 4, 17), catchup=False, tags=["water","climate", "station_observations", "daily"], default_args=default_args diff --git a/airflow/dags/gw_moe_dag.py b/airflow/dags/gw_moe_dag.py index e112c4c4..f7685138 100644 --- a/airflow/dags/gw_moe_dag.py +++ b/airflow/dags/gw_moe_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="gw_moe_dag", - schedule_interval="15 8 * * *", - start_date=pendulum.datetime(2025, 4, 17, tz="UTC"), + schedule="15 8 * * *", + start_date=datetime(2025, 4, 17), catchup=False, tags=["groundwater", "station_observations", "daily"], default_args=default_args diff --git a/airflow/dags/msp_dag.py b/airflow/dags/msp_dag.py index eb9ee136..3a585f60 100644 --- a/airflow/dags/msp_dag.py +++ b/airflow/dags/msp_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="msp_dag", - schedule_interval="0 8 * * *", - start_date=pendulum.datetime(2025, 5, 7, tz="UTC"), + schedule="0 8 * * *", + start_date=datetime(2025, 5, 7), catchup=False, tags=["water","climate", "station_observations", "daily"], default_args=default_args diff --git a/airflow/dags/quarterly_climate_ec_update_dag.py b/airflow/dags/quarterly_climate_ec_update_dag.py index 120bf658..4c094781 100644 --- a/airflow/dags/quarterly_climate_ec_update_dag.py +++ b/airflow/dags/quarterly_climate_ec_update_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -11,8 +11,8 @@ @dag( dag_id="quarterly_ec_update_dag", # Cron for At 08:30 UTC (00:30 PST) on day-of-month 1 in every 3rd month. - schedule_interval="30 8 1 */3 *", - start_date=pendulum.datetime(2025, 6, 13, tz="UTC"), + schedule="30 8 1 */3 *", + start_date=datetime(2025, 6, 13), catchup=False, tags=["climate", "quarterly"], default_args=default_args diff --git a/airflow/dags/quarterly_ems_water_quality_dag.py b/airflow/dags/quarterly_ems_water_quality_dag.py index 149f76ba..e7790a83 100644 --- a/airflow/dags/quarterly_ems_water_quality_dag.py +++ b/airflow/dags/quarterly_ems_water_quality_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -11,8 +11,8 @@ @dag( dag_id="quarterly_ems_water_quality_dag", # Cron for At 08:30 UTC (00:30 PST) on day-of-month 2 in every 3rd month. - schedule_interval="30 8 2 */3 *", - start_date=pendulum.datetime(2025, 7, 3, tz="UTC"), + schedule="30 8 2 */3 *", + start_date=datetime(2025, 7, 3), catchup=False, tags=["waterquality", "quarterly"], default_args=default_args diff --git a/airflow/dags/quarterly_gw_moe_dag.py b/airflow/dags/quarterly_gw_moe_dag.py index dd414bfc..0d0bb22a 100644 --- a/airflow/dags/quarterly_gw_moe_dag.py +++ b/airflow/dags/quarterly_gw_moe_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -11,8 +11,8 @@ @dag( dag_id="quarterly_moe_gw_update", # Cron for At 09:00 UTC (01:00 PST) on day-of-month 1 in every 3rd month. - schedule_interval="0 9 1 */3 *", - start_date=pendulum.datetime(2025, 6, 13, tz="UTC"), + schedule="0 9 1 */3 *", + start_date=datetime(2025, 6, 13), catchup=False, tags=["groundwater", "quarterly"], default_args=default_args diff --git a/airflow/dags/quarterly_hydat_import_dag.py b/airflow/dags/quarterly_hydat_import_dag.py index 47439ac6..58dfda14 100644 --- a/airflow/dags/quarterly_hydat_import_dag.py +++ b/airflow/dags/quarterly_hydat_import_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -13,8 +13,8 @@ # Cron for At 01:30 on day-of-month 1 and 15 of each month. # This was done instead of checking every quarter because there is not consistent schedule for Hydat. If there is not a new version of # Hydat available. It will not scrape it. - schedule_interval="30 9 1,15 * *", - start_date=pendulum.datetime(2025, 6, 13, tz="UTC"), + schedule="30 9 1,15 * *", + start_date=datetime(2025, 6, 13), catchup=False, tags=["water", "quarterly", "hydat"], default_args=default_args diff --git a/airflow/dags/quarterly_moe_hydrometric_historic_dag.py b/airflow/dags/quarterly_moe_hydrometric_historic_dag.py index 6cceed92..0e6e1ff3 100644 --- a/airflow/dags/quarterly_moe_hydrometric_historic_dag.py +++ b/airflow/dags/quarterly_moe_hydrometric_historic_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -11,8 +11,8 @@ @dag( dag_id="quarterly_moe_hydrometric_historic_update_dag", # Cron for At 10:00 UTC (02:00 PST) on day-of-month 1 in every 3rd month. - schedule_interval="0 10 1 */3 *", - start_date=pendulum.datetime(2025, 6, 13, tz="UTC"), + schedule="0 10 1 */3 *", + start_date=datetime(2025, 6, 13), catchup=False, tags=["water", "quarterly"], default_args=default_args diff --git a/airflow/dags/quarterly_water_quality_eccc_dag.py b/airflow/dags/quarterly_water_quality_eccc_dag.py index 99c76522..487318f4 100644 --- a/airflow/dags/quarterly_water_quality_eccc_dag.py +++ b/airflow/dags/quarterly_water_quality_eccc_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -11,8 +11,8 @@ @dag( dag_id="quarterly_water_quality_eccc_dag", # Cron for At 10:15 UTC (02:15 PST) on day-of-month 1 in every 3rd month. - schedule_interval="15 10 1 */3 *", - start_date=pendulum.datetime(2025, 6, 13, tz="UTC"), + schedule="15 10 1 */3 *", + start_date=datetime(2025, 6, 13), catchup=False, tags=["waterquality", "quarterly"], default_args=default_args diff --git a/airflow/dags/update_station_year_var_status_dag.py b/airflow/dags/update_station_year_var_status_dag.py index 0c7d1c53..e2ae7870 100644 --- a/airflow/dags/update_station_year_var_status_dag.py +++ b/airflow/dags/update_station_year_var_status_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="update_station_year_var_status_dag", - schedule_interval="30 13 * * *", - start_date=pendulum.datetime(2025, 7, 15, tz="UTC"), + schedule="30 13 * * *", + start_date=datetime(2025, 7, 15), catchup=False, tags=["utility", "daily"], default_args=default_args diff --git a/airflow/dags/water_approval_dag.py b/airflow/dags/water_approval_dag.py index 98a0671a..df4de288 100644 --- a/airflow/dags/water_approval_dag.py +++ b/airflow/dags/water_approval_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="wls_water_approval_dag", - schedule_interval="0 6 * * *", - start_date=pendulum.datetime(2025, 5, 29, tz="UTC"), + schedule="0 6 * * *", + start_date=datetime(2025, 5, 29), catchup=False, tags=["licence", "databc", "daily"], default_args=default_args diff --git a/airflow/dags/water_licences_bcer_dag.py b/airflow/dags/water_licences_bcer_dag.py index fd535968..1c729698 100644 --- a/airflow/dags/water_licences_bcer_dag.py +++ b/airflow/dags/water_licences_bcer_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="bc_ogc_dag", - schedule_interval="0 6 * * *", - start_date=pendulum.datetime(2025, 5, 29, tz="UTC"), + schedule="0 6 * * *", + start_date=datetime(2025, 5, 29), catchup=False, tags=["licence", "databc", "daily"], default_args=default_args diff --git a/airflow/dags/weather_farm_prd_dag.py b/airflow/dags/weather_farm_prd_dag.py index e8dc4570..89fb697c 100644 --- a/airflow/dags/weather_farm_prd_dag.py +++ b/airflow/dags/weather_farm_prd_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="weather_farm_prd_dag", - schedule_interval="0 8 * * *", - start_date=pendulum.datetime(2025, 5, 15, tz="UTC"), + schedule="0 8 * * *", + start_date=datetime(2025, 5, 15), catchup=False, tags=["climate", "station_observations", "daily"], default_args=default_args diff --git a/airflow/dags/wra_wrl_dag.py b/airflow/dags/wra_wrl_dag.py index b2d4a9ac..7ebb56d3 100644 --- a/airflow/dags/wra_wrl_dag.py +++ b/airflow/dags/wra_wrl_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="wra_wrl_dag", - schedule_interval="5 6 * * *", - start_date=pendulum.datetime(2025, 6, 5, tz="UTC"), + schedule="5 6 * * *", + start_date=datetime(2025, 6, 5), catchup=False, tags=["licence","databc", "daily"], default_args=default_args diff --git a/airflow/dags/wsc_hydro_dag.py b/airflow/dags/wsc_hydro_dag.py index 037be908..70502542 100644 --- a/airflow/dags/wsc_hydro_dag.py +++ b/airflow/dags/wsc_hydro_dag.py @@ -1,6 +1,6 @@ import os -import pendulum -from airflow.decorators import dag, task +from datetime import datetime +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="wsc_hydro_dag", - schedule_interval="0 8 * * *", - start_date=pendulum.datetime(2025, 4, 17, tz="UTC"), + schedule="0 8 * * *", + start_date=datetime(2025, 4, 17), catchup=False, tags=["water", "station_observations", "daily"], default_args=default_args diff --git a/airflow/docker-compose.yaml b/airflow/docker-compose.yaml index 348af40b..a083183d 100644 --- a/airflow/docker-compose.yaml +++ b/airflow/docker-compose.yaml @@ -24,7 +24,7 @@ # The following variables are supported: # # AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow. -# Default: apache/airflow:2.10.5 +# Default: apache/airflow:3.1.7 # AIRFLOW_UID - User ID in Airflow containers # Default: 50000 # AIRFLOW_PROJ_DIR - Base path to which all the files will be volumed. @@ -46,37 +46,38 @@ --- x-airflow-common: &airflow-common - # In order to add custom dependencies or upgrade provider packages you can use your extended image. + # In order to add custom dependencies or upgrade provider distributions you can use your extended image. # Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml # and uncomment the "build" line below, Then run `docker-compose build` to build the images. + # image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:3.1.7} build: . - # build: . + env_file: + - ${ENV_FILE_PATH:-.env} environment: &airflow-common-env AIRFLOW__CORE__EXECUTOR: LocalExecutor + AIRFLOW__CORE__AUTH_MANAGER: airflow.providers.fab.auth_manager.fab_auth_manager.FabAuthManager AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow - AIRFLOW__CORE__FERNET_KEY: '' + AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true' AIRFLOW__CORE__LOAD_EXAMPLES: 'false' - AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session' + AIRFLOW__CORE__EXECUTION_API_SERVER_URL: 'http://airflow-apiserver:8080/execution/' + AIRFLOW__EMAIL__SUBJECT_TEMPLATE: "/opt/airflow/email_templates/subject/local_subject.html" + AIRFLOW__EMAIL__HTML_CONTENT_TEMPLATE: "/opt/airflow/email_templates/content/okd_content.html" + # yamllint disable rule:line-length # Use simple http server on scheduler for health checks # See https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/check-health.html#scheduler-health-check-server # yamllint enable rule:line-length AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true' - # WARNING: Use _PIP_ADDITIONAL_REQUIREMENTS option ONLY for a quick checks - # for other purpose (development, test and especially production usage) build/extend Airflow image. - _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-} # The following line can be used to set a custom config file, stored in the local config folder - # If you want to use it, outcomment it and replace airflow.cfg with the name of your config file AIRFLOW_CONFIG: '/opt/airflow/config/airflow.cfg' volumes: - ${AIRFLOW_PROJ_DIR:-.}/dags:/opt/airflow/dags + - ${AIRFLOW_PROJ_DIR:-.}/data:/opt/airflow/data - ${AIRFLOW_PROJ_DIR:-.}/logs:/opt/airflow/logs - ${AIRFLOW_PROJ_DIR:-.}/config:/opt/airflow/config - ${AIRFLOW_PROJ_DIR:-.}/plugins:/opt/airflow/plugins - env_file: - - .env user: "${AIRFLOW_UID:-50000}:0" depends_on: &airflow-common-depends-on @@ -85,7 +86,7 @@ x-airflow-common: services: postgres: - image: postgres:13 + image: postgres:16 environment: POSTGRES_USER: airflow POSTGRES_PASSWORD: airflow @@ -99,13 +100,14 @@ services: start_period: 5s restart: always - airflow-webserver: + + airflow-apiserver: <<: *airflow-common - command: webserver + command: api-server ports: - "8080:8080" healthcheck: - test: ["CMD", "curl", "--fail", "http://localhost:8080/health"] + test: ["CMD", "curl", "--fail", "http://localhost:8080/api/v2/version"] interval: 30s timeout: 10s retries: 5 @@ -130,6 +132,22 @@ services: <<: *airflow-common-depends-on airflow-init: condition: service_completed_successfully + + airflow-dag-processor: + <<: *airflow-common + command: dag-processor + healthcheck: + test: ["CMD-SHELL", 'airflow jobs check --job-type DagProcessorJob --hostname "$${HOSTNAME}"'] + interval: 30s + timeout: 10s + retries: 5 + start_period: 30s + restart: always + depends_on: + <<: *airflow-common-depends-on + airflow-init: + condition: service_completed_successfully + airflow-triggerer: <<: *airflow-common command: triggerer @@ -160,6 +178,7 @@ services: echo "For other operating systems you can get rid of the warning with manually created .env file:" echo " See: https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#setting-the-right-airflow-user" echo + export AIRFLOW_UID=$$(id -u) fi one_meg=1048576 mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg)) @@ -194,9 +213,38 @@ services: echo " https://airflow.apache.org/docs/apache-airflow/stable/howto/docker-compose/index.html#before-you-begin" echo fi - mkdir -p /sources/logs /sources/dags /sources/plugins - chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins} - exec /entrypoint airflow version + echo + echo "Creating missing opt dirs if missing:" + echo + mkdir -v -p /opt/airflow/{logs,dags,data,plugins,config} + echo + echo "Airflow version:" + /entrypoint airflow version + echo + echo "Files in shared volumes:" + echo + ls -la /opt/airflow/{logs,dags,data,plugins,config} + echo + echo "Running airflow config list to create default config file if missing." + echo + /entrypoint airflow config list >/dev/null + echo + echo "Files in shared volumes:" + echo + ls -la /opt/airflow/{logs,dags,data,plugins,config} + echo + echo "Change ownership of files in /opt/airflow to ${AIRFLOW_UID}:0" + echo + chown -R "${AIRFLOW_UID}:0" /opt/airflow/ + echo + echo "Change ownership of files in shared volumes to ${AIRFLOW_UID}:0" + echo + chown -v -R "${AIRFLOW_UID}:0" /opt/airflow/{logs,dags,data,plugins,config} + echo + echo "Files in shared volumes:" + echo + ls -la /opt/airflow/{logs,dags,data,plugins,config} + # yamllint enable rule:line-length environment: <<: *airflow-common-env @@ -206,8 +254,6 @@ services: _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow} _PIP_ADDITIONAL_REQUIREMENTS: '' user: "0:0" - volumes: - - ${AIRFLOW_PROJ_DIR:-.}:/sources airflow-cli: <<: *airflow-common @@ -221,6 +267,8 @@ services: - bash - -c - airflow + depends_on: + <<: *airflow-common-depends-on volumes: postgres-db-volume: diff --git a/airflow/email_templates/subject/local_subject.html b/airflow/email_templates/subject/local_subject.html new file mode 100644 index 00000000..9e1fe24e --- /dev/null +++ b/airflow/email_templates/subject/local_subject.html @@ -0,0 +1 @@ +BCWAT- Local Airflow Alert: {{ti.dag_id}} {{ti.state}} diff --git a/airflow/pod_templates/okd/heavy_task_template.yaml b/airflow/pod_templates/okd/heavy_task_template.yaml index ede8bffb..426b919c 100644 --- a/airflow/pod_templates/okd/heavy_task_template.yaml +++ b/airflow/pod_templates/okd/heavy_task_template.yaml @@ -42,11 +42,16 @@ spec: secretKeyRef: name: airflow-database-connection key: connection - - name: AIRFLOW__WEBSERVER__SECRET_KEY + - name: AIRFLOW__API__SECRET_KEY valueFrom: secretKeyRef: - name: airflow-webserver-secret-key - key: webserver-secret-key + name: airflow-api-secret-key + key: api-secret-key + - name: AIRFLOW__API_AUTH__JWT_SECRET + valueFrom: + secretKeyRef: + name: airflow-jwt-secret + key: jwt-secret - name: AIRFLOW__KUBERNETES__LOGS_SERVICE_HOST value: airflow-webserver - name: AIRFLOW__KUBERNETES__LOGS_SERVICE_PORT @@ -79,7 +84,7 @@ spec: value: airflow@foundryspatial.com - name: AIRFLOW__EMAIL__HTML_CONTENT_TEMPLATE value: "/opt/airflow/email_templates/content/okd_content.html" - - name: AIRFLOW__WEBSERVER__BASE_URL + - name: AIRFLOW__API__BASE_URL value: "http://bcwat.airflow.fspatial" resources: requests: diff --git a/airflow/pod_templates/okd/largest_task_template.yaml b/airflow/pod_templates/okd/largest_task_template.yaml index ecb50d45..d237a88e 100644 --- a/airflow/pod_templates/okd/largest_task_template.yaml +++ b/airflow/pod_templates/okd/largest_task_template.yaml @@ -42,11 +42,16 @@ spec: secretKeyRef: name: airflow-database-connection key: connection - - name: AIRFLOW__WEBSERVER__SECRET_KEY + - name: AIRFLOW__API__SECRET_KEY valueFrom: secretKeyRef: - name: airflow-webserver-secret-key - key: webserver-secret-key + name: airflow-api-secret-key + key: api-secret-key + - name: AIRFLOW__API_AUTH__JWT_SECRET + valueFrom: + secretKeyRef: + name: airflow-jwt-secret + key: jwt-secret - name: AIRFLOW__KUBERNETES__LOGS_SERVICE_HOST value: airflow-webserver - name: AIRFLOW__KUBERNETES__LOGS_SERVICE_PORT @@ -79,7 +84,7 @@ spec: value: airflow@foundryspatial.com - name: AIRFLOW__EMAIL__HTML_CONTENT_TEMPLATE value: "/opt/airflow/email_templates/content/okd_content.html" - - name: AIRFLOW__WEBSERVER__BASE_URL + - name: AIRFLOW__API__BASE_URL value: "http://bcwat.airflow.fspatial" resources: requests: diff --git a/airflow/pod_templates/okd/medium_task_template.yaml b/airflow/pod_templates/okd/medium_task_template.yaml index a930ff45..0bab2562 100644 --- a/airflow/pod_templates/okd/medium_task_template.yaml +++ b/airflow/pod_templates/okd/medium_task_template.yaml @@ -42,11 +42,16 @@ spec: secretKeyRef: name: airflow-database-connection key: connection - - name: AIRFLOW__WEBSERVER__SECRET_KEY + - name: AIRFLOW__API__SECRET_KEY valueFrom: secretKeyRef: - name: airflow-webserver-secret-key - key: webserver-secret-key + name: airflow-api-secret-key + key: api-secret-key + - name: AIRFLOW__API_AUTH__JWT_SECRET + valueFrom: + secretKeyRef: + name: airflow-jwt-secret + key: jwt-secret - name: AIRFLOW__KUBERNETES__LOGS_SERVICE_HOST value: airflow-webserver - name: AIRFLOW__KUBERNETES__LOGS_SERVICE_PORT @@ -79,7 +84,7 @@ spec: value: airflow@foundryspatial.com - name: AIRFLOW__EMAIL__HTML_CONTENT_TEMPLATE value: "/opt/airflow/email_templates/content/okd_content.html" - - name: AIRFLOW__WEBSERVER__BASE_URL + - name: AIRFLOW__API__BASE_URL value: "http://bcwat.airflow.fspatial" resources: requests: diff --git a/airflow/pod_templates/okd/small_task_template.yaml b/airflow/pod_templates/okd/small_task_template.yaml index efe96b81..1ba2be31 100644 --- a/airflow/pod_templates/okd/small_task_template.yaml +++ b/airflow/pod_templates/okd/small_task_template.yaml @@ -42,11 +42,16 @@ spec: secretKeyRef: name: airflow-database-connection key: connection - - name: AIRFLOW__WEBSERVER__SECRET_KEY + - name: AIRFLOW__API__SECRET_KEY valueFrom: secretKeyRef: - name: airflow-webserver-secret-key - key: webserver-secret-key + name: airflow-api-secret-key + key: api-secret-key + - name: AIRFLOW__API_AUTH__JWT_SECRET + valueFrom: + secretKeyRef: + name: airflow-jwt-secret + key: jwt-secret - name: AIRFLOW__KUBERNETES__LOGS_SERVICE_HOST value: airflow-webserver - name: AIRFLOW__KUBERNETES__LOGS_SERVICE_PORT @@ -79,7 +84,7 @@ spec: value: airflow@foundryspatial.com - name: AIRFLOW__EMAIL__HTML_CONTENT_TEMPLATE value: "/opt/airflow/email_templates/content/okd_content.html" - - name: AIRFLOW__WEBSERVER__BASE_URL + - name: AIRFLOW__API__BASE_URL value: "http://bcwat.airflow.fspatial" resources: requests: diff --git a/airflow/pod_templates/okd/tiny_task_template.yaml b/airflow/pod_templates/okd/tiny_task_template.yaml index e9e51863..c4a45171 100644 --- a/airflow/pod_templates/okd/tiny_task_template.yaml +++ b/airflow/pod_templates/okd/tiny_task_template.yaml @@ -42,11 +42,16 @@ spec: secretKeyRef: name: airflow-database-connection key: connection - - name: AIRFLOW__WEBSERVER__SECRET_KEY + - name: AIRFLOW__API__SECRET_KEY valueFrom: secretKeyRef: - name: airflow-webserver-secret-key - key: webserver-secret-key + name: airflow-api-secret-key + key: api-secret-key + - name: AIRFLOW__API_AUTH__JWT_SECRET + valueFrom: + secretKeyRef: + name: airflow-jwt-secret + key: jwt-secret - name: AIRFLOW__KUBERNETES__LOGS_SERVICE_HOST value: airflow-webserver - name: AIRFLOW__KUBERNETES__LOGS_SERVICE_PORT @@ -79,7 +84,7 @@ spec: value: airflow@foundryspatial.com - name: AIRFLOW__EMAIL__HTML_CONTENT_TEMPLATE value: "/opt/airflow/email_templates/content/okd_content.html" - - name: AIRFLOW__WEBSERVER__BASE_URL + - name: AIRFLOW__API__BASE_URL value: "http://bcwat.airflow.fspatial" resources: requests: diff --git a/airflow/unused_dags/drive_bc_dag.py b/airflow/unused_dags/drive_bc_dag.py index 17d49152..bf5c7eed 100644 --- a/airflow/unused_dags/drive_bc_dag.py +++ b/airflow/unused_dags/drive_bc_dag.py @@ -1,6 +1,6 @@ import os import pendulum -from airflow.decorators import dag, task +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="drive_bc_dag", - schedule_interval="30 * * * *", - start_date=pendulum.datetime(2025, 5, 7, tz="UTC"), + schedule="30 * * * *", + start_date=datetime(2025, 5, 7), catchup=False, tags=["climate", "station_observations", "hourly"], default_args=default_args diff --git a/airflow/unused_dags/hourly_to_daily_dag.py b/airflow/unused_dags/hourly_to_daily_dag.py index fcfa3647..9af27556 100644 --- a/airflow/unused_dags/hourly_to_daily_dag.py +++ b/airflow/unused_dags/hourly_to_daily_dag.py @@ -1,6 +1,6 @@ import os import pendulum -from airflow.decorators import dag, task +from airflow.sdk import dag, task from shared.constants import default_args from shared.functions import generate_executor_config_template from dotenv import load_dotenv, find_dotenv @@ -10,8 +10,8 @@ @dag( dag_id="convert_hourly_to_daily_dag", - schedule_interval="0 13 * * *", - start_date=pendulum.datetime(2025, 5, 9, tz="UTC"), + schedule="0 13 * * *", + start_date=datetime(2025, 5, 9), catchup=False, tags=["climate", "conversions", "daily"] ) diff --git a/charts/okd/airflow/values.yaml b/charts/okd/airflow/values.yaml index 25b310b0..454ed59a 100644 --- a/charts/okd/airflow/values.yaml +++ b/charts/okd/airflow/values.yaml @@ -10,6 +10,14 @@ extraSecrets: type: Opaque stringData: | fernet-key: "{{ .Values.secrets.fernet.key }}" + airflow-api-secret-key: + type: Opaque + stringData: | + api-secret-key: "{{ .Values.secrets.api.secretKey }}" + airflow-jwt-secret: + type: Opaque + stringData: | + jwt-secret: "{{ .Values.secrets.api.jwtKey }}" airflow-sendgrid-default: type: Opaque stringData: | @@ -17,19 +25,20 @@ extraSecrets: "conn_type": "sendgrid", "password": "{{ .Values.secrets.sendgridApiKey }}" }' -# We are manaully creating a fernet-key before initializing Airflow. + fernetKeySecretName: airflow-fernet-key +apiSecretKeySecretName: airflow-api-secret-key +jwtSecretName: airflow-jwt-secret # Airflow database config # Requires Airflow-metadata secret to have been created, pointing to the external database handling airflow metadata info data: metadataSecretName: airflow-database-connection - resultBackendSecretName: airflow-database-connection registry: secretName: acr-pull-secret -airflowVersion: "2.10.5" +airflowVersion: "3.1.7" images: airflow: @@ -81,7 +90,7 @@ createUserJob: type: RuntimeDefault ttlSecondsAfterFinished: 180 -webserver: +apiServer: startupProbe: timeoutSeconds: 60 env: @@ -119,6 +128,10 @@ webserver: runAsNonRoot: true seccompProfile: type: RuntimeDefault +dagProcessor: + env: + - name: ENVIRONMENT + value: OKD scheduler: env: diff --git a/client/cypress.config.js b/client/cypress.config.js index b68a7f58..931bcd3f 100644 --- a/client/cypress.config.js +++ b/client/cypress.config.js @@ -30,6 +30,14 @@ export default defineConfig({ return null } }) + on('before:browser:launch', (browser, launchOptions) => { + if (browser.name === 'chrome') { + launchOptions.args.push('--ignore-gpu-blocklist') + launchOptions.args.push('--use-gl=swiftshader') + launchOptions.args.push('--enable-unsafe-swiftshader') + } + return launchOptions + }) // It's IMPORTANT to return the config object // with any changed environment variables return config diff --git a/client/package.json b/client/package.json index 78f89bfe..894e090d 100644 --- a/client/package.json +++ b/client/package.json @@ -28,8 +28,8 @@ "@quasar/vite-plugin": "^1.9.0", "@vitejs/plugin-vue": "^5.2.4", "@vue/test-utils": "^2.4.6", - "cypress": "^14.4.0", - "cypress-vite": "^1.6.0", + "cypress": "14.4.0", + "cypress-vite": "1.6.0", "jsdom": "^26.1.0", "sass-embedded": "^1.86.1", "vite": "^6.2.0",