Skip to content

Commit f6a8f65

Browse files
authored
Merge pull request #2763 from dlt-hub/devel
master merge for 1.12.0 release
2 parents 7bcc707 + 3de8a1e commit f6a8f65

File tree

445 files changed

+18900
-6510
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

445 files changed

+18900
-6510
lines changed

.github/workflows/lint.yml

Lines changed: 2 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -2,25 +2,13 @@
22
name: lint | code & tests
33

44
on:
5-
pull_request:
6-
branches:
7-
- master
8-
- devel
5+
workflow_call:
96
workflow_dispatch:
107

11-
concurrency:
12-
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
13-
cancel-in-progress: true
14-
158
jobs:
16-
get_docs_changes:
17-
name: docs changes
18-
uses: ./.github/workflows/get_docs_changes.yml
199

2010
run_lint:
2111
name: lint
22-
needs: get_docs_changes
23-
if: needs.get_docs_changes.outputs.changes_outside_docs == 'true'
2412
strategy:
2513
fail-fast: true
2614
matrix:
@@ -60,7 +48,7 @@ jobs:
6048

6149
- name: Install dependencies
6250
# if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
63-
run: poetry install --all-extras --with airflow,providers,pipeline,sentry-sdk,dbt
51+
run: poetry install --all-extras --with airflow,providers,pipeline,sentry-sdk,dbt,marimo,streamlit
6452

6553
- name: Run make lint
6654
run: |

.github/workflows/main.yml

Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
name: dlt
2+
3+
on:
4+
pull_request:
5+
branches: [ master, devel ]
6+
7+
concurrency:
8+
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
9+
cancel-in-progress: true
10+
11+
jobs:
12+
13+
# checks wether anything outside docs has changed, if not, most of the tests can be skipped
14+
# lint will check this and if lint is not run, no other tests will be run
15+
get_docs_changes:
16+
name: docs changes
17+
uses: ./.github/workflows/get_docs_changes.yml
18+
19+
# if the PR is from a fork, we need to authorize the secrets access and remote destinations run with a lable
20+
authorize_run_from_fork:
21+
name: check if fork and if so wether secrets are available
22+
# run when label is assigned OR when we are not a fork
23+
if: ${{ github.event.label.name == 'ci from fork' || (github.event.pull_request.head.repo.full_name == github.repository && (github.event.action == 'opened' || github.event.action == 'synchronize'))}}
24+
runs-on: ubuntu-latest
25+
steps:
26+
- run: true
27+
28+
# testing and linting of docs snippets is always run to catch problems in the docs
29+
# NOTE: we could splint linting and testing of docs so linter can also always run for fork PRs
30+
test_docs_snippets:
31+
name: test snippets in docs
32+
uses: ./.github/workflows/test_docs_snippets.yml
33+
secrets: inherit
34+
35+
lint:
36+
name: lint on all python versions
37+
needs: get_docs_changes
38+
if: needs.get_docs_changes.outputs.changes_outside_docs == 'true'
39+
uses: ./.github/workflows/lint.yml
40+
41+
test_common:
42+
name: run common tests on all python versions and OSes
43+
needs: lint
44+
uses: ./.github/workflows/test_common.yml
45+
46+
#
47+
# Destination and Sources local tests, do not provide secrets
48+
# Other tests that do not require remote connections
49+
#
50+
test_destinations_local:
51+
name: run local destination tests without secrets
52+
needs: test_common
53+
uses: ./.github/workflows/test_destinations_local.yml
54+
55+
test_sources_local:
56+
name: run local source tests without secrets
57+
needs: test_common
58+
uses: ./.github/workflows/test_sources_local.yml
59+
60+
test_plus:
61+
name: test dlt+ connection
62+
needs: lint
63+
uses: ./.github/workflows/test_plus.yml
64+
65+
test_tools_airflow:
66+
name: test airflow helpers
67+
needs: lint
68+
uses: ./.github/workflows/test_tools_airflow.yml
69+
70+
test_tools_build_images:
71+
name: test build images
72+
needs: lint
73+
uses: ./.github/workflows/test_tools_build_images.yml
74+
75+
#
76+
# Remote destination tests and docs snippets, needs secrets,
77+
# so we depend on authorize and forward secrets
78+
#
79+
80+
81+
test_destinations_remote:
82+
name: test remote destinations with secrets
83+
needs: [authorize_run_from_fork, test_common]
84+
uses: ./.github/workflows/test_destinations_remote.yml
85+
secrets: inherit
86+
with:
87+
run_full_test_suite: ${{ contains(github.event.pull_request.labels.*.name, 'ci full') || github.event_name == 'schedule'}}
88+
89+
#
90+
# Other tools and tests that require secrets
91+
#
92+
test_tools_dbt_runner:
93+
name: test dbt runner
94+
needs: [test_common, authorize_run_from_fork]
95+
uses: ./.github/workflows/test_tools_dbt_runner.yml
96+
secrets: inherit
97+
98+
# dbt cloud tests currently are disabled, TODO: explain why
99+
# test_tools_dbt_cloud:
100+
# needs: [test_common, authorize_run_from_fork]
101+
# uses: ./.github/workflows/test_tools_dbt_cloud.yml
102+
# secrets: inherit
103+
104+
105+

.github/workflows/test_common.yml

Lines changed: 75 additions & 97 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,11 @@
11
name: common | common
22

33
on:
4-
pull_request:
5-
branches:
6-
- master
7-
- devel
8-
workflow_dispatch:
4+
workflow_call:
95

10-
concurrency:
11-
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
12-
cancel-in-progress: true
136

147
env:
158
RUNTIME__LOG_LEVEL: ERROR
16-
RUNTIME__DLTHUB_TELEMETRY_ENDPOINT: ${{ secrets.RUNTIME__DLTHUB_TELEMETRY_ENDPOINT }}
179

1810
# we need the secrets only for the rest_api_pipeline tests which are in tests/sources
1911
# so we inject them only at the end
@@ -22,35 +14,48 @@ env:
2214
SOURCES__GITHUB_API_PIPELINE__ACCESS_TOKEN: ${{ secrets.GITHUB_TOKEN }}
2315

2416
jobs:
25-
get_docs_changes:
26-
name: docs changes
27-
uses: ./.github/workflows/get_docs_changes.yml
28-
2917
run_common:
3018
name: test
31-
needs: get_docs_changes
32-
if: needs.get_docs_changes.outputs.changes_outside_docs == 'true'
3319
strategy:
3420
fail-fast: false
3521
matrix:
36-
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
37-
python-version: ["3.11.x"]
38-
# Test all python versions on ubuntu only
3922
include:
40-
- python-version: "3.9.x"
41-
os: "ubuntu-latest"
42-
- python-version: "3.10.x"
43-
os: "ubuntu-latest"
44-
- python-version: "3.12.x"
45-
os: "ubuntu-latest"
46-
- python-version: "3.13.x"
47-
os: "ubuntu-latest"
48-
- python-version: "3.13.x"
49-
os: "windows-latest"
23+
24+
# macos tests
25+
- os: macos-latest
26+
python-version: "3.11.x"
27+
shell: bash
28+
29+
# linux tests
30+
- os: ubuntu-latest
31+
python-version: "3.9.x"
32+
shell: bash
33+
- os: ubuntu-latest
34+
python-version: "3.10.x"
35+
shell: bash
36+
- os: ubuntu-latest
37+
python-version: "3.11.x"
38+
shell: bash
39+
- os: ubuntu-latest
40+
python-version: "3.12.x"
41+
shell: bash
42+
- os: ubuntu-latest
43+
python-version: "3.13.x"
44+
shell: bash
45+
46+
# windows tests
47+
- os: windows-latest
48+
python-version: "3.11.x"
49+
shell: cmd
50+
pytest_args: '-m "not forked"'
51+
- os: windows-latest
52+
python-version: "3.13.x"
53+
shell: cmd
54+
pytest_args: '-m "not forked"'
5055

5156
defaults:
5257
run:
53-
shell: bash
58+
shell: ${{ matrix.shell }}
5459
runs-on: ${{ matrix.os }}
5560

5661
steps:
@@ -72,7 +77,6 @@ jobs:
7277
copy tzdata %USERPROFILE%\Downloads\tzdata
7378
curl https://raw.githubusercontent.com/unicode-org/cldr/master/common/supplemental/windowsZones.xml --output %USERPROFILE%\Downloads\tzdata\windowsZones.xml
7479
if: runner.os == 'Windows'
75-
shell: cmd
7680

7781
- name: Install Poetry
7882
# https://github.com/snok/install-poetry#running-on-windows
@@ -83,93 +87,67 @@ jobs:
8387
installer-parallel: true
8488
version: 1.8.5
8589

86-
# NOTE: do not cache. we want to have a clean state each run and we upgrade depdendencies later
87-
# - name: Load cached venv
88-
# id: cached-poetry-dependencies
89-
# uses: actions/cache@v3
90-
# with:
91-
# # path: ${{ steps.pip-cache.outputs.dir }}
92-
# path: .venv
93-
# key: venv-${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('**/poetry.lock') }}
94-
9590
- name: Install dependencies
9691
run: poetry install --no-interaction --with sentry-sdk
97-
98-
- run: |
99-
poetry run pytest tests/common tests/normalize tests/reflection tests/plugins tests/load/test_dummy_client.py tests/extract/test_extract.py tests/extract/test_sources.py tests/pipeline/test_pipeline_state.py
100-
if: runner.os != 'Windows'
101-
name: Run common tests with minimum dependencies Linux/MAC
102-
- run: |
103-
poetry run pytest tests/common tests/normalize tests/reflection tests/plugins tests/load/test_dummy_client.py tests/extract/test_extract.py tests/extract/test_sources.py tests/pipeline/test_pipeline_state.py -m "not forked"
104-
if: runner.os == 'Windows'
105-
name: Run common tests with minimum dependencies Windows
106-
shell: cmd
92+
93+
- name: Run common tests with minimum dependencies
94+
run: |
95+
poetry run pytest tests/common tests/normalize tests/reflection tests/plugins tests/load/test_dummy_client.py tests/extract/test_extract.py tests/extract/test_sources.py tests/pipeline/test_pipeline_state.py ${{ matrix.pytest_args }}
96+
10797
10898
- name: Install duckdb dependencies
10999
run: poetry install --no-interaction -E duckdb --with sentry-sdk
110100

111-
- run: |
112-
poetry run pytest tests/pipeline/test_pipeline.py tests/pipeline/test_import_export_schema.py
113-
if: runner.os != 'Windows'
114-
name: Run pipeline smoke tests with minimum deps Linux/MAC
115-
- run: |
116-
poetry run pytest tests/pipeline/test_pipeline.py tests/pipeline/test_import_export_schema.py -m "not forked"
117-
if: runner.os == 'Windows'
118-
name: Run smoke tests with minimum deps Windows
119-
shell: cmd
101+
- name: Run pipeline smoke tests with minimum deps
102+
run: |
103+
poetry run pytest tests/pipeline/test_pipeline.py tests/pipeline/test_import_export_schema.py ${{ matrix.pytest_args }}
120104
121105
- name: Install pyarrow
122106
run: poetry install --no-interaction -E duckdb -E cli -E parquet --with sentry-sdk
123107

124-
- run: |
125-
poetry run pytest tests/pipeline/test_pipeline_extra.py -k arrow
126-
if: runner.os != 'Windows'
127-
name: Run pipeline tests with pyarrow but no pandas installed
128-
- run: |
129-
poetry run pytest tests/pipeline/test_pipeline_extra.py -k arrow -m "not forked"
130-
if: runner.os == 'Windows'
131-
name: Run pipeline tests with pyarrow but no pandas installed Windows
132-
shell: cmd
108+
- name: Run pipeline tests with pyarrow but no pandas installed
109+
run: |
110+
poetry run pytest tests/pipeline/test_pipeline_extra.py -k arrow ${{ matrix.pytest_args }}
133111
134112
- name: Install pipeline and sources dependencies
135113
run: poetry install --no-interaction -E duckdb -E cli -E parquet -E deltalake -E sql_database --with sentry-sdk,pipeline,sources
136114

137-
- run: |
138-
poetry run pytest tests/extract tests/pipeline tests/libs tests/cli/common tests/destinations tests/sources
139-
if: runner.os != 'Windows'
140-
name: Run extract and pipeline tests Linux/MAC
141-
- run: |
142-
poetry run pytest tests/extract tests/pipeline tests/libs tests/cli/common tests/destinations tests/sources -m "not forked"
143-
if: runner.os == 'Windows'
144-
name: Run extract tests Windows
145-
shell: cmd
115+
- name: Run extract and pipeline tests
116+
run: |
117+
poetry run pytest tests/extract tests/pipeline tests/libs tests/cli/common tests/destinations tests/sources tests/transformations ${{ matrix.pytest_args }}
146118
147119
# here we upgrade sql alchemy to 2 an run the sql_database tests again
148120
- name: Upgrade sql alchemy
149121
run: poetry run pip install sqlalchemy==2.0.32
150122

151-
- run: |
152-
poetry run pytest tests/sources/sql_database
153-
if: runner.os != 'Windows'
154-
name: Run extract and pipeline tests Linux/MAC
155-
- run: |
123+
- name: Run extract and pipeline tests
124+
run: |
156125
poetry run pytest tests/sources/sql_database
157-
if: runner.os == 'Windows'
158-
name: Run extract tests Windows
159-
shell: cmd
160-
161-
# - name: Install Pydantic 1.0
162-
# run: pip install "pydantic<2"
163-
164-
# - run: |
165-
# poetry run pytest tests/libs
166-
# if: runner.os != 'Windows'
167-
# name: Run extract and pipeline tests Linux/MAC
168-
# - run: |
169-
# poetry run pytest tests/libs
170-
# if: runner.os == 'Windows'
171-
# name: Run extract tests Windows
172-
# shell: cmd
126+
127+
128+
129+
# test marimo app, does not work with python 3.13
130+
- name: Install dlt with duckdb and studio
131+
run: poetry install --no-interaction -E duckdb --with sentry-sdk,pipeline,sources,ibis,marimo
132+
if: matrix.python-version != '3.13.x'
133+
134+
- name: Install playwright
135+
run: poetry run playwright install
136+
if: matrix.python-version != '3.13.x'
137+
138+
# Run marimo studio unit tests
139+
- name: Run marimo studio unit tests
140+
run: |
141+
poetry run pytest tests/helpers/studio
142+
if: matrix.python-version != '3.13.x'
143+
144+
# Run marimo e2e tests (does not pass with python 3.9, does not pass on windows (playwright does not work somehow), does not pass on python 3.13 (ibis not available))
145+
- name: Run marimo e2e
146+
run: |
147+
poetry run marimo run --headless dlt/helpers/studio/app.py -- -- --pipelines_dir _storage/.dlt/pipelines/ --with_test_identifiers true & poetry run pytest --browser chromium tests/e2e
148+
if: matrix.python-version != '3.13.x' && matrix.python-version != '3.9.x' && matrix.os != 'windows-latest'
149+
150+
173151

174152
matrix_job_required_check:
175153
name: common | common tests

0 commit comments

Comments
 (0)