Skip to content

Commit 4a193b1

Browse files
author
Michael Myaskovsky
committed
added skips
1 parent 00fbf7f commit 4a193b1

15 files changed

+64
-5
lines changed

integration_tests/tests/test_all_columns_anomalies.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from datetime import datetime, timedelta
22
from typing import Any, Dict, List
3+
import pytest
34

45
from data_generator import DATE_FORMAT, generate_dates
56
from dbt_project import DbtProject
@@ -28,6 +29,7 @@ def test_anomalyless_all_columns_anomalies(test_id: str, dbt_project: DbtProject
2829
assert all([res["status"] == "pass" for res in test_results])
2930

3031

32+
@pytest.mark.skip_targets(["clickhouse"])
3133
def test_anomalous_all_columns_anomalies(test_id: str, dbt_project: DbtProject):
3234
utc_today = datetime.utcnow().date()
3335
test_date, *training_dates = generate_dates(base_date=utc_today - timedelta(1))
@@ -52,6 +54,7 @@ def test_anomalous_all_columns_anomalies(test_id: str, dbt_project: DbtProject):
5254
assert col_to_status == {"superhero": "fail", TIMESTAMP_COLUMN: "pass"}
5355

5456

57+
@pytest.mark.skip_targets(["clickhouse"])
5558
def test_all_columns_anomalies_with_where_expression(
5659
test_id: str, dbt_project: DbtProject
5760
):

integration_tests/tests/test_anomalies_backfill_logic.py

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import json
22
from datetime import datetime, time, timedelta
3+
import pytest
34

45
import dateutil.parser
56
from data_generator import DATE_FORMAT, generate_dates
@@ -106,6 +107,7 @@ def test_full_backfill_for_non_incremental_model(dbt_project: DbtProject, test_i
106107
}
107108

108109

110+
@pytest.mark.skip_targets(["clickhouse"])
109111
def test_partial_backfill_for_incremental_models(dbt_project: DbtProject, test_id: str):
110112
utc_today = datetime.utcnow().date()
111113
data_dates = generate_dates(base_date=utc_today - timedelta(1))
@@ -152,6 +154,7 @@ def test_partial_backfill_for_incremental_models(dbt_project: DbtProject, test_i
152154
}
153155

154156

157+
@pytest.mark.skip_targets(["clickhouse"])
155158
def test_longer_backfill_in_case_of_a_gap(dbt_project: DbtProject, test_id: str):
156159
date_gap_size = 5
157160
utc_today = datetime.utcnow().date()
@@ -204,6 +207,7 @@ def test_longer_backfill_in_case_of_a_gap(dbt_project: DbtProject, test_id: str)
204207
}
205208

206209

210+
@pytest.mark.skip_targets(["clickhouse"])
207211
def test_full_backfill_if_metric_not_updated_for_a_long_time(
208212
dbt_project: DbtProject, test_id: str
209213
):
@@ -263,6 +267,7 @@ def test_full_backfill_if_metric_not_updated_for_a_long_time(
263267
}
264268

265269

270+
@pytest.mark.skip_targets(["clickhouse"])
266271
def test_backfill_when_metric_doesnt_exist_back_enough(
267272
dbt_project: DbtProject, test_id: str
268273
):
@@ -307,6 +312,7 @@ def test_backfill_when_metric_doesnt_exist_back_enough(
307312
}
308313

309314

315+
@pytest.mark.skip_targets(["clickhouse"])
310316
def test_backfill_with_middle_buckets_gap(dbt_project: DbtProject, test_id: str):
311317
utc_today = datetime.utcnow().date()
312318
data_start = utc_today - timedelta(21)
@@ -375,6 +381,7 @@ def test_backfill_with_middle_buckets_gap(dbt_project: DbtProject, test_id: str)
375381
}
376382

377383

384+
@pytest.mark.skip_targets(["clickhouse"])
378385
def test_bucket_size_not_aligned_with_days(dbt_project: DbtProject, test_id: str):
379386
"""
380387
In this test we choose a bucket size that is not aligned with one day - specifically 7 hours.

integration_tests/tests/test_anomalies_ranges.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import json
22
from datetime import datetime, timedelta
33
from typing import Any, Dict, List
4+
import pytest
45

56
from data_generator import DATE_FORMAT, generate_dates
67
from dbt_project import DbtProject
@@ -33,6 +34,7 @@ def get_latest_anomaly_test_points(dbt_project: DbtProject, test_id: str):
3334
return [json.loads(result["result_row"]) for result in results]
3435

3536

37+
@pytest.mark.skip_targets(["clickhouse"])
3638
def test_anomaly_ranges_are_valid(test_id: str, dbt_project: DbtProject):
3739
utc_today = datetime.utcnow().date()
3840
test_date, *training_dates = generate_dates(base_date=utc_today - timedelta(1))
@@ -66,6 +68,7 @@ def test_anomaly_ranges_are_valid(test_id: str, dbt_project: DbtProject):
6668
assert all([row["min_value"] == row["max_value"] for row in anomaly_test_points])
6769

6870

71+
@pytest.mark.skip_targets(["clickhouse"])
6972
def test_anomaly_ranges_are_valid_with_seasonality(
7073
test_id: str, dbt_project: DbtProject
7174
):

integration_tests/tests/test_anomaly_exclude_metrics.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from datetime import datetime, timedelta
22
from typing import Any, Dict, List
3+
import pytest
34

45
from data_generator import DATE_FORMAT, generate_dates
56
from dbt_project import DbtProject
@@ -24,6 +25,7 @@
2425
time_bucket={"period": "hour", "count": 6},
2526
dates_step=timedelta(hours=6),
2627
)
28+
@pytest.mark.skip_targets(["clickhouse"])
2729
def test_exclude_specific_dates(
2830
test_id: str, dbt_project: DbtProject, time_bucket: dict, dates_step: timedelta
2931
):
@@ -123,6 +125,7 @@ def test_exclude_specific_timestamps(test_id: str, dbt_project: DbtProject):
123125
assert test_result["status"] == "fail"
124126

125127

128+
@pytest.mark.skip_targets(["clickhouse"])
126129
def test_exclude_date_range(test_id: str, dbt_project: DbtProject):
127130
utc_today = datetime.utcnow().date()
128131
test_date, *training_dates = generate_dates(base_date=utc_today - timedelta(1))
@@ -158,6 +161,7 @@ def test_exclude_date_range(test_id: str, dbt_project: DbtProject):
158161
assert test_result["status"] == "fail"
159162

160163

164+
@pytest.mark.skip_targets(["clickhouse"])
161165
def test_exclude_by_metric_value(test_id: str, dbt_project: DbtProject):
162166
utc_today = datetime.utcnow().date()
163167
test_date, *training_dates = generate_dates(base_date=utc_today - timedelta(1))

integration_tests/tests/test_collect_metrics.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from collections import defaultdict
22
from datetime import datetime, timedelta
33
from typing import Any, Dict, List
4+
import pytest
45

56
from data_generator import DATE_FORMAT, generate_dates
67
from dbt_project import DbtProject
@@ -91,6 +92,7 @@ def test_collect_no_timestamp_metrics(test_id: str, dbt_project: DbtProject):
9192
assert col_to_metric_names == EXPECTED_COL_TO_METRIC_NAMES
9293

9394

95+
@pytest.mark.skip_targets(["clickhouse"])
9496
def test_collect_group_by_metrics(test_id: str, dbt_project: DbtProject):
9597
utc_today = datetime.utcnow().date()
9698
data: List[Dict[str, Any]] = [

integration_tests/tests/test_column_anomalies.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from datetime import datetime, timedelta
22
from typing import Any, Dict, List
3+
import pytest
34

45
from data_generator import DATE_FORMAT, generate_dates
56
from dbt_project import DbtProject
@@ -29,6 +30,7 @@ def test_anomalyless_column_anomalies(test_id: str, dbt_project: DbtProject):
2930
assert test_result["status"] == "pass"
3031

3132

33+
@pytest.mark.skip_targets(["clickhouse"])
3234
def test_anomalyless_no_timestamp_column_anomalies(
3335
test_id: str, dbt_project: DbtProject
3436
):
@@ -49,6 +51,7 @@ def test_anomalyless_no_timestamp_column_anomalies(
4951
assert test_result["status"] == "pass"
5052

5153

54+
@pytest.mark.skip_targets(["clickhouse"])
5255
def test_anomalous_column_anomalies(test_id: str, dbt_project: DbtProject):
5356
utc_today = datetime.utcnow().date()
5457
test_date, *training_dates = generate_dates(base_date=utc_today - timedelta(1))
@@ -72,6 +75,7 @@ def test_anomalous_column_anomalies(test_id: str, dbt_project: DbtProject):
7275
assert test_result["status"] == "fail"
7376

7477

78+
@pytest.mark.skip_targets(["clickhouse"])
7579
def test_column_anomalies_with_where_parameter(test_id: str, dbt_project: DbtProject):
7680
utc_today = datetime.utcnow().date()
7781
test_date, *training_dates = generate_dates(base_date=utc_today - timedelta(1))
@@ -172,6 +176,7 @@ def test_column_anomalies_with_timestamp_as_sql_expression(
172176
drop_failure_percent_threshold=5,
173177
metric_value=1,
174178
)
179+
@pytest.mark.skip_targets(["clickhouse"])
175180
def test_volume_anomaly_static_data_drop(
176181
test_id: str,
177182
dbt_project: DbtProject,
@@ -236,6 +241,7 @@ def test_anomalyless_column_anomalies_group(test_id: str, dbt_project: DbtProjec
236241
assert test_result["status"] == "pass"
237242

238243

244+
@pytest.mark.skip_targets(["clickhouse"])
239245
def test_column_anomalies_group_by(test_id: str, dbt_project: DbtProject):
240246
utc_today = datetime.utcnow().date()
241247
test_date, *training_dates = generate_dates(base_date=utc_today - timedelta(1))
@@ -289,6 +295,7 @@ def test_column_anomalies_group_by(test_id: str, dbt_project: DbtProject):
289295
assert test_result["failures"] == 2
290296

291297

298+
@pytest.mark.skip_targets(["clickhouse"])
292299
def test_anomalyless_column_anomalies_group_by_none_dimension(
293300
test_id: str, dbt_project: DbtProject
294301
):
@@ -333,6 +340,7 @@ def test_anomalyless_column_anomalies_group_by_none_dimension(
333340
assert test_result["failures"] == 2
334341

335342

343+
@pytest.mark.skip_targets(["clickhouse"])
336344
def test_anomalyless_column_anomalies_group_by_multi(
337345
test_id: str, dbt_project: DbtProject
338346
):
@@ -384,6 +392,7 @@ def test_anomalyless_column_anomalies_group_by_multi(
384392
assert test_result["failures"] == 3
385393

386394

395+
@pytest.mark.skip_targets(["clickhouse"])
387396
def test_anomalyless_column_anomalies_group_by_description(
388397
test_id: str, dbt_project: DbtProject
389398
):
@@ -418,6 +427,7 @@ def test_anomalyless_column_anomalies_group_by_description(
418427
assert "not enough data" not in test_result["test_results_description"].lower()
419428

420429

430+
@pytest.mark.skip_targets(["clickhouse"])
421431
def test_anomalous_boolean_column_anomalies(test_id: str, dbt_project: DbtProject):
422432
utc_today = datetime.utcnow().date()
423433
test_date, *training_dates = generate_dates(base_date=utc_today - timedelta(1))

integration_tests/tests/test_dbt_artifacts/test_artifacts.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ def test_artifacts_caching(dbt_project: DbtProject):
2323
assert first_row == second_row, "Artifacts are not cached at the on-run-end."
2424

2525

26+
@pytest.mark.skip_targets(["clickhouse"])
2627
def test_artifacts_updating(dbt_project: DbtProject):
2728
# Disabled by default in the tests for performance reasons.
2829
dbt_project.dbt_runner.vars["disable_dbt_artifacts_autoupload"] = False

integration_tests/tests/test_dimension_anomalies.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import json
22
from datetime import datetime, timedelta
33
from typing import Any, Dict, List
4+
import pytest
45

56
from data_generator import DATE_FORMAT, generate_dates
67
from dbt_project import DbtProject
@@ -68,6 +69,7 @@ def test_dimension_anomalies_with_timestamp_as_sql_expression(
6869
assert test_result["status"] == "pass"
6970

7071

72+
@pytest.mark.skip_targets(["clickhouse"])
7173
def test_anomalous_dimension_anomalies(test_id: str, dbt_project: DbtProject):
7274
utc_today = datetime.utcnow().date()
7375
test_date, *training_dates = generate_dates(base_date=utc_today - timedelta(1))
@@ -99,6 +101,7 @@ def test_anomalous_dimension_anomalies(test_id: str, dbt_project: DbtProject):
99101
assert anomaly_test_points[0]["dimension_value"] == "Superman"
100102

101103

104+
@pytest.mark.skip_targets(["clickhouse"])
102105
def test_dimensions_anomalies_with_where_parameter(
103106
test_id: str, dbt_project: DbtProject
104107
):
@@ -144,6 +147,7 @@ def test_dimensions_anomalies_with_where_parameter(
144147
assert test_result["status"] == "fail"
145148

146149

150+
@pytest.mark.skip_targets(["clickhouse"])
147151
def test_dimension_anomalies_with_timestamp_exclude_final_results(
148152
test_id: str, dbt_project: DbtProject
149153
):

integration_tests/tests/test_event_freshness_anomalies.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
from datetime import datetime, timedelta
2+
import pytest
23

34
from data_generator import DATE_FORMAT, generate_dates
45
from dbt_project import DbtProject
@@ -29,6 +30,7 @@ def test_anomalyless_event_freshness(test_id: str, dbt_project: DbtProject):
2930
assert result["status"] == "pass"
3031

3132

33+
@pytest.mark.skip_targets(["clickhouse"])
3234
def test_stop_event_freshness(test_id: str, dbt_project: DbtProject):
3335
anomaly_date = datetime.now() - timedelta(days=2)
3436
data = [
@@ -50,6 +52,7 @@ def test_stop_event_freshness(test_id: str, dbt_project: DbtProject):
5052
assert result["status"] == "fail"
5153

5254

55+
@pytest.mark.skip_targets(["clickhouse"])
5356
def test_slower_rate_event_freshness(test_id: str, dbt_project: DbtProject):
5457
# To avoid races, set the "custom_started_at" to the beginning of the day
5558
test_started_at = datetime.utcnow().replace(hour=0, minute=0, second=0)

integration_tests/tests/test_exposure_schema_validity.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def test_exposure_schema_validity_no_exposures(test_id: str, dbt_project: DbtPro
5050
assert test_result["status"] == "pass"
5151

5252

53-
@pytest.mark.skip_targets(["spark"])
53+
@pytest.mark.skip_targets(["spark", "clickhouse"])
5454
def test_exposure_schema_validity_correct_columns_and_types(
5555
test_id: str, dbt_project: DbtProject
5656
):
@@ -115,7 +115,7 @@ def test_exposure_schema_validity_correct_columns_and_invalid_type(
115115
assert test_result["status"] == "fail"
116116

117117

118-
@pytest.mark.skip_targets(["spark"])
118+
@pytest.mark.skip_targets(["spark", "clickhouse"])
119119
def test_exposure_schema_validity_invalid_type_name_present_in_error(
120120
test_id: str, dbt_project: DbtProject
121121
):

0 commit comments

Comments
 (0)