Skip to content

Commit c0b0dbf

Browse files
author
Michael Myaskovsky
committed
dbt run works on clickhouse
1 parent cc226bf commit c0b0dbf

File tree

6 files changed

+43
-2
lines changed

6 files changed

+43
-2
lines changed

dbt_project.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@ version: "0.18.1"
33

44
require-dbt-version: [">=1.0.0", "<2.0.0"]
55

6+
flags:
7+
require_explicit_package_overrides_for_builtin_materializations: false
8+
69
config-version: 2
710
profile: "elementary"
811

macros/edr/data_monitoring/monitors/column_numeric_monitors.sql

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,17 @@
2323
{% endmacro %}
2424

2525
{% macro standard_deviation(column_name) -%}
26+
{{ adapter.dispatch('standard_deviation', 'elementary')(column_name) }}
27+
{%- endmacro %}
28+
29+
{% macro default__standard_deviation(column_name) -%}
2630
stddev(cast({{ column_name }} as {{ elementary.edr_type_float() }}))
2731
{%- endmacro %}
2832

33+
{% macro clickhouse__standard_deviation(column_name) -%}
34+
stddevPop(cast({{ column_name }} as {{ elementary.edr_type_float() }}))
35+
{%- endmacro %}
36+
2937
{% macro variance(column_name) -%}
3038
variance(cast({{ column_name }} as {{ elementary.edr_type_float() }}))
3139
{%- endmacro %}

macros/edr/dbt_artifacts/upload_artifacts_to_table.sql

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
{% macro upload_artifacts_to_table(table_relation, artifacts, flatten_artifact_callback, append=False, should_commit=False, metadata_hashes=None, on_query_exceed=none) %}
22
{% set context_name = 'upload_artifacts_to_table[' ~ table_relation.name ~ ']'%}
33
{% do elementary.begin_duration_measure_context(context_name) %}
4-
54
{% set flatten_artifact_dicts = [] %}
65
{% do elementary.file_log("[{}] Flattening the artifacts.".format(table_relation.identifier)) %}
76
{% do elementary.begin_duration_measure_context('artifacts_flatten') %}
@@ -18,8 +17,10 @@
1817

1918
{% if append %}
2019
{# In append mode, just insert, and no need to be atomic #}
20+
2121
{% do elementary.insert_rows(table_relation, flatten_artifact_dicts, should_commit, elementary.get_config_var('dbt_artifacts_chunk_size'), on_query_exceed) %}
2222
{% else %}
23+
2324
{% set upload_artifact_method = get_upload_artifact_method(table_relation, metadata_hashes) %}
2425
{% if upload_artifact_method.type == "diff" %}
2526
{% do elementary.file_log("[{}] Comparing the artifacts state.".format(table_relation.identifier)) %}

macros/utils/cross_db_utils/current_timestamp.sql

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,9 @@
2222
cast(current_timestamp() as timestamp)
2323
{% endmacro %}
2424

25+
{% macro clickhouse__edr_current_timestamp() %}
26+
now()
27+
{% endmacro %}
2528

2629
{% macro edr_current_timestamp_in_utc() -%}
2730
{{ adapter.dispatch('edr_current_timestamp_in_utc','elementary')() }}
@@ -47,6 +50,10 @@
4750
cast(unix_timestamp() as timestamp)
4851
{% endmacro %}
4952

53+
{% macro clickhouse__edr_current_timestamp_in_utc() %}
54+
now('UTC')
55+
{% endmacro %}
56+
5057
{% macro athena__edr_current_timestamp() -%}
5158
CURRENT_TIMESTAMP
5259
{%- endmacro -%}

macros/utils/table_operations/delete_and_insert.sql

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,28 @@
5454
{% do return([query]) %}
5555
{% endmacro %}
5656

57+
{% macro clickhouse__get_delete_and_insert_queries(relation, insert_relation, delete_relation, delete_column_key) %}
58+
{% set queries = [] %}
59+
60+
{% if delete_relation %}
61+
{% set delete_query %}
62+
alter table {{ relation }} delete where
63+
{{ delete_column_key }} is null
64+
or {{ delete_column_key }} in (select {{ delete_column_key }} from {{ delete_relation }});
65+
{% endset %}
66+
{% do queries.append(delete_query) %}
67+
{% endif %}
68+
69+
{% if insert_relation %}
70+
{% set insert_query %}
71+
insert into {{ relation }} select * from {{ insert_relation }};
72+
{% endset %}
73+
{% do queries.append(insert_query) %}
74+
{% endif %}
75+
76+
{% do return(queries) %}
77+
{% endmacro %}
78+
5779
{% macro spark__get_delete_and_insert_queries(relation, insert_relation, delete_relation, delete_column_key) %}
5880
{% set queries = [] %}
5981

models/edr/data_monitoring/anomaly_detection/metrics_anomaly_score.sql

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ time_window_aggregation as (
2727
bucket_duration_hours,
2828
updated_at,
2929
avg(metric_value) over (partition by metric_name, full_table_name, column_name order by bucket_start asc rows between unbounded preceding and current row) as training_avg,
30-
stddev(metric_value) over (partition by metric_name, full_table_name, column_name order by bucket_start asc rows between unbounded preceding and current row) as training_stddev,
30+
{{ elementary.standard_deviation('metric_value') }} over (partition by metric_name, full_table_name, column_name order by bucket_start asc rows between unbounded preceding and current row) as training_stddev,
3131
count(metric_value) over (partition by metric_name, full_table_name, column_name order by bucket_start asc rows between unbounded preceding and current row) as training_set_size,
3232
last_value(bucket_end) over (partition by metric_name, full_table_name, column_name order by bucket_start asc rows between unbounded preceding and current row) training_end,
3333
first_value(bucket_end) over (partition by metric_name, full_table_name, column_name order by bucket_start asc rows between unbounded preceding and current row) as training_start

0 commit comments

Comments
 (0)