Skip to content
This repository was archived by the owner on Sep 2, 2025. It is now read-only.

Commit 4d255b2

Browse files
authored
Fix: Cast to timestamp prior to event time comparison (#1422)
1 parent 26c19e9 commit 4d255b2

File tree

4 files changed

+78
-4
lines changed

4 files changed

+78
-4
lines changed
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
kind: Fixes
2+
body: Cast `event_time` to a timestamp prior to comparing against microbatch start/end
3+
time
4+
time: 2024-12-04T10:58:46.573608-05:00
5+
custom:
6+
Author: michelleark
7+
Issue: "1422"

dbt/adapters/bigquery/relation.py

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,12 @@
44

55
from dbt_common.exceptions import CompilationError
66
from dbt_common.utils.dict import filter_null_values
7-
from dbt.adapters.base.relation import BaseRelation, ComponentName, InformationSchema
7+
from dbt.adapters.base.relation import (
8+
BaseRelation,
9+
ComponentName,
10+
InformationSchema,
11+
EventTimeFilter,
12+
)
813
from dbt.adapters.contracts.relation import RelationConfig, RelationType
914
from dbt.adapters.relation_configs import RelationConfigChangeAction
1015

@@ -116,6 +121,24 @@ def materialized_view_config_changeset(
116121
def information_schema(self, identifier: Optional[str] = None) -> "BigQueryInformationSchema":
117122
return BigQueryInformationSchema.from_relation(self, identifier)
118123

124+
def _render_event_time_filtered(self, event_time_filter: EventTimeFilter) -> str:
125+
"""
126+
Returns "" if start and end are both None
127+
"""
128+
filter = ""
129+
if event_time_filter.start and event_time_filter.end:
130+
filter = f"cast({event_time_filter.field_name} as timestamp) >= '{event_time_filter.start}' and cast({event_time_filter.field_name} as timestamp) < '{event_time_filter.end}'"
131+
elif event_time_filter.start:
132+
filter = (
133+
f"cast({event_time_filter.field_name} as timestamp) >= '{event_time_filter.start}'"
134+
)
135+
elif event_time_filter.end:
136+
filter = (
137+
f"cast({event_time_filter.field_name} as timestamp) < '{event_time_filter.end}'"
138+
)
139+
140+
return filter
141+
119142

120143
@dataclass(frozen=True, eq=False, repr=False)
121144
class BigQueryInformationSchema(InformationSchema):

tests/functional/adapter/incremental/incremental_strategy_fixtures.py

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -570,7 +570,7 @@
570570
begin=modules.datetime.datetime(2020, 1, 1, 0, 0, 0)
571571
)
572572
}}
573-
select * from {{ ref('input_model') }}
573+
select id, cast(event_time as timestamp) as event_time from {{ ref('input_model') }}
574574
"""
575575

576576
microbatch_input_sql = """
@@ -582,6 +582,24 @@
582582
select 3 as id, TIMESTAMP '2020-01-03 00:00:00-0' as event_time
583583
"""
584584

585+
microbatch_input_event_time_date_sql = """
586+
{{ config(materialized='table', event_time='event_time') }}
587+
select 1 as id, DATE '2020-01-01' as event_time
588+
union all
589+
select 2 as id, DATE '2020-01-02' as event_time
590+
union all
591+
select 3 as id, DATE '2020-01-03' as event_time
592+
"""
593+
594+
microbatch_input_event_time_datetime_sql = """
595+
{{ config(materialized='table', event_time='event_time') }}
596+
select 1 as id, DATETIME '2020-01-01' as event_time
597+
union all
598+
select 2 as id, DATETIME '2020-01-02' as event_time
599+
union all
600+
select 3 as id, DATETIME '2020-01-03' as event_time
601+
"""
602+
585603
microbatch_model_no_partition_by_sql = """
586604
{{ config(
587605
materialized='incremental',

tests/functional/adapter/incremental/test_incremental_microbatch.py

Lines changed: 28 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313
microbatch_input_sql,
1414
microbatch_model_no_partition_by_sql,
1515
microbatch_model_invalid_partition_by_sql,
16+
microbatch_input_event_time_date_sql,
17+
microbatch_input_event_time_datetime_sql,
1618
)
1719

1820

@@ -22,6 +24,32 @@ def microbatch_model_sql(self) -> str:
2224
return microbatch_model_no_unique_id_sql
2325

2426

27+
class TestBigQueryMicrobatchInputWithDate(TestBigQueryMicrobatch):
28+
@pytest.fixture(scope="class")
29+
def input_model_sql(self) -> str:
30+
return microbatch_input_event_time_date_sql
31+
32+
@pytest.fixture(scope="class")
33+
def insert_two_rows_sql(self, project) -> str:
34+
test_schema_relation = project.adapter.Relation.create(
35+
database=project.database, schema=project.test_schema
36+
)
37+
return f"insert into {test_schema_relation}.input_model (id, event_time) values (4, DATE '2020-01-04'), (5, DATE '2020-01-05')"
38+
39+
40+
class TestBigQueryMicrobatchInputWithDatetime(TestBigQueryMicrobatch):
41+
@pytest.fixture(scope="class")
42+
def input_model_sql(self) -> str:
43+
return microbatch_input_event_time_datetime_sql
44+
45+
@pytest.fixture(scope="class")
46+
def insert_two_rows_sql(self, project) -> str:
47+
test_schema_relation = project.adapter.Relation.create(
48+
database=project.database, schema=project.test_schema
49+
)
50+
return f"insert into {test_schema_relation}.input_model (id, event_time) values (4, DATETIME '2020-01-04'), (5, DATETIME '2020-01-05')"
51+
52+
2553
class TestBigQueryMicrobatchMissingPartitionBy:
2654
@pytest.fixture(scope="class")
2755
def models(self) -> str:
@@ -30,7 +58,6 @@ def models(self) -> str:
3058
"input_model.sql": microbatch_input_sql,
3159
}
3260

33-
@mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"})
3461
def test_execution_failure_no_partition_by(self, project):
3562
with patch_microbatch_end_time("2020-01-03 13:57:00"):
3663
_, stdout = run_dbt_and_capture(["run"], expect_pass=False)
@@ -45,7 +72,6 @@ def models(self) -> str:
4572
"input_model.sql": microbatch_input_sql,
4673
}
4774

48-
@mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"})
4975
def test_execution_failure_no_partition_by(self, project):
5076
with patch_microbatch_end_time("2020-01-03 13:57:00"):
5177
_, stdout = run_dbt_and_capture(["run"], expect_pass=False)

0 commit comments

Comments
 (0)