From 1fa600a3b6eea1edf6f61f42dc4a44d8480466f1 Mon Sep 17 00:00:00 2001 From: fivetran-amrutabhimsenayachit Date: Mon, 22 Dec 2025 15:27:15 -0500 Subject: [PATCH 1/4] feat(duckdb): Add transpilation support for nanoseconds used in date/time functions --- sqlglot/dialects/duckdb.py | 84 +++++++++++++++++++++++++++++++- tests/dialects/test_snowflake.py | 77 +++++++++++++++++++++++++++++ 2 files changed, 159 insertions(+), 2 deletions(-) diff --git a/sqlglot/dialects/duckdb.py b/sqlglot/dialects/duckdb.py index 2007cfe660..da3156b311 100644 --- a/sqlglot/dialects/duckdb.py +++ b/sqlglot/dialects/duckdb.py @@ -8,6 +8,7 @@ from sqlglot import exp, generator, parser, tokens, transforms from sqlglot.dialects.dialect import ( + DATETIME_DELTA, Dialect, JSON_EXTRACT_TYPE, NormalizationStrategy, @@ -17,7 +18,7 @@ bool_xor_sql, build_default_decimal_type, count_if_to_sum, - date_delta_to_binary_interval_op, + date_delta_to_binary_interval_op as base_date_delta_to_binary_interval_op, date_trunc_to_time, datestrtodate_sql, no_datetime_sql, @@ -142,6 +143,53 @@ def _last_day_sql(self: DuckDB.Generator, expression: exp.LastDay) -> str: return self.function_fallback_sql(expression) +def _unwrap_cast(expr: exp.Expression) -> exp.Expression: + """Unwrap Cast expression to avoid double-casting that loses nanosecond precision. + + Nested casts can lose precision when converting between + timestamp types. By unwrapping the inner cast, we go directly from the source + expression to TIMESTAMP_NS, preserving nanosecond precision. + """ + return expr.this if isinstance(expr, exp.Cast) else expr + + +def _is_nanosecond_unit(unit: t.Optional[exp.Expression]) -> bool: + """Check if unit is NANOSECOND.""" + return isinstance(unit, (exp.Var, exp.Literal)) and unit.name.upper() == "NANOSECOND" + + +def _handle_nanosecond_diff( + self: DuckDB.Generator, + end_time: exp.Expression, + start_time: exp.Expression, +) -> str: + """Generate NANOSECOND diff using EPOCH_NS since DATE_DIFF doesn't support it.""" + end_ns = exp.cast(_unwrap_cast(end_time), exp.DataType.Type.TIMESTAMP_NS) + start_ns = exp.cast(_unwrap_cast(start_time), exp.DataType.Type.TIMESTAMP_NS) + + # Build expression tree: EPOCH_NS(end) - EPOCH_NS(start) + return self.sql( + exp.Sub(this=exp.func("EPOCH_NS", end_ns), expression=exp.func("EPOCH_NS", start_ns)) + ) + + +def _handle_nanosecond_add( + self: DuckDB.Generator, + timestamp: exp.Expression, + nanoseconds: exp.Expression, +) -> str: + """Generate NANOSECOND add using EPOCH_NS and make_timestamp_ns since INTERVAL doesn't support it.""" + timestamp_ns = exp.cast(_unwrap_cast(timestamp), exp.DataType.Type.TIMESTAMP_NS) + + # Build expression tree: make_timestamp_ns(EPOCH_NS(timestamp) + nanoseconds) + return self.sql( + exp.func( + "make_timestamp_ns", + exp.Add(this=exp.func("EPOCH_NS", timestamp_ns), expression=nanoseconds), + ) + ) + + def _to_boolean_sql(self: DuckDB.Generator, expression: exp.ToBoolean) -> str: """ Transpile TO_BOOLEAN and TRY_TO_BOOLEAN functions from Snowflake to DuckDB equivalent. @@ -215,6 +263,13 @@ def _date_sql(self: DuckDB.Generator, expression: exp.Date) -> str: # BigQuery -> DuckDB conversion for the TIME_DIFF function def _timediff_sql(self: DuckDB.Generator, expression: exp.TimeDiff) -> str: + unit = expression.args.get("unit") + + if _is_nanosecond_unit(unit): + this_ts = exp.cast(expression.this, exp.DataType.Type.TIMESTAMP_NS) + expr_ts = exp.cast(expression.expression, exp.DataType.Type.TIMESTAMP_NS) + return _handle_nanosecond_diff(self, expr_ts, this_ts) + this = exp.cast(expression.this, exp.DataType.Type.TIME) expr = exp.cast(expression.expression, exp.DataType.Type.TIME) @@ -223,6 +278,27 @@ def _timediff_sql(self: DuckDB.Generator, expression: exp.TimeDiff) -> str: return self.func("DATE_DIFF", unit_to_str(expression), expr, this) +def date_delta_to_binary_interval_op( + cast: bool = True, +) -> t.Callable[[DuckDB.Generator, DATETIME_DELTA], str]: + """DuckDB override to handle NANOSECOND operations; delegates other units to base.""" + base_impl = base_date_delta_to_binary_interval_op(cast=cast) + + def duckdb_date_delta_sql(self: DuckDB.Generator, expression: DATETIME_DELTA) -> str: + unit = expression.args.get("unit") + + # Handle NANOSECOND unit (DuckDB doesn't support INTERVAL ... NANOSECOND) + if _is_nanosecond_unit(unit): + interval_value = expression.expression + if isinstance(interval_value, exp.Interval): + interval_value = interval_value.this + return _handle_nanosecond_add(self, expression.this, interval_value) + + return base_impl(self, expression) + + return duckdb_date_delta_sql + + @unsupported_args(("expression", "DuckDB's ARRAY_SORT does not support a comparator.")) def _array_sort_sql(self: DuckDB.Generator, expression: exp.ArraySort) -> str: return self.func("ARRAY_SORT", expression.this) @@ -439,9 +515,13 @@ def _build_week_trunc_expression(date_expr: exp.Expression, start_dow: int) -> e def _date_diff_sql(self: DuckDB.Generator, expression: exp.DateDiff) -> str: + unit = expression.args.get("unit") + + if _is_nanosecond_unit(unit): + return _handle_nanosecond_diff(self, expression.this, expression.expression) + this = _implicit_datetime_cast(expression.this) expr = _implicit_datetime_cast(expression.expression) - unit = expression.args.get("unit") # DuckDB's WEEK diff does not respect Monday crossing (week boundaries), it checks (end_day - start_day) / 7: # SELECT DATE_DIFF('WEEK', CAST('2024-12-13' AS DATE), CAST('2024-12-17' AS DATE)) --> 0 (Monday crossed) diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py index 5365796d86..bc54605c29 100644 --- a/tests/dialects/test_snowflake.py +++ b/tests/dialects/test_snowflake.py @@ -2475,6 +2475,83 @@ def test_timestamps(self): }, ) + # Test DATEDIFF with NANOSECOND - DuckDB uses EPOCH_NS since DATE_DIFF doesn't support NANOSECOND + self.validate_all( + "DATEDIFF(NANOSECOND, '2023-01-01 10:00:00.000000000', '2023-01-01 10:00:00.123456789')", + write={ + "duckdb": "EPOCH_NS(CAST('2023-01-01 10:00:00.123456789' AS TIMESTAMP_NS)) - EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS))", + "snowflake": "DATEDIFF(NANOSECOND, '2023-01-01 10:00:00.000000000', '2023-01-01 10:00:00.123456789')", + }, + ) + self.validate_all( + "DATEDIFF(NANOSECOND, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP), CAST('2023-01-01 10:00:00.987654321' AS TIMESTAMP))", + write={ + "duckdb": "EPOCH_NS(CAST('2023-01-01 10:00:00.987654321' AS TIMESTAMP_NS)) - EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS))", + "snowflake": "DATEDIFF(NANOSECOND, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP), CAST('2023-01-01 10:00:00.987654321' AS TIMESTAMP))", + }, + ) + + # Test TIMEDIFF with NANOSECOND - DuckDB uses EPOCH_NS for nanosecond precision + self.validate_all( + "TIMEDIFF(NANOSECOND, '10:00:00.000000000', '10:00:00.123456789')", + write={ + "duckdb": "EPOCH_NS(CAST('10:00:00.123456789' AS TIMESTAMP_NS)) - EPOCH_NS(CAST('10:00:00.000000000' AS TIMESTAMP_NS))", + "snowflake": "DATEDIFF(NANOSECOND, '10:00:00.000000000', '10:00:00.123456789')", + }, + ) + + # Test DATEADD with NANOSECOND - DuckDB uses MAKE_TIMESTAMP_NS since INTERVAL doesn't support NANOSECOND + self.validate_all( + "DATEADD(NANOSECOND, 123456789, '2023-01-01 10:00:00.000000000')", + write={ + "duckdb": "MAKE_TIMESTAMP_NS(EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS)) + 123456789)", + "snowflake": "DATEADD(NANOSECOND, 123456789, '2023-01-01 10:00:00.000000000')", + }, + ) + self.validate_all( + "DATEADD(NANOSECOND, 999999999, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP))", + write={ + "duckdb": "MAKE_TIMESTAMP_NS(EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS)) + 999999999)", + "snowflake": "DATEADD(NANOSECOND, 999999999, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP))", + }, + ) + + # Test TIMEADD with NANOSECOND - DuckDB uses MAKE_TIMESTAMP_NS + self.validate_all( + "TIMEADD(NANOSECOND, 123456789, '10:00:00.000000000')", + write={ + "duckdb": "MAKE_TIMESTAMP_NS(EPOCH_NS(CAST('10:00:00.000000000' AS TIMESTAMP_NS)) + 123456789)", + "snowflake": "TIMEADD(NANOSECOND, 123456789, '10:00:00.000000000')", + }, + ) + + # Test negative NANOSECOND values (subtraction) + self.validate_all( + "DATEADD(NANOSECOND, -123456789, '2023-01-01 10:00:00.500000000')", + write={ + "duckdb": "MAKE_TIMESTAMP_NS(EPOCH_NS(CAST('2023-01-01 10:00:00.500000000' AS TIMESTAMP_NS)) + -123456789)", + "snowflake": "DATEADD(NANOSECOND, -123456789, '2023-01-01 10:00:00.500000000')", + }, + ) + + # Test TIMESTAMPDIFF with NANOSECOND - Snowflake parser converts to DATEDIFF + self.validate_all( + "TIMESTAMPDIFF(NANOSECOND, '2023-01-01 10:00:00.000000000', '2023-01-01 10:00:00.123456789')", + write={ + "duckdb": "EPOCH_NS(CAST('2023-01-01 10:00:00.123456789' AS TIMESTAMP_NS)) - EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS))", + "snowflake": "DATEDIFF(NANOSECOND, '2023-01-01 10:00:00.000000000', '2023-01-01 10:00:00.123456789')", + }, + ) + + # Test TIMESTAMPADD with NANOSECOND - Snowflake parser converts to DATEADD + self.validate_all( + "TIMESTAMPADD(NANOSECOND, 123456789, '2023-01-01 10:00:00.000000000')", + write={ + "duckdb": "MAKE_TIMESTAMP_NS(EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS)) + 123456789)", + "snowflake": "DATEADD(NANOSECOND, 123456789, '2023-01-01 10:00:00.000000000')", + }, + ) + self.validate_identity("DATEADD(y, 5, x)", "DATEADD(YEAR, 5, x)") self.validate_identity("DATEADD(y, 5, x)", "DATEADD(YEAR, 5, x)") self.validate_identity("DATE_PART(yyy, x)", "DATE_PART(YEAR, x)") From c8a9ce5cdceaaf1b6718683dc6ed5ed2b495f6b4 Mon Sep 17 00:00:00 2001 From: fivetran-amrutabhimsenayachit Date: Tue, 23 Dec 2025 10:49:19 -0500 Subject: [PATCH 2/4] Address review comments: remove redundant casting and fix test cases - Remove redundant TIMESTAMP_NS casting in _timediff_sql (lines 268-271) * Previously cast to TIMESTAMP_NS before calling _handle_nanosecond_diff * _handle_nanosecond_diff already handles casting via exp.cast (which avoids recasting) * Now passes expressions directly, matching _date_diff_sql pattern - Fix TIMEDIFF test case to use valid Snowflake syntax * Changed from TIME literal '10:00:00.000000000' * To TIMESTAMP with CAST: CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP) * TIME literals don't work with TIMEDIFF in Snowflake - Fix TIMEADD test case to use valid Snowflake syntax * Changed from TIME literal '10:00:00.000000000' * To TIMESTAMP with CAST: CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP) * TIME literals don't work with TIMEADD in Snowflake Addresses review comments from VaggelisD on PR #6617 --- sqlglot/dialects/duckdb.py | 17 ++++++++++------- tests/dialects/test_snowflake.py | 12 ++++++------ 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/sqlglot/dialects/duckdb.py b/sqlglot/dialects/duckdb.py index da3156b311..a9f55f3c4c 100644 --- a/sqlglot/dialects/duckdb.py +++ b/sqlglot/dialects/duckdb.py @@ -144,11 +144,16 @@ def _last_day_sql(self: DuckDB.Generator, expression: exp.LastDay) -> str: def _unwrap_cast(expr: exp.Expression) -> exp.Expression: - """Unwrap Cast expression to avoid double-casting that loses nanosecond precision. + """Unwrap Cast expression to avoid nested casts when recasting to different types. - Nested casts can lose precision when converting between - timestamp types. By unwrapping the inner cast, we go directly from the source - expression to TIMESTAMP_NS, preserving nanosecond precision. + While exp.cast avoids recasting to the SAME type, it doesn't unwrap casts to + DIFFERENT types. This helper extracts the inner expression before casting to + avoid nested casts like CAST(CAST(x AS TIMESTAMP) AS TIMESTAMP_NS). + + Example: + Input: CAST('2023-01-01' AS TIMESTAMP) + Without unwrap: CAST(CAST('2023-01-01' AS TIMESTAMP) AS TIMESTAMP_NS) + With unwrap: CAST('2023-01-01' AS TIMESTAMP_NS) """ return expr.this if isinstance(expr, exp.Cast) else expr @@ -266,9 +271,7 @@ def _timediff_sql(self: DuckDB.Generator, expression: exp.TimeDiff) -> str: unit = expression.args.get("unit") if _is_nanosecond_unit(unit): - this_ts = exp.cast(expression.this, exp.DataType.Type.TIMESTAMP_NS) - expr_ts = exp.cast(expression.expression, exp.DataType.Type.TIMESTAMP_NS) - return _handle_nanosecond_diff(self, expr_ts, this_ts) + return _handle_nanosecond_diff(self, expression.expression, expression.this) this = exp.cast(expression.this, exp.DataType.Type.TIME) expr = exp.cast(expression.expression, exp.DataType.Type.TIME) diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py index bc54605c29..d8c7b0a7dd 100644 --- a/tests/dialects/test_snowflake.py +++ b/tests/dialects/test_snowflake.py @@ -2493,10 +2493,10 @@ def test_timestamps(self): # Test TIMEDIFF with NANOSECOND - DuckDB uses EPOCH_NS for nanosecond precision self.validate_all( - "TIMEDIFF(NANOSECOND, '10:00:00.000000000', '10:00:00.123456789')", + "TIMEDIFF(NANOSECOND, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP), CAST('2023-01-01 10:00:00.123456789' AS TIMESTAMP))", write={ - "duckdb": "EPOCH_NS(CAST('10:00:00.123456789' AS TIMESTAMP_NS)) - EPOCH_NS(CAST('10:00:00.000000000' AS TIMESTAMP_NS))", - "snowflake": "DATEDIFF(NANOSECOND, '10:00:00.000000000', '10:00:00.123456789')", + "duckdb": "EPOCH_NS(CAST('2023-01-01 10:00:00.123456789' AS TIMESTAMP_NS)) - EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS))", + "snowflake": "DATEDIFF(NANOSECOND, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP), CAST('2023-01-01 10:00:00.123456789' AS TIMESTAMP))", }, ) @@ -2518,10 +2518,10 @@ def test_timestamps(self): # Test TIMEADD with NANOSECOND - DuckDB uses MAKE_TIMESTAMP_NS self.validate_all( - "TIMEADD(NANOSECOND, 123456789, '10:00:00.000000000')", + "TIMEADD(NANOSECOND, 123456789, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP))", write={ - "duckdb": "MAKE_TIMESTAMP_NS(EPOCH_NS(CAST('10:00:00.000000000' AS TIMESTAMP_NS)) + 123456789)", - "snowflake": "TIMEADD(NANOSECOND, 123456789, '10:00:00.000000000')", + "duckdb": "MAKE_TIMESTAMP_NS(EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS)) + 123456789)", + "snowflake": "TIMEADD(NANOSECOND, 123456789, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP))", }, ) From 6555304fc9a6f77c4584eaa0e6e4fb2433043239 Mon Sep 17 00:00:00 2001 From: fivetran-amrutabhimsenayachit Date: Tue, 23 Dec 2025 13:26:09 -0500 Subject: [PATCH 3/4] Address review comments: simplify code and improve clarity MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Use expression.unit property accessor instead of expression.args.get('unit') * Updated in _timediff_sql, date_delta_to_binary_interval_op, _date_diff_sql * More concise and idiomatic - Remove unnecessary docstring from _is_nanosecond_unit * Function name is self-explanatory - Keep _unwrap_cast helper function * Necessary to avoid nested casts like CAST(CAST(x AS TIMESTAMP) AS TIMESTAMP_NS) * exp.cast only avoids recasting to SAME type, not DIFFERENT types * Example: CAST('2023-01-01' AS TIMESTAMP) → without unwrap → CAST(CAST(...) AS TIMESTAMP_NS) * With unwrap: extracts '2023-01-01' → CAST('2023-01-01' AS TIMESTAMP_NS) - cast parameter not needed for NANOSECOND handling * NANOSECOND operations require EPOCH_NS/make_timestamp_ns * These functions require TIMESTAMP_NS type * Must always cast regardless of cast parameter * cast parameter only applies to base implementation's interval operations Addresses review comments from georgesittas on PR #6617 --- sqlglot/dialects/duckdb.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/sqlglot/dialects/duckdb.py b/sqlglot/dialects/duckdb.py index a9f55f3c4c..bc168671f5 100644 --- a/sqlglot/dialects/duckdb.py +++ b/sqlglot/dialects/duckdb.py @@ -159,7 +159,6 @@ def _unwrap_cast(expr: exp.Expression) -> exp.Expression: def _is_nanosecond_unit(unit: t.Optional[exp.Expression]) -> bool: - """Check if unit is NANOSECOND.""" return isinstance(unit, (exp.Var, exp.Literal)) and unit.name.upper() == "NANOSECOND" @@ -268,7 +267,7 @@ def _date_sql(self: DuckDB.Generator, expression: exp.Date) -> str: # BigQuery -> DuckDB conversion for the TIME_DIFF function def _timediff_sql(self: DuckDB.Generator, expression: exp.TimeDiff) -> str: - unit = expression.args.get("unit") + unit = expression.unit if _is_nanosecond_unit(unit): return _handle_nanosecond_diff(self, expression.expression, expression.this) @@ -288,7 +287,7 @@ def date_delta_to_binary_interval_op( base_impl = base_date_delta_to_binary_interval_op(cast=cast) def duckdb_date_delta_sql(self: DuckDB.Generator, expression: DATETIME_DELTA) -> str: - unit = expression.args.get("unit") + unit = expression.unit # Handle NANOSECOND unit (DuckDB doesn't support INTERVAL ... NANOSECOND) if _is_nanosecond_unit(unit): @@ -518,7 +517,7 @@ def _build_week_trunc_expression(date_expr: exp.Expression, start_dow: int) -> e def _date_diff_sql(self: DuckDB.Generator, expression: exp.DateDiff) -> str: - unit = expression.args.get("unit") + unit = expression.unit if _is_nanosecond_unit(unit): return _handle_nanosecond_diff(self, expression.this, expression.expression) From 5dc6181389372fc842f80e743be4a9bc9753326e Mon Sep 17 00:00:00 2001 From: fivetran-amrutabhimsenayachit Date: Tue, 23 Dec 2025 15:19:14 -0500 Subject: [PATCH 4/4] feat(duckdb): Remove the unwrapping logic and remove the corresponding tests for simplicity --- sqlglot/dialects/duckdb.py | 21 +++------------------ tests/dialects/test_snowflake.py | 30 ++++++++---------------------- 2 files changed, 11 insertions(+), 40 deletions(-) diff --git a/sqlglot/dialects/duckdb.py b/sqlglot/dialects/duckdb.py index bc168671f5..6bf0cb85d2 100644 --- a/sqlglot/dialects/duckdb.py +++ b/sqlglot/dialects/duckdb.py @@ -143,21 +143,6 @@ def _last_day_sql(self: DuckDB.Generator, expression: exp.LastDay) -> str: return self.function_fallback_sql(expression) -def _unwrap_cast(expr: exp.Expression) -> exp.Expression: - """Unwrap Cast expression to avoid nested casts when recasting to different types. - - While exp.cast avoids recasting to the SAME type, it doesn't unwrap casts to - DIFFERENT types. This helper extracts the inner expression before casting to - avoid nested casts like CAST(CAST(x AS TIMESTAMP) AS TIMESTAMP_NS). - - Example: - Input: CAST('2023-01-01' AS TIMESTAMP) - Without unwrap: CAST(CAST('2023-01-01' AS TIMESTAMP) AS TIMESTAMP_NS) - With unwrap: CAST('2023-01-01' AS TIMESTAMP_NS) - """ - return expr.this if isinstance(expr, exp.Cast) else expr - - def _is_nanosecond_unit(unit: t.Optional[exp.Expression]) -> bool: return isinstance(unit, (exp.Var, exp.Literal)) and unit.name.upper() == "NANOSECOND" @@ -168,8 +153,8 @@ def _handle_nanosecond_diff( start_time: exp.Expression, ) -> str: """Generate NANOSECOND diff using EPOCH_NS since DATE_DIFF doesn't support it.""" - end_ns = exp.cast(_unwrap_cast(end_time), exp.DataType.Type.TIMESTAMP_NS) - start_ns = exp.cast(_unwrap_cast(start_time), exp.DataType.Type.TIMESTAMP_NS) + end_ns = exp.cast(end_time, exp.DataType.Type.TIMESTAMP_NS) + start_ns = exp.cast(start_time, exp.DataType.Type.TIMESTAMP_NS) # Build expression tree: EPOCH_NS(end) - EPOCH_NS(start) return self.sql( @@ -183,7 +168,7 @@ def _handle_nanosecond_add( nanoseconds: exp.Expression, ) -> str: """Generate NANOSECOND add using EPOCH_NS and make_timestamp_ns since INTERVAL doesn't support it.""" - timestamp_ns = exp.cast(_unwrap_cast(timestamp), exp.DataType.Type.TIMESTAMP_NS) + timestamp_ns = exp.cast(timestamp, exp.DataType.Type.TIMESTAMP_NS) # Build expression tree: make_timestamp_ns(EPOCH_NS(timestamp) + nanoseconds) return self.sql( diff --git a/tests/dialects/test_snowflake.py b/tests/dialects/test_snowflake.py index d8c7b0a7dd..f7ad99ec1a 100644 --- a/tests/dialects/test_snowflake.py +++ b/tests/dialects/test_snowflake.py @@ -2483,20 +2483,13 @@ def test_timestamps(self): "snowflake": "DATEDIFF(NANOSECOND, '2023-01-01 10:00:00.000000000', '2023-01-01 10:00:00.123456789')", }, ) - self.validate_all( - "DATEDIFF(NANOSECOND, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP), CAST('2023-01-01 10:00:00.987654321' AS TIMESTAMP))", - write={ - "duckdb": "EPOCH_NS(CAST('2023-01-01 10:00:00.987654321' AS TIMESTAMP_NS)) - EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS))", - "snowflake": "DATEDIFF(NANOSECOND, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP), CAST('2023-01-01 10:00:00.987654321' AS TIMESTAMP))", - }, - ) - # Test TIMEDIFF with NANOSECOND - DuckDB uses EPOCH_NS for nanosecond precision + # Test DATEDIFF with NANOSECOND on columns self.validate_all( - "TIMEDIFF(NANOSECOND, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP), CAST('2023-01-01 10:00:00.123456789' AS TIMESTAMP))", + "DATEDIFF(NANOSECOND, start_time, end_time)", write={ - "duckdb": "EPOCH_NS(CAST('2023-01-01 10:00:00.123456789' AS TIMESTAMP_NS)) - EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS))", - "snowflake": "DATEDIFF(NANOSECOND, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP), CAST('2023-01-01 10:00:00.123456789' AS TIMESTAMP))", + "duckdb": "EPOCH_NS(CAST(end_time AS TIMESTAMP_NS)) - EPOCH_NS(CAST(start_time AS TIMESTAMP_NS))", + "snowflake": "DATEDIFF(NANOSECOND, start_time, end_time)", }, ) @@ -2508,20 +2501,13 @@ def test_timestamps(self): "snowflake": "DATEADD(NANOSECOND, 123456789, '2023-01-01 10:00:00.000000000')", }, ) - self.validate_all( - "DATEADD(NANOSECOND, 999999999, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP))", - write={ - "duckdb": "MAKE_TIMESTAMP_NS(EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS)) + 999999999)", - "snowflake": "DATEADD(NANOSECOND, 999999999, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP))", - }, - ) - # Test TIMEADD with NANOSECOND - DuckDB uses MAKE_TIMESTAMP_NS + # Test DATEADD with NANOSECOND on columns self.validate_all( - "TIMEADD(NANOSECOND, 123456789, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP))", + "DATEADD(NANOSECOND, nano_offset, timestamp_col)", write={ - "duckdb": "MAKE_TIMESTAMP_NS(EPOCH_NS(CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP_NS)) + 123456789)", - "snowflake": "TIMEADD(NANOSECOND, 123456789, CAST('2023-01-01 10:00:00.000000000' AS TIMESTAMP))", + "duckdb": "MAKE_TIMESTAMP_NS(EPOCH_NS(CAST(timestamp_col AS TIMESTAMP_NS)) + nano_offset)", + "snowflake": "DATEADD(NANOSECOND, nano_offset, timestamp_col)", }, )