Skip to content

Commit 0222725

Browse files
committed
Rework DYNAMIC_TABLE, use SHOW AS RESOURCE DYNAMIC TABLE for metadata checks; Implement checks for is_transient, retention_time, column comments, refresh_mode
1 parent e0303f0 commit 0222725

File tree

8 files changed

+165
-34
lines changed

8 files changed

+165
-34
lines changed

CHANGELOG.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,12 @@
11
# Changelog
22

3+
## [0.55.0] - 2025-06-26
4+
5+
- Reworked `DYNAMIC_TABLE` to use `SHOW AS RESOURCE DYNAMIC TABLES` command.
6+
- Implemented additional dynamic table checks for column comments, transient, retention_time, refresh_mode.
7+
- Added explicit "replace reasons" comments for `DYNAMIC_TABLE`, similar to `TABLE`.
8+
- `TABLE` comments can now be fully "unset" to NULL instead of empty string.
9+
310
## [0.54.2] - 2025-06-25
411

512
- Added check for `cluster_by` parameter for existing dynamic tables (thanks to gcv-epalmer).

snowddl/parser/dynamic_table.py

Lines changed: 23 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
from functools import partial
2+
13
from snowddl.blueprint import (
24
AccountObjectIdent,
35
DynamicTableBlueprint,
@@ -16,6 +18,7 @@
1618
RowAccessPolicyReference,
1719
)
1820
from snowddl.parser.abc_parser import AbstractParser, ParsedFile
21+
from snowddl.parser.schema import database_json_schema, schema_json_schema
1922

2023

2124
# fmt: off
@@ -155,9 +158,25 @@ class DynamicTableParser(AbstractParser):
155158
unit_plural_to_singular = {v: k for k, v in unit_singular_to_plural.items()}
156159

157160
def load_blueprints(self):
158-
self.parse_schema_object_files("dynamic_table", dynamic_table_json_schema, self.process_dynamic_table)
161+
combined_params = {}
162+
163+
for database_name in self.get_database_names():
164+
database_params = self.parse_single_entity_file(f"{database_name}/params", database_json_schema)
165+
combined_params[database_name] = {}
166+
167+
for schema_name in self.get_schema_names_in_database(database_name):
168+
schema_params = self.parse_single_entity_file(f"{database_name}/{schema_name}/params", schema_json_schema)
169+
170+
combined_params[database_name][schema_name] = {
171+
"is_transient": database_params.get("is_transient", False) or schema_params.get("is_transient", False),
172+
"retention_time": schema_params.get("retention_time"),
173+
}
174+
175+
self.parse_schema_object_files(
176+
"dynamic_table", dynamic_table_json_schema, partial(self.process_dynamic_table, combined_params=combined_params)
177+
)
159178

160-
def process_dynamic_table(self, f: ParsedFile):
179+
def process_dynamic_table(self, f: ParsedFile, combined_params: dict):
161180
column_blueprints = []
162181

163182
for col_name, col_comment in f.params.get("columns", {}).items():
@@ -177,8 +196,8 @@ def process_dynamic_table(self, f: ParsedFile):
177196
refresh_mode=f.params.get("refresh_mode").upper() if f.params.get("refresh_mode") else None,
178197
initialize=f.params.get("initialize").upper() if f.params.get("initialize") else None,
179198
cluster_by=f.params.get("cluster_by"),
180-
is_transient=f.params.get("is_transient", False),
181-
retention_time=f.params.get("retention_time"),
199+
is_transient=f.params.get("is_transient", combined_params[f.database][f.schema].get("is_transient", False)),
200+
retention_time=f.params.get("retention_time", combined_params[f.database][f.schema].get("retention_time", None)),
182201
depends_on=set(
183202
build_schema_object_ident(self.env_prefix, d, f.database, f.schema) for d in f.params.get("depends_on", [])
184203
),

snowddl/resolver/dynamic_table.py

Lines changed: 95 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,44 +1,54 @@
1-
from re import compile
1+
from json import loads as json_loads
22

33
from snowddl.blueprint import DynamicTableBlueprint
44
from snowddl.resolver.abc_schema_object_resolver import AbstractSchemaObjectResolver, ResolveResult, ObjectType
55

6-
cluster_by_syntax_re = compile(r"^(\w+)?\((.*)\)$")
7-
86

97
class DynamicTableResolver(AbstractSchemaObjectResolver):
10-
# Dynamic tables are available for all accounts during preview, including STANDARD edition
11-
# skip_min_edition = Edition.ENTERPRISE
128
skip_on_empty_blueprints = True
139

10+
unit_to_seconds_multiplier = {
11+
"second": 1,
12+
"seconds": 1,
13+
"minute": 60,
14+
"minutes": 60,
15+
"hour": 3600,
16+
"hours": 3600,
17+
"day": 86400,
18+
"days": 86400,
19+
}
20+
1421
def get_object_type(self) -> ObjectType:
1522
return ObjectType.DYNAMIC_TABLE
1623

1724
def get_existing_objects_in_schema(self, schema: dict):
1825
existing_objects = {}
1926

2027
cur = self.engine.execute_meta(
21-
"SHOW DYNAMIC TABLES IN SCHEMA {database:i}.{schema:i}",
28+
"SHOW AS RESOURCE DYNAMIC TABLES IN SCHEMA {database:i}.{schema:i}",
2229
{
2330
"database": schema["database"],
2431
"schema": schema["schema"],
2532
},
2633
)
2734

2835
for r in cur:
36+
r = json_loads(r["As Resource"])
37+
2938
existing_objects[f"{r['database_name']}.{r['schema_name']}.{r['name']}"] = {
3039
"database": r["database_name"],
3140
"schema": r["schema_name"],
3241
"name": r["name"],
3342
"owner": r["owner"],
34-
# Extract SQL query text only, skip the initial "CREATE DYNAMIC TABLE ..." part
35-
# Snowflake modifies original SQL text in this column, it cannot be compared directly
36-
"text": r["text"].partition("\nAS\n")[2].rstrip(";"),
37-
"cluster_by": r["cluster_by"] if r["cluster_by"] else None,
43+
"is_transient": r["kind"] == "TRANSIENT",
44+
"retention_time": r["data_retention_time_in_days"],
45+
"columns": r["columns"],
46+
"text": r["query"].rstrip(";"),
47+
"cluster_by": r["cluster_by"],
3848
"target_lag": r["target_lag"],
3949
"refresh_mode": r["refresh_mode"],
4050
"warehouse": r["warehouse"],
41-
"comment": r["comment"] if r["comment"] else None,
51+
"comment": r["comment"],
4252
}
4353

4454
return existing_objects
@@ -66,16 +76,25 @@ def create_object(self, bp: DynamicTableBlueprint):
6676

6777
return ResolveResult.CREATE
6878

69-
def _compare_cluster_by(self, bp: DynamicTableBlueprint, row: dict):
70-
bp_cluster_by = ", ".join(bp.cluster_by) if bp.cluster_by else None
71-
snow_cluster_by = cluster_by_syntax_re.sub(r"\2", row["cluster_by"]) if row["cluster_by"] else None
72-
73-
return bp_cluster_by == snow_cluster_by
74-
7579
def compare_object(self, bp: DynamicTableBlueprint, row: dict):
7680
result = ResolveResult.NOCHANGE
81+
replace_reasons = []
82+
83+
if bp.columns and [str(c.name) for c in bp.columns] != [str(c["name"]) for c in row["columns"]]:
84+
replace_reasons.append("Column definition was changed")
7785

7886
if bp.text != row["text"]:
87+
replace_reasons.append("SQL text was changed")
88+
89+
if bp.is_transient is True and row["is_transient"] is False:
90+
replace_reasons.append("Dynamic table type was changed to TRANSIENT")
91+
elif bp.is_transient is False and row["is_transient"] is True:
92+
replace_reasons.append("Dynamic table type was changed to PERMANENT")
93+
94+
if bp.refresh_mode and bp.refresh_mode != "AUTO" and bp.refresh_mode != row["refresh_mode"]:
95+
replace_reasons.append(f"Refresh mode was changed to {bp.refresh_mode}")
96+
97+
if replace_reasons:
7998
query = self.engine.query_builder()
8099
query.append("CREATE OR REPLACE")
81100

@@ -90,11 +109,11 @@ def compare_object(self, bp: DynamicTableBlueprint, row: dict):
90109
)
91110

92111
query.append(self._build_common_dynamic_table_sql(bp))
93-
self.engine.execute_unsafe_ddl(query)
112+
self.engine.execute_unsafe_ddl("\n".join(f"-- {r}" for r in replace_reasons) + "\n" + str(query))
94113

95114
return ResolveResult.REPLACE
96115

97-
if bp.target_lag != row["target_lag"]:
116+
if not self._compare_target_lag(bp, row):
98117
self.engine.execute_safe_ddl(
99118
"ALTER DYNAMIC TABLE {full_name:i} SET TARGET_LAG = {target_lag}",
100119
{
@@ -133,6 +152,17 @@ def compare_object(self, bp: DynamicTableBlueprint, row: dict):
133152

134153
result = ResolveResult.ALTER
135154

155+
if bp.retention_time is not None and bp.retention_time != row["retention_time"]:
156+
self.engine.execute_unsafe_ddl(
157+
"ALTER DYNAMIC TABLE {full_name:i} SET DATA_RETENTION_TIME_IN_DAYS = {retention_time:d}",
158+
{
159+
"full_name": bp.full_name,
160+
"retention_time": bp.retention_time,
161+
},
162+
)
163+
164+
result = ResolveResult.ALTER
165+
136166
if bp.comment != row["comment"]:
137167
self.engine.execute_safe_ddl(
138168
"ALTER DYNAMIC TABLE {full_name:i} SET COMMENT = {comment}",
@@ -144,6 +174,30 @@ def compare_object(self, bp: DynamicTableBlueprint, row: dict):
144174

145175
result = ResolveResult.ALTER
146176

177+
for idx, c in enumerate(row["columns"]):
178+
bp_col_comment = bp.columns[idx].comment if bp.columns else None
179+
180+
if bp_col_comment != c["comment"]:
181+
if bp_col_comment:
182+
self.engine.execute_safe_ddl(
183+
"ALTER DYNAMIC TABLE {full_name:i} MODIFY COLUMN {column_name:i} COMMENT {comment}",
184+
{
185+
"full_name": bp.full_name,
186+
"column_name": c["name"],
187+
"comment": bp_col_comment,
188+
},
189+
)
190+
else:
191+
self.engine.execute_safe_ddl(
192+
"ALTER DYNAMIC TABLE {full_name:i} MODIFY COLUMN {column_name:i} UNSET COMMENT",
193+
{
194+
"full_name": bp.full_name,
195+
"column_name": c["name"],
196+
},
197+
)
198+
199+
result = ResolveResult.ALTER
200+
147201
return result
148202

149203
def drop_object(self, row: dict):
@@ -241,3 +295,25 @@ def _build_common_dynamic_table_sql(self, bp: DynamicTableBlueprint):
241295
query.append_nl(bp.text)
242296

243297
return query
298+
299+
def _compare_cluster_by(self, bp: DynamicTableBlueprint, row: dict):
300+
bp_cluster_by = ", ".join(bp.cluster_by).upper() if bp.cluster_by else None
301+
snow_cluster_by = ", ".join(row["cluster_by"]).upper() if row["cluster_by"] else None
302+
303+
return bp_cluster_by == snow_cluster_by
304+
305+
def _compare_target_lag(self, bp: DynamicTableBlueprint, row: dict):
306+
if bp.target_lag == "DOWNSTREAM":
307+
return row["target_lag"]["type"] == "DOWNSTREAM"
308+
309+
num, _, unit = bp.target_lag.partition(" ")
310+
311+
num = int(num)
312+
unit = unit.lower()
313+
314+
num_in_seconds = num * self.unit_to_seconds_multiplier[unit]
315+
316+
if row["target_lag"]["type"] == "USER_DEFINED" and row["target_lag"]["seconds"] == num_in_seconds:
317+
return True
318+
319+
return False

snowddl/resolver/table.py

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -165,16 +165,25 @@ def compare_object(self, bp: TableBlueprint, row: dict):
165165

166166
# Comments
167167
if snow_c.comment != bp_c.comment:
168-
# UNSET COMMENT is currently not supported for columns, we can only set it to empty string
169-
safe_alters.append(
170-
self.engine.format(
171-
"MODIFY COLUMN {col_name:i} COMMENT {comment}",
172-
{
173-
"col_name": col_name,
174-
"comment": bp_c.comment if bp_c.comment else "",
175-
},
168+
if bp_c.comment:
169+
safe_alters.append(
170+
self.engine.format(
171+
"MODIFY COLUMN {col_name:i} COMMENT {comment}",
172+
{
173+
"col_name": col_name,
174+
"comment": bp_c.comment,
175+
},
176+
)
177+
)
178+
else:
179+
safe_alters.append(
180+
self.engine.format(
181+
"MODIFY COLUMN {col_name:i} UNSET COMMENT",
182+
{
183+
"col_name": col_name,
184+
},
185+
)
176186
)
177-
)
178187

179188
# If type matches exactly, skip all other checks
180189
if snow_c.type == bp_c.type:
@@ -266,7 +275,7 @@ def compare_object(self, bp: TableBlueprint, row: dict):
266275
if bp.is_transient is True and row["is_transient"] is False:
267276
replace_reasons.append("Table type was changed to TRANSIENT")
268277
elif bp.is_transient is False and row["is_transient"] is True:
269-
replace_reasons.append("Table type was changed to no longer being TRANSIENT")
278+
replace_reasons.append("Table type was changed to PERMANENT")
270279

271280
# Retention time
272281
if bp.retention_time is not None and bp.retention_time != row["retention_time"]:

snowddl/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "0.54.2"
1+
__version__ = "0.55.0"

test/_config/step1/db1/sc1/dynamic_table/dt004_dt1.yaml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
columns:
2+
id: aaa
3+
name: ~
4+
15
text: |-
26
SELECT id, name
37
FROM dt004_tb1

test/_config/step2/db1/sc1/dynamic_table/dt004_dt1.yaml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
1+
columns:
2+
id: ~
3+
name: bbb
4+
15
text: |-
26
SELECT id, name
37
FROM dt004_tb1

test/dynamic_table/dt004.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,31 @@
11
def test_step1(helper):
22
dynamic_table_show = helper.show_dynamic_table("db1", "sc1", "dt004_dt1")
3+
dynamic_table_desc = helper.desc_table("db1", "sc1", "dt004_dt1")
34

45
assert dynamic_table_show["automatic_clustering"] == "ON"
56
assert dynamic_table_show["cluster_by"] == "LINEAR(id)"
67

8+
assert dynamic_table_desc["ID"]["comment"] == "aaa"
9+
assert dynamic_table_desc["NAME"]["comment"] is None
10+
711

812
def test_step2(helper):
913
dynamic_table_show = helper.show_dynamic_table("db1", "sc1", "dt004_dt1")
14+
dynamic_table_desc = helper.desc_table("db1", "sc1", "dt004_dt1")
1015

1116
assert dynamic_table_show["automatic_clustering"] == "ON"
1217
assert dynamic_table_show["cluster_by"] == "LINEAR(id, name)"
1318

19+
assert dynamic_table_desc["ID"]["comment"] is None
20+
assert dynamic_table_desc["NAME"]["comment"] == "bbb"
21+
1422

1523
def test_step3(helper):
1624
dynamic_table_show = helper.show_dynamic_table("db1", "sc1", "dt004_dt1")
25+
dynamic_table_desc = helper.desc_table("db1", "sc1", "dt004_dt1")
1726

1827
assert dynamic_table_show["automatic_clustering"] == "OFF"
1928
assert dynamic_table_show["cluster_by"] == ""
29+
30+
assert dynamic_table_desc["ID"]["comment"] is None
31+
assert dynamic_table_desc["NAME"]["comment"] is None

0 commit comments

Comments
 (0)