Skip to content

Commit 7abde4d

Browse files
committed
Switch TABLE metadata from SHOW TABLES to SHOW AS RESOURCE TABLES
1 parent 6911d43 commit 7abde4d

File tree

3 files changed

+52
-46
lines changed

3 files changed

+52
-46
lines changed

snowddl/app/base.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -277,6 +277,12 @@ def init_arguments_parser(self):
277277
"--destroy-without-prefix", help="Allow {destroy} action without --env-prefix", default=False, action="store_true"
278278
)
279279

280+
# Legacy compatibility flags
281+
parser.add_argument(
282+
"--legacy-column-expression",
283+
help="Enable handling for undocumented table column expression parameter",
284+
)
285+
280286
# Subparsers
281287
subparsers = parser.add_subparsers(dest="action")
282288
subparsers.required = True
@@ -537,6 +543,9 @@ def init_settings(self):
537543
if self.args.get("max_workers"):
538544
settings.max_workers = int(self.args.get("max_workers"))
539545

546+
if self.args.get("legacy_column_expression"):
547+
settings.legacy_column_expression = True
548+
540549
return settings
541550

542551
def get_engine(self):

snowddl/resolver/table.py

Lines changed: 40 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from itertools import islice
2-
from re import compile
2+
from json import loads as json_loads
33

44
from snowddl.blueprint import (
55
Ident,
@@ -12,9 +12,6 @@
1212
)
1313
from snowddl.resolver.abc_schema_object_resolver import AbstractSchemaObjectResolver, ResolveResult, ObjectType
1414

15-
cluster_by_syntax_re = compile(r"^(\w+)?\((.*)\)$")
16-
collate_type_syntax_re = compile(r"^(.*) COLLATE \'(.*)\'$")
17-
1815

1916
class TableResolver(AbstractSchemaObjectResolver):
2017
def get_object_type(self) -> ObjectType:
@@ -24,22 +21,18 @@ def get_existing_objects_in_schema(self, schema: dict):
2421
existing_objects = {}
2522

2623
cur = self.engine.execute_meta(
27-
"SHOW TABLES IN SCHEMA {database:i}.{schema:i}",
24+
"SHOW AS RESOURCE TABLES IN SCHEMA {database:i}.{schema:i}",
2825
{
2926
"database": schema["database"],
3027
"schema": schema["schema"],
3128
},
3229
)
3330

3431
for r in cur:
32+
r = json_loads(r["As Resource"])
33+
3534
# Skip other table types
36-
if (
37-
r.get("is_external") == "Y"
38-
or r.get("is_event") == "Y"
39-
or r.get("is_hybrid") == "Y"
40-
or r.get("is_iceberg") == "Y"
41-
or r.get("is_dynamic") == "Y"
42-
):
35+
if r["table_type"] != "NORMAL":
4336
continue
4437

4538
full_name = f"{r['database_name']}.{r['schema_name']}.{r['name']}"
@@ -49,11 +42,13 @@ def get_existing_objects_in_schema(self, schema: dict):
4942
"name": r["name"],
5043
"owner": r["owner"],
5144
"is_transient": r["kind"] == "TRANSIENT",
52-
"retention_time": int(r["retention_time"]),
53-
"cluster_by": r["cluster_by"] if r["cluster_by"] else None,
54-
"change_tracking": bool(r["change_tracking"] == "ON"),
55-
"search_optimization": bool(r.get("search_optimization") == "ON"),
56-
"comment": r["comment"] if r["comment"] else None,
45+
"retention_time": r["data_retention_time_in_days"],
46+
"cluster_by": r["cluster_by"],
47+
"change_tracking": r["change_tracking"],
48+
"search_optimization": r["search_optimization"],
49+
"comment": r["comment"],
50+
"columns": r["columns"],
51+
"constraints": r["constraints"],
5752
}
5853

5954
return existing_objects
@@ -77,7 +72,7 @@ def compare_object(self, bp: TableBlueprint, row: dict):
7772
replace_notices = []
7873

7974
bp_cols = {str(c.name): c for c in bp.columns}
80-
snow_cols = self._get_existing_columns(bp)
75+
snow_cols = self._get_existing_columns(bp, row)
8176

8277
dropping_col_names = []
8378
remaining_col_names = list(snow_cols.keys())
@@ -381,36 +376,35 @@ def drop_object(self, row: dict):
381376

382377
return ResolveResult.DROP
383378

384-
def _get_existing_columns(self, bp: TableBlueprint):
379+
def _get_existing_columns(self, bp: TableBlueprint, row: dict):
385380
existing_columns = {}
386381

387-
cur = self.engine.execute_meta(
388-
"DESC TABLE {full_name:i}",
389-
{
390-
"full_name": bp.full_name,
391-
},
392-
)
393-
394-
for r in cur:
395-
m = collate_type_syntax_re.match(r["type"])
382+
for c in row["columns"]:
383+
existing_columns[c["name"]] = TableColumn(
384+
name=Ident(c["name"]),
385+
type=DataType(c["datatype"]),
386+
not_null=c["nullable"] is False,
387+
default=c["default"],
388+
collate=c["collate"],
389+
comment=c["comment"],
390+
)
396391

397-
if m:
398-
dtype = m.group(1)
399-
collate = m.group(2)
400-
else:
401-
dtype = r["type"]
402-
collate = None
403-
404-
existing_columns[r["name"]] = TableColumn(
405-
name=Ident(r["name"]),
406-
type=DataType(dtype),
407-
not_null=bool(r["null?"] == "N"),
408-
default=r["default"] if r["default"] else None,
409-
expression=r["expression"] if r["expression"] else None,
410-
collate=collate,
411-
comment=r["comment"] if r["comment"] else None,
392+
# Expression is not available in SHOW AS RESOURCE TABLES output
393+
# DESC TABLE must be used explicitly in order to read expression value
394+
if self.engine.settings.legacy_column_expression:
395+
cur = self.engine.execute_meta(
396+
"DESC TABLE {full_name:i}",
397+
{
398+
"full_name": bp.full_name,
399+
},
412400
)
413401

402+
for r in cur:
403+
if r["expression"] is None:
404+
continue
405+
406+
existing_columns[r["name"]].expression = r["expression"]
407+
414408
return existing_columns
415409

416410
def _build_create_table(self, bp: TableBlueprint, snow_cols=None):
@@ -574,10 +568,10 @@ def _build_create_table(self, bp: TableBlueprint, snow_cols=None):
574568
return query
575569

576570
def _compare_cluster_by(self, bp: TableBlueprint, row: dict):
577-
bp_cluster_by = ", ".join(bp.cluster_by) if bp.cluster_by else None
578-
snow_cluster_by = cluster_by_syntax_re.sub(r"\2", row["cluster_by"]) if row["cluster_by"] else None
571+
bp_normalized_cluster_by = [c.upper() for c in bp.cluster_by] if bp.cluster_by else None
572+
snow_normalized_cluster_by = [c.upper() for c in row["cluster_by"]] if row["cluster_by"] else None
579573

580-
return bp_cluster_by == snow_cluster_by
574+
return bp_normalized_cluster_by == snow_normalized_cluster_by
581575

582576
def _create_search_optimization(self, bp: TableBlueprint, condition=True):
583577
# Legacy search optimization on an entire table

snowddl/settings.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,9 @@ class SnowDDLSettings(BaseModelWithConfig):
3131
ignore_ownership: bool = False
3232
max_workers: int = 32
3333

34+
# Legacy compatibility flags
35+
legacy_column_expression: bool = False
36+
3437
# Options specific for snowddl-convert
3538
convert_function_body_to_file: bool = False
3639
convert_view_text_to_file: bool = False

0 commit comments

Comments
 (0)