Skip to content

Commit 2c9e6ce

Browse files
committed
Add CLI option --clone-source-env-prefix
1 parent c905999 commit 2c9e6ce

File tree

6 files changed

+120
-23
lines changed

6 files changed

+120
-23
lines changed

CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,11 @@
11
# Changelog
22

3+
## [0.53.0] -2025-05-26
4+
5+
- Added policies for `DYNAMIC_TABLE` config.
6+
- Fixed incorrect object type names while dropping policy references.
7+
- Added CLI option `--clone-source-env-prefix`. It allowed to clone tables from another environment with env prefix. Previously it was possible to clone only from databases without prefix.
8+
39
## [0.52.0] - 2025-05-23
410

511
- Reworked data type parsing logic for `SHOW FUNCTIONS` and `SHOW PROCEDURES` to prepare for bundle `2025_03` [changes](https://docs.snowflake.com/en/release-notes/bcr-bundles/2025_03/bcr-1944).

snowddl/app/base.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -258,10 +258,14 @@ def init_arguments_parser(self):
258258
# Cloning
259259
parser.add_argument(
260260
"--clone-table",
261-
help="Clone all tables from source databases (without env_prefix) to destination databases (with env_prefix)",
261+
help="Clone all tables from source databases to destination databases (with env_prefix)",
262262
default=False,
263263
action="store_true",
264264
)
265+
parser.add_argument(
266+
"--clone-source-env-prefix",
267+
help="Clone from another environment with different env_prefix",
268+
)
265269

266270
# Destroy without env prefix
267271
parser.add_argument(
@@ -497,6 +501,12 @@ def init_settings(self):
497501

498502
settings.clone_table = True
499503

504+
if self.args.get("clone_source_env_prefix"):
505+
env_prefix = self.args.get("clone_source_env_prefix")
506+
env_prefix_separator = self.args.get("env_prefix_separator")
507+
508+
settings.clone_source_env_prefix = f"{env_prefix}{env_prefix_separator}".upper()
509+
500510
if self.args.get("env_admin_role"):
501511
settings.env_admin_role = Ident(self.args.get("env_admin_role"))
502512

snowddl/app/singledb.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -216,6 +216,10 @@ def init_arguments_parser(self):
216216
default=False,
217217
action="store_true",
218218
)
219+
parser.add_argument(
220+
"--clone-source-env-prefix",
221+
help="Clone from another environment with different env_prefix",
222+
)
219223

220224
# Subparsers
221225
subparsers = parser.add_subparsers(dest="action")

snowddl/resolver/clone_table.py

Lines changed: 97 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from snowddl.blueprint import TableBlueprint
1+
from snowddl.blueprint import TableBlueprint, SchemaObjectIdent
22
from snowddl.resolver.abc_schema_object_resolver import AbstractResolver, ResolveResult, ObjectType
33

44

@@ -26,6 +26,7 @@ def get_existing_objects(self):
2626

2727
def get_databases_for_clone(self):
2828
databases_for_clone = {}
29+
clone_source_env_prefix = self.engine.settings.clone_source_env_prefix
2930

3031
cur = self.engine.execute_meta("SHOW DATABASES")
3132

@@ -34,12 +35,24 @@ def get_databases_for_clone(self):
3435
if r["origin"]:
3536
continue
3637

37-
# Skip databases without destination for cloning
38-
if f"{self.config.env_prefix}{r['name']}" not in self.engine.schema_cache.databases:
38+
src_database = str(r["name"])
39+
40+
if clone_source_env_prefix:
41+
# Skip everything which does not start with source prefix
42+
if not src_database.startswith(clone_source_env_prefix):
43+
continue
44+
45+
dst_database = f"{self.config.env_prefix}{src_database.removeprefix(clone_source_env_prefix)}"
46+
else:
47+
dst_database = f"{self.config.env_prefix}{src_database}"
48+
49+
# Skip every source database without destination database for cloning
50+
if dst_database not in self.engine.schema_cache.databases:
3951
continue
4052

41-
databases_for_clone[r["name"]] = {
42-
"database": r["name"],
53+
databases_for_clone[dst_database] = {
54+
"src_database": src_database,
55+
"dst_database": dst_database,
4356
}
4457

4558
return databases_for_clone
@@ -48,9 +61,9 @@ def get_schemas_for_clone(self, database):
4861
schemas_for_clone = {}
4962

5063
cur = self.engine.execute_meta(
51-
"SHOW SCHEMAS IN DATABASE {database:i}",
64+
"SHOW SCHEMAS IN DATABASE {src_database:i}",
5265
{
53-
"database": database["database"],
66+
"src_database": database["src_database"],
5467
},
5568
)
5669

@@ -59,12 +72,15 @@ def get_schemas_for_clone(self, database):
5972
if r["name"] == "INFORMATION_SCHEMA":
6073
continue
6174

62-
# Skip schemas without destination for cloning
63-
if f"{self.config.env_prefix}{r['database_name']}.{r['name']}" not in self.engine.schema_cache.schemas:
75+
dst_schema = f"{database['dst_database']}.{r['name']}"
76+
77+
# Skip every source schema without destination schema for cloning
78+
if dst_schema not in self.engine.schema_cache.schemas:
6479
continue
6580

66-
schemas_for_clone[f"{r['database_name']}.{r['name']}"] = {
67-
"database": r["database_name"],
81+
schemas_for_clone[dst_schema] = {
82+
"src_database": database["src_database"],
83+
"dst_database": database["dst_database"],
6884
"schema": r["name"],
6985
}
7086

@@ -74,9 +90,9 @@ def get_tables_for_clone(self, schema):
7490
tables_for_clone = {}
7591

7692
cur = self.engine.execute_meta(
77-
"SHOW TABLES IN SCHEMA {database:i}.{schema:i}",
93+
"SHOW TABLES IN SCHEMA {src_database:i}.{schema:i}",
7894
{
79-
"database": schema["database"],
95+
"src_database": schema["src_database"],
8096
"schema": schema["schema"],
8197
},
8298
)
@@ -92,8 +108,9 @@ def get_tables_for_clone(self, schema):
92108
):
93109
continue
94110

95-
tables_for_clone[f"{self.config.env_prefix}{r['database_name']}.{r['schema_name']}.{r['name']}"] = {
96-
"database": r["database_name"],
111+
tables_for_clone[f"{schema['dst_database']}.{r['schema_name']}.{r['name']}"] = {
112+
"src_database": schema["src_database"],
113+
"dst_database": schema["dst_database"],
97114
"schema": r["schema_name"],
98115
"name": r["name"],
99116
"is_transient": r["kind"] == "TRANSIENT",
@@ -119,27 +136,30 @@ def drop_object(self, row: dict):
119136
query.append("TRANSIENT")
120137

121138
query.append(
122-
"TABLE IF NOT EXISTS {database_with_prefix:i}.{schema:i}.{table_name:i}",
139+
"TABLE IF NOT EXISTS {dst_database:i}.{schema:i}.{name:i}",
123140
{
124-
"database_with_prefix": f"{self.config.env_prefix}{row['database']}",
141+
"dst_database": row["dst_database"],
125142
"schema": row["schema"],
126-
"table_name": row["name"],
143+
"name": row["name"],
127144
},
128145
)
129146

130147
query.append_nl(
131-
"CLONE {database:i}.{schema:i}.{table_name:i}",
148+
"CLONE {src_database:i}.{schema:i}.{name:i}",
132149
{
133-
"database": row["database"],
150+
"src_database": row["src_database"],
134151
"schema": row["schema"],
135-
"table_name": row["name"],
152+
"name": row["name"],
136153
},
137154
)
138155

139156
cur = self.engine.execute_clone(query)
140157
r = cur.fetchone()
141158

142159
if str(r["status"]).endswith("successfully created."):
160+
self._drop_existing_policy_refs(
161+
ObjectType.TABLE, SchemaObjectIdent("", row["dst_database"], row["schema"], row["name"])
162+
)
143163
return ResolveResult.CREATE
144164

145165
return ResolveResult.NOCHANGE
@@ -155,3 +175,59 @@ def _is_skipped(self):
155175
return True
156176

157177
return False
178+
179+
def _drop_existing_policy_refs(self, object_type: ObjectType, object_name: SchemaObjectIdent):
180+
cur = self.engine.execute_meta(
181+
"SELECT * FROM TABLE(snowflake.information_schema.policy_references(ref_entity_domain => {object_type}, ref_entity_name => {object_name}))",
182+
{
183+
"object_type": object_type.singular_for_ref,
184+
"object_name": object_name,
185+
},
186+
)
187+
188+
for r in cur:
189+
if r["POLICY_KIND"] == "AGGREGATION_POLICY":
190+
self.engine.execute_clone(
191+
"ALTER {object_type:r} {object_name:i} UNSET AGGREGATION POLICY",
192+
{
193+
"object_type": object_type.singular_for_ref,
194+
"object_name": object_name,
195+
},
196+
)
197+
198+
elif r["POLICY_KIND"] == "MASKING_POLICY":
199+
self.engine.execute_clone(
200+
"ALTER {object_type:r} {object_name:i} MODIFY COLUMN {column:i} UNSET MASKING POLICY",
201+
{
202+
"object_type": object_type.singular_for_ref,
203+
"object_name": object_name,
204+
"column": r["REF_COLUMN_NAME"],
205+
},
206+
)
207+
208+
elif r["POLICY_KIND"] == "PROJECTION_POLICY":
209+
self.engine.execute_clone(
210+
"ALTER {object_type:r} {object_name:i} MODIFY COLUMN {column:i} UNSET PROJECTION POLICY",
211+
{
212+
"object_type": object_type.singular_for_ref,
213+
"object_name": object_name,
214+
"column": r["REF_COLUMN_NAME"],
215+
},
216+
)
217+
218+
elif r["POLICY_KIND"] == "ROW_ACCESS_POLICY":
219+
policy_name = SchemaObjectIdent("", r["POLICY_DB"], r["POLICY_SCHEMA"], r["POLICY_NAME"])
220+
221+
self.engine.execute_clone(
222+
"ALTER {object_type:r} {object_name:i} DROP ROW ACCESS POLICY {policy_name:i}",
223+
{
224+
"object_type": object_type.singular_for_ref,
225+
"object_name": object_name,
226+
"policy_name": policy_name,
227+
},
228+
)
229+
230+
else:
231+
self.engine.logger.warning(
232+
f"Detected unknown policy type [{r['POLICY_KIND']} attached to cloned object [{object_name}]"
233+
)

snowddl/settings.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ class SnowDDLSettings(BaseModelWithConfig):
2424
refresh_stage_encryption: bool = False
2525
refresh_secrets: bool = False
2626
clone_table: bool = False
27+
clone_source_env_prefix: str = ""
2728
exclude_object_types: List[ObjectType] = []
2829
include_object_types: List[ObjectType] = []
2930
include_databases: List[DatabaseIdent] = []

snowddl/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = "0.52.0"
1+
__version__ = "0.53.0"

0 commit comments

Comments
 (0)