Skip to content

Commit 1715608

Browse files
authored
M kovalsky/smmnotebook (#382)
* added notebook * added notebook * updated restore function to only take dataset name.
1 parent 29729a5 commit 1715608

File tree

2 files changed

+149
-6
lines changed

2 files changed

+149
-6
lines changed
Lines changed: 144 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,144 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"id": "5c27dfd1-4fe0-4a97-92e6-ddf78889aa93",
6+
"metadata": {
7+
"nteract": {
8+
"transient": {
9+
"deleting": false
10+
}
11+
}
12+
},
13+
"source": [
14+
"### Install the latest .whl package\n",
15+
"\n",
16+
"Check [here](https://pypi.org/project/semantic-link-labs/) to see the latest version."
17+
]
18+
},
19+
{
20+
"cell_type": "code",
21+
"execution_count": null,
22+
"id": "d5cae9db-cef9-48a8-a351-9c5fcc99645c",
23+
"metadata": {
24+
"jupyter": {
25+
"outputs_hidden": true,
26+
"source_hidden": false
27+
},
28+
"nteract": {
29+
"transient": {
30+
"deleting": false
31+
}
32+
}
33+
},
34+
"outputs": [],
35+
"source": [
36+
"%pip install semantic-link-labs"
37+
]
38+
},
39+
{
40+
"cell_type": "markdown",
41+
"id": "b195eae8",
42+
"metadata": {},
43+
"source": [
44+
"### Import the library and necessary packages"
45+
]
46+
},
47+
{
48+
"cell_type": "code",
49+
"execution_count": null,
50+
"id": "1344e286",
51+
"metadata": {},
52+
"outputs": [],
53+
"source": [
54+
"import sempy_labs as labs\n",
55+
"source_dataset = '' # Name of the semantic model to backup\n",
56+
"target_dataset = '' # Name of the semantic model to restore\n",
57+
"source_workspace = '' # Name of the workspace in which the semantic model resides\n",
58+
"target_workspace = '' # Destination workspace of the semantic model\n",
59+
"source_file_path = '' # Name/path of the backup file to create\n",
60+
"target_file_path = '' # Name/path of the backup file to be copied to the target workspace\n",
61+
"storage_account = '' # Name of the ADLS Gen2 storage account associated with both source & target workspaces"
62+
]
63+
},
64+
{
65+
"cell_type": "markdown",
66+
"id": "d4f5356a",
67+
"metadata": {},
68+
"source": [
69+
"#### Prerequisites\n",
70+
"* [Create an ADLS Gen2 storage account (in the Azure Portal)](https://learn.microsoft.com/azure/storage/common/storage-account-create?tabs=azure-portal)\n",
71+
"* Assign the ADLS Gen2 storage account to both source and target workspaces\n",
72+
" * Navigate to your workspace.\n",
73+
" * Select 'Workspace settings'.\n",
74+
" * Select 'Azure connections'.\n",
75+
" * Within 'Azure Data Lake Gen2 Storage' click 'Configure'.\n",
76+
" * Enter your Subscription, Resource Group and Storage Account.\n",
77+
" * Click 'Save'.\n"
78+
]
79+
},
80+
{
81+
"cell_type": "markdown",
82+
"id": "55e5ca67",
83+
"metadata": {},
84+
"source": [
85+
"### Backup, copy and restore a semantic model to a new workspace"
86+
]
87+
},
88+
{
89+
"cell_type": "code",
90+
"execution_count": null,
91+
"id": "5a985c1f",
92+
"metadata": {},
93+
"outputs": [],
94+
"source": [
95+
"labs.backup_semantic_model(\n",
96+
" dataset=source_dataset,\n",
97+
" file_path=source_file_path,\n",
98+
" workspace=source_workspace,\n",
99+
")\n",
100+
"labs.copy_semantic_model_backup_file(\n",
101+
" source_workspace=source_workspace,\n",
102+
" target_workspace=target_workspace,\n",
103+
" source_file_name=source_file_path,\n",
104+
" target_file_name=target_file_path,\n",
105+
" storage_account=storage_account,\n",
106+
")\n",
107+
"labs.restore_semantic_model(\n",
108+
" dataset=target_dataset,\n",
109+
" file_path=target_file_path,\n",
110+
" workspace=target_workspace,\n",
111+
")"
112+
]
113+
}
114+
],
115+
"metadata": {
116+
"kernel_info": {
117+
"name": "synapse_pyspark"
118+
},
119+
"kernelspec": {
120+
"display_name": "Synapse PySpark",
121+
"language": "Python",
122+
"name": "synapse_pyspark"
123+
},
124+
"language_info": {
125+
"name": "python"
126+
},
127+
"microsoft": {
128+
"language": "python"
129+
},
130+
"nteract": {
131+
"version": "nteract-front-end@1.0.0"
132+
},
133+
"spark_compute": {
134+
"compute_id": "/trident/default"
135+
},
136+
"synapse_widget": {
137+
"state": {},
138+
"version": "0.1"
139+
},
140+
"widgets": {}
141+
},
142+
"nbformat": 4,
143+
"nbformat_minor": 5
144+
}

src/sempy_labs/_clear_cache.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def backup_semantic_model(
105105

106106
@log
107107
def restore_semantic_model(
108-
dataset: str | UUID,
108+
dataset: str,
109109
file_path: str,
110110
allow_overwrite: bool = True,
111111
ignore_incompatibilities: bool = True,
@@ -118,8 +118,8 @@ def restore_semantic_model(
118118
119119
Parameters
120120
----------
121-
dataset : str | uuid.UUID
122-
Name or ID of the semantic model.
121+
dataset : str
122+
Name of the semantic model.
123123
file_path : str
124124
The location in which to backup the semantic model. Must end in '.abf'.
125125
Example 1: file_path = 'MyModel.abf'
@@ -142,11 +142,10 @@ def restore_semantic_model(
142142
)
143143

144144
(workspace_name, workspace_id) = resolve_workspace_name_and_id(workspace)
145-
(dataset_name, dataset_id) = resolve_dataset_name_and_id(dataset, workspace_id)
146145

147146
tmsl = {
148147
"restore": {
149-
"database": dataset_name,
148+
"database": dataset,
150149
"file": file_path,
151150
"allowOverwrite": allow_overwrite,
152151
"security": "copyAll",
@@ -160,7 +159,7 @@ def restore_semantic_model(
160159
fabric.execute_tmsl(script=tmsl, workspace=workspace_id)
161160

162161
print(
163-
f"{icons.green_dot} The '{dataset_name}' semantic model has been restored to the '{workspace_name}' workspace based on the '{file_path}' backup file."
162+
f"{icons.green_dot} The '{dataset}' semantic model has been restored to the '{workspace_name}' workspace based on the '{file_path}' backup file."
164163
)
165164

166165

0 commit comments

Comments
 (0)