Skip to content

Commit e7b69d6

Browse files
committed
added list_capacities, fixes.
1 parent caf9ea2 commit e7b69d6

File tree

4 files changed

+77
-33
lines changed

4 files changed

+77
-33
lines changed

src/sempy_labs/__init__.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,14 +9,15 @@
99
from sempy_labs._generate_semantic_model import (
1010
create_blank_semantic_model,
1111
create_semantic_model_from_bim,
12-
# deploy_semantic_model,
12+
deploy_semantic_model,
1313
get_semantic_model_bim,
1414
)
1515
from sempy_labs._list_functions import (
1616
delete_custom_pool,
1717
list_semantic_model_objects,
1818
list_shortcuts,
1919
get_object_level_security,
20+
list_capacities,
2021
# list_annotations,
2122
# list_columns,
2223
list_dashboards,
@@ -108,7 +109,7 @@
108109
"evaluate_dax_impersonation",
109110
"create_blank_semantic_model",
110111
"create_semantic_model_from_bim",
111-
#'deploy_semantic_model',
112+
"deploy_semantic_model",
112113
"get_semantic_model_bim",
113114
"get_object_level_security",
114115
#'list_annotations',
@@ -179,4 +180,5 @@
179180
"update_workspace_user",
180181
"list_workspace_users",
181182
"assign_workspace_to_dataflow_storage",
183+
"list_capacities",
182184
]

src/sempy_labs/_generate_semantic_model.py

Lines changed: 25 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -153,55 +153,63 @@ def conv_b64(file):
153153

154154

155155
def deploy_semantic_model(
156-
dataset: str,
157-
new_dataset: Optional[str] = None,
158-
workspace: Optional[str] = None,
159-
new_dataset_workspace: Optional[str] = None,
156+
source_dataset: str,
157+
source_workspace: Optional[str] = None,
158+
target_dataset: Optional[str] = None,
159+
target_workspace: Optional[str] = None,
160+
refresh_target_dataset: Optional[bool] = True,
160161
):
161162
"""
162163
Deploys a semantic model based on an existing semantic model.
163164
164165
Parameters
165166
----------
166-
dataset : str
167+
source_dataset : str
167168
Name of the semantic model to deploy.
168-
new_dataset: str
169-
Name of the new semantic model to be created.
170-
workspace : str, default=None
169+
source_workspace : str, default=None
171170
The Fabric workspace name.
172171
Defaults to None which resolves to the workspace of the attached lakehouse
173172
or if no lakehouse attached, resolves to the workspace of the notebook.
174-
new_dataset_workspace : str, default=None
173+
target_dataset: str
174+
Name of the new semantic model to be created.
175+
target_workspace : str, default=None
175176
The Fabric workspace name in which the new semantic model will be deployed.
176177
Defaults to None which resolves to the workspace of the attached lakehouse
177178
or if no lakehouse attached, resolves to the workspace of the notebook.
179+
refresh_target_dataset : bool, default=True
180+
If set to True, this will initiate a full refresh of the target semantic model in the target workspace.
178181
179182
Returns
180183
-------
181184
182185
"""
183186

184-
workspace = fabric.resolve_workspace_name(workspace)
187+
from sempy_labs import refresh_semantic_model
185188

186-
if new_dataset_workspace is None:
187-
new_dataset_workspace = workspace
189+
source_workspace = fabric.resolve_workspace_name(source_workspace)
188190

189-
if new_dataset is None:
190-
new_dataset = dataset
191+
if target_workspace is None:
192+
target_workspace = source_workspace
191193

192-
if new_dataset == dataset and new_dataset_workspace == workspace:
194+
if target_dataset is None:
195+
target_dataset = source_dataset
196+
197+
if target_dataset == source_dataset and target_workspace == source_workspace:
193198
print(
194199
f"{icons.red_dot} The 'dataset' and 'new_dataset' parameters have the same value. And, the 'workspace' and 'new_dataset_workspace' "
195200
f"parameters have the same value. At least one of these must be different. Please update the parameters."
196201
)
197202
return
198203

199-
bim = get_semantic_model_bim(dataset=dataset, workspace=workspace)
204+
bim = get_semantic_model_bim(dataset=source_dataset, workspace=source_workspace)
200205

201206
create_semantic_model_from_bim(
202-
dataset=new_dataset, bim_file=bim, workspace=new_dataset_workspace
207+
dataset=target_dataset, bim_file=bim, workspace=target_workspace
203208
)
204209

210+
if refresh_target_dataset:
211+
refresh_semantic_model(dataset=target_dataset, workspace=target_workspace)
212+
205213

206214
def get_semantic_model_bim(
207215
dataset: str,

src/sempy_labs/_list_functions.py

Lines changed: 46 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
from pyspark.sql import SparkSession
1212
from typing import Optional
1313
import sempy_labs._icons as icons
14+
from sempy.fabric.exceptions import FabricHTTPException
1415

1516

1617
def get_object_level_security(
@@ -1835,12 +1836,11 @@ def update_custom_pool(
18351836
f"/v1/workspaces/{workspace_id}/spark/pools", json=request_body
18361837
)
18371838

1838-
if response.status_code == 200:
1839-
print(
1840-
f"{icons.green_dot} The '{pool_name}' spark pool within the '{workspace}' workspace has been updated."
1841-
)
1842-
else:
1843-
raise ValueError(f"{icons.red_dot} {response.status_code}")
1839+
if response.status_code != 200:
1840+
raise FabricHTTPException(response)
1841+
print(
1842+
f"{icons.green_dot} The '{pool_name}' spark pool within the '{workspace}' workspace has been updated."
1843+
)
18441844

18451845

18461846
def delete_custom_pool(pool_name: str, workspace: Optional[str | None] = None):
@@ -1874,12 +1874,11 @@ def delete_custom_pool(pool_name: str, workspace: Optional[str | None] = None):
18741874
client = fabric.FabricRestClient()
18751875
response = client.delete(f"/v1/workspaces/{workspace_id}/spark/pools/{poolId}")
18761876

1877-
if response.status_code == 200:
1878-
print(
1879-
f"{icons.green_dot} The '{pool_name}' spark pool has been deleted from the '{workspace}' workspace."
1880-
)
1881-
else:
1882-
print(f"{icons.red_dot} {response.status_code}")
1877+
if response.status_code != 200:
1878+
raise FabricHTTPException(response)
1879+
print(
1880+
f"{icons.green_dot} The '{pool_name}' spark pool has been deleted from the '{workspace}' workspace."
1881+
)
18831882

18841883

18851884
def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] = None):
@@ -1899,11 +1898,10 @@ def assign_workspace_to_capacity(capacity_name: str, workspace: Optional[str] =
18991898
-------
19001899
"""
19011900

1902-
# https://learn.microsoft.com/en-us/rest/api/fabric/core/workspaces/assign-to-capacity?tabs=HTTP
19031901
(workspace, workspace_id) = resolve_workspace_name_and_id(workspace)
19041902

19051903
dfC = fabric.list_capacities()
1906-
dfC_filt = dfC[dfC["Name"] == capacity_name]
1904+
dfC_filt = dfC[dfC["Display Name"] == capacity_name]
19071905
capacity_id = dfC_filt["Id"].iloc[0]
19081906

19091907
request_body = {"capacityId": capacity_id}
@@ -2315,3 +2313,37 @@ def assign_workspace_to_dataflow_storage(
23152313
)
23162314
else:
23172315
print(f"{icons.red_dot} {response.status_code}")
2316+
2317+
2318+
def list_capacities() -> pd.DataFrame:
2319+
"""
2320+
Shows the capacities and their properties.
2321+
2322+
Parameters
2323+
----------
2324+
2325+
Returns
2326+
-------
2327+
pandas.DataFrame
2328+
A pandas dataframe showing the capacities and their properties
2329+
"""
2330+
2331+
df = pd.DataFrame(
2332+
columns=["Id", "Display Name", "Sku", "Region", "State", "Admins"]
2333+
)
2334+
2335+
client = fabric.PowerBIRestClient()
2336+
response = client.get("/v1.0/myorg/capacities")
2337+
2338+
for i in response.json()["value"]:
2339+
new_data = {
2340+
"Id": i.get("id", {}).lower(),
2341+
"Display Name": i.get("displayName", {}),
2342+
"Sku": i.get("sku", {}),
2343+
"Region": i.get("region", {}),
2344+
"State": i.get("state", {}),
2345+
"Admins": [i.get("admins", [])],
2346+
}
2347+
df = pd.concat([df, pd.DataFrame(new_data, index=[0])], ignore_index=True)
2348+
2349+
return df

src/sempy_labs/_model_bpa_rules.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import sempy
12
import sempy.fabric as fabric
23
import pandas as pd
34
import re
@@ -30,6 +31,7 @@ def model_bpa_rules(
3031
A pandas dataframe containing the default rules for the run_model_bpa function.
3132
"""
3233

34+
sempy.fabric._client._utils._init_analysis_services()
3335
import Microsoft.AnalysisServices.Tabular as TOM
3436

3537
workspace = fabric.resolve_workspace_name(workspace)

0 commit comments

Comments
 (0)