Each item type has its own set of APIs. The following are the APIs for each item type. All APIs are also available on workspace level as well.
Go to:
- Dashboards, DataMarts, Mirrored Warehouses, Paginated Reports
- Anomaly Detectors
- Apache Airflow Jobs
- Copy Jobs
- Cosmos DB Databases
- Data Agents
- Dataflows
- Data Pipelines
- Digital Twin Builder
- Digital Twin Builder Flow
- Environments
- Eventhouses
- Eventstreams
- Event Schema Sets
- Eventstream Topology
- GraphQL APIs
- Graph Models
- Graph Query Sets
- KQL Dashboards
- KQL Databases
- KQL Querysets
- Lakehouse
- Maps
- Mirrored Azure Databricks Catalogs
- Mirrored Database
- ML Experiments
- ML Models
- Mounted Data Factories
- Notebooks
- Ontologies
- Operations Agents
- Reflexes
- Reports
- Semantic Models
- Snowflake Databases
- Spark Livy Sessions
- Spark Custom Pools
- Spark Workspace Settings
- Spark Job Definitions
- SQL Databases
- SQL Endpoints
- User Data Functions
- Variable Libraries
- Warehouses
- Warehouse Snapshots
- NL To KQL
from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List dashboards
list_dashboards = fc.list_dashboards(workspace_id)
# List datamarts
list_datamarts = fc.list_datamarts(workspace_id)
# List mirrored warehouses
list_mirrored_warehouses = fc.list_mirrored_warehouses(workspace_id)
# List paginated reports
list_paginated_reports = fc.list_paginated_reports(workspace_id)
# Update paginated report
fc.update_paginated_report(workspace_id="1232", paginated_report_id="12312",
display_name = "newname", description = "newdescription", return_item=False)fcc = FabricClientCore()
workspace_id = "0asdfasdfsdf3"
item_id = "9asdfasdfasdf2"
# get anomaly detector definition
anomaly_detector_definition = fcc.get_anomaly_detector_definition(workspace_id=workspace_id, anomaly_detector_id=item_id)
definition = anomaly_detector_definition["definition"]
# create anomaly detector
anomaly_detector_new = fcc.create_anomaly_detector(workspace_id=workspace_id, display_name="date_str", definition=definition)
# get anomaly detector
anomaly_detector_get = fcc.get_anomaly_detector(workspace_id=workspace_id, anomaly_detector_id=anomaly_detector_new.id)
# list anomaly detectors
anomaly_detectors = fcc.list_anomaly_detectors(workspace_id=workspace_id)
# update anomaly detector
date_str_updated = date_str + "_updated"
anomaly_detector_updated = fcc.update_anomaly_detector(workspace_id=workspace_id, anomaly_detector_id=anomaly_detector_new.id, display_name=date_str_updated, return_item=True)
# update anomaly detector definition
anomaly_detector_updated = fcc.update_anomaly_detector_definition(workspace_id=workspace_id, anomaly_detector_id=anomaly_detector_new.id, definition=definition)
# delete anomaly detector
resp = fcc.delete_anomaly_detector(workspace_id=workspace_id, anomaly_detector_id=anomaly_detector_new.id)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "05bc5bsdfs478151d3"
item_id = "4e68dfgd3c3df14"
# List Apache Airflow Jobs
apache_airflow_job = fcc.list_apache_airflow_jobs(workspace_id=workspace_id)
# Get Apache Airflow Job Definition
apache_airflow_job_definition = fcc.get_apache_airflow_job_definition(workspace_id=workspace_id, apache_airflow_job_id=item_id)
definition = apache_airflow_job_definition["definition"]
# Create Apache Airflow Job
date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
date_str = f"copyjob{date_str}"
apache_airflow_job_new = fcc.create_apache_airflow_job(workspace_id=workspace_id, display_name=date_str, definition=definition)
# Get Apache Airflow Job
apache_airflow_job_get = fcc.get_apache_airflow_job(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id)
self.assertEqual(apache_airflow_job_get.display_name, date_str)
# Update Apache Airflow Job
date_str_updated = date_str + "_updated"
apache_airflow_job_updated = fcc.update_apache_airflow_job(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id, display_name=date_str_updated, return_item=True)
# Update Apache Airflow Job Definition
apache_airflow_job_updated = fcc.update_apache_airflow_job_definition(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id, definition=definition)
# Delete Apache Airflow Job
resp = fcc.delete_apache_airflow_job(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_updated.id)
# Get Apache Airflow Job Compute (beta)
compute = fcc.get_apache_airflow_job_compute(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id)
# Get Apache Airflow Job Environment (beta)
environment = fcc.get_apache_airflow_job_environment(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id)
# Create or Update Apache Airflow Job File (beta)
resp = fcc.create_or_update_apache_airflow_job_file(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id,
file_path="dags/my_dag.py", file_contents="print('hello')")
# Get Apache Airflow Job File (beta)
file_resp = fcc.get_apache_airflow_job_file(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id, file_path="dags/my_dag.py")
# List Apache Airflow Job Files (beta)
files = fcc.list_apache_airflow_job_files(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id)
# Delete Apache Airflow Job File (beta)
resp = fcc.delete_apache_airflow_job_file(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id, file_path="dags/my_dag.py")
# Create Airflow Pool Template (beta)
pool = fcc.create_airflow_pool_template(workspace_id=workspace_id, name="my_pool", node_size="Small")
# Get Airflow Pool Template (beta)
pool = fcc.get_airflow_pool_template(workspace_id=workspace_id, pool_template_id=pool['id'])
# List Airflow Pool Templates (beta)
pools = fcc.list_airflow_pool_templates(workspace_id=workspace_id)
# Delete Airflow Pool Template (beta)
resp = fcc.delete_airflow_pool_template(workspace_id=workspace_id, pool_template_id=pool['id'])
# List Apache Airflow Job Libraries (beta)
libraries = fcc.list_apache_airflow_job_libraries(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id)
# Get Apache Airflow Job Settings (beta)
settings = fcc.get_apache_airflow_job_settings(workspace_id=workspace_id, apache_airflow_job_id=apache_airflow_job_new.id)
# Get Airflow Workspace Settings (beta)
ws_settings = fcc.get_airflow_workspace_settings(workspace_id=workspace_id)
# Update Airflow Workspace Settings (beta)
resp = fcc.update_airflow_workspace_settings(workspace_id=workspace_id, default_pool_template_id="pool_template_id")from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "asdfasdf"
item_id = "asdfasdf9"
# Get copy job definition
copy_job_definition = fcc.get_copy_job_definition(workspace_id=workspace_id, copy_job_id=item_id)
# Create copy job
copy_job_new = fcc.create_copy_job(workspace_id=workspace_id, display_name="name", definition=definition)
# Get copy job
copy_job_get = fcc.get_copy_job(workspace_id=workspace_id, copy_job_id=copy_job_new.id)
# List copy jobs
copy_jobs = fcc.list_copy_jobs(workspace_id=workspace_id)
# Update copy job
copy_job_updated = fcc.update_copy_job(workspace_id=workspace_id, copy_job_id=copy_job_new.id, display_name="date_str_updated", return_item=True)
# Update copy job definition
copy_job_updated = fcc.update_copy_job_definition(workspace_id=workspace_id, copy_job_id=copy_job_new.id, definition=definition)
# Delete copy job
resp = fcc.delete_copy_job(workspace_id=workspace_id, copy_job_id=copy_job.id)
from msfabricpysdkcore import FabricClientCore
from datetime import datetime
fcc = FabricClientCore()
workspace_id = "0asdfasdfd3"
item_id = "8bsadf4088"
# List dataflows
dataflows = fcc.list_dataflows(workspace_id=workspace_id)
# Get dataflow definition
dataflow_definition = fcc.get_dataflow_definition(workspace_id=workspace_id, dataflow_id=item_id)
definition = dataflow_definition["definition"]
date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
date_str = f"dataflow{date_str}"
# Create dataflow
dataflow_new = fcc.create_dataflow(workspace_id=workspace_id, display_name=date_str, definition=definition)
# Get dataflow
dataflow_get = fcc.get_dataflow(workspace_id=workspace_id, dataflow_id=dataflow_new.id)
# Update dataflow
date_str_updated = date_str + "_updated"
dataflow_updated = fcc.update_dataflow(workspace_id=workspace_id, dataflow_id=dataflow_new.id, display_name=date_str_updated, return_item=True)
# Update dataflow definition
dataflow_updated = fcc.update_dataflow_definition(workspace_id=workspace_id, dataflow_id=dataflow_new.id, definition=definition)
# Delete dataflow
resp = fcc.delete_dataflow(workspace_id=workspace_id, dataflow_id=dataflow_new.id)
# Apply changes to dataflow
resp = fcc.run_on_demand_apply_changes(workspace_id=workspace_id, dataflow_id=dataflow_new.id,
job_type = "ApplyChanges", wait_for_completion = False):
# Execute dataflow
resp = fcc.run_on_demand_execute(workspace_id=workspace_id, dataflow_id=dataflow_new.id,
job_type = "Execute", wait_for_completion = False):
configuration = {
"startDateTime": "2025-04-28T00:00:00",
"endDateTime": "2025-04-30T23:59:00",
"localTimeZoneId": "Central Standard Time",
"type": "Cron",
"interval": 10
}
# Schedule apply changes job for a dataflow
resp = fcc.schedule_apply_changes(workspace_id=workspace_id, dataflow_id=dataflow_new.id,
configuration=configuration, enabled=True)
# Schedule execute job for a dataflow
resp = fcc.schedule_execute(workspace_id=workspace_id, dataflow_id=dataflow_new.id,
configuration=configuration, enabled=True)
# Discover dataflow parameters
params = fcc.discover_dataflow_parameters(workspace_id=workspace_id, dataflow_id=dataflow_new.id)
# Execute dataflow query
result = fcc.execute_dataflow_query(workspace_id=workspace_id, dataflow_id=dataflow_new.id, query_name="MyQuery")from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace_id = "asdfasdf"
item_id = "b7dasfasf26b5d3"
# List data pipelines
data_pipelines = fcc.list_data_pipelines(workspace_id=workspace_id)
# Get data pipeline definition
data_pipeline_definition = fcc.get_data_pipeline_definition(workspace_id=workspace_id, data_pipeline_id=item_id)
definition = data_pipeline_definition["definition"]
date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
date_str = f"data_pipeline{date_str}"
# Create data pipeline
data_pipeline_new = fcc.create_data_pipeline(workspace_id=workspace_id, display_name=date_str, definition=definition)
# Get data pipeline
data_pipeline_get = fcc.get_data_pipeline(workspace_id=workspace_id, data_pipeline_id=data_pipeline_new.id)
# Update data pipeline
date_str_updated = date_str + "_updated"
data_pipeline_updated = fcc.update_data_pipeline(workspace_id=workspace_id, data_pipeline_id=data_pipeline_new.id, display_name=date_str_updated, return_item=True)
# Update data pipeline definition
data_pipeline_updated = fcc.update_data_pipeline_definition(workspace_id=workspace_id, data_pipeline_id=data_pipeline_new.id, definition=definition)
# Delete data pipeline
resp = fcc.delete_data_pipeline(workspace_id=workspace_id, data_pipeline_id=data_pipeline_updated.id)
from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "05bc5ba128a478151d3"
item_id = "d726asdfa1723931d1"
# List Digital Twin Builders
digital_twin_builders = fcc.list_digital_twin_builders(workspace_id=workspace_id)
# Get Digital Twin Builder Definition
digital_twin_builder_definition = fcc.get_digital_twin_builder_definition(workspace_id=workspace_id, digital_twin_builder_id=item_id)
definition = digital_twin_builder_definition["definition"]
# Create Digital Twin Builder
date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
date_str = f"copyjob{date_str}"
digital_twin_builder_new = fcc.create_digital_twin_builder(workspace_id=workspace_id, display_name=date_str, definition=definition)
# Get Digital Twin Builder
digital_twin_builder_get = fcc.get_digital_twin_builder(workspace_id=workspace_id, digital_twin_builder_id=digital_twin_builder_new.id)
# Update Digital Twin Builder
date_str_updated = date_str + "_updated"
digital_twin_builder_updated = fcc.update_digital_twin_builder(workspace_id=workspace_id, digital_twin_builder_id=digital_twin_builder_new.id, display_name=date_str_updated, return_item=True)
# Update Digital Twin Builder Definition
digital_twin_builder_updated = fcc.update_digital_twin_builder_definition(workspace_id=workspace_id, digital_twin_builder_id=digital_twin_builder_new.id, definition=definition)
# Delete Digital Twin Builder
resp = fcc.delete_digital_twin_builder(workspace_id=workspace_id, digital_twin_builder_id=digital_twin_builder_updated.id)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "05basdf51d3"
item_id = "d726asasdf3931d1"
# Create Digital Twin Builder Flow
creation_payload = {"digitalTwinBuilderItemReference": {
"referenceType": "ById",
"itemId": "d96de2f4-7dd1-45ad-9ff6-37a2d6aa9861",
"workspaceId": "cfafbeb1-8037-4d0c-896e-a46fb27ff229"
}}
digital_twin_builder_flow_new = fcc.create_digital_twin_builder_flow(workspace_id=workspace_id, display_name="New Digital Twin Builder Flow", creation_payload=creation_payload, description="This is a new digital twin builder flow")
# Get Digital Twin Builder Flow
digital_twin_builder_flow_get = fcc.get_digital_twin_builder_flow(workspace_id=workspace_id, digital_twin_builder_flow_id=digital_twin_builder_flow_new.id)
# Get Digital Twin Builder Flow Definition
digital_twin_builder_flow_definition = fcc.get_digital_twin_builder_flow_definition(workspace_id=workspace_id, digital_twin_builder_flow_id=digital_twin_builder_flow_new.id)
# List Digital Twin Builder Flows
dtwbfs = fcc.list_digital_twin_builder_flows(workspace_id=workspace_id, with_properties=False)
# Update Digital Twin Builder Flow
digital_twin_builder_flow_updated = fcc.update_digital_twin_builder_flow(workspace_id=workspace_id, digital_twin_builder_flow_id=digital_twin_builder_flow_new.id, display_name="Updated Digital Twin Builder Flow", return_item=True)
# Update Digital Twin Builder Flow Definition
digital_twin_builder_flow_definition_updated = fcc.update_digital_twin_builder_flow_definition(workspace_id=workspace_id, digital_twin_builder_flow_id=digital_twin_builder_flow_new.id, definition=digital_twin_builder_flow_definition)
# Delete Digital Twin Builder Flow
status_code = fcc.delete_digital_twin_builder_flow(workspace_id=workspace_id, digital_twin_builder_flow_id=digital_twin_builder_flow_definition_updated.id)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace_id = 'd8asd'
env_id = "4hb"
# Create Environment
environment1 = fcc.create_environment(workspace_id, display_name="environment1", description="My first environment")
# Get Environment
env = fcc.get_environment(workspace_id, environment_id=env_id)
# Get Environment Definition
env = fcc.get_environment_definition(workspace_id, environment_id=env_id)
# List Environments
environments = fcc.list_environments(workspace_id)
# Update Environment
env2 = fcc.update_environment(workspace_id, env_id, display_name="environment2", return_item=True)
# Update Environment Definition
definition= {
"parts": [
{
"path": "Libraries/PublicLibraries/environment.yml",
"payload": "ZTAK",
"payloadType": "I64"
},
{..},
{..}]
}
env2 = fcc.update_environment_definition(workspace_id, env_id, definition=definition)
# Delete Environment
status_code = fc.delete_environment(workspace_id, env_id)
# Publish Environment
resp = fcc.publish_environment(workspace_id, environment_id=env_id)
# Cancel Publish Environment
resp = fcc.cancel_publish_environment(workspace_id, environment_id=env_id)
# Export Staging External Libraries
resp = fcc.export_staging_external_libraries(workspace_id, environment_id=env_id)
# Import External Libraries to Staging
resp = fcc.import_external_libraries_to_staging(workspace_id, environment_id=env_id,
file_path="blubb.yml")
# Remove External Library
resp = fcc.remove_external_library(workspace_id, environment_id=env_id, name="msfabricpysdkcore", version="0.2.9")
# List Staging Libraries
resp = fcc.list_staging_libraries(workspace_id, environment_id=env_id)
# Get Staging Spark Compute
resp = fcc.get_staging_spark_compute(workspace_id, environment_id=env_id)
# Update Staging Spark Compute
resp = fcc.update_staging_spark_compute(workspace_id, environment_id=env_id, driver_cores=4)
# Upload Custom Library
resp = fcc.upload_custom_library(workspace_id, environment_id=env_id, library_name="msfabricpysdkcore.whl", file_path="msfabricpysdkcore-0.2.10-py3-none-any.whl")
# Delete Custom Library
resp = fcc.delete_custom_library(workspace_id, environment_id=env_id, library_name="msfabricpysdkcore.whl")
# Get Published Spark Compute
resp = fcc.get_published_spark_compute(workspace_id, environment_id=env_id)
# List Published Libraries
resp = fcc.list_published_libraries(workspace_id, environment_id=env_id)
# Export Published External Libraries
resp = fcc.export_published_external_libraries(workspace_id, environment_id=env_id)
# Get Published Spark Compute
resp = fcc.get_published_spark_compute(workspace_id, environment_id=env_id)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace_id = 'd8a5abeieojfsdf-ab46-343bc57ddbe5'
# Create Eventhouse
eventhouse1 = fc.create_eventhouse(workspace_id, display_name="eventhouse1")
# List Eventhouses
eventhouses = fc.list_eventhouses(workspace_id)
eventhouse_names = [eh.display_name for eh in eventhouses]
# Get Eventhouse
eh = fc.get_eventhouse(workspace_id, eventhouse_name="eventhouse1")
# Update Eventhouse
eh2 = fc.update_eventhouse(workspace_id, eh.id, display_name="eventhouse2", return_item=True)
# Delete Eventhouse
status_code = fc.delete_eventhouse(workspace_id, eh.id)
# Get Eventhouse Definition
eventhouse_definition = fc.get_eventhouse_definition(workspace_id, eventhouse_id=eh.id, format = None):
eventhouse_definition = eventhouse_definition["definition"]
# Update Eventhouse Definition
fc.update_eventhouse_definition(workspace_id, eventhouse_id=eh.id, definition=eventhouse_definition, update_metadata = None):
from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List Eventstreams
eventstreams = fc.list_eventstreams(workspace_id)
# Create Eventstream
es = fc.create_eventstream(workspace_id, display_name="es1")
# Get Eventstream
es = fc.get_eventstream(workspace_id, eventstream_name="es1")
# Update Eventstream
es2 = fc.update_eventstream(workspace_id, es.id, display_name="es2", return_item=True)
# Delete Eventstream
fc.delete_eventstream(workspace_id, es.id)
# Get Eventstream Definition
eventstream_definition = fc.get_eventstream_definition(workspace_id, eventstream_id=es.id, format = None)
eventstream_definition = eventstream_definition["definition"]
# Update Eventstream Definition
fc.update_eventstream_definition(workspace_id, eventstream_id=es.id, definition=eventstream_definition, update_metadata = None)
from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "05basdfasdf1d3"
item_id = "94f4adfsff1b9"
custom_destination_id = "acdasdfasff2fb984d"
custom_source_id = "9f3829asdfasdf2ad3ecd"
source_id = "e58dasdfasf0540b17"
destination_id = "2446e6asdfasdfa3eb257"
# Get Eventstream Topology
topology = fcc.get_eventstream_topology(workspace_id, item_id)
# Get Eventstream Destination
destination = fcc.get_eventstream_destination(workspace_id, item_id, destination_id)
# Get Eventstream Destination Connection
destination_conn = fcc.get_eventstream_destination_connection(workspace_id, item_id, custom_destination_id)
# Get Eventstream Source
source = fcc.get_eventstream_source(workspace_id, item_id, source_id)
# Get Eventstream Source Connection
source_conn = fcc.get_eventstream_source_connection(workspace_id, item_id, custom_source_id)
# Pause Eventstream
resp = fcc.pause_eventstream(workspace_id, item_id)
# Resume Eventstream
resp = fcc.resume_eventstream(workspace_id, item_id, start_type="Now")
# Pause Eventstream Source
resp = fcc.pause_eventstream_source(workspace_id, item_id, source_id)
# Resume Eventstream Destination
resp = fcc.pause_eventstream_destination(workspace_id, item_id, destination_id)
# Resume Eventstream Source
resp = fcc.resume_eventstream_source(workspace_id, item_id, source_id, start_type="Now")
# Resume Eventstream Destination
resp = fcc.resume_eventstream_destination(workspace_id, item_id, destination_id, start_type="Now")from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List GraphQL APIs
graphql_apis = fc.list_graphql_apis(workspace_id = workspace_id)
# Create GraphQL API
graphql_api = fc.create_graphql_api(workspace_id = workspace_id, display_name="graphql_api1", description="description")
# Get GraphQL API
graphql_api = fc.get_graphql_api(workspace_id = workspace_id, graphql_api_name="graphql_api1", graphql_api_id=None)
# Update GraphQL API
graphql_api2 = fc.update_graphql_api(workspace_id = workspace_id, graphql_api_id = graphql_api.id, display_name="graphql_api2", description="description", return_item=True)
# Delete GraphQL API
fc.delete_graphql_api(workspace_id = workspace_id, graphql_api_id = graphql_api.id)
# Get GraphQL API Definition
graphql_api_definition = fc.get_graphql_api_definition(workspace_id = workspace_id, graphql_api_id = graphql_api.id)
# Update GraphQL API Definition
fc.update_graphql_api_definition(workspace_id = workspace_id, graphql_api_id = graphql_api.id, definition = graphql_api_definition)from msfabricpysdkcore import FabricClientCore
# Create KQL Dashboard
kql_dash = fc.create_kql_dashboard(display_name="kql_dash_name", workspace_id="workspace_id")
# Delete KQL Dashboard
resp_code = fc.delete_kql_dashboard(workspace_id="w123", kql_dashboard_id="123123")
# Get KQL Dashboard
kql_dash2 = fc.get_kql_dashboard(workspace_id="w123", kql_dashboard_name="kql_dash_name")
kql_dash2 = fc.get_kql_dashboard(workspace_id="w123", kql_dashboard_id="123123")
# Get KQL Dashboard Definition
definition_orig = fc.get_kql_dashboard_definition(workspace_id="w123", kql_dashboard_id="dsfsf")
# Update KQL Dashboard
kql_dash3 = fc.update_kql_dashboard(workspace_id="w123", kql_dashboard_id="123123",
display_name="new_name", return_item=True)
# Update KQL Dashboard Definition
definition = fc.update_kql_dashboard_definition(workspace_id="w123", kql_dashboard_id="2323", definition=definition_orig)
# List KQL Dashboards
kql_dashs = fc.list_kql_dashboards(workspace_id="w123")from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace_id = 'd8a5abe89jufojafds3bc57ddbe5'
evenhouse_id = "1482adfa290348238423428510a9197"
creation_payload = {"databaseType" : "ReadWrite",
"parentEventhouseItemId" : evenhouse_id}
# Create KQL Database
kqldb = fc.create_kql_database(workspace_id = workspace_id, display_name="kqldatabase12",
creation_payload=creation_payload)
# List KQL Databases
kql_databases = fc.list_kql_databases(workspace_id)
kql_database_names = [kqldb.display_name for kqldb in kql_databases]
# Get KQL Database
kqldb = fc.get_kql_database(workspace_id, kql_database_name="kqldatabase12")
# Update KQL Database
kqldb2 = fc.update_kql_database(workspace_id, kqldb.id, display_name="kqldb23", return_item=True)
# Delete KQL Database
status_code = fc.delete_kql_database(workspace_id, kqldb.id)
# Get KQL Database Definition
kql_database_definition = fc.get_kql_database_definition(workspace_id, kql_database_id=kqldb.id, format = None)
# Update KQL Database Definition
fc.update_kql_database_definition(workspace_id, kql_database_id=kqldb.id, definition=kql_database_definition, update_metadata = None)
# Create KQL Database Table Shortcut
target = {"oneLake": {"itemId": "item_id_target", "path": "Tables/MyTable", "workspaceId": "ws_id_target"}}
shortcut = fc.create_kql_database_shortcut(workspace_id, kql_database_id=kqldb.id, name="my_shortcut", target=target, enable_query_acceleration=True)
# Get KQL Database Table Shortcut
shortcut = fc.get_kql_database_shortcut(workspace_id, kql_database_id=kqldb.id, shortcut_name="my_shortcut")
# List KQL Database Table Shortcuts
shortcuts = fc.list_kql_database_shortcuts(workspace_id, kql_database_id=kqldb.id)
# Delete KQL Database Table Shortcut
status_code = fc.delete_kql_database_shortcut(workspace_id, kql_database_id=kqldb.id, shortcut_name="my_shortcut")
from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
kqlq_w_content = fc.get_kql_queryset(workspace_id, kql_queryset_name=kql_queryset_name)
definition = fc.get_kql_queryset_definition(workspace_id, kqlq_w_content.id)
definition = definition["definition"]
# Create KQL Queryset
kqlq = fc.create_kql_queryset(workspace_id="workspace_id", definition=definition, display_name="kql_queryset_new")
# Delete KQL Queryset
status_code = fc.delete_kql_queryset(workspace_id="workspace_id", kql_queryset_id="kqlq.id")
# Get KQL Queryset
kqlq = fc.get_kql_queryset(workspace_id="workspace_id", kql_queryset_id="kqlq.id")
kqlq_w_content = fc.get_kql_queryset(workspace_id="workspace_id", kql_queryset_name="kql_queryset_name")
# Get KQL Queryset Definition
definition = fc.get_kql_queryset_definition(workspace_id="workspace_id", kql_queryset_id="kqlq.id")
# List KQL Querysets
kqlqs = fc.list_kql_querysets(workspace_id="workspace_id")
# Update KQL Queryset
kqlq2 = fc.update_kql_queryset(workspace_id="workspace_id", kql_queryset_id="kqlq.id", display_name="new_name", return_item=True)
# Update KQL Queryset Definition
fc.update_kql_queryset_definition(workspace_id="workspace_id", kql_queryset_id="kqlq.id", definition=definition)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# Get Lakehouse
lakehouse = fc.get_lakehouse(workspace_id=workspace_id, item_name="lakehouse1")
lakehouse_id = lakehouse.id
date_str = datetime.now().strftime("%Y%m%d%H%M%S")
table_name = f"table{date_str}"
# Load Table
status_code = fc.load_table(workspace_id=workspace_id, lakehouse_id=lakehouse_id, table_name=table_name,
path_type="File", relative_path="Files/folder1/titanic.csv")
# List Tables
table_list = fc.list_tables(workspace_id=workspace_id, lakehouse_id=lakehouse_id)
# Run on demand table maintenance
execution_data = {
"tableName": table_name,
"optimizeSettings": {
"vOrder": True,
"zOrderBy": [
"tipAmount"
]
},
"vacuumSettings": {
"retentionPeriod": "7:01:00:00"
}
}
fc.run_on_demand_table_maintenance(workspace_id=workspace_id, lakehouse_id=lakehouse_id,
execution_data = execution_data,
job_type = "TableMaintenance", wait_for_completion = True)
# Create Lakehouse
lakehouse = fc.create_lakehouse(workspace_id=workspace_id, display_name="lakehouse2")
# (Preview Feature) Create Lakehouse with Schema
creation_payload = {
"enableSchemas": True
}
lakehouse = fc.create_lakehouse(workspace_id=workspace_id, display_name="lakehouse2", creation_payload=creation_payload)
# List Lakehouses
lakehouses = fc.list_lakehouses(workspace_id)
# Get Lakehouse
lakehouse2 = fc.get_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
# Update Lakehouse
lakehouse2 = fc.update_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id, display_name="lakehouse3", return_item=True)
# Delete Lakehouse
fc.delete_lakehouse(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
# Get Lakehouse Definition
lakehouse_definition = fc.get_lakehouse_definition(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
# Update Lakehouse Definition
fc.update_lakehouse_definition(workspace_id=workspace_id, lakehouse_id=lakehouse.id, definition=lakehouse_definition)
# List Livy Sessions
livy_sessions = fc.list_lakehouse_livy_sessions(workspace_id=workspace_id, lakehouse_id=lakehouse.id)
# Get Livy Session
livy_session = fc.get_lakehouse_livy_session(workspace_id=workspace_id, lakehouse_id=lakehouse.id, livy_id=livy_id)
workspace_id = "f8xxxxxxxxxxxx"
lakehouse_id = "bxxxxxxxxxxxx"
# Run On-Demand Refresh Materialized Lake View
resp = fcc.run_on_demand_refresh_materialized_lake_view(workspace_id=workspace_id, lakehouse_id=lakehouse_id)
# Create Refresh Materialized Lake View Schedule
configuration = {
"startDateTime": "2025-04-28T00:00:00",
"endDateTime": "2025-04-30T23:59:00",
"localTimeZoneId": "Central Standard Time",
"type": "Cron",
"interval": 10
}
resp = fcc.create_refresh_materialized_lake_view_schedule(workspace_id=workspace_id, lakehouse_id=lakehouse_id, enabled=True, configuration=configuration)
# Delete Refresh Materialized Lake View Schedule
resp = fcc.delete_refresh_materialized_lake_view_schedule(workspace_id=workspace_id, lakehouse_id=lakehouse_id, schedule_id="a2xxxxxxxxxxxx")
# Update Refresh Materialized Lake View Schedule
resp = fcc.update_refresh_materialized_lake_view_schedule(workspace_id=workspace_id, lakehouse_id=lakehouse_id, schedule_id="a2xxxxxxxxxxxx",
enabled=False, configuration=configuration)
#from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "05bc5baa-ef02-4a31-ab20-158a478151d3"
item_id = "eb5a54af-f282-4612-97c1-95120620b5d3"
connection_id = "f7ac4f29-a70e-4868-87a1-9cdd92eacfa0"
catalog_name = "unitycatalogdbxsweden"
schema_name = "testinternal"
table_name = "internal_customer"
# List Mirrored Azure Databricks Catalogs
mirrored_azure_databricks_catalog = fcc.list_mirrored_azure_databricks_catalogs(workspace_id=workspace_id)
# Get Mirrored Azure Databricks Catalog Definition
mirrored_azure_databricks_catalog_definition = fcc.get_mirrored_azure_databricks_catalog_definition(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=item_id)
definition = mirrored_azure_databricks_catalog_definition["definition"]
# Create Mirrored Azure Databricks Catalog
date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
date_str = f"copyjob{date_str}"
creation_payload = {
"catalogName": "catalog_1",
"databricksWorkspaceConnectionId": "c1128asdfas1e35f86",
"mirroringMode": "Full",
"storageConnectionId": "c1128fasfdb91e35f87"
}
mirrored_azure_databricks_catalog_new = fcc.create_mirrored_azure_databricks_catalog(workspace_id=workspace_id, display_name=date_str, creation_payload=creation_payload)
# Get Mirrored Azure Databricks Catalog
mirrored_azure_databricks_catalog_get = fcc.get_mirrored_azure_databricks_catalog(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog_new.id)
# Update Mirrored Azure Databricks Catalog
date_str_updated = date_str + "_updated"
mirrored_azure_databricks_catalog_updated = fcc.update_mirrored_azure_databricks_catalog(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog_new.id, display_name=date_str_updated, return_item=True)
# Update Mirrored Azure Databricks Catalog Definition
mirrored_azure_databricks_catalog_updated = fcc.update_mirrored_azure_databricks_catalog_definition(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog_new.id, definition=definition)
# Delete Mirrored Azure Databricks Catalog
resp = fcc.delete_mirrored_azure_databricks_catalog(workspace_id=workspace_id, mirrored_azure_databricks_catalog_id=mirrored_azure_databricks_catalog_updated.id)
# Discover Mirrored Azure Databricks Catalogs, Schemas, and Tables
catalogs = fcc.discover_mirrored_azure_databricks_catalogs(workspace_id=workspace_id, databricks_workspace_connection_id=connection_id)
schemas = fcc.discover_mirrored_azure_databricks_catalog_schemas(workspace_id=workspace_id, catalog_name=catalog_name, databricks_workspace_connection_id=connection_id)
tables = fcc.discover_mirrored_azure_databricks_catalog_tables(workspace_id=workspace_id, catalog_name=catalog_name, schema_name=schema_name, databricks_workspace_connection_id=connection_id)
# Refresh Mirrored Azure Databricks Catalog Metadata
status = fcc.refresh_mirrored_azure_databricks_catalog_metadata(workspace_id=workspace_id,
item_id= item_id, wait_for_completion=False)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace_id = "05bbbbbbbbbbbbbbb3"
item_id = "9dccccccccccccccccccc0"
# Get Map Definition
map_definition = fcc.get_map_definition(workspace_id=workspace_id, map_id=item_id)
definition = map_definition["definition"]
# Create Map
map_new = fcc.create_map(workspace_id=workspace_id, display_name="asdfs", definition=definition)
# Get Map
map_get = fcc.get_map(workspace_id=workspace_id, map_id=map_new.id)
# List Maps
maps = fcc.list_maps(workspace_id=workspace_id)
# Update Map
map_updated = fcc.update_map(workspace_id=workspace_id, map_id=map_new.id, display_name="asdfasdfasdf", return_item=True)
# Update Map Definition
map_updated = fcc.update_map_definition(workspace_id=workspace_id, map_id=map_new.id, definition=definition)
resp = fcc.delete_map(workspace_id=workspace_id, map_id=map.id)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
mirrored_db_w_content = fc.get_mirrored_database(workspace_id="workspace_id", mirrored_database_name="dbdemo")
# Get Mirroring Status
status = fc.get_mirroring_status(workspace_id="workspace_id", mirrored_database_id="mirrored_db_w_content.id")
# Get tables mirroring status
table_status = fc.get_tables_mirroring_status(workspace_id="workspace_id", mirrored_database_id="mirrored_db_w_content.id")
# Start Mirroring
fc.start_mirroring(workspace_id="workspace_id", mirrored_database_id="mirrored_db_w_content.id")
# Stop Mirroring
fc.stop_mirroring(workspace_id="workspace_id", mirrored_database_id="mirrored_db_w_content.id")
# Create Mirrored Database
mirrored_db = fc.create_mirrored_database(workspace_id="workspace_id", display_name="mirrored_db_name")
# Delete Mirrored Database
status_code = fc.delete_mirrored_database(workspace_id="workspace_id", mirrored_database_id="mirrored_db_check.id")
# Get Mirrored Database
mirrored_db_check = fc.get_mirrored_database(workspace_id="workspace_id", mirrored_database_id="mirrored_db.id")
# Get mirrored database definition
definition = fc.get_mirrored_database_definition(workspace_id="workspace_id", mirrored_database_id="mirrored_db_w_content.id")
# List Mirrored Databases
mirrored_dbs = fc.list_mirrored_databases(workspace_id="workspace_id")
# Update Mirrored Database
mirrored_db_2 = fc.update_mirrored_database(workspace_id="workspace_id", mirrored_database_id="mirrored_db_check.id",
display_name="new_name", return_item=True)
# Update Mirrored Database Definition
fc.update_mirrored_database_definition(workspace_id="workspace_id", mirrored_database_id="mirrored_db_check.id", definition=definition)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List ML Experiments
ml_experiments = fc.list_ml_experiments(workspace_id)
# Create ML Experiment
mle = fc.create_ml_experiment(workspace_id, display_name="mlexperiment1")
# Get ML Experiment
mle = fc.get_ml_experiment(workspace_id, ml_experiment_name="mlexperiment1")
# Update ML Experiment
mle2 = fc.update_ml_experiment(workspace_id, mle.id, display_name="mlexperiment2", return_item=True)
# Delete ML Experiment
fc.delete_ml_experiment(workspace_id, mle.id)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List ML Models
ml_models = fc.list_ml_models(workspace_id)
# Create ML Model
ml_model = fc.create_ml_model(workspace_id, display_name="mlmodel1")
# Get ML Model
ml_model = fc.get_ml_model(workspace_id, ml_model_name="mlmodel1")
# Update ML Model
ml_model2 = fc.update_ml_model(workspace_id, ml_model_id=ml_model.id, display_name="mlmodel2", return_item=True)
# Delete ML Model
fc.delete_ml_model(workspace_id, ml_model.id)
## Endpoints
workspace_id = "123123"
model_id = "123123"
# Activate ML Model Endpoint Version
resp = fcc.activate_ml_model_endpoint_version(workspace_id=workspace_id, model_id=model_id, name="1")
# Deactivate all ML Model Endpoint Versions
resp = fcc.deactivate_all_ml_model_endpoint_versions(workspace_id=workspace_id, model_id=model_id)
# Deactivate ML Model Endpoint Version
resp = fcc.deactivate_ml_model_endpoint_version(workspace_id=workspace_id, model_id=model_id, name="1")
# Get ML Model Endpoint
resp = fcc.get_ml_model_endpoint(workspace_id=workspace_id, model_id=model_id)
# Get ML Model Endpoint Version
resp = fcc.get_ml_model_endpoint_version(workspace_id=workspace_id, model_id=model_id, name="1")
# List ML Model Endpoint Versions
resp = fcc.list_ml_model_endpoint_versions(workspace_id=workspace_id, model_id=model_id)
# Score ML Model Endpoint
format_type= "dataframe"
orientation= "values"
inputs= [
[
-0.00188201652779,
-0.04464163650698,
-0.0514740612388,
-0.0263275281478529,
-0.00844872411121,
-0.01916333974822,
0.07441156407875721,
-0.03949338287409329,
-0.0683315470939731,
-0.092204049626824
]]
resp = fcc.score_ml_model_endpoint(workspace_id=workspace_id, model_id=model_id, format_type=format_type, orientation=orientation, inputs=inputs)
# Score ML Model Endpoint Version
resp = fcc.score_ml_model_endpoint_version(workspace_id=workspace_id, model_id=model_id, name="1", format_type=format_type, orientation=orientation, inputs=inputs)
# Update ML Model Endpoint
defaultVersionAssignmentBehavior = "StaticallyConfigured"
defaultVersionName = "1"
resp = fcc.update_ml_model_endpoint(workspace_id=workspace_id, model_id=model_id,
default_version_assignment_behavior=defaultVersionAssignmentBehavior,
default_version_name=defaultVersionName)
# Update ML Model Endpoint Version
resp = fcc.update_ml_model_endpoint_version(workspace_id=workspace_id, model_id=model_id, name="1", scale_rule="AllowScaleToZero")
from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List Mounted Data Factories
mounted_data_factories = fc.list_mounted_data_factories(workspace_id = workspace_id)
mounted_data_factory_w_content = fc.get_mounted_data_factory(workspace_id = workspace_id, mounted_data_factory_name="HelloWorld")
# Create Mounted Data Factory
mounted_data_factory = fc.create_mounted_data_factory(workspace_id = workspace_id, display_name="mounted_data_factory1", description="description", definition=mounted_data_factory_w_content.definition)
# Get Mounted Data Factory
mounted_data_factory = fc.get_mounted_data_factory(workspace_id = workspace_id, mounted_data_factory_name="mounted_data_factory1", mounted_data_factory_id=None)
# Update Mounted Data Factory
mounted_data_factory2 = fc.update_mounted_data_factory(workspace_id = workspace_id, mounted_data_factory_id = mounted_data_factory.id, display_name="mounted_data_factory2", return_item=True)
# Delete Mounted Data Factory
fc.delete_mounted_data_factory(workspace_id = workspace_id, mounted_data_factory_id = mounted_data_factory.id)
# Get Mounted Data Factory Definition
mounted_data_factory_definition = fc.get_mounted_data_factory_definition(workspace_id = workspace_id, mounted_data_factory_id = mounted_data_factory.id, format=None)
# Update Mounted Data Factory Definition
fc.update_mounted_data_factory_definition(workspace_id = workspace_id, mounted_data_factory_id = mounted_data_factory.id, definition=mounted_data_factory_w_content.definition, update_metadata=None) from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List Notebooks
notebooks = fc.list_notebooks(workspace_id)
# Create Notebook
notebook_w_content = fc.get_notebook(workspace_id, notebook_name="HelloWorld")
definition = notebook_w_content.definition
notebook = fc.create_notebook(workspace_id, definition = definition, display_name="notebook1")
# Get Notebook
notebook = fc.get_notebook(workspace_id, notebook_name="notebook1")
# Update Notebook
notebook2 = fc.update_notebook(workspace_id, notebook.id, display_name="notebook2", return_item=True)
# Get Notebook Definition
fc.get_notebook_definition(workspace_id, notebook.id, format=None)
# Update Notebook Definition
fc.update_notebook_definition(workspace_id, notebook.id, definition=definition)
# Delete Notebook
fc.delete_notebook(workspace_id, notebook.id)
# List Notebook Livy Sessions
notebook_livy_sessions = fc.list_notebook_livy_sessions(workspace_id, notebook.id)
# Get Notebook Livy Session
notebook_livy_session = fc.get_notebook_livy_session(workspace_id, notebook.id, livy_id=livy_id)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List Reports
reports = fc.list_reports(workspace_id = workspace_id)
# Create Report
report_w_content = fc.get_report(workspace_id = workspace_id, report_name="HelloWorldReport")
definition = report_w_content.definition
report = fc.create_report(workspace_id = workspace_id, display_name="report1", definition=definition)
# Get Report
report = fc.get_report(workspace_id = workspace_id, report_name="report1")
# Get Report Definition
fc.get_report_definition(workspace_id = workspace_id, report_id = report.id, format=None)
# Update Report Definition
fc.update_report_definition(workspace_id = workspace_id, report_id = report.id, definition=definition)
# Update Report
fc.update_report(workspace_id = workspace_id, report_id = report.id, display_name = "name", description = "Description", return_item=False):
# Delete Report
fc.delete_report(workspace_id = workspace_id, report_id = report.id)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List Reflexes
reflexes = fc.list_reflexes(workspace_id=workspace_id)
# Create Reflex
reflex_w_content = fc.get_reflex(workspace_id=workspace_id, reflex_name="HelloWorld")
definition = reflex_w_content.definition
reflex = fc.create_reflex(workspace_id=workspace_id, display_name="reflex1", description = "Description", definition=definition)
# Get Reflex
reflex = fc.get_reflex(workspace_id=workspace_id, reflex_name="reflex1")
# Get Reflex Definition
definition = fc.get_reflex_definition(workspace_id=workspace_id, reflex_id=reflex.id, format=None)
# Update Reflex
reflex2 = fc.update_reflex(workspace_id=workspace_id, reflex_id= reflex.id, display_name="reflex2", description = "Description", return_item=True)
# Update Reflex Definition
fc.update_reflex_definition(workspace_id=workspace_id, reflex_id= reflex.id, definition=definition)
# Delete Reflex
fc.delete_reflex(workspace_id=workspace_id, reflex_id=reflex.id)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List Semantic Models
semantic_models = fc.list_semantic_models(workspace_id="1232")
# Create Semantic Model
semantic_model_w_content = fc.get_semantic_model(workspace_id="1232", semantic_model_name="Table")
definition = semantic_model_w_content.definition
semantic_model = fc.create_semantic_model(workspace_id="1232", display_name="semanticmodel1", definition=definition)
# Get Semantic Model
semantic_model = fc.get_semantic_model(workspace_id="1232", semantic_model_name="semanticmodel1")
semantic_model = fc.get_semantic_model(workspace_id="1232", semantic_model_id="semantic_model.id")
# Get Semantic Model Definition
definition = fc.get_semantic_model_definition(workspace_id="1232", semantic_model_id="semantic_model.id", format=None)
# Update Semantic Model
fc.update_semantic_model(workspace_id="1232", semantic_model_id="semantic_model.id", display_name="new_name", return_item=True)
# Update Semantic Model Definition
fc.update_semantic_model_definition(workspace_id="1232", semantic_model_id="semantic_model.id", definition=definition)
# Delete Semantic Model
fc.delete_semantic_model(workspace_id="1232", semantic_model_id="semantic_model.id")
# Bind Semantic Model Connection
connection_binding = {
"id": "0xxxxxxxxxxxxxxxxxx",
"connectivityType": "OnPremisesDataGateway",
"connectionDetails": {
"type": "SQL",
"path": "contoso.database.windows.net;sales"
}
}
fc.bind_semantic_model_connection(workspace_id="workspace_id", semantic_model_id="semantic_model_id", connection_binding=connection_binding)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List Spark Livy Sessions
spark_livy_sessions = fc.list_livy_sessions(workspace_id=workspace_id)
# List Livy Sessions for a specific item
spark_livy_sessions = fc.list_livy_sessions(workspace_id=workspace_id, item_id="item_id", item_type="lakehouses")
# Get a specific Livy Session
livy_session = fc.get_livy_session(workspace_id=workspace_id, item_id="item_id", item_type="lakehouses", livy_id="livy_id")workspace_id = "sfgsdfgs34234"
# List spark custom pools
pools = fc.list_workspace_custom_pools(workspace_id=workspace_id)
pool1 = [p for p in pools if p.name == "pool1"][0]
# Get a spark custom pool
pool1_clone = fc.get_workspace_custom_pool(workspace_id=workspace_id, pool_id=pool1.id)
# Create a spark custom pool
pool2 = fc.create_workspace_custom_pool(workspace_id=workspace_id,
name="pool2",
node_family="MemoryOptimized",
node_size="Small",
auto_scale = {"enabled": True, "minNodeCount": 1, "maxNodeCount": 2},
dynamic_executor_allocation = {"enabled": True, "minExecutors": 1, "maxExecutors": 1})
# Update a spark custom pool
pool2 = fc.update_workspace_custom_pool(workspace_id=workspace_id, pool_id=pool2.id,
auto_scale = {"enabled": True, "minNodeCount": 1, "maxNodeCount": 7}, return_item=True)
# Delete a spark custom pool
status_code = fc.delete_workspace_custom_pool(workspace_id=workspace_id, pool_id=pool2.id)workspace_id = "io4i34t0sfg"
# Get spark settings
settings = fc.get_spark_settings(workspace_id)
# Update
settings["automaticLog"]["enabled"] = not settings["automaticLog"]["enabled"]
settings = fc.update_spark_settings(workspace_id, automatic_log=settings["automaticLog"])
from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List Spark Job Definitions
spark_job_definitions = fc.list_spark_job_definitions(workspace_id)
# Create Spark Job Definition
spark_job_definition_w_content = fc.get_spark_job_definition(workspace_id, spark_job_definition_name="helloworld")
definition = spark_job_definition_w_content.definition
spark_job_definition = fc.create_spark_job_definition(workspace_id, display_name=spark_job_definition_name, definition=definition)
# Get Spark Job Definition
spark_job_definition = fc.get_spark_job_definition(workspace_id, spark_job_definition_name="helloworld")
# Run on demand spark job definition
job_instance = fc.run_on_demand_spark_job_definition(workspace_id, spark_job_definition.id, job_type="sparkjob")
# Update Spark Job Definition
spark_job_definition2 = fc.update_spark_job_definition(workspace_id, spark_job_definition.id, display_name="sparkjobdefinition2", return_item=True)
# Get Spark Job Definition Definition
fc.get_spark_job_definition_definition(workspace_id, spark_job_definition.id, format=None)
# Update Spark Job Definition Definition
fc.update_spark_job_definition_definition(workspace_id, spark_job_definition.id, definition=definition)
# Delete Spark Job Definition
fc.delete_spark_job_definition(workspace_id, spark_job_definition.id)
# List Spark Job Definition Livy Sessions
spark_job_definition_livy_sessions = fc.list_spark_job_definition_livy_sessions(workspace_id, spark_job_definition.id)
# Get Spark Job Definition Livy Session
spark_job_definition_livy_session = fc.get_spark_job_definition_livy_session(workspace_id, spark_job_definition.id, livy_id=livy_id)
from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List SQL Databases
sql_databases = fc.list_sql_databases(workspace_id=workspace_id)
# Create SQL Database
sql_database = fc.create_sql_database(workspace_id=workspace_id, display_name="sqldb1", description="description")
# Get SQL Database
sql_database = fc.get_sql_database(workspace_id=workspace_id, sql_database_name="sqldb1")
# Update SQL Database
sql_database2 = fc.update_sql_database(workspace_id=workspace_id, sql_database_id=sql_database.id, display_name="sqldb2", description="description", return_item=True)
# Delete SQL Database
fc.delete_sql_database(workspace_id=workspace_id, sql_database_id=sql_database.id)
# Get SQL Database Definition
definition = fc.get_sql_database_definition(workspace_id=workspace_id, sql_database_id=sql_database.id)
# Update SQL Database Definition
resp = fc.update_sql_database_definition(workspace_id=workspace_id, sql_database_id=sql_database.id, definition=definition)
# Revalidate SQL Database CMK
resp = fc.revalidate_sql_database_cmk(workspace_id=workspace_id, sql_database_id=sql_database.id)
# Start SQL Database Mirroring
resp = fc.start_sql_database_mirroring(workspace_id=workspace_id, sql_database_id=sql_database.id)
# Stop SQL Database Mirroring
resp = fc.stop_sql_database_mirroring(workspace_id=workspace_id, sql_database_id=sql_database.id)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace_id = "0asdfasdf8151d3"
# List sql endpoints
list_sql_endpoints = fc.list_sql_endpoints(workspace_id)
# Refresh SQL Endpoint Metadata
sql_endpoint_id = "123123"
resp = fc.refresh_sql_endpoint_metadata(workspace_id, sql_endpoint_id, preview = True, timeout = None, wait_for_completion = False):
# Get SQL Endpoint Audit Settings
audit_settings = fcc.get_sql_endpoint_audit_settings(workspace_id=workspace_id, sql_endpoint_id=sql_endpoint_id)
# Update SQL Endpoint Audit Settings
respo = fcc.update_sql_endpoint_audit_settings(workspace_id=workspace_id, sql_endpoint_id=sql_endpoint_id,
state="Enabled", retention_days=10)
# Set SQL Endpoint Audit Actions and Groups
actionsandgroups = ["SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP", "FAILED_DATABASE_AUTHENTICATION_GROUP", "BATCH_COMPLETED_GROUP"]
respo = fcc.set_sql_endpoint_audit_actions_and_groups(workspace_id=workspace_id, sql_endpoint_id=sql_endpoint_id,
set_audit_actions_and_groups_request=actionsandgroups)
# Get connection string
connection_string = fcc.get_sql_endpoint_connection_string(workspace_id=workspace_id, sql_endpoint_id=sql_endpoint_id)
# Optional parameters for get connection string
guestTenantId="6------------------2"
privateLinkType="Workspace"
connection_string = fcc.get_sql_endpoint_connection_string(workspace_id=workspace_id, sql_endpoint_id=sql_endpoint_id,
guest_tenant_id=guestTenantId, private_link_type=privateLinkType)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
item_id = "08-----------ec"
# Create User Data Function
resp = fcc.create_user_data_function(
workspace_id=workspace_id, display_name="MyFunction",
definition=resp["definition"],
description="Created via SDK")
# Delete User Data Function
udf_id = resp.id
resp = fcc.delete_user_data_function(workspace_id=workspace_id, user_data_function_id=udf_id)
# Get User Data Function
resp = fcc.get_user_data_function(workspace_id, item_id)
# Get User Data Function Definition
resp = fcc.get_user_data_function_definition(workspace_id, item_id)
# List User Data Functions
resp = fcc.list_user_data_functions(workspace_id=workspace_id)
# Update User Data Function
resp = fcc.update_user_data_function(workspace_id=workspace_id, user_data_function_id=udf_id, display_name="MyFunctionUpdated")
# Update User Data Function Definition
definition = {'parts': [{'path': 'definition.json',
'payload': 'ew0KICAiJHNjaGVtYSI6ICJodHRwczovL2RldmVsb3Blci5taWNyb3NvZnQuY29tL2pzb24tc2NoZW1hcy9mYWJyaWMvaXRlbS91c2VyRGF0YUZ1bmN0aW9uL2RlZmluaXRpb24vMS4xLjAvc2NoZW1hLmpzb24iLA0KICAicnVudGltZSI6ICJQWVRIT04iLA0KICAiY29ubmVjdGVkRGF0YVNvdXJjZXMiOiBbXSwNCiAgImZ1bmN0aW9ucyI6IFtdLA0KICAibGlicmFyaWVzIjogew0KICAgICJwdWJsaWMiOiBbXSwNCiAgICAicHJpdmF0ZSI6IFtdDQogIH0NCn0=',
'payloadType': 'InlineBase64'},
{'path': '.platform',
'payload': 'ewogICIkc2NoZW1hIjogImh0dHBzOi8vZGV2ZWxvcGVyLm1pY3Jvc29mdC5jb20vanNvbi1zY2hlbWFzL2ZhYnJpYy9naXRJbnRlZ3JhdGlvbi9wbGF0Zm9ybVByb3BlcnRpZXMvMi4wLjAvc2NoZW1hLmpzb24iLAogICJtZXRhZGF0YSI6IHsKICAgICJ0eXBlIjogIlVzZXJEYXRhRnVuY3Rpb24iLAogICAgImRpc3BsYXlOYW1lIjogIlVzZXJEYXRhRnVuY3Rpb25fMSIKICB9LAogICJjb25maWciOiB7CiAgICAidmVyc2lvbiI6ICIyLjAiLAogICAgImxvZ2ljYWxJZCI6ICIwMDAwMDAwMC0wMDAwLTAwMDAtMDAwMC0wMDAwMDAwMDAwMDAiCiAgfQp9',
'payloadType': 'InlineBase64'}]}
resp = fcc.update_user_data_function_definition(workspace_id=workspace_id, user_data_function_id=udf_id, definition=definition)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "0asdfasdf8151d3"
item_id = "0812fasdfsdf2709ec"
# get variable library definition
variable_library_definition = fcc.get_variable_library_definition(workspace_id=workspace_id, variable_library_id=item_id)
definition = variable_library_definition["definition"]
# create variable library
date_str = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
date_str = date_str.replace(" ", "T").replace(":", "").replace("-", "")
date_str = f"variablelibrary{date_str}"
variable_library_new = fcc.create_variable_library(workspace_id=workspace_id, display_name=date_str, definition=definition)
# get variable library
variable_library_get = fcc.get_variable_library(workspace_id=workspace_id, variable_library_id=variable_library_new.id)
# list variable libraries
variable_librarys = fcc.list_variable_libraries(workspace_id=workspace_id)
# update variable library
date_str_updated = date_str + "_updated"
variable_library_updated = fcc.update_variable_library(workspace_id=workspace_id, variable_library_id=variable_library_new.id, display_name=date_str_updated, return_item=True)
# update variable library definition
variable_library_updated = fcc.update_variable_library_definition(workspace_id=workspace_id, variable_library_id=variable_library_new.id, definition=definition)
# delete variable library
resp = fcc.delete_variable_library(workspace_id=workspace_id, variable_library_id=variable_library_updated.id)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
# List Warehouses
warehouses = fc.list_warehouses(workspace_id)
# Create Warehouse
warehouse = fc.create_warehouse(workspace_id=workspace_id, display_name="wh1")
# Get Warehouse
warehouse = fc.get_warehouse(workspace_id=workspace_id, warehouse_id="123123")
# Update Warehouse
warehouse2 = fc.update_warehouse(workspace_id=workspace_id, warehouse_id=warehouse.id, display_name="wh2", return_item=True)
# Delete Warehouse
fc.delete_warehouse(workspace_id=workspace_id, warehouse_id=warehouse.id)
# Get Warehouse Connection String
resp = fcc.get_warehouse_connection_string(workspace_id=workspace_id, warehouse_id=item_id)
# List Warehouse Restore Points
resp = fcc.list_warehouse_restore_points(workspace_id=workspace_id, warehouse_id=item_id)
# Create Warehouse Restore Point
resp = fcc.create_warehouse_restore_point(workspace_id=workspace_id, warehouse_id=item_id, "my second restore point", wait_for_completion=False)
# Get Warehouse Restore Point
resp = fcc.get_warehouse_restore_point(workspace_id=workspace_id, warehouse_id=item_id, "1760452482000")
# Update Warehouse Restore Point
resp = fcc.update_warehouse_restore_point(workspace_id=workspace_id, warehouse_id=item_id, rp_to_delete, description="updated description")
# Delete Warehouse Restore Point
resp = fcc.delete_warehouse_restore_point(workspace_id=workspace_id, warehouse_id=item_id, rp_to_delete)
# Restore Warehouse to Restore Point
resp = fcc.restore_warehouse_to_restore_point(workspace_id=workspace_id, warehouse_id=item_id, "1760452482000")
# Get Warehouse SQL Audit Settings
audit_settings = fcc.get_warehouse_sql_audit_settings(workspace_id=workspace_id, warehouse_id=item_id)
# Update Warehouse SQL Audit Settings
respo = fcc.update_warehouse_sql_audit_settings(workspace_id=workspace_id, warehouse_id=item_id,
state="Enabled", retention_days=10)
# Set Warehouse SQL Audit Actions and Groups
actionsandgroups = ["SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP", "FAILED_DATABASE_AUTHENTICATION_GROUP", "BATCH_COMPLETED_GROUP"]
respo = fcc.set_warehouse_audit_actions_and_groups(workspace_id=workspace_id, warehouse_id=item_id,
set_audit_actions_and_groups_request=actionsandgroups)from msfabricpysdkcore import FabricClientCore
fc = FabricClientCore()
workspace = fc.get_workspace_by_name("testitems")
workspace_id = workspace.id
creation_payload = {
"parentWarehouseId": "7332259c-fb34-4975-99db-85818fb8664f",
"snapshotDateTime": "2024-10-15T13:00:00Z"
}
# Create Warehouse Snapshot
warehouse_sn = fc.create_warehouse_snapshot(workspace_id=workspace_id, display_name="warehouse_snapshot1",
creation_payload=creation_payload, description="Description")
# Get Warehouse Snapshot
warehouse_sn2 = fc.get_warehouse_snapshot(workspace_id=workspace_id, warehouse_snapshot_name="warehouse_snapshot1")
# Get Warehouse Snapshot by ID
warehouse_sn2 = fc.get_warehouse_snapshot(workspace_id=workspace_id, warehouse_snapshot_id=warehouse_sn.id)
# List Warehouse Snapshots
warehouse_snapshots = fc.list_warehouse_snapshots(workspace_id=workspace_id, with_properties = False)
# Update Warehouse Snapshot
properties = {
"snapshotDateTime": "2024-10-10T15:20:15Z"
}
warehouse_sn3 = fc.update_warehouse_snapshot(workspace_id=workspace_id, warehouse_snapshot_id=warehouse_sn.id,
display_name="warehouse_snapshot2", description="Description", properties=properties, return_item=True)
# Delete Warehouse Snapshot
resp = fc.delete_warehouse_snapshot(workspace_id=workspace_id, warehouse_snapshot_id=warehouse_sn.id)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "your_workspace_id"
# Create Cosmos DB database
cosmos_db = fcc.create_cosmos_db_database(workspace_id=workspace_id, display_name="my_cosmos_db", definition=definition)
# Get Cosmos DB database
cosmos_db_get = fcc.get_cosmos_db_database(workspace_id=workspace_id, cosmos_db_database_id=cosmos_db.id)
# Get Cosmos DB database by name
cosmos_db_get = fcc.get_cosmos_db_database(workspace_id=workspace_id, cosmos_db_database_name="my_cosmos_db")
# Get Cosmos DB database definition
cosmos_db_def = fcc.get_cosmos_db_database_definition(workspace_id=workspace_id, cosmos_db_database_id=cosmos_db.id)
# List Cosmos DB databases
cosmos_dbs = fcc.list_cosmos_db_databases(workspace_id=workspace_id)
# Update Cosmos DB database
cosmos_db_updated = fcc.update_cosmos_db_database(workspace_id=workspace_id, cosmos_db_database_id=cosmos_db.id, display_name="updated_name", return_item=True)
# Update Cosmos DB database definition
fcc.update_cosmos_db_database_definition(workspace_id=workspace_id, cosmos_db_database_id=cosmos_db.id, definition=definition)
# Delete Cosmos DB database
resp = fcc.delete_cosmos_db_database(workspace_id=workspace_id, cosmos_db_database_id=cosmos_db.id)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "your_workspace_id"
# Create data agent
data_agent = fcc.create_data_agent(workspace_id=workspace_id, display_name="my_data_agent", definition=definition)
# Get data agent
data_agent_get = fcc.get_data_agent(workspace_id=workspace_id, data_agent_id=data_agent.id)
# Get data agent by name
data_agent_get = fcc.get_data_agent(workspace_id=workspace_id, data_agent_name="my_data_agent")
# Get data agent definition
data_agent_def = fcc.get_data_agent_definition(workspace_id=workspace_id, data_agent_id=data_agent.id)
# List data agents
data_agents = fcc.list_data_agents(workspace_id=workspace_id)
# Update data agent
data_agent_updated = fcc.update_data_agent(workspace_id=workspace_id, data_agent_id=data_agent.id, display_name="updated_name", return_item=True)
# Update data agent definition
fcc.update_data_agent_definition(workspace_id=workspace_id, data_agent_id=data_agent.id, definition=definition)
# Delete data agent
resp = fcc.delete_data_agent(workspace_id=workspace_id, data_agent_id=data_agent.id)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "your_workspace_id"
# Create event schema set
event_schema_set = fcc.create_event_schema_set(workspace_id=workspace_id, display_name="my_event_schema_set", definition=definition)
# Get event schema set
event_schema_set_get = fcc.get_event_schema_set(workspace_id=workspace_id, event_schema_set_id=event_schema_set.id)
# Get event schema set by name
event_schema_set_get = fcc.get_event_schema_set(workspace_id=workspace_id, event_schema_set_name="my_event_schema_set")
# Get event schema set definition
event_schema_set_def = fcc.get_event_schema_set_definition(workspace_id=workspace_id, event_schema_set_id=event_schema_set.id)
# List event schema sets
event_schema_sets = fcc.list_event_schema_sets(workspace_id=workspace_id)
# Update event schema set
event_schema_set_updated = fcc.update_event_schema_set(workspace_id=workspace_id, event_schema_set_id=event_schema_set.id, display_name="updated_name", return_item=True)
# Update event schema set definition
fcc.update_event_schema_set_definition(workspace_id=workspace_id, event_schema_set_id=event_schema_set.id, definition=definition)
# Delete event schema set
resp = fcc.delete_event_schema_set(workspace_id=workspace_id, event_schema_set_id=event_schema_set.id)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "your_workspace_id"
# Create graph model
graph_model = fcc.create_graph_model(workspace_id=workspace_id, display_name="my_graph_model", definition=definition)
# Get graph model
graph_model_get = fcc.get_graph_model(workspace_id=workspace_id, graph_model_id=graph_model.id)
# Get graph model by name
graph_model_get = fcc.get_graph_model(workspace_id=workspace_id, graph_model_name="my_graph_model")
# Get graph model definition
graph_model_def = fcc.get_graph_model_definition(workspace_id=workspace_id, graph_model_id=graph_model.id)
# List graph models
graph_models = fcc.list_graph_models(workspace_id=workspace_id)
# Update graph model
graph_model_updated = fcc.update_graph_model(workspace_id=workspace_id, graph_model_id=graph_model.id, display_name="updated_name", return_item=True)
# Update graph model definition
fcc.update_graph_model_definition(workspace_id=workspace_id, graph_model_id=graph_model.id, definition=definition)
# Delete graph model
resp = fcc.delete_graph_model(workspace_id=workspace_id, graph_model_id=graph_model.id)
# Run on-demand refresh graph
resp = fcc.run_on_demand_refresh_graph(workspace_id=workspace_id, graph_model_id=graph_model.id)
# Execute graph model query (beta)
result = fcc.execute_graph_model_query(workspace_id=workspace_id, graph_model_id=graph_model.id, query="MATCH (n) RETURN n LIMIT 10")
# Get queryable graph type (beta)
graph_type = fcc.get_queryable_graph_type(workspace_id=workspace_id, graph_model_id=graph_model.id)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "your_workspace_id"
# Create graph query set
graph_query_set = fcc.create_graph_query_set(workspace_id=workspace_id, display_name="my_graph_query_set", definition=definition)
# Get graph query set
graph_query_set_get = fcc.get_graph_query_set(workspace_id=workspace_id, graph_query_set_id=graph_query_set.id)
# Get graph query set by name
graph_query_set_get = fcc.get_graph_query_set(workspace_id=workspace_id, graph_query_set_name="my_graph_query_set")
# Get graph query set definition
graph_query_set_def = fcc.get_graph_query_set_definition(workspace_id=workspace_id, graph_query_set_id=graph_query_set.id)
# List graph query sets
graph_query_sets = fcc.list_graph_query_sets(workspace_id=workspace_id)
# Update graph query set
graph_query_set_updated = fcc.update_graph_query_set(workspace_id=workspace_id, graph_query_set_id=graph_query_set.id, display_name="updated_name", return_item=True)
# Update graph query set definition
fcc.update_graph_query_set_definition(workspace_id=workspace_id, graph_query_set_id=graph_query_set.id, definition=definition)
# Delete graph query set
resp = fcc.delete_graph_query_set(workspace_id=workspace_id, graph_query_set_id=graph_query_set.id)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "your_workspace_id"
# Create ontology
ontology = fcc.create_ontology(workspace_id=workspace_id, display_name="my_ontology", definition=definition)
# Get ontology
ontology_get = fcc.get_ontology(workspace_id=workspace_id, ontology_id=ontology.id)
# Get ontology by name
ontology_get = fcc.get_ontology(workspace_id=workspace_id, ontology_name="my_ontology")
# Get ontology definition
ontology_def = fcc.get_ontology_definition(workspace_id=workspace_id, ontology_id=ontology.id)
# List ontologies
ontologies = fcc.list_ontologies(workspace_id=workspace_id)
# Update ontology
ontology_updated = fcc.update_ontology(workspace_id=workspace_id, ontology_id=ontology.id, display_name="updated_name", return_item=True)
# Update ontology definition
fcc.update_ontology_definition(workspace_id=workspace_id, ontology_id=ontology.id, definition=definition)
# Delete ontology
resp = fcc.delete_ontology(workspace_id=workspace_id, ontology_id=ontology.id)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "your_workspace_id"
# Create operations agent
ops_agent = fcc.create_operations_agent(workspace_id=workspace_id, display_name="my_ops_agent", definition=definition)
# Get operations agent
ops_agent_get = fcc.get_operations_agent(workspace_id=workspace_id, operations_agent_id=ops_agent.id)
# Get operations agent by name
ops_agent_get = fcc.get_operations_agent(workspace_id=workspace_id, operations_agent_name="my_ops_agent")
# Get operations agent definition
ops_agent_def = fcc.get_operations_agent_definition(workspace_id=workspace_id, operations_agent_id=ops_agent.id)
# List operations agents
ops_agents = fcc.list_operations_agents(workspace_id=workspace_id)
# Update operations agent
ops_agent_updated = fcc.update_operations_agent(workspace_id=workspace_id, operations_agent_id=ops_agent.id, display_name="updated_name", return_item=True)
# Update operations agent definition
fcc.update_operations_agent_definition(workspace_id=workspace_id, operations_agent_id=ops_agent.id, definition=definition)
# Delete operations agent
resp = fcc.delete_operations_agent(workspace_id=workspace_id, operations_agent_id=ops_agent.id)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "your_workspace_id"
# Create Snowflake database
snowflake_db = fcc.create_snowflake_database(workspace_id=workspace_id, display_name="my_snowflake_db", definition=definition)
# Get Snowflake database
snowflake_db_get = fcc.get_snowflake_database(workspace_id=workspace_id, snowflake_database_id=snowflake_db.id)
# Get Snowflake database by name
snowflake_db_get = fcc.get_snowflake_database(workspace_id=workspace_id, snowflake_database_name="my_snowflake_db")
# Get Snowflake database definition
snowflake_db_def = fcc.get_snowflake_database_definition(workspace_id=workspace_id, snowflake_database_id=snowflake_db.id)
# List Snowflake databases
snowflake_dbs = fcc.list_snowflake_databases(workspace_id=workspace_id)
# Update Snowflake database
snowflake_db_updated = fcc.update_snowflake_database(workspace_id=workspace_id, snowflake_database_id=snowflake_db.id, display_name="updated_name", return_item=True)
# Update Snowflake database definition
fcc.update_snowflake_database_definition(workspace_id=workspace_id, snowflake_database_id=snowflake_db.id, definition=definition)
# Delete Snowflake database
resp = fcc.delete_snowflake_database(workspace_id=workspace_id, snowflake_database_id=snowflake_db.id)from msfabricpysdkcore import FabricClientCore
fcc = FabricClientCore()
workspace_id = "your_workspace_id"
# Convert natural language to KQL (beta)
result = fcc.nl_to_kql(workspace_id=workspace_id,
cluster_url="https://mycluster.kusto.windows.net",
database_name="mydb",
item_id_for_billing="item_id",
natural_language="Show me the top 10 errors in the last hour")