diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 79f2d92b6..62eb1dbba 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -7aade78d7c1b9f56b56f546480acb516ee93d98d \ No newline at end of file +69902d1abe35bd9e78e0231927bf14d11b383a16 \ No newline at end of file diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 5f3d0433c..8aec0cda1 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,31 @@ ### Internal Changes ### API Changes +* Added [w.service_principal_secrets_proxy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/oauth2/service_principal_secrets_proxy.html) workspace-level service. +* Added [w.default_warehouse_id](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/default_warehouse_id.html) workspace-level service. +* Added `database` field for `databricks.sdk.service.apps.AppResource`. +* Added `environment_settings` field for `databricks.sdk.service.catalog.ConnectionInfo`. +* Added `environment_settings` field for `databricks.sdk.service.catalog.CreateConnection`. +* Added `environment_settings` field for `databricks.sdk.service.catalog.UpdateConnection`. +* Added `read_replica_count` field for `databricks.sdk.service.ml.OnlineStore`. +* Added `page_size` field for `databricks.sdk.service.oauth2.ListServicePrincipalSecretsRequest`. +* Added `query_based_connector_config` field for `databricks.sdk.service.pipelines.TableSpecificConfig`. +* Added `projected_remaining_task_total_time_ms`, `remaining_task_count`, `runnable_tasks` and `work_to_be_done` fields for `databricks.sdk.service.sql.QueryMetrics`. +* Added `is_default_for_provider` and `name` fields for `databricks.sdk.service.workspace.CreateCredentialsRequest`. +* Added `is_default_for_provider` and `name` fields for `databricks.sdk.service.workspace.CreateCredentialsResponse`. +* Added `is_default_for_provider` and `name` fields for `databricks.sdk.service.workspace.CredentialInfo`. +* Added `is_default_for_provider` and `name` fields for `databricks.sdk.service.workspace.GetCredentialsResponse`. +* Added `is_default_for_provider` and `name` fields for `databricks.sdk.service.workspace.UpdateCredentialsRequest`. +* Added `databricks` enum value for `databricks.sdk.service.catalog.SystemType`. +* Added `driver_dns_resolution_failure` enum value for `databricks.sdk.service.compute.TerminationReasonCode`. +* Added `confluence` and `meta_marketing` enum values for `databricks.sdk.service.pipelines.IngestionSourceType`. +* Added `delta_iceberg_table` enum value for `databricks.sdk.service.sharing.TableInternalAttributesSharedTableType`. +* [Breaking] Changed `delete()` method for [w.table_constraints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/catalog/table_constraints.html) workspace-level service to start returning `databricks.sdk.service.catalog.DeleteTableConstraintResponse` dataclass. +* [Breaking] Changed `service_principal_id` field for `databricks.sdk.service.oauth2.CreateServicePrincipalSecretRequest` to type `str` dataclass. +* [Breaking] Changed `service_principal_id` field for `databricks.sdk.service.oauth2.DeleteServicePrincipalSecretRequest` to type `str` dataclass. +* [Breaking] Changed `service_principal_id` field for `databricks.sdk.service.oauth2.ListServicePrincipalSecretsRequest` to type `str` dataclass. +* [Breaking] Changed `calls` field for `databricks.sdk.service.serving.AiGatewayRateLimit` to no longer be required. +* Changed `calls` field for `databricks.sdk.service.serving.AiGatewayRateLimit` to no longer be required. +* [Breaking] Removed `create()` method for [w.dashboards](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sql/dashboards.html) workspace-level service. +* [Breaking] Removed `range` and `if_unmodified_since` fields for `databricks.sdk.service.files.DownloadRequest`. +* [Breaking] Removed `range` and `if_unmodified_since` fields for `databricks.sdk.service.files.GetMetadataRequest`. \ No newline at end of file diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index f0d9efc29..deb1f7785 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -97,7 +97,8 @@ OAuthPublishedAppsAPI, PublishedAppIntegrationAPI, ServicePrincipalFederationPolicyAPI, - ServicePrincipalSecretsAPI) + ServicePrincipalSecretsAPI, + ServicePrincipalSecretsProxyAPI) from databricks.sdk.service.pipelines import PipelinesAPI from databricks.sdk.service.provisioning import (CredentialsAPI, EncryptionKeysAPI, @@ -113,10 +114,11 @@ AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI, ComplianceSecurityProfileAPI, CredentialsManagerAPI, CspEnablementAccountAPI, DashboardEmailSubscriptionsAPI, - DefaultNamespaceAPI, DisableLegacyAccessAPI, DisableLegacyDbfsAPI, - DisableLegacyFeaturesAPI, EnableExportNotebookAPI, EnableIpAccessListsAPI, - EnableNotebookTableClipboardAPI, EnableResultsDownloadingAPI, - EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI, + DefaultNamespaceAPI, DefaultWarehouseIdAPI, DisableLegacyAccessAPI, + DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI, EnableExportNotebookAPI, + EnableIpAccessListsAPI, EnableNotebookTableClipboardAPI, + EnableResultsDownloadingAPI, EnhancedSecurityMonitoringAPI, + EsmEnablementAccountAPI, IpAccessListsAPI, LlmProxyPartnerPoweredAccountAPI, LlmProxyPartnerPoweredEnforceAPI, LlmProxyPartnerPoweredWorkspaceAPI, NetworkConnectivityAPI, NetworkPoliciesAPI, NotificationDestinationsAPI, PersonalComputeAPI, @@ -323,6 +325,7 @@ def __init__( self._resource_quotas = pkg_catalog.ResourceQuotasAPI(self._api_client) self._schemas = pkg_catalog.SchemasAPI(self._api_client) self._secrets = pkg_workspace.SecretsAPI(self._api_client) + self._service_principal_secrets_proxy = pkg_oauth2.ServicePrincipalSecretsProxyAPI(self._api_client) self._service_principals = pkg_iam.ServicePrincipalsAPI(self._api_client) self._serving_endpoints = serving_endpoints serving_endpoints_data_plane_token_source = DataPlaneTokenSource( @@ -788,6 +791,11 @@ def secrets(self) -> pkg_workspace.SecretsAPI: """The Secrets API allows you to manage secrets, secret scopes, and access permissions.""" return self._secrets + @property + def service_principal_secrets_proxy(self) -> pkg_oauth2.ServicePrincipalSecretsProxyAPI: + """These APIs enable administrators to manage service principal secrets at the workspace level.""" + return self._service_principal_secrets_proxy + @property def service_principals(self) -> pkg_iam.ServicePrincipalsAPI: """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.""" diff --git a/databricks/sdk/service/aibuilder.py b/databricks/sdk/service/aibuilder.py index e71040b56..7008a0da9 100755 --- a/databricks/sdk/service/aibuilder.py +++ b/databricks/sdk/service/aibuilder.py @@ -15,73 +15,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class CancelCustomLlmOptimizationRunRequest: - id: Optional[str] = None - - -@dataclass -class CreateCustomLlmRequest: - name: str - """Name of the custom LLM. Only alphanumeric characters and dashes allowed.""" - - instructions: str - """Instructions for the custom LLM to follow""" - - agent_artifact_path: Optional[str] = None - """Optional: UC path for agent artifacts. If you are using a dataset that you only have read - permissions, please provide a destination path where you have write permissions. Please provide - this in catalog.schema format.""" - - datasets: Optional[List[Dataset]] = None - """Datasets used for training and evaluating the model, not for inference. Currently, only 1 - dataset is accepted.""" - - guidelines: Optional[List[str]] = None - """Guidelines for the custom LLM to adhere to""" - - def as_dict(self) -> dict: - """Serializes the CreateCustomLlmRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.agent_artifact_path is not None: - body["agent_artifact_path"] = self.agent_artifact_path - if self.datasets: - body["datasets"] = [v.as_dict() for v in self.datasets] - if self.guidelines: - body["guidelines"] = [v for v in self.guidelines] - if self.instructions is not None: - body["instructions"] = self.instructions - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateCustomLlmRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.agent_artifact_path is not None: - body["agent_artifact_path"] = self.agent_artifact_path - if self.datasets: - body["datasets"] = self.datasets - if self.guidelines: - body["guidelines"] = self.guidelines - if self.instructions is not None: - body["instructions"] = self.instructions - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCustomLlmRequest: - """Deserializes the CreateCustomLlmRequest from a dictionary.""" - return cls( - agent_artifact_path=d.get("agent_artifact_path", None), - datasets=_repeated_dict(d, "datasets", Dataset), - guidelines=d.get("guidelines", None), - instructions=d.get("instructions", None), - name=d.get("name", None), - ) - - @dataclass class CustomLlm: name: str @@ -203,12 +136,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Dataset: return cls(table=_from_dict(d, "table", Table)) -@dataclass -class StartCustomLlmOptimizationRunRequest: - id: Optional[str] = None - """The Id of the tile.""" - - class State(Enum): """States of Custom LLM optimization lifecycle.""" @@ -263,60 +190,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Table: ) -@dataclass -class UpdateCustomLlmRequest: - custom_llm: CustomLlm - """The CustomLlm containing the fields which should be updated.""" - - update_mask: str - """The list of the CustomLlm fields to update. These should correspond to the values (or lack - thereof) present in `custom_llm`. - - The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - id: Optional[str] = None - """The id of the custom llm""" - - def as_dict(self) -> dict: - """Serializes the UpdateCustomLlmRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.custom_llm: - body["custom_llm"] = self.custom_llm.as_dict() - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCustomLlmRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.custom_llm: - body["custom_llm"] = self.custom_llm - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomLlmRequest: - """Deserializes the UpdateCustomLlmRequest from a dictionary.""" - return cls( - custom_llm=_from_dict(d, "custom_llm", CustomLlm), - id=d.get("id", None), - update_mask=d.get("update_mask", None), - ) - - class AiBuilderAPI: """The Custom LLMs service manages state and powers the UI for the Custom LLM product.""" diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py index 1a83022e6..22caa3809 100755 --- a/databricks/sdk/service/apps.py +++ b/databricks/sdk/service/apps.py @@ -602,45 +602,13 @@ def from_dict(cls, d: Dict[str, Any]) -> AppPermissionsDescription: ) -@dataclass -class AppPermissionsRequest: - access_control_list: Optional[List[AppAccessControlRequest]] = None - - app_name: Optional[str] = None - """The app for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the AppPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.app_name is not None: - body["app_name"] = self.app_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AppPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.app_name is not None: - body["app_name"] = self.app_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AppPermissionsRequest: - """Deserializes the AppPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AppAccessControlRequest), - app_name=d.get("app_name", None), - ) - - @dataclass class AppResource: name: str """Name of the App Resource.""" + database: Optional[AppResourceDatabase] = None + description: Optional[str] = None """Description of the App Resource.""" @@ -657,6 +625,8 @@ class AppResource: def as_dict(self) -> dict: """Serializes the AppResource into a dictionary suitable for use as a JSON request body.""" body = {} + if self.database: + body["database"] = self.database.as_dict() if self.description is not None: body["description"] = self.description if self.job: @@ -676,6 +646,8 @@ def as_dict(self) -> dict: def as_shallow_dict(self) -> dict: """Serializes the AppResource into a shallow dictionary of its immediate attributes.""" body = {} + if self.database: + body["database"] = self.database if self.description is not None: body["description"] = self.description if self.job: @@ -696,6 +668,7 @@ def as_shallow_dict(self) -> dict: def from_dict(cls, d: Dict[str, Any]) -> AppResource: """Deserializes the AppResource from a dictionary.""" return cls( + database=_from_dict(d, "database", AppResourceDatabase), description=d.get("description", None), job=_from_dict(d, "job", AppResourceJob), name=d.get("name", None), @@ -706,6 +679,51 @@ def from_dict(cls, d: Dict[str, Any]) -> AppResource: ) +@dataclass +class AppResourceDatabase: + instance_name: str + + database_name: str + + permission: AppResourceDatabaseDatabasePermission + + def as_dict(self) -> dict: + """Serializes the AppResourceDatabase into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.database_name is not None: + body["database_name"] = self.database_name + if self.instance_name is not None: + body["instance_name"] = self.instance_name + if self.permission is not None: + body["permission"] = self.permission.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the AppResourceDatabase into a shallow dictionary of its immediate attributes.""" + body = {} + if self.database_name is not None: + body["database_name"] = self.database_name + if self.instance_name is not None: + body["instance_name"] = self.instance_name + if self.permission is not None: + body["permission"] = self.permission + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> AppResourceDatabase: + """Deserializes the AppResourceDatabase from a dictionary.""" + return cls( + database_name=d.get("database_name", None), + instance_name=d.get("instance_name", None), + permission=_enum(d, "permission", AppResourceDatabaseDatabasePermission), + ) + + +class AppResourceDatabaseDatabasePermission(Enum): + + CAN_CONNECT_AND_CREATE = "CAN_CONNECT_AND_CREATE" + + @dataclass class AppResourceJob: id: str @@ -1109,18 +1127,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListAppsResponse: return cls(apps=_repeated_dict(d, "apps", App), next_page_token=d.get("next_page_token", None)) -@dataclass -class StartAppRequest: - name: Optional[str] = None - """The name of the app.""" - - -@dataclass -class StopAppRequest: - name: Optional[str] = None - """The name of the app.""" - - class AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index d779cc24f..2e118457a 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -416,41 +416,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BudgetPolicy: ) -@dataclass -class CreateBillingUsageDashboardRequest: - dashboard_type: Optional[UsageDashboardType] = None - """Workspace level usage dashboard shows usage data for the specified workspace ID. Global level - usage dashboard shows usage data for all workspaces in the account.""" - - workspace_id: Optional[int] = None - """The workspace ID of the workspace in which the usage dashboard is created.""" - - def as_dict(self) -> dict: - """Serializes the CreateBillingUsageDashboardRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_type is not None: - body["dashboard_type"] = self.dashboard_type.value - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateBillingUsageDashboardRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_type is not None: - body["dashboard_type"] = self.dashboard_type - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateBillingUsageDashboardRequest: - """Deserializes the CreateBillingUsageDashboardRequest from a dictionary.""" - return cls( - dashboard_type=_enum(d, "dashboard_type", UsageDashboardType), workspace_id=d.get("workspace_id", None) - ) - - @dataclass class CreateBillingUsageDashboardResponse: dashboard_id: Optional[str] = None @@ -628,31 +593,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationBudgetAlertCon ) -@dataclass -class CreateBudgetConfigurationRequest: - budget: CreateBudgetConfigurationBudget - """Properties of the new budget configuration.""" - - def as_dict(self) -> dict: - """Serializes the CreateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.budget: - body["budget"] = self.budget.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.budget: - body["budget"] = self.budget - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationRequest: - """Deserializes the CreateBudgetConfigurationRequest from a dictionary.""" - return cls(budget=_from_dict(d, "budget", CreateBudgetConfigurationBudget)) - - @dataclass class CreateBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None @@ -678,43 +618,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetConfigurationResponse: return cls(budget=_from_dict(d, "budget", BudgetConfiguration)) -@dataclass -class CreateBudgetPolicyRequest: - """A request to create a BudgetPolicy.""" - - policy: Optional[BudgetPolicy] = None - """The policy to create. `policy_id` needs to be empty as it will be generated `policy_name` must - be provided, custom_tags may need to be provided depending on the cloud provider. All other - fields are optional.""" - - request_id: Optional[str] = None - """A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is - recommended. This request is only idempotent if a `request_id` is provided.""" - - def as_dict(self) -> dict: - """Serializes the CreateBudgetPolicyRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.policy: - body["policy"] = self.policy.as_dict() - if self.request_id is not None: - body["request_id"] = self.request_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateBudgetPolicyRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.policy: - body["policy"] = self.policy - if self.request_id is not None: - body["request_id"] = self.request_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateBudgetPolicyRequest: - """Deserializes the CreateBudgetPolicyRequest from a dictionary.""" - return cls(policy=_from_dict(d, "policy", BudgetPolicy), request_id=d.get("request_id", None)) - - @dataclass class CreateLogDeliveryConfigurationParams: """* Log Delivery Configuration""" @@ -1500,38 +1403,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationBudget: ) -@dataclass -class UpdateBudgetConfigurationRequest: - budget: UpdateBudgetConfigurationBudget - """The updated budget. This will overwrite the budget specified by the budget ID.""" - - budget_id: Optional[str] = None - """The Databricks budget configuration ID.""" - - def as_dict(self) -> dict: - """Serializes the UpdateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.budget: - body["budget"] = self.budget.as_dict() - if self.budget_id is not None: - body["budget_id"] = self.budget_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.budget: - body["budget"] = self.budget - if self.budget_id is not None: - body["budget_id"] = self.budget_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationRequest: - """Deserializes the UpdateBudgetConfigurationRequest from a dictionary.""" - return cls(budget=_from_dict(d, "budget", UpdateBudgetConfigurationBudget), budget_id=d.get("budget_id", None)) - - @dataclass class UpdateBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None @@ -1557,80 +1428,12 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateBudgetConfigurationResponse: return cls(budget=_from_dict(d, "budget", BudgetConfiguration)) -@dataclass -class UpdateLogDeliveryConfigurationStatusRequest: - """* Update Log Delivery Configuration""" - - status: LogDeliveryConfigStatus - """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). - Defaults to `ENABLED`. You can [enable or disable the - configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration - is not supported, so disable a log delivery configuration that is no longer needed.""" - - log_delivery_configuration_id: Optional[str] = None - """The log delivery configuration id of customer""" - - def as_dict(self) -> dict: - """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.log_delivery_configuration_id is not None: - body["log_delivery_configuration_id"] = self.log_delivery_configuration_id - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.log_delivery_configuration_id is not None: - body["log_delivery_configuration_id"] = self.log_delivery_configuration_id - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateLogDeliveryConfigurationStatusRequest: - """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary.""" - return cls( - log_delivery_configuration_id=d.get("log_delivery_configuration_id", None), - status=_enum(d, "status", LogDeliveryConfigStatus), - ) - - class UsageDashboardType(Enum): USAGE_DASHBOARD_TYPE_GLOBAL = "USAGE_DASHBOARD_TYPE_GLOBAL" USAGE_DASHBOARD_TYPE_WORKSPACE = "USAGE_DASHBOARD_TYPE_WORKSPACE" -@dataclass -class WrappedCreateLogDeliveryConfiguration: - """* Properties of the new log delivery configuration.""" - - log_delivery_configuration: CreateLogDeliveryConfigurationParams - - def as_dict(self) -> dict: - """Serializes the WrappedCreateLogDeliveryConfiguration into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.log_delivery_configuration: - body["log_delivery_configuration"] = self.log_delivery_configuration.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the WrappedCreateLogDeliveryConfiguration into a shallow dictionary of its immediate attributes.""" - body = {} - if self.log_delivery_configuration: - body["log_delivery_configuration"] = self.log_delivery_configuration - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> WrappedCreateLogDeliveryConfiguration: - """Deserializes the WrappedCreateLogDeliveryConfiguration from a dictionary.""" - return cls( - log_delivery_configuration=_from_dict(d, "log_delivery_configuration", CreateLogDeliveryConfigurationParams) - ) - - @dataclass class WrappedLogDeliveryConfiguration: log_delivery_configuration: Optional[LogDeliveryConfiguration] = None @@ -1701,16 +1504,22 @@ def __init__(self, api_client): def download(self, start_month: str, end_month: str, *, personal_data: Optional[bool] = None) -> DownloadResponse: """Returns billable usage logs in CSV format for the specified account and date range. For the data - schema, see [CSV file schema]. Note that this method might take multiple minutes to complete. + schema, see: + + - AWS: [CSV file schema]. - GCP: [CSV file schema]. + + Note that this method might take multiple minutes to complete. **Warning**: Depending on the queried date range, the number of workspaces in the account, the size of the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If you experience this, try to mitigate by calling the API with narrower date ranges. - [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema + [CSV file schema]: https://docs.gcp.databricks.com/administration-guide/account-settings/usage-analysis.html#csv-file-schema :param start_month: str - Format: `YYYY-MM`. First month to return billable usage logs for. This field is required. + Format specification for month in the format `YYYY-MM`. This is used to specify billable usage + `start_month` and `end_month` properties. **Note**: Billable usage logs are unavailable before March + 2019 (`2019-03`). :param end_month: str Format: `YYYY-MM`. Last month to return billable usage logs for. This field is required. :param personal_data: bool (optional) diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 738d13cb6..3fe7b66bb 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -19,106 +19,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class AccountsCreateMetastore: - metastore_info: Optional[CreateMetastore] = None - - def as_dict(self) -> dict: - """Serializes the AccountsCreateMetastore into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.metastore_info: - body["metastore_info"] = self.metastore_info.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsCreateMetastore into a shallow dictionary of its immediate attributes.""" - body = {} - if self.metastore_info: - body["metastore_info"] = self.metastore_info - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastore: - """Deserializes the AccountsCreateMetastore from a dictionary.""" - return cls(metastore_info=_from_dict(d, "metastore_info", CreateMetastore)) - - -@dataclass -class AccountsCreateMetastoreAssignment: - metastore_assignment: Optional[CreateMetastoreAssignment] = None - - metastore_id: Optional[str] = None - """Unity Catalog metastore ID""" - - workspace_id: Optional[int] = None - """Workspace ID.""" - - def as_dict(self) -> dict: - """Serializes the AccountsCreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsCreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateMetastoreAssignment: - """Deserializes the AccountsCreateMetastoreAssignment from a dictionary.""" - return cls( - metastore_assignment=_from_dict(d, "metastore_assignment", CreateMetastoreAssignment), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) - - -@dataclass -class AccountsCreateStorageCredential: - credential_info: Optional[CreateStorageCredential] = None - - metastore_id: Optional[str] = None - """Unity Catalog metastore ID""" - - def as_dict(self) -> dict: - """Serializes the AccountsCreateStorageCredential into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.credential_info: - body["credential_info"] = self.credential_info.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsCreateStorageCredential into a shallow dictionary of its immediate attributes.""" - body = {} - if self.credential_info: - body["credential_info"] = self.credential_info - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsCreateStorageCredential: - """Deserializes the AccountsCreateStorageCredential from a dictionary.""" - return cls( - credential_info=_from_dict(d, "credential_info", CreateStorageCredential), - metastore_id=d.get("metastore_id", None), - ) - - @dataclass class AccountsMetastoreAssignment: metastore_assignment: Optional[MetastoreAssignment] = None @@ -191,123 +91,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AccountsStorageCredentialInfo: return cls(credential_info=_from_dict(d, "credential_info", StorageCredentialInfo)) -@dataclass -class AccountsUpdateMetastore: - metastore_id: Optional[str] = None - """Unity Catalog metastore ID""" - - metastore_info: Optional[UpdateMetastore] = None - - def as_dict(self) -> dict: - """Serializes the AccountsUpdateMetastore into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.metastore_info: - body["metastore_info"] = self.metastore_info.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsUpdateMetastore into a shallow dictionary of its immediate attributes.""" - body = {} - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.metastore_info: - body["metastore_info"] = self.metastore_info - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastore: - """Deserializes the AccountsUpdateMetastore from a dictionary.""" - return cls( - metastore_id=d.get("metastore_id", None), metastore_info=_from_dict(d, "metastore_info", UpdateMetastore) - ) - - -@dataclass -class AccountsUpdateMetastoreAssignment: - metastore_assignment: Optional[UpdateMetastoreAssignment] = None - - metastore_id: Optional[str] = None - """Unity Catalog metastore ID""" - - workspace_id: Optional[int] = None - """Workspace ID.""" - - def as_dict(self) -> dict: - """Serializes the AccountsUpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsUpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.metastore_assignment: - body["metastore_assignment"] = self.metastore_assignment - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateMetastoreAssignment: - """Deserializes the AccountsUpdateMetastoreAssignment from a dictionary.""" - return cls( - metastore_assignment=_from_dict(d, "metastore_assignment", UpdateMetastoreAssignment), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) - - -@dataclass -class AccountsUpdateStorageCredential: - credential_info: Optional[UpdateStorageCredential] = None - - metastore_id: Optional[str] = None - """Unity Catalog metastore ID""" - - storage_credential_name: Optional[str] = None - """Name of the storage credential.""" - - def as_dict(self) -> dict: - """Serializes the AccountsUpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.credential_info: - body["credential_info"] = self.credential_info.as_dict() - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AccountsUpdateStorageCredential into a shallow dictionary of its immediate attributes.""" - body = {} - if self.credential_info: - body["credential_info"] = self.credential_info - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AccountsUpdateStorageCredential: - """Deserializes the AccountsUpdateStorageCredential from a dictionary.""" - return cls( - credential_info=_from_dict(d, "credential_info", UpdateStorageCredential), - metastore_id=d.get("metastore_id", None), - storage_credential_name=d.get("storage_credential_name", None), - ) - - @dataclass class ArtifactAllowlistInfo: artifact_matchers: Optional[List[ArtifactMatcher]] = None @@ -1442,6 +1225,8 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionDependency: @dataclass class ConnectionInfo: + """Next ID: 23""" + comment: Optional[str] = None """User-provided free-form text description.""" @@ -1460,6 +1245,9 @@ class ConnectionInfo: credential_type: Optional[CredentialType] = None """The type of credential.""" + environment_settings: Optional[EnvironmentSettings] = None + """[Create,Update:OPT] Connection environment settings as EnvironmentSettings object.""" + full_name: Optional[str] = None """Full name of connection.""" @@ -1509,6 +1297,8 @@ def as_dict(self) -> dict: body["created_by"] = self.created_by if self.credential_type is not None: body["credential_type"] = self.credential_type.value + if self.environment_settings: + body["environment_settings"] = self.environment_settings.as_dict() if self.full_name is not None: body["full_name"] = self.full_name if self.metastore_id is not None: @@ -1550,6 +1340,8 @@ def as_shallow_dict(self) -> dict: body["created_by"] = self.created_by if self.credential_type is not None: body["credential_type"] = self.credential_type + if self.environment_settings: + body["environment_settings"] = self.environment_settings if self.full_name is not None: body["full_name"] = self.full_name if self.metastore_id is not None: @@ -1586,6 +1378,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: created_at=d.get("created_at", None), created_by=d.get("created_by", None), credential_type=_enum(d, "credential_type", CredentialType), + environment_settings=_from_dict(d, "environment_settings", EnvironmentSettings), full_name=d.get("full_name", None), metastore_id=d.get("metastore_id", None), name=d.get("name", None), @@ -1602,7 +1395,7 @@ def from_dict(cls, d: Dict[str, Any]) -> ConnectionInfo: class ConnectionType(Enum): - """Next Id: 36""" + """Next Id: 37""" BIGQUERY = "BIGQUERY" DATABRICKS = "DATABRICKS" @@ -1675,190 +1468,417 @@ def from_dict(cls, d: Dict[str, Any]) -> ContinuousUpdateStatus: @dataclass -class CreateCatalog: +class CreateFunction: name: str - """Name of catalog.""" + """Name of function, relative to parent schema.""" + + catalog_name: str + """Name of parent catalog.""" + + schema_name: str + """Name of parent schema relative to its parent catalog.""" + + input_params: FunctionParameterInfos + + data_type: ColumnTypeName + """Scalar function return data type.""" + + full_data_type: str + """Pretty printed function data type.""" + + routine_body: CreateFunctionRoutineBody + """Function language. When **EXTERNAL** is used, the language of the routine function should be + specified in the __external_language__ field, and the __return_params__ of the function cannot + be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + **NO_SQL**.""" + + routine_definition: str + """Function body.""" + + parameter_style: CreateFunctionParameterStyle + """Function parameter style. **S** is the value for SQL.""" + + is_deterministic: bool + """Whether the function is deterministic.""" + + sql_data_access: CreateFunctionSqlDataAccess + """Function SQL data access.""" + + is_null_call: bool + """Function null call.""" + + security_type: CreateFunctionSecurityType + """Function security type.""" + + specific_name: str + """Specific name of the function; Reserved for future use.""" comment: Optional[str] = None """User-provided free-form text description.""" - connection_name: Optional[str] = None - """The name of the connection to an external data source.""" + external_language: Optional[str] = None + """External function language.""" - options: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" + external_name: Optional[str] = None + """External function name.""" - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" + properties: Optional[str] = None + """JSON-serialized key-value pair map, encoded (escaped) as a string.""" - provider_name: Optional[str] = None - """The name of delta sharing provider. - - A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.""" + return_params: Optional[FunctionParameterInfos] = None + """Table function return parameters.""" - share_name: Optional[str] = None - """The name of the share under the share provider.""" + routine_dependencies: Optional[DependencyList] = None + """Function dependencies.""" - storage_root: Optional[str] = None - """Storage root URL for managed tables within catalog.""" + sql_path: Optional[str] = None + """List of schemes whose objects can be referenced without qualification.""" def as_dict(self) -> dict: - """Serializes the CreateCatalog into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateFunction into a dictionary suitable for use as a JSON request body.""" body = {} + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name if self.comment is not None: body["comment"] = self.comment - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: - body["properties"] = self.properties - if self.provider_name is not None: - body["provider_name"] = self.provider_name - if self.share_name is not None: - body["share_name"] = self.share_name - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.data_type is not None: + body["data_type"] = self.data_type.value + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.input_params: + body["input_params"] = self.input_params.as_dict() + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call + if self.name is not None: + body["name"] = self.name + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style.value + if self.properties is not None: + body["properties"] = self.properties + if self.return_params: + body["return_params"] = self.return_params.as_dict() + if self.routine_body is not None: + body["routine_body"] = self.routine_body.value + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies.as_dict() + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type.value + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access.value + if self.sql_path is not None: + body["sql_path"] = self.sql_path return body def as_shallow_dict(self) -> dict: - """Serializes the CreateCatalog into a shallow dictionary of its immediate attributes.""" + """Serializes the CreateFunction into a shallow dictionary of its immediate attributes.""" body = {} + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name if self.comment is not None: body["comment"] = self.comment - if self.connection_name is not None: - body["connection_name"] = self.connection_name + if self.data_type is not None: + body["data_type"] = self.data_type + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.input_params: + body["input_params"] = self.input_params + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call if self.name is not None: body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style + if self.properties is not None: body["properties"] = self.properties - if self.provider_name is not None: - body["provider_name"] = self.provider_name - if self.share_name is not None: - body["share_name"] = self.share_name - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.return_params: + body["return_params"] = self.return_params + if self.routine_body is not None: + body["routine_body"] = self.routine_body + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access + if self.sql_path is not None: + body["sql_path"] = self.sql_path return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCatalog: - """Deserializes the CreateCatalog from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> CreateFunction: + """Deserializes the CreateFunction from a dictionary.""" return cls( + catalog_name=d.get("catalog_name", None), comment=d.get("comment", None), - connection_name=d.get("connection_name", None), + data_type=_enum(d, "data_type", ColumnTypeName), + external_language=d.get("external_language", None), + external_name=d.get("external_name", None), + full_data_type=d.get("full_data_type", None), + input_params=_from_dict(d, "input_params", FunctionParameterInfos), + is_deterministic=d.get("is_deterministic", None), + is_null_call=d.get("is_null_call", None), name=d.get("name", None), - options=d.get("options", None), + parameter_style=_enum(d, "parameter_style", CreateFunctionParameterStyle), properties=d.get("properties", None), - provider_name=d.get("provider_name", None), - share_name=d.get("share_name", None), - storage_root=d.get("storage_root", None), + return_params=_from_dict(d, "return_params", FunctionParameterInfos), + routine_body=_enum(d, "routine_body", CreateFunctionRoutineBody), + routine_definition=d.get("routine_definition", None), + routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), + schema_name=d.get("schema_name", None), + security_type=_enum(d, "security_type", CreateFunctionSecurityType), + specific_name=d.get("specific_name", None), + sql_data_access=_enum(d, "sql_data_access", CreateFunctionSqlDataAccess), + sql_path=d.get("sql_path", None), ) -@dataclass -class CreateConnection: - name: str - """Name of the connection.""" +class CreateFunctionParameterStyle(Enum): + """Function parameter style. **S** is the value for SQL.""" - connection_type: ConnectionType - """The type of connection.""" + S = "S" - options: Dict[str, str] - """A map of key-value properties attached to the securable.""" - comment: Optional[str] = None - """User-provided free-form text description.""" +class CreateFunctionRoutineBody(Enum): + """Function language. When **EXTERNAL** is used, the language of the routine function should be + specified in the __external_language__ field, and the __return_params__ of the function cannot + be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + **NO_SQL**.""" - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" + EXTERNAL = "EXTERNAL" + SQL = "SQL" - read_only: Optional[bool] = None - """If the connection is read only.""" + +class CreateFunctionSecurityType(Enum): + """The security type of the function.""" + + DEFINER = "DEFINER" + + +class CreateFunctionSqlDataAccess(Enum): + """Function SQL data access.""" + + CONTAINS_SQL = "CONTAINS_SQL" + NO_SQL = "NO_SQL" + READS_SQL_DATA = "READS_SQL_DATA" + + +@dataclass +class CreateMetastore: + name: str + """The user-specified name of the metastore.""" + + region: Optional[str] = None + """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" + + storage_root: Optional[str] = None + """The storage root URL for metastore""" def as_dict(self) -> dict: - """Serializes the CreateConnection into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.connection_type is not None: - body["connection_type"] = self.connection_type.value if self.name is not None: body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: - body["properties"] = self.properties - if self.read_only is not None: - body["read_only"] = self.read_only + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body def as_shallow_dict(self) -> dict: - """Serializes the CreateConnection into a shallow dictionary of its immediate attributes.""" + """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.connection_type is not None: - body["connection_type"] = self.connection_type if self.name is not None: body["name"] = self.name - if self.options: - body["options"] = self.options - if self.properties: - body["properties"] = self.properties - if self.read_only is not None: - body["read_only"] = self.read_only + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateConnection: - """Deserializes the CreateConnection from a dictionary.""" - return cls( - comment=d.get("comment", None), - connection_type=_enum(d, "connection_type", ConnectionType), - name=d.get("name", None), - options=d.get("options", None), - properties=d.get("properties", None), - read_only=d.get("read_only", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> CreateMetastore: + """Deserializes the CreateMetastore from a dictionary.""" + return cls(name=d.get("name", None), region=d.get("region", None), storage_root=d.get("storage_root", None)) @dataclass -class CreateCredentialRequest: - name: str - """The credential name. The name must be unique among storage and service credentials within the - metastore.""" - - aws_iam_role: Optional[AwsIamRole] = None - """The AWS IAM role configuration.""" +class CreateMetastoreAssignment: + metastore_id: str + """The unique ID of the metastore.""" - azure_managed_identity: Optional[AzureManagedIdentity] = None - """The Azure managed identity configuration.""" + default_catalog_name: str + """The name of the default catalog in the metastore. This field is deprecated. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace.""" - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" + workspace_id: Optional[int] = None + """A workspace ID.""" - comment: Optional[str] = None - """Comment associated with the credential.""" + def as_dict(self) -> dict: + """Serializes the CreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id + return body - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None - """The Databricks managed GCP service account configuration.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id + return body - purpose: Optional[CredentialPurpose] = None - """Indicates the purpose of the credential.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateMetastoreAssignment: + """Deserializes the CreateMetastoreAssignment from a dictionary.""" + return cls( + default_catalog_name=d.get("default_catalog_name", None), + metastore_id=d.get("metastore_id", None), + workspace_id=d.get("workspace_id", None), + ) - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" - skip_validation: Optional[bool] = None - """Optional. Supplying true to this argument skips validation of the created set of credentials.""" +@dataclass +class CreateRequestExternalLineage: + source: ExternalLineageObject + """Source object of the external lineage relationship.""" + + target: ExternalLineageObject + """Target object of the external lineage relationship.""" + + columns: Optional[List[ColumnRelationship]] = None + """List of column relationships between source and target objects.""" + + id: Optional[str] = None + """Unique identifier of the external lineage relationship.""" + + properties: Optional[Dict[str, str]] = None + """Key-value properties associated with the external lineage relationship.""" + + def as_dict(self) -> dict: + """Serializes the CreateRequestExternalLineage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source.as_dict() + if self.target: + body["target"] = self.target.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateRequestExternalLineage into a shallow dictionary of its immediate attributes.""" + body = {} + if self.columns: + body["columns"] = self.columns + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source + if self.target: + body["target"] = self.target + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateRequestExternalLineage: + """Deserializes the CreateRequestExternalLineage from a dictionary.""" + return cls( + columns=_repeated_dict(d, "columns", ColumnRelationship), + id=d.get("id", None), + properties=d.get("properties", None), + source=_from_dict(d, "source", ExternalLineageObject), + target=_from_dict(d, "target", ExternalLineageObject), + ) + + +@dataclass +class CreateResponse: + def as_dict(self) -> dict: + """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: + """Deserializes the CreateResponse from a dictionary.""" + return cls() + + +@dataclass +class CreateStorageCredential: + name: str + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" + + aws_iam_role: Optional[AwsIamRoleRequest] = None + """The AWS IAM role configuration.""" + + azure_managed_identity: Optional[AzureManagedIdentityRequest] = None + """The Azure managed identity configuration.""" + + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" + + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + + comment: Optional[str] = None + """Comment associated with the credential.""" + + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None + """The Databricks managed GCP service account configuration.""" + + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" + + skip_validation: Optional[bool] = None + """Supplying true to this argument skips validation of the created credential.""" def as_dict(self) -> dict: - """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} if self.aws_iam_role: body["aws_iam_role"] = self.aws_iam_role.as_dict() @@ -1866,14 +1886,14 @@ def as_dict(self) -> dict: body["azure_managed_identity"] = self.azure_managed_identity.as_dict() if self.azure_service_principal: body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() if self.comment is not None: body["comment"] = self.comment if self.databricks_gcp_service_account: body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() if self.name is not None: body["name"] = self.name - if self.purpose is not None: - body["purpose"] = self.purpose.value if self.read_only is not None: body["read_only"] = self.read_only if self.skip_validation is not None: @@ -1881,7 +1901,7 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} if self.aws_iam_role: body["aws_iam_role"] = self.aws_iam_role @@ -1889,14 +1909,14 @@ def as_shallow_dict(self) -> dict: body["azure_managed_identity"] = self.azure_managed_identity if self.azure_service_principal: body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token if self.comment is not None: body["comment"] = self.comment if self.databricks_gcp_service_account: body["databricks_gcp_service_account"] = self.databricks_gcp_service_account if self.name is not None: body["name"] = self.name - if self.purpose is not None: - body["purpose"] = self.purpose if self.read_only is not None: body["read_only"] = self.read_only if self.skip_validation is not None: @@ -1904,670 +1924,458 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialRequest: - """Deserializes the CreateCredentialRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> CreateStorageCredential: + """Deserializes the CreateStorageCredential from a dictionary.""" return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest + ), name=d.get("name", None), - purpose=_enum(d, "purpose", CredentialPurpose), read_only=d.get("read_only", None), skip_validation=d.get("skip_validation", None), ) @dataclass -class CreateExternalLocation: - name: str - """Name of the external location.""" - - url: str - """Path URL of the external location.""" - - credential_name: str - """Name of the storage credential used with this location.""" - - comment: Optional[str] = None - """User-provided free-form text description.""" - - enable_file_events: Optional[bool] = None - """Whether to enable file events on this external location.""" - - encryption_details: Optional[EncryptionDetails] = None - - fallback: Optional[bool] = None - """Indicates whether fallback mode is enabled for this external location. When fallback mode is - enabled, the access to the location falls back to cluster credentials if UC credentials are not - sufficient.""" - - file_event_queue: Optional[FileEventQueue] = None - """File event queue settings.""" - - read_only: Optional[bool] = None - """Indicates whether the external location is read-only.""" +class CredentialDependency: + """A credential that is dependent on a SQL object.""" - skip_validation: Optional[bool] = None - """Skips validation of the storage credential associated with the external location.""" + credential_name: Optional[str] = None + """Full name of the dependent credential, in the form of __credential_name__.""" def as_dict(self) -> dict: - """Serializes the CreateExternalLocation into a dictionary suitable for use as a JSON request body.""" + """Serializes the CredentialDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment if self.credential_name is not None: body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue.as_dict() - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url return body def as_shallow_dict(self) -> dict: - """Serializes the CreateExternalLocation into a shallow dictionary of its immediate attributes.""" + """Serializes the CredentialDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment if self.credential_name is not None: body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateExternalLocation: - """Deserializes the CreateExternalLocation from a dictionary.""" - return cls( - comment=d.get("comment", None), - credential_name=d.get("credential_name", None), - enable_file_events=d.get("enable_file_events", None), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - fallback=d.get("fallback", None), - file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), - name=d.get("name", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - url=d.get("url", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> CredentialDependency: + """Deserializes the CredentialDependency from a dictionary.""" + return cls(credential_name=d.get("credential_name", None)) @dataclass -class CreateFunction: - name: str - """Name of function, relative to parent schema.""" +class CredentialInfo: + aws_iam_role: Optional[AwsIamRole] = None + """The AWS IAM role configuration.""" - catalog_name: str - """Name of parent catalog.""" + azure_managed_identity: Optional[AzureManagedIdentity] = None + """The Azure managed identity configuration.""" - schema_name: str - """Name of parent schema relative to its parent catalog.""" + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" - input_params: FunctionParameterInfos + comment: Optional[str] = None + """Comment associated with the credential.""" - data_type: ColumnTypeName - """Scalar function return data type.""" + created_at: Optional[int] = None + """Time at which this credential was created, in epoch milliseconds.""" - full_data_type: str - """Pretty printed function data type.""" + created_by: Optional[str] = None + """Username of credential creator.""" - routine_body: CreateFunctionRoutineBody - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None + """The Databricks managed GCP service account configuration.""" - routine_definition: str - """Function body.""" + full_name: Optional[str] = None + """The full name of the credential.""" - parameter_style: CreateFunctionParameterStyle - """Function parameter style. **S** is the value for SQL.""" + id: Optional[str] = None + """The unique identifier of the credential.""" - is_deterministic: bool - """Whether the function is deterministic.""" + isolation_mode: Optional[IsolationMode] = None + """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - sql_data_access: CreateFunctionSqlDataAccess - """Function SQL data access.""" - - is_null_call: bool - """Function null call.""" - - security_type: CreateFunctionSecurityType - """Function security type.""" - - specific_name: str - """Specific name of the function; Reserved for future use.""" + metastore_id: Optional[str] = None + """Unique identifier of the parent metastore.""" - comment: Optional[str] = None - """User-provided free-form text description.""" + name: Optional[str] = None + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" - external_language: Optional[str] = None - """External function language.""" + owner: Optional[str] = None + """Username of current owner of credential.""" - external_name: Optional[str] = None - """External function name.""" + purpose: Optional[CredentialPurpose] = None + """Indicates the purpose of the credential.""" - properties: Optional[str] = None - """JSON-serialized key-value pair map, encoded (escaped) as a string.""" + read_only: Optional[bool] = None + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" - return_params: Optional[FunctionParameterInfos] = None - """Table function return parameters.""" + updated_at: Optional[int] = None + """Time at which this credential was last modified, in epoch milliseconds.""" - routine_dependencies: Optional[DependencyList] = None - """Function dependencies.""" + updated_by: Optional[str] = None + """Username of user who last modified the credential.""" - sql_path: Optional[str] = None - """List of schemes whose objects can be referenced without qualification.""" + used_for_managed_storage: Optional[bool] = None + """Whether this credential is the current metastore's root storage credential. Only applicable when + purpose is **STORAGE**.""" def as_dict(self) -> dict: - """Serializes the CreateFunction into a dictionary suitable for use as a JSON request body.""" + """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role.as_dict() + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity.as_dict() + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() if self.comment is not None: body["comment"] = self.comment - if self.data_type is not None: - body["data_type"] = self.data_type.value - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.input_params: - body["input_params"] = self.input_params.as_dict() - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style.value - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params.as_dict() - if self.routine_body is not None: - body["routine_body"] = self.routine_body.value - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies.as_dict() - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type.value - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access.value - if self.sql_path is not None: - body["sql_path"] = self.sql_path + if self.owner is not None: + body["owner"] = self.owner + if self.purpose is not None: + body["purpose"] = self.purpose.value + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body def as_shallow_dict(self) -> dict: - """Serializes the CreateFunction into a shallow dictionary of its immediate attributes.""" + """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal if self.comment is not None: body["comment"] = self.comment - if self.data_type is not None: - body["data_type"] = self.data_type - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.input_params: - body["input_params"] = self.input_params - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params - if self.routine_body is not None: - body["routine_body"] = self.routine_body - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access - if self.sql_path is not None: - body["sql_path"] = self.sql_path + if self.owner is not None: + body["owner"] = self.owner + if self.purpose is not None: + body["purpose"] = self.purpose + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateFunction: - """Deserializes the CreateFunction from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: + """Deserializes the CredentialInfo from a dictionary.""" return cls( - catalog_name=d.get("catalog_name", None), + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), comment=d.get("comment", None), - data_type=_enum(d, "data_type", ColumnTypeName), - external_language=d.get("external_language", None), - external_name=d.get("external_name", None), - full_data_type=d.get("full_data_type", None), - input_params=_from_dict(d, "input_params", FunctionParameterInfos), - is_deterministic=d.get("is_deterministic", None), - is_null_call=d.get("is_null_call", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), + full_name=d.get("full_name", None), + id=d.get("id", None), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + metastore_id=d.get("metastore_id", None), name=d.get("name", None), - parameter_style=_enum(d, "parameter_style", CreateFunctionParameterStyle), - properties=d.get("properties", None), - return_params=_from_dict(d, "return_params", FunctionParameterInfos), - routine_body=_enum(d, "routine_body", CreateFunctionRoutineBody), - routine_definition=d.get("routine_definition", None), - routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), - schema_name=d.get("schema_name", None), - security_type=_enum(d, "security_type", CreateFunctionSecurityType), - specific_name=d.get("specific_name", None), - sql_data_access=_enum(d, "sql_data_access", CreateFunctionSqlDataAccess), - sql_path=d.get("sql_path", None), + owner=d.get("owner", None), + purpose=_enum(d, "purpose", CredentialPurpose), + read_only=d.get("read_only", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + used_for_managed_storage=d.get("used_for_managed_storage", None), ) -class CreateFunctionParameterStyle(Enum): - """Function parameter style. **S** is the value for SQL.""" +class CredentialPurpose(Enum): - S = "S" + SERVICE = "SERVICE" + STORAGE = "STORAGE" -@dataclass -class CreateFunctionRequest: - function_info: CreateFunction - """Partial __FunctionInfo__ specifying the function to be created.""" +class CredentialType(Enum): + """Next Id: 13""" - def as_dict(self) -> dict: - """Serializes the CreateFunctionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.function_info: - body["function_info"] = self.function_info.as_dict() - return body + ANY_STATIC_CREDENTIAL = "ANY_STATIC_CREDENTIAL" + BEARER_TOKEN = "BEARER_TOKEN" + OAUTH_ACCESS_TOKEN = "OAUTH_ACCESS_TOKEN" + OAUTH_M2M = "OAUTH_M2M" + OAUTH_REFRESH_TOKEN = "OAUTH_REFRESH_TOKEN" + OAUTH_RESOURCE_OWNER_PASSWORD = "OAUTH_RESOURCE_OWNER_PASSWORD" + OAUTH_U2M = "OAUTH_U2M" + OAUTH_U2M_MAPPING = "OAUTH_U2M_MAPPING" + OIDC_TOKEN = "OIDC_TOKEN" + PEM_PRIVATE_KEY = "PEM_PRIVATE_KEY" + SERVICE_CREDENTIAL = "SERVICE_CREDENTIAL" + UNKNOWN_CREDENTIAL_TYPE = "UNKNOWN_CREDENTIAL_TYPE" + USERNAME_PASSWORD = "USERNAME_PASSWORD" - def as_shallow_dict(self) -> dict: - """Serializes the CreateFunctionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.function_info: - body["function_info"] = self.function_info - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateFunctionRequest: - """Deserializes the CreateFunctionRequest from a dictionary.""" - return cls(function_info=_from_dict(d, "function_info", CreateFunction)) - - -class CreateFunctionRoutineBody(Enum): - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" +@dataclass +class CredentialValidationResult: + message: Optional[str] = None + """Error message would exist when the result does not equal to **PASS**.""" - EXTERNAL = "EXTERNAL" - SQL = "SQL" + result: Optional[ValidateCredentialResult] = None + """The results of the tested operation.""" + def as_dict(self) -> dict: + """Serializes the CredentialValidationResult into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.message is not None: + body["message"] = self.message + if self.result is not None: + body["result"] = self.result.value + return body -class CreateFunctionSecurityType(Enum): - """The security type of the function.""" + def as_shallow_dict(self) -> dict: + """Serializes the CredentialValidationResult into a shallow dictionary of its immediate attributes.""" + body = {} + if self.message is not None: + body["message"] = self.message + if self.result is not None: + body["result"] = self.result + return body - DEFINER = "DEFINER" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CredentialValidationResult: + """Deserializes the CredentialValidationResult from a dictionary.""" + return cls(message=d.get("message", None), result=_enum(d, "result", ValidateCredentialResult)) -class CreateFunctionSqlDataAccess(Enum): - """Function SQL data access.""" +class DataSourceFormat(Enum): + """Data source format""" - CONTAINS_SQL = "CONTAINS_SQL" - NO_SQL = "NO_SQL" - READS_SQL_DATA = "READS_SQL_DATA" + AVRO = "AVRO" + BIGQUERY_FORMAT = "BIGQUERY_FORMAT" + CSV = "CSV" + DATABRICKS_FORMAT = "DATABRICKS_FORMAT" + DATABRICKS_ROW_STORE_FORMAT = "DATABRICKS_ROW_STORE_FORMAT" + DELTA = "DELTA" + DELTASHARING = "DELTASHARING" + DELTA_UNIFORM_HUDI = "DELTA_UNIFORM_HUDI" + DELTA_UNIFORM_ICEBERG = "DELTA_UNIFORM_ICEBERG" + HIVE = "HIVE" + ICEBERG = "ICEBERG" + JSON = "JSON" + MONGODB_FORMAT = "MONGODB_FORMAT" + MYSQL_FORMAT = "MYSQL_FORMAT" + NETSUITE_FORMAT = "NETSUITE_FORMAT" + ORACLE_FORMAT = "ORACLE_FORMAT" + ORC = "ORC" + PARQUET = "PARQUET" + POSTGRESQL_FORMAT = "POSTGRESQL_FORMAT" + REDSHIFT_FORMAT = "REDSHIFT_FORMAT" + SALESFORCE_DATA_CLOUD_FORMAT = "SALESFORCE_DATA_CLOUD_FORMAT" + SALESFORCE_FORMAT = "SALESFORCE_FORMAT" + SNOWFLAKE_FORMAT = "SNOWFLAKE_FORMAT" + SQLDW_FORMAT = "SQLDW_FORMAT" + SQLSERVER_FORMAT = "SQLSERVER_FORMAT" + TERADATA_FORMAT = "TERADATA_FORMAT" + TEXT = "TEXT" + UNITY_CATALOG = "UNITY_CATALOG" + VECTOR_INDEX_FORMAT = "VECTOR_INDEX_FORMAT" + WORKDAY_RAAS_FORMAT = "WORKDAY_RAAS_FORMAT" @dataclass -class CreateMetastore: - name: str - """The user-specified name of the metastore.""" +class DatabricksGcpServiceAccount: + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - region: Optional[str] = None - """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" + credential_id: Optional[str] = None + """The Databricks internal ID that represents this managed identity.""" - storage_root: Optional[str] = None - """The storage root URL for metastore""" + email: Optional[str] = None + """The email of the service account.""" + + private_key_id: Optional[str] = None + """The ID that represents the private key for this Service Account""" def as_dict(self) -> dict: - """Serializes the CreateMetastore into a dictionary suitable for use as a JSON request body.""" + """Serializes the DatabricksGcpServiceAccount into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email + if self.private_key_id is not None: + body["private_key_id"] = self.private_key_id return body def as_shallow_dict(self) -> dict: - """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes.""" + """Serializes the DatabricksGcpServiceAccount into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email + if self.private_key_id is not None: + body["private_key_id"] = self.private_key_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateMetastore: - """Deserializes the CreateMetastore from a dictionary.""" - return cls(name=d.get("name", None), region=d.get("region", None), storage_root=d.get("storage_root", None)) + def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccount: + """Deserializes the DatabricksGcpServiceAccount from a dictionary.""" + return cls( + credential_id=d.get("credential_id", None), + email=d.get("email", None), + private_key_id=d.get("private_key_id", None), + ) @dataclass -class CreateMetastoreAssignment: - metastore_id: str - """The unique ID of the metastore.""" - - default_catalog_name: str - """The name of the default catalog in the metastore. This field is deprecated. Please use "Default - Namespace API" to configure the default catalog for a Databricks workspace.""" - - workspace_id: Optional[int] = None - """A workspace ID.""" +class DatabricksGcpServiceAccountRequest: + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" def as_dict(self) -> dict: - """Serializes the CreateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" + """Serializes the DatabricksGcpServiceAccountRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: - """Serializes the CreateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" + """Serializes the DatabricksGcpServiceAccountRequest into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateMetastoreAssignment: - """Deserializes the CreateMetastoreAssignment from a dictionary.""" - return cls( - default_catalog_name=d.get("default_catalog_name", None), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountRequest: + """Deserializes the DatabricksGcpServiceAccountRequest from a dictionary.""" + return cls() @dataclass -class CreateMonitor: - assets_dir: str - """The directory to store monitoring assets (e.g. dashboard, metric tables).""" +class DatabricksGcpServiceAccountResponse: + """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" - output_schema_name: str - """Schema where output metric tables are created.""" + credential_id: Optional[str] = None + """The Databricks internal ID that represents this managed identity.""" - baseline_table_name: Optional[str] = None - """Name of the baseline table from which drift metrics are computed from. Columns in the monitored - table should also be present in the baseline table.""" + email: Optional[str] = None + """The email of the service account.""" - custom_metrics: Optional[List[MonitorMetric]] = None - """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived - metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across - time windows).""" + def as_dict(self) -> dict: + """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email + return body - data_classification_config: Optional[MonitorDataClassificationConfig] = None - """The data classification config for the monitor.""" + def as_shallow_dict(self) -> dict: + """Serializes the DatabricksGcpServiceAccountResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.email is not None: + body["email"] = self.email + return body - inference_log: Optional[MonitorInferenceLog] = None - """Configuration for monitoring inference logs.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountResponse: + """Deserializes the DatabricksGcpServiceAccountResponse from a dictionary.""" + return cls(credential_id=d.get("credential_id", None), email=d.get("email", None)) - notifications: Optional[MonitorNotifications] = None - """The notification settings for the monitor.""" - schedule: Optional[MonitorCronSchedule] = None - """The schedule for automatically updating and refreshing metric tables.""" +@dataclass +class DeleteAliasResponse: + def as_dict(self) -> dict: + """Serializes the DeleteAliasResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - skip_builtin_dashboard: Optional[bool] = None - """Whether to skip creating a default dashboard summarizing data quality metrics.""" + def as_shallow_dict(self) -> dict: + """Serializes the DeleteAliasResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - slicing_exprs: Optional[List[str]] = None - """List of column expressions to slice data with for targeted analysis. The data is grouped by each - expression independently, resulting in a separate slice for each predicate and its complements. - For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteAliasResponse: + """Deserializes the DeleteAliasResponse from a dictionary.""" + return cls() - snapshot: Optional[MonitorSnapshot] = None - """Configuration for monitoring snapshot tables.""" - table_name: Optional[str] = None - """Full name of the table.""" +@dataclass +class DeleteCredentialResponse: + def as_dict(self) -> dict: + """Serializes the DeleteCredentialResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - time_series: Optional[MonitorTimeSeries] = None - """Configuration for monitoring time series tables.""" + def as_shallow_dict(self) -> dict: + """Serializes the DeleteCredentialResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - warehouse_id: Optional[str] = None - """Optional argument to specify the warehouse for dashboard creation. If not specified, the first - running warehouse will be used.""" - - def as_dict(self) -> dict: - """Serializes the CreateMonitor into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config.as_dict() - if self.inference_log: - body["inference_log"] = self.inference_log.as_dict() - if self.notifications: - body["notifications"] = self.notifications.as_dict() - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.skip_builtin_dashboard is not None: - body["skip_builtin_dashboard"] = self.skip_builtin_dashboard - if self.slicing_exprs: - body["slicing_exprs"] = [v for v in self.slicing_exprs] - if self.snapshot: - body["snapshot"] = self.snapshot.as_dict() - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series.as_dict() - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateMonitor into a shallow dictionary of its immediate attributes.""" - body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = self.custom_metrics - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config - if self.inference_log: - body["inference_log"] = self.inference_log - if self.notifications: - body["notifications"] = self.notifications - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule - if self.skip_builtin_dashboard is not None: - body["skip_builtin_dashboard"] = self.skip_builtin_dashboard - if self.slicing_exprs: - body["slicing_exprs"] = self.slicing_exprs - if self.snapshot: - body["snapshot"] = self.snapshot - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateMonitor: - """Deserializes the CreateMonitor from a dictionary.""" - return cls( - assets_dir=d.get("assets_dir", None), - baseline_table_name=d.get("baseline_table_name", None), - custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), - data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), - inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), - notifications=_from_dict(d, "notifications", MonitorNotifications), - output_schema_name=d.get("output_schema_name", None), - schedule=_from_dict(d, "schedule", MonitorCronSchedule), - skip_builtin_dashboard=d.get("skip_builtin_dashboard", None), - slicing_exprs=d.get("slicing_exprs", None), - snapshot=_from_dict(d, "snapshot", MonitorSnapshot), - table_name=d.get("table_name", None), - time_series=_from_dict(d, "time_series", MonitorTimeSeries), - warehouse_id=d.get("warehouse_id", None), - ) - - -@dataclass -class CreateRegisteredModelRequest: - catalog_name: str - """The name of the catalog where the schema and the registered model reside""" - - schema_name: str - """The name of the schema where the registered model resides""" - - name: str - """The name of the registered model""" - - comment: Optional[str] = None - """The comment attached to the registered model""" - - storage_location: Optional[str] = None - """The storage location on the cloud under which model version data files are stored""" - - def as_dict(self) -> dict: - """Serializes the CreateRegisteredModelRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateRegisteredModelRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateRegisteredModelRequest: - """Deserializes the CreateRegisteredModelRequest from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - name=d.get("name", None), - schema_name=d.get("schema_name", None), - storage_location=d.get("storage_location", None), - ) + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteCredentialResponse: + """Deserializes the DeleteCredentialResponse from a dictionary.""" + return cls() @dataclass -class CreateRequestExternalLineage: +class DeleteRequestExternalLineage: source: ExternalLineageObject """Source object of the external lineage relationship.""" target: ExternalLineageObject """Target object of the external lineage relationship.""" - columns: Optional[List[ColumnRelationship]] = None - """List of column relationships between source and target objects.""" - id: Optional[str] = None """Unique identifier of the external lineage relationship.""" - properties: Optional[Dict[str, str]] = None - """Key-value properties associated with the external lineage relationship.""" - def as_dict(self) -> dict: - """Serializes the CreateRequestExternalLineage into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteRequestExternalLineage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] if self.id is not None: body["id"] = self.id - if self.properties: - body["properties"] = self.properties if self.source: body["source"] = self.source.as_dict() if self.target: @@ -2575,14 +2383,10 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the CreateRequestExternalLineage into a shallow dictionary of its immediate attributes.""" + """Serializes the DeleteRequestExternalLineage into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: - body["columns"] = self.columns if self.id is not None: body["id"] = self.id - if self.properties: - body["properties"] = self.properties if self.source: body["source"] = self.source if self.target: @@ -2590,721 +2394,799 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateRequestExternalLineage: - """Deserializes the CreateRequestExternalLineage from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> DeleteRequestExternalLineage: + """Deserializes the DeleteRequestExternalLineage from a dictionary.""" return cls( - columns=_repeated_dict(d, "columns", ColumnRelationship), id=d.get("id", None), - properties=d.get("properties", None), source=_from_dict(d, "source", ExternalLineageObject), target=_from_dict(d, "target", ExternalLineageObject), ) @dataclass -class CreateResponse: +class DeleteResponse: def as_dict(self) -> dict: - """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: - """Deserializes the CreateResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: + """Deserializes the DeleteResponse from a dictionary.""" return cls() @dataclass -class CreateSchema: - name: str - """Name of schema, relative to parent catalog.""" +class DeleteTableConstraintResponse: + def as_dict(self) -> dict: + """Serializes the DeleteTableConstraintResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - catalog_name: str - """Name of parent catalog.""" + def as_shallow_dict(self) -> dict: + """Serializes the DeleteTableConstraintResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - comment: Optional[str] = None - """User-provided free-form text description.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteTableConstraintResponse: + """Deserializes the DeleteTableConstraintResponse from a dictionary.""" + return cls() - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" - storage_root: Optional[str] = None - """Storage root URL for managed tables within schema.""" +@dataclass +class DeltaRuntimePropertiesKvPairs: + """Properties pertaining to the current state of the delta table as given by the commit server. + This does not contain **delta.*** (input) properties in __TableInfo.properties__.""" + + delta_runtime_properties: Dict[str, str] + """A map of key-value properties attached to the securable.""" def as_dict(self) -> dict: - """Serializes the CreateSchema into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeltaRuntimePropertiesKvPairs into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.properties: - body["properties"] = self.properties - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.delta_runtime_properties: + body["delta_runtime_properties"] = self.delta_runtime_properties return body def as_shallow_dict(self) -> dict: - """Serializes the CreateSchema into a shallow dictionary of its immediate attributes.""" + """Serializes the DeltaRuntimePropertiesKvPairs into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.properties: - body["properties"] = self.properties - if self.storage_root is not None: - body["storage_root"] = self.storage_root + if self.delta_runtime_properties: + body["delta_runtime_properties"] = self.delta_runtime_properties return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateSchema: - """Deserializes the CreateSchema from a dictionary.""" - return cls( - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - name=d.get("name", None), - properties=d.get("properties", None), - storage_root=d.get("storage_root", None), - ) - + def from_dict(cls, d: Dict[str, Any]) -> DeltaRuntimePropertiesKvPairs: + """Deserializes the DeltaRuntimePropertiesKvPairs from a dictionary.""" + return cls(delta_runtime_properties=d.get("delta_runtime_properties", None)) -@dataclass -class CreateStorageCredential: - name: str - """The credential name. The name must be unique among storage and service credentials within the - metastore.""" - aws_iam_role: Optional[AwsIamRoleRequest] = None - """The AWS IAM role configuration.""" +class DeltaSharingScopeEnum(Enum): - azure_managed_identity: Optional[AzureManagedIdentityRequest] = None - """The Azure managed identity configuration.""" + INTERNAL = "INTERNAL" + INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL" - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" +@dataclass +class Dependency: + """A dependency of a SQL object. One of the following fields must be defined: __table__, + __function__, __connection__, or __credential__.""" - comment: Optional[str] = None - """Comment associated with the credential.""" + connection: Optional[ConnectionDependency] = None - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None - """The Databricks managed GCP service account configuration.""" + credential: Optional[CredentialDependency] = None - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" + function: Optional[FunctionDependency] = None - skip_validation: Optional[bool] = None - """Supplying true to this argument skips validation of the created credential.""" + table: Optional[TableDependency] = None def as_dict(self) -> dict: - """Serializes the CreateStorageCredential into a dictionary suitable for use as a JSON request body.""" + """Serializes the Dependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.connection: + body["connection"] = self.connection.as_dict() + if self.credential: + body["credential"] = self.credential.as_dict() + if self.function: + body["function"] = self.function.as_dict() + if self.table: + body["table"] = self.table.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes.""" + """Serializes the Dependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.name is not None: - body["name"] = self.name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + if self.connection: + body["connection"] = self.connection + if self.credential: + body["credential"] = self.credential + if self.function: + body["function"] = self.function + if self.table: + body["table"] = self.table return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateStorageCredential: - """Deserializes the CreateStorageCredential from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> Dependency: + """Deserializes the Dependency from a dictionary.""" return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - name=d.get("name", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), + connection=_from_dict(d, "connection", ConnectionDependency), + credential=_from_dict(d, "credential", CredentialDependency), + function=_from_dict(d, "function", FunctionDependency), + table=_from_dict(d, "table", TableDependency), ) @dataclass -class CreateTableConstraint: - full_name_arg: str - """The full name of the table referenced by the constraint.""" +class DependencyList: + """A list of dependencies.""" - constraint: TableConstraint + dependencies: Optional[List[Dependency]] = None + """Array of dependencies.""" def as_dict(self) -> dict: - """Serializes the CreateTableConstraint into a dictionary suitable for use as a JSON request body.""" + """Serializes the DependencyList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.constraint: - body["constraint"] = self.constraint.as_dict() - if self.full_name_arg is not None: - body["full_name_arg"] = self.full_name_arg + if self.dependencies: + body["dependencies"] = [v.as_dict() for v in self.dependencies] return body def as_shallow_dict(self) -> dict: - """Serializes the CreateTableConstraint into a shallow dictionary of its immediate attributes.""" + """Serializes the DependencyList into a shallow dictionary of its immediate attributes.""" body = {} - if self.constraint: - body["constraint"] = self.constraint - if self.full_name_arg is not None: - body["full_name_arg"] = self.full_name_arg + if self.dependencies: + body["dependencies"] = self.dependencies return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateTableConstraint: - """Deserializes the CreateTableConstraint from a dictionary.""" - return cls(constraint=_from_dict(d, "constraint", TableConstraint), full_name_arg=d.get("full_name_arg", None)) + def from_dict(cls, d: Dict[str, Any]) -> DependencyList: + """Deserializes the DependencyList from a dictionary.""" + return cls(dependencies=_repeated_dict(d, "dependencies", Dependency)) @dataclass -class CreateVolumeRequestContent: - catalog_name: str - """The name of the catalog where the schema and the volume are""" +class DisableResponse: + def as_dict(self) -> dict: + """Serializes the DisableResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - schema_name: str - """The name of the schema where the volume is""" + def as_shallow_dict(self) -> dict: + """Serializes the DisableResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - name: str - """The name of the volume""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DisableResponse: + """Deserializes the DisableResponse from a dictionary.""" + return cls() - volume_type: VolumeType - comment: Optional[str] = None - """The comment attached to the volume""" +@dataclass +class EffectivePermissionsList: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - storage_location: Optional[str] = None - """The storage location on the cloud""" + privilege_assignments: Optional[List[EffectivePrivilegeAssignment]] = None + """The privileges conveyed to each principal (either directly or via inheritance)""" def as_dict(self) -> dict: - """Serializes the CreateVolumeRequestContent into a dictionary suitable for use as a JSON request body.""" + """Serializes the EffectivePermissionsList into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.volume_type is not None: - body["volume_type"] = self.volume_type.value + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: - """Serializes the CreateVolumeRequestContent into a shallow dictionary of its immediate attributes.""" + """Serializes the EffectivePermissionsList into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.volume_type is not None: - body["volume_type"] = self.volume_type + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateVolumeRequestContent: - """Deserializes the CreateVolumeRequestContent from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> EffectivePermissionsList: + """Deserializes the EffectivePermissionsList from a dictionary.""" return cls( - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - name=d.get("name", None), - schema_name=d.get("schema_name", None), - storage_location=d.get("storage_location", None), - volume_type=_enum(d, "volume_type", VolumeType), + next_page_token=d.get("next_page_token", None), + privilege_assignments=_repeated_dict(d, "privilege_assignments", EffectivePrivilegeAssignment), ) @dataclass -class CredentialDependency: - """A credential that is dependent on a SQL object.""" +class EffectivePredictiveOptimizationFlag: + value: EnablePredictiveOptimization + """Whether predictive optimization should be enabled for this object and objects under it.""" - credential_name: Optional[str] = None - """Full name of the dependent credential, in the form of __credential_name__.""" + inherited_from_name: Optional[str] = None + """The name of the object from which the flag was inherited. If there was no inheritance, this + field is left blank.""" + + inherited_from_type: Optional[EffectivePredictiveOptimizationFlagInheritedFromType] = None + """The type of the object from which the flag was inherited. If there was no inheritance, this + field is left blank.""" def as_dict(self) -> dict: - """Serializes the CredentialDependency into a dictionary suitable for use as a JSON request body.""" + """Serializes the EffectivePredictiveOptimizationFlag into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_name is not None: - body["credential_name"] = self.credential_name + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type.value + if self.value is not None: + body["value"] = self.value.value return body def as_shallow_dict(self) -> dict: - """Serializes the CredentialDependency into a shallow dictionary of its immediate attributes.""" + """Serializes the EffectivePredictiveOptimizationFlag into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_name is not None: - body["credential_name"] = self.credential_name + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type + if self.value is not None: + body["value"] = self.value return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CredentialDependency: - """Deserializes the CredentialDependency from a dictionary.""" - return cls(credential_name=d.get("credential_name", None)) - - -@dataclass -class CredentialInfo: - aws_iam_role: Optional[AwsIamRole] = None - """The AWS IAM role configuration.""" + def from_dict(cls, d: Dict[str, Any]) -> EffectivePredictiveOptimizationFlag: + """Deserializes the EffectivePredictiveOptimizationFlag from a dictionary.""" + return cls( + inherited_from_name=d.get("inherited_from_name", None), + inherited_from_type=_enum(d, "inherited_from_type", EffectivePredictiveOptimizationFlagInheritedFromType), + value=_enum(d, "value", EnablePredictiveOptimization), + ) - azure_managed_identity: Optional[AzureManagedIdentity] = None - """The Azure managed identity configuration.""" - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" +class EffectivePredictiveOptimizationFlagInheritedFromType(Enum): + """The type of the object from which the flag was inherited. If there was no inheritance, this + field is left blank.""" - comment: Optional[str] = None - """Comment associated with the credential.""" + CATALOG = "CATALOG" + SCHEMA = "SCHEMA" - created_at: Optional[int] = None - """Time at which this credential was created, in epoch milliseconds.""" - created_by: Optional[str] = None - """Username of credential creator.""" +@dataclass +class EffectivePrivilege: + inherited_from_name: Optional[str] = None + """The full name of the object that conveys this privilege via inheritance. This field is omitted + when privilege is not inherited (it's assigned to the securable itself).""" - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None - """The Databricks managed GCP service account configuration.""" + inherited_from_type: Optional[SecurableType] = None + """The type of the object that conveys this privilege via inheritance. This field is omitted when + privilege is not inherited (it's assigned to the securable itself).""" - full_name: Optional[str] = None - """The full name of the credential.""" + privilege: Optional[Privilege] = None + """The privilege assigned to the principal.""" - id: Optional[str] = None - """The unique identifier of the credential.""" + def as_dict(self) -> dict: + """Serializes the EffectivePrivilege into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type.value + if self.privilege is not None: + body["privilege"] = self.privilege.value + return body - isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" + def as_shallow_dict(self) -> dict: + """Serializes the EffectivePrivilege into a shallow dictionary of its immediate attributes.""" + body = {} + if self.inherited_from_name is not None: + body["inherited_from_name"] = self.inherited_from_name + if self.inherited_from_type is not None: + body["inherited_from_type"] = self.inherited_from_type + if self.privilege is not None: + body["privilege"] = self.privilege + return body - metastore_id: Optional[str] = None - """Unique identifier of the parent metastore.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilege: + """Deserializes the EffectivePrivilege from a dictionary.""" + return cls( + inherited_from_name=d.get("inherited_from_name", None), + inherited_from_type=_enum(d, "inherited_from_type", SecurableType), + privilege=_enum(d, "privilege", Privilege), + ) - name: Optional[str] = None - """The credential name. The name must be unique among storage and service credentials within the - metastore.""" - owner: Optional[str] = None - """Username of current owner of credential.""" +@dataclass +class EffectivePrivilegeAssignment: + principal: Optional[str] = None + """The principal (user email address or group name).""" - purpose: Optional[CredentialPurpose] = None - """Indicates the purpose of the credential.""" + privileges: Optional[List[EffectivePrivilege]] = None + """The privileges conveyed to the principal (either directly or via inheritance).""" - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" + def as_dict(self) -> dict: + """Serializes the EffectivePrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = [v.as_dict() for v in self.privileges] + return body - updated_at: Optional[int] = None - """Time at which this credential was last modified, in epoch milliseconds.""" + def as_shallow_dict(self) -> dict: + """Serializes the EffectivePrivilegeAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = self.privileges + return body - updated_by: Optional[str] = None - """Username of user who last modified the credential.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilegeAssignment: + """Deserializes the EffectivePrivilegeAssignment from a dictionary.""" + return cls(principal=d.get("principal", None), privileges=_repeated_dict(d, "privileges", EffectivePrivilege)) + + +class EnablePredictiveOptimization(Enum): + + DISABLE = "DISABLE" + ENABLE = "ENABLE" + INHERIT = "INHERIT" - used_for_managed_storage: Optional[bool] = None - """Whether this credential is the current metastore's root storage credential. Only applicable when - purpose is **STORAGE**.""" +@dataclass +class EnableResponse: def as_dict(self) -> dict: - """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the EnableResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.purpose is not None: - body["purpose"] = self.purpose.value - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage return body def as_shallow_dict(self) -> dict: - """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the EnableResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.purpose is not None: - body["purpose"] = self.purpose - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: - """Deserializes the CredentialInfo from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), - full_name=d.get("full_name", None), - id=d.get("id", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - purpose=_enum(d, "purpose", CredentialPurpose), - read_only=d.get("read_only", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - used_for_managed_storage=d.get("used_for_managed_storage", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> EnableResponse: + """Deserializes the EnableResponse from a dictionary.""" + return cls() -class CredentialPurpose(Enum): +@dataclass +class EncryptionDetails: + """Encryption options that apply to clients connecting to cloud storage.""" - SERVICE = "SERVICE" - STORAGE = "STORAGE" + sse_encryption_details: Optional[SseEncryptionDetails] = None + """Server-Side Encryption properties for clients communicating with AWS s3.""" + def as_dict(self) -> dict: + """Serializes the EncryptionDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.sse_encryption_details: + body["sse_encryption_details"] = self.sse_encryption_details.as_dict() + return body -class CredentialType(Enum): - """Next Id: 13""" + def as_shallow_dict(self) -> dict: + """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes.""" + body = {} + if self.sse_encryption_details: + body["sse_encryption_details"] = self.sse_encryption_details + return body - ANY_STATIC_CREDENTIAL = "ANY_STATIC_CREDENTIAL" - BEARER_TOKEN = "BEARER_TOKEN" - OAUTH_ACCESS_TOKEN = "OAUTH_ACCESS_TOKEN" - OAUTH_M2M = "OAUTH_M2M" - OAUTH_REFRESH_TOKEN = "OAUTH_REFRESH_TOKEN" - OAUTH_RESOURCE_OWNER_PASSWORD = "OAUTH_RESOURCE_OWNER_PASSWORD" - OAUTH_U2M = "OAUTH_U2M" - OAUTH_U2M_MAPPING = "OAUTH_U2M_MAPPING" - OIDC_TOKEN = "OIDC_TOKEN" - PEM_PRIVATE_KEY = "PEM_PRIVATE_KEY" - SERVICE_CREDENTIAL = "SERVICE_CREDENTIAL" - UNKNOWN_CREDENTIAL_TYPE = "UNKNOWN_CREDENTIAL_TYPE" - USERNAME_PASSWORD = "USERNAME_PASSWORD" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> EncryptionDetails: + """Deserializes the EncryptionDetails from a dictionary.""" + return cls(sse_encryption_details=_from_dict(d, "sse_encryption_details", SseEncryptionDetails)) @dataclass -class CredentialValidationResult: - message: Optional[str] = None - """Error message would exist when the result does not equal to **PASS**.""" +class EnvironmentSettings: + environment_version: Optional[str] = None - result: Optional[ValidateCredentialResult] = None - """The results of the tested operation.""" + java_dependencies: Optional[List[str]] = None def as_dict(self) -> dict: - """Serializes the CredentialValidationResult into a dictionary suitable for use as a JSON request body.""" + """Serializes the EnvironmentSettings into a dictionary suitable for use as a JSON request body.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.result is not None: - body["result"] = self.result.value + if self.environment_version is not None: + body["environment_version"] = self.environment_version + if self.java_dependencies: + body["java_dependencies"] = [v for v in self.java_dependencies] return body def as_shallow_dict(self) -> dict: - """Serializes the CredentialValidationResult into a shallow dictionary of its immediate attributes.""" + """Serializes the EnvironmentSettings into a shallow dictionary of its immediate attributes.""" body = {} - if self.message is not None: - body["message"] = self.message - if self.result is not None: - body["result"] = self.result + if self.environment_version is not None: + body["environment_version"] = self.environment_version + if self.java_dependencies: + body["java_dependencies"] = self.java_dependencies return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CredentialValidationResult: - """Deserializes the CredentialValidationResult from a dictionary.""" - return cls(message=d.get("message", None), result=_enum(d, "result", ValidateCredentialResult)) + def from_dict(cls, d: Dict[str, Any]) -> EnvironmentSettings: + """Deserializes the EnvironmentSettings from a dictionary.""" + return cls( + environment_version=d.get("environment_version", None), java_dependencies=d.get("java_dependencies", None) + ) -class DataSourceFormat(Enum): - """Data source format""" +@dataclass +class ExternalLineageExternalMetadata: + name: Optional[str] = None - AVRO = "AVRO" - BIGQUERY_FORMAT = "BIGQUERY_FORMAT" - CSV = "CSV" - DATABRICKS_FORMAT = "DATABRICKS_FORMAT" - DATABRICKS_ROW_STORE_FORMAT = "DATABRICKS_ROW_STORE_FORMAT" - DELTA = "DELTA" - DELTASHARING = "DELTASHARING" - DELTA_UNIFORM_HUDI = "DELTA_UNIFORM_HUDI" - DELTA_UNIFORM_ICEBERG = "DELTA_UNIFORM_ICEBERG" - HIVE = "HIVE" - ICEBERG = "ICEBERG" - JSON = "JSON" - MONGODB_FORMAT = "MONGODB_FORMAT" - MYSQL_FORMAT = "MYSQL_FORMAT" - NETSUITE_FORMAT = "NETSUITE_FORMAT" - ORACLE_FORMAT = "ORACLE_FORMAT" - ORC = "ORC" - PARQUET = "PARQUET" - POSTGRESQL_FORMAT = "POSTGRESQL_FORMAT" - REDSHIFT_FORMAT = "REDSHIFT_FORMAT" - SALESFORCE_DATA_CLOUD_FORMAT = "SALESFORCE_DATA_CLOUD_FORMAT" - SALESFORCE_FORMAT = "SALESFORCE_FORMAT" - SNOWFLAKE_FORMAT = "SNOWFLAKE_FORMAT" - SQLDW_FORMAT = "SQLDW_FORMAT" - SQLSERVER_FORMAT = "SQLSERVER_FORMAT" - TERADATA_FORMAT = "TERADATA_FORMAT" - TEXT = "TEXT" - UNITY_CATALOG = "UNITY_CATALOG" - VECTOR_INDEX_FORMAT = "VECTOR_INDEX_FORMAT" - WORKDAY_RAAS_FORMAT = "WORKDAY_RAAS_FORMAT" + def as_dict(self) -> dict: + """Serializes the ExternalLineageExternalMetadata into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: + body["name"] = self.name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineageExternalMetadata into a shallow dictionary of its immediate attributes.""" + body = {} + if self.name is not None: + body["name"] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageExternalMetadata: + """Deserializes the ExternalLineageExternalMetadata from a dictionary.""" + return cls(name=d.get("name", None)) @dataclass -class DatabricksGcpServiceAccount: - """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" +class ExternalLineageExternalMetadataInfo: + """Represents the external metadata object in the lineage event.""" - credential_id: Optional[str] = None - """The Databricks internal ID that represents this managed identity.""" + entity_type: Optional[str] = None + """Type of entity represented by the external metadata object.""" - email: Optional[str] = None - """The email of the service account.""" + event_time: Optional[str] = None + """Timestamp of the lineage event.""" - private_key_id: Optional[str] = None - """The ID that represents the private key for this Service Account""" + name: Optional[str] = None + """Name of the external metadata object.""" + + system_type: Optional[SystemType] = None + """Type of external system.""" def as_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccount into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLineageExternalMetadataInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email - if self.private_key_id is not None: - body["private_key_id"] = self.private_key_id + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.event_time is not None: + body["event_time"] = self.event_time + if self.name is not None: + body["name"] = self.name + if self.system_type is not None: + body["system_type"] = self.system_type.value return body def as_shallow_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccount into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLineageExternalMetadataInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email - if self.private_key_id is not None: - body["private_key_id"] = self.private_key_id + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.event_time is not None: + body["event_time"] = self.event_time + if self.name is not None: + body["name"] = self.name + if self.system_type is not None: + body["system_type"] = self.system_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccount: - """Deserializes the DatabricksGcpServiceAccount from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageExternalMetadataInfo: + """Deserializes the ExternalLineageExternalMetadataInfo from a dictionary.""" return cls( - credential_id=d.get("credential_id", None), - email=d.get("email", None), - private_key_id=d.get("private_key_id", None), + entity_type=d.get("entity_type", None), + event_time=d.get("event_time", None), + name=d.get("name", None), + system_type=_enum(d, "system_type", SystemType), ) @dataclass -class DatabricksGcpServiceAccountRequest: - """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" +class ExternalLineageFileInfo: + """Represents the path information in the lineage event.""" + + event_time: Optional[str] = None + """Timestamp of the lineage event.""" + + path: Optional[str] = None + """URL of the path.""" + + securable_name: Optional[str] = None + """The full name of the securable on the path.""" + + securable_type: Optional[str] = None + """The securable type of the securable on the path.""" + + storage_location: Optional[str] = None + """The storage location associated with securable on the path.""" def as_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccountRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLineageFileInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.event_time is not None: + body["event_time"] = self.event_time + if self.path is not None: + body["path"] = self.path + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.securable_type is not None: + body["securable_type"] = self.securable_type + if self.storage_location is not None: + body["storage_location"] = self.storage_location return body def as_shallow_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccountRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLineageFileInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.event_time is not None: + body["event_time"] = self.event_time + if self.path is not None: + body["path"] = self.path + if self.securable_name is not None: + body["securable_name"] = self.securable_name + if self.securable_type is not None: + body["securable_type"] = self.securable_type + if self.storage_location is not None: + body["storage_location"] = self.storage_location return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountRequest: - """Deserializes the DatabricksGcpServiceAccountRequest from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageFileInfo: + """Deserializes the ExternalLineageFileInfo from a dictionary.""" + return cls( + event_time=d.get("event_time", None), + path=d.get("path", None), + securable_name=d.get("securable_name", None), + securable_type=d.get("securable_type", None), + storage_location=d.get("storage_location", None), + ) @dataclass -class DatabricksGcpServiceAccountResponse: - """GCP long-lived credential. Databricks-created Google Cloud Storage service account.""" +class ExternalLineageInfo: + """Lineage response containing lineage information of a data asset.""" - credential_id: Optional[str] = None - """The Databricks internal ID that represents this managed identity.""" + external_lineage_info: Optional[ExternalLineageRelationshipInfo] = None + """Information about the edge metadata of the external lineage relationship.""" - email: Optional[str] = None - """The email of the service account.""" + external_metadata_info: Optional[ExternalLineageExternalMetadataInfo] = None + """Information about external metadata involved in the lineage relationship.""" + + file_info: Optional[ExternalLineageFileInfo] = None + """Information about the file involved in the lineage relationship.""" + + model_info: Optional[ExternalLineageModelVersionInfo] = None + """Information about the model version involved in the lineage relationship.""" + + table_info: Optional[ExternalLineageTableInfo] = None + """Information about the table involved in the lineage relationship.""" def as_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLineageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email + if self.external_lineage_info: + body["external_lineage_info"] = self.external_lineage_info.as_dict() + if self.external_metadata_info: + body["external_metadata_info"] = self.external_metadata_info.as_dict() + if self.file_info: + body["file_info"] = self.file_info.as_dict() + if self.model_info: + body["model_info"] = self.model_info.as_dict() + if self.table_info: + body["table_info"] = self.table_info.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the DatabricksGcpServiceAccountResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLineageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.email is not None: - body["email"] = self.email + if self.external_lineage_info: + body["external_lineage_info"] = self.external_lineage_info + if self.external_metadata_info: + body["external_metadata_info"] = self.external_metadata_info + if self.file_info: + body["file_info"] = self.file_info + if self.model_info: + body["model_info"] = self.model_info + if self.table_info: + body["table_info"] = self.table_info return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DatabricksGcpServiceAccountResponse: - """Deserializes the DatabricksGcpServiceAccountResponse from a dictionary.""" - return cls(credential_id=d.get("credential_id", None), email=d.get("email", None)) + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageInfo: + """Deserializes the ExternalLineageInfo from a dictionary.""" + return cls( + external_lineage_info=_from_dict(d, "external_lineage_info", ExternalLineageRelationshipInfo), + external_metadata_info=_from_dict(d, "external_metadata_info", ExternalLineageExternalMetadataInfo), + file_info=_from_dict(d, "file_info", ExternalLineageFileInfo), + model_info=_from_dict(d, "model_info", ExternalLineageModelVersionInfo), + table_info=_from_dict(d, "table_info", ExternalLineageTableInfo), + ) @dataclass -class DeleteAliasResponse: +class ExternalLineageModelVersion: + name: Optional[str] = None + + version: Optional[str] = None + def as_dict(self) -> dict: - """Serializes the DeleteAliasResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLineageModelVersion into a dictionary suitable for use as a JSON request body.""" body = {} + if self.name is not None: + body["name"] = self.name + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: - """Serializes the DeleteAliasResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLineageModelVersion into a shallow dictionary of its immediate attributes.""" body = {} + if self.name is not None: + body["name"] = self.name + if self.version is not None: + body["version"] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteAliasResponse: - """Deserializes the DeleteAliasResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageModelVersion: + """Deserializes the ExternalLineageModelVersion from a dictionary.""" + return cls(name=d.get("name", None), version=d.get("version", None)) @dataclass -class DeleteCredentialResponse: +class ExternalLineageModelVersionInfo: + """Represents the model version information in the lineage event.""" + + event_time: Optional[str] = None + """Timestamp of the lineage event.""" + + model_name: Optional[str] = None + """Name of the model.""" + + version: Optional[int] = None + """Version number of the model.""" + def as_dict(self) -> dict: - """Serializes the DeleteCredentialResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLineageModelVersionInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.event_time is not None: + body["event_time"] = self.event_time + if self.model_name is not None: + body["model_name"] = self.model_name + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: - """Serializes the DeleteCredentialResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLineageModelVersionInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.event_time is not None: + body["event_time"] = self.event_time + if self.model_name is not None: + body["model_name"] = self.model_name + if self.version is not None: + body["version"] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteCredentialResponse: - """Deserializes the DeleteCredentialResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageModelVersionInfo: + """Deserializes the ExternalLineageModelVersionInfo from a dictionary.""" + return cls( + event_time=d.get("event_time", None), model_name=d.get("model_name", None), version=d.get("version", None) + ) @dataclass -class DeleteRequestExternalLineage: +class ExternalLineageObject: + external_metadata: Optional[ExternalLineageExternalMetadata] = None + + model_version: Optional[ExternalLineageModelVersion] = None + + path: Optional[ExternalLineagePath] = None + + table: Optional[ExternalLineageTable] = None + + def as_dict(self) -> dict: + """Serializes the ExternalLineageObject into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.external_metadata: + body["external_metadata"] = self.external_metadata.as_dict() + if self.model_version: + body["model_version"] = self.model_version.as_dict() + if self.path: + body["path"] = self.path.as_dict() + if self.table: + body["table"] = self.table.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineageObject into a shallow dictionary of its immediate attributes.""" + body = {} + if self.external_metadata: + body["external_metadata"] = self.external_metadata + if self.model_version: + body["model_version"] = self.model_version + if self.path: + body["path"] = self.path + if self.table: + body["table"] = self.table + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageObject: + """Deserializes the ExternalLineageObject from a dictionary.""" + return cls( + external_metadata=_from_dict(d, "external_metadata", ExternalLineageExternalMetadata), + model_version=_from_dict(d, "model_version", ExternalLineageModelVersion), + path=_from_dict(d, "path", ExternalLineagePath), + table=_from_dict(d, "table", ExternalLineageTable), + ) + + +@dataclass +class ExternalLineagePath: + url: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the ExternalLineagePath into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.url is not None: + body["url"] = self.url + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ExternalLineagePath into a shallow dictionary of its immediate attributes.""" + body = {} + if self.url is not None: + body["url"] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineagePath: + """Deserializes the ExternalLineagePath from a dictionary.""" + return cls(url=d.get("url", None)) + + +@dataclass +class ExternalLineageRelationship: source: ExternalLineageObject """Source object of the external lineage relationship.""" target: ExternalLineageObject """Target object of the external lineage relationship.""" + columns: Optional[List[ColumnRelationship]] = None + """List of column relationships between source and target objects.""" + id: Optional[str] = None """Unique identifier of the external lineage relationship.""" + properties: Optional[Dict[str, str]] = None + """Key-value properties associated with the external lineage relationship.""" + def as_dict(self) -> dict: - """Serializes the DeleteRequestExternalLineage into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLineageRelationship into a dictionary suitable for use as a JSON request body.""" body = {} + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] if self.id is not None: body["id"] = self.id + if self.properties: + body["properties"] = self.properties if self.source: body["source"] = self.source.as_dict() if self.target: @@ -3312,10 +3194,14 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the DeleteRequestExternalLineage into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLineageRelationship into a shallow dictionary of its immediate attributes.""" body = {} + if self.columns: + body["columns"] = self.columns if self.id is not None: body["id"] = self.id + if self.properties: + body["properties"] = self.properties if self.source: body["source"] = self.source if self.target: @@ -3323,2180 +3209,2262 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteRequestExternalLineage: - """Deserializes the DeleteRequestExternalLineage from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageRelationship: + """Deserializes the ExternalLineageRelationship from a dictionary.""" return cls( + columns=_repeated_dict(d, "columns", ColumnRelationship), id=d.get("id", None), + properties=d.get("properties", None), source=_from_dict(d, "source", ExternalLineageObject), target=_from_dict(d, "target", ExternalLineageObject), ) @dataclass -class DeleteResponse: +class ExternalLineageRelationshipInfo: + source: ExternalLineageObject + """Source object of the external lineage relationship.""" + + target: ExternalLineageObject + """Target object of the external lineage relationship.""" + + columns: Optional[List[ColumnRelationship]] = None + """List of column relationships between source and target objects.""" + + id: Optional[str] = None + """Unique identifier of the external lineage relationship.""" + + properties: Optional[Dict[str, str]] = None + """Key-value properties associated with the external lineage relationship.""" + def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLineageRelationshipInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source.as_dict() + if self.target: + body["target"] = self.target.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLineageRelationshipInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.columns: + body["columns"] = self.columns + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source + if self.target: + body["target"] = self.target return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageRelationshipInfo: + """Deserializes the ExternalLineageRelationshipInfo from a dictionary.""" + return cls( + columns=_repeated_dict(d, "columns", ColumnRelationship), + id=d.get("id", None), + properties=d.get("properties", None), + source=_from_dict(d, "source", ExternalLineageObject), + target=_from_dict(d, "target", ExternalLineageObject), + ) @dataclass -class DeltaRuntimePropertiesKvPairs: - """Properties pertaining to the current state of the delta table as given by the commit server. - This does not contain **delta.*** (input) properties in __TableInfo.properties__.""" - - delta_runtime_properties: Dict[str, str] - """A map of key-value properties attached to the securable.""" +class ExternalLineageTable: + name: Optional[str] = None def as_dict(self) -> dict: - """Serializes the DeltaRuntimePropertiesKvPairs into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLineageTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.delta_runtime_properties: - body["delta_runtime_properties"] = self.delta_runtime_properties + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: - """Serializes the DeltaRuntimePropertiesKvPairs into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLineageTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.delta_runtime_properties: - body["delta_runtime_properties"] = self.delta_runtime_properties + if self.name is not None: + body["name"] = self.name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeltaRuntimePropertiesKvPairs: - """Deserializes the DeltaRuntimePropertiesKvPairs from a dictionary.""" - return cls(delta_runtime_properties=d.get("delta_runtime_properties", None)) - - -class DeltaSharingScopeEnum(Enum): - - INTERNAL = "INTERNAL" - INTERNAL_AND_EXTERNAL = "INTERNAL_AND_EXTERNAL" + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageTable: + """Deserializes the ExternalLineageTable from a dictionary.""" + return cls(name=d.get("name", None)) @dataclass -class Dependency: - """A dependency of a SQL object. One of the following fields must be defined: __table__, - __function__, __connection__, or __credential__.""" +class ExternalLineageTableInfo: + """Represents the table information in the lineage event.""" - connection: Optional[ConnectionDependency] = None + catalog_name: Optional[str] = None + """Name of Catalog.""" - credential: Optional[CredentialDependency] = None + event_time: Optional[str] = None + """Timestamp of the lineage event.""" - function: Optional[FunctionDependency] = None + name: Optional[str] = None + """Name of Table.""" - table: Optional[TableDependency] = None + schema_name: Optional[str] = None + """Name of Schema.""" def as_dict(self) -> dict: - """Serializes the Dependency into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLineageTableInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.connection: - body["connection"] = self.connection.as_dict() - if self.credential: - body["credential"] = self.credential.as_dict() - if self.function: - body["function"] = self.function.as_dict() - if self.table: - body["table"] = self.table.as_dict() + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.event_time is not None: + body["event_time"] = self.event_time + if self.name is not None: + body["name"] = self.name + if self.schema_name is not None: + body["schema_name"] = self.schema_name return body def as_shallow_dict(self) -> dict: - """Serializes the Dependency into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLineageTableInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.connection: - body["connection"] = self.connection - if self.credential: - body["credential"] = self.credential - if self.function: - body["function"] = self.function - if self.table: - body["table"] = self.table + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.event_time is not None: + body["event_time"] = self.event_time + if self.name is not None: + body["name"] = self.name + if self.schema_name is not None: + body["schema_name"] = self.schema_name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Dependency: - """Deserializes the Dependency from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageTableInfo: + """Deserializes the ExternalLineageTableInfo from a dictionary.""" return cls( - connection=_from_dict(d, "connection", ConnectionDependency), - credential=_from_dict(d, "credential", CredentialDependency), - function=_from_dict(d, "function", FunctionDependency), - table=_from_dict(d, "table", TableDependency), + catalog_name=d.get("catalog_name", None), + event_time=d.get("event_time", None), + name=d.get("name", None), + schema_name=d.get("schema_name", None), ) @dataclass -class DependencyList: - """A list of dependencies.""" - - dependencies: Optional[List[Dependency]] = None - """Array of dependencies.""" +class ExternalLocationInfo: + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" - def as_dict(self) -> dict: - """Serializes the DependencyList into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dependencies: - body["dependencies"] = [v.as_dict() for v in self.dependencies] - return body + comment: Optional[str] = None + """User-provided free-form text description.""" - def as_shallow_dict(self) -> dict: - """Serializes the DependencyList into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dependencies: - body["dependencies"] = self.dependencies - return body + created_at: Optional[int] = None + """Time at which this external location was created, in epoch milliseconds.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DependencyList: - """Deserializes the DependencyList from a dictionary.""" - return cls(dependencies=_repeated_dict(d, "dependencies", Dependency)) + created_by: Optional[str] = None + """Username of external location creator.""" + credential_id: Optional[str] = None + """Unique ID of the location's storage credential.""" -@dataclass -class DisableResponse: - def as_dict(self) -> dict: - """Serializes the DisableResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body + credential_name: Optional[str] = None + """Name of the storage credential used with this location.""" - def as_shallow_dict(self) -> dict: - """Serializes the DisableResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body + enable_file_events: Optional[bool] = None + """Whether to enable file events on this external location.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DisableResponse: - """Deserializes the DisableResponse from a dictionary.""" - return cls() + encryption_details: Optional[EncryptionDetails] = None + fallback: Optional[bool] = None + """Indicates whether fallback mode is enabled for this external location. When fallback mode is + enabled, the access to the location falls back to cluster credentials if UC credentials are not + sufficient.""" -@dataclass -class EffectivePermissionsList: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + file_event_queue: Optional[FileEventQueue] = None + """File event queue settings.""" - privilege_assignments: Optional[List[EffectivePrivilegeAssignment]] = None - """The privileges conveyed to each principal (either directly or via inheritance)""" + isolation_mode: Optional[IsolationMode] = None - def as_dict(self) -> dict: - """Serializes the EffectivePermissionsList into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.privilege_assignments: - body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] - return body + metastore_id: Optional[str] = None + """Unique identifier of metastore hosting the external location.""" - def as_shallow_dict(self) -> dict: - """Serializes the EffectivePermissionsList into a shallow dictionary of its immediate attributes.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.privilege_assignments: - body["privilege_assignments"] = self.privilege_assignments - return body + name: Optional[str] = None + """Name of the external location.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EffectivePermissionsList: - """Deserializes the EffectivePermissionsList from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - privilege_assignments=_repeated_dict(d, "privilege_assignments", EffectivePrivilegeAssignment), - ) + owner: Optional[str] = None + """The owner of the external location.""" + read_only: Optional[bool] = None + """Indicates whether the external location is read-only.""" -@dataclass -class EffectivePredictiveOptimizationFlag: - value: EnablePredictiveOptimization - """Whether predictive optimization should be enabled for this object and objects under it.""" + updated_at: Optional[int] = None + """Time at which external location this was last modified, in epoch milliseconds.""" - inherited_from_name: Optional[str] = None - """The name of the object from which the flag was inherited. If there was no inheritance, this - field is left blank.""" + updated_by: Optional[str] = None + """Username of user who last modified the external location.""" - inherited_from_type: Optional[EffectivePredictiveOptimizationFlagInheritedFromType] = None - """The type of the object from which the flag was inherited. If there was no inheritance, this - field is left blank.""" + url: Optional[str] = None + """Path URL of the external location.""" def as_dict(self) -> dict: - """Serializes the EffectivePredictiveOptimizationFlag into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalLocationInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type.value - if self.value is not None: - body["value"] = self.value.value + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.enable_file_events is not None: + body["enable_file_events"] = self.enable_file_events + if self.encryption_details: + body["encryption_details"] = self.encryption_details.as_dict() + if self.fallback is not None: + body["fallback"] = self.fallback + if self.file_event_queue: + body["file_event_queue"] = self.file_event_queue.as_dict() + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: - """Serializes the EffectivePredictiveOptimizationFlag into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalLocationInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type - if self.value is not None: - body["value"] = self.value + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.credential_id is not None: + body["credential_id"] = self.credential_id + if self.credential_name is not None: + body["credential_name"] = self.credential_name + if self.enable_file_events is not None: + body["enable_file_events"] = self.enable_file_events + if self.encryption_details: + body["encryption_details"] = self.encryption_details + if self.fallback is not None: + body["fallback"] = self.fallback + if self.file_event_queue: + body["file_event_queue"] = self.file_event_queue + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.read_only is not None: + body["read_only"] = self.read_only + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.url is not None: + body["url"] = self.url return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EffectivePredictiveOptimizationFlag: - """Deserializes the EffectivePredictiveOptimizationFlag from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ExternalLocationInfo: + """Deserializes the ExternalLocationInfo from a dictionary.""" return cls( - inherited_from_name=d.get("inherited_from_name", None), - inherited_from_type=_enum(d, "inherited_from_type", EffectivePredictiveOptimizationFlagInheritedFromType), - value=_enum(d, "value", EnablePredictiveOptimization), + browse_only=d.get("browse_only", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + credential_id=d.get("credential_id", None), + credential_name=d.get("credential_name", None), + enable_file_events=d.get("enable_file_events", None), + encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), + fallback=d.get("fallback", None), + file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + read_only=d.get("read_only", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + url=d.get("url", None), ) -class EffectivePredictiveOptimizationFlagInheritedFromType(Enum): - """The type of the object from which the flag was inherited. If there was no inheritance, this - field is left blank.""" +@dataclass +class ExternalMetadata: + name: str + """Name of the external metadata object.""" - CATALOG = "CATALOG" - SCHEMA = "SCHEMA" + system_type: SystemType + """Type of external system.""" + entity_type: str + """Type of entity within the external system.""" -@dataclass -class EffectivePrivilege: - inherited_from_name: Optional[str] = None - """The full name of the object that conveys this privilege via inheritance. This field is omitted - when privilege is not inherited (it's assigned to the securable itself).""" + columns: Optional[List[str]] = None + """List of columns associated with the external metadata object.""" - inherited_from_type: Optional[SecurableType] = None - """The type of the object that conveys this privilege via inheritance. This field is omitted when - privilege is not inherited (it's assigned to the securable itself).""" + create_time: Optional[str] = None + """Time at which this external metadata object was created.""" - privilege: Optional[Privilege] = None - """The privilege assigned to the principal.""" + created_by: Optional[str] = None + """Username of external metadata object creator.""" - def as_dict(self) -> dict: - """Serializes the EffectivePrivilege into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type.value - if self.privilege is not None: - body["privilege"] = self.privilege.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EffectivePrivilege into a shallow dictionary of its immediate attributes.""" - body = {} - if self.inherited_from_name is not None: - body["inherited_from_name"] = self.inherited_from_name - if self.inherited_from_type is not None: - body["inherited_from_type"] = self.inherited_from_type - if self.privilege is not None: - body["privilege"] = self.privilege - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilege: - """Deserializes the EffectivePrivilege from a dictionary.""" - return cls( - inherited_from_name=d.get("inherited_from_name", None), - inherited_from_type=_enum(d, "inherited_from_type", SecurableType), - privilege=_enum(d, "privilege", Privilege), - ) - - -@dataclass -class EffectivePrivilegeAssignment: - principal: Optional[str] = None - """The principal (user email address or group name).""" - - privileges: Optional[List[EffectivePrivilege]] = None - """The privileges conveyed to the principal (either directly or via inheritance).""" - - def as_dict(self) -> dict: - """Serializes the EffectivePrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = [v.as_dict() for v in self.privileges] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EffectivePrivilegeAssignment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = self.privileges - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EffectivePrivilegeAssignment: - """Deserializes the EffectivePrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_dict(d, "privileges", EffectivePrivilege)) + description: Optional[str] = None + """User-provided free-form text description.""" + id: Optional[str] = None + """Unique identifier of the external metadata object.""" -class EnablePredictiveOptimization(Enum): + metastore_id: Optional[str] = None + """Unique identifier of parent metastore.""" - DISABLE = "DISABLE" - ENABLE = "ENABLE" - INHERIT = "INHERIT" + owner: Optional[str] = None + """Owner of the external metadata object.""" + properties: Optional[Dict[str, str]] = None + """A map of key-value properties attached to the external metadata object.""" -@dataclass -class EnableRequest: - catalog_name: Optional[str] = None - """the catalog for which the system schema is to enabled in""" + update_time: Optional[str] = None + """Time at which this external metadata object was last modified.""" - metastore_id: Optional[str] = None - """The metastore ID under which the system schema lives.""" + updated_by: Optional[str] = None + """Username of user who last modified external metadata object.""" - schema_name: Optional[str] = None - """Full name of the system schema.""" + url: Optional[str] = None + """URL associated with the external metadata object.""" def as_dict(self) -> dict: - """Serializes the EnableRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the ExternalMetadata into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name + if self.columns: + body["columns"] = [v for v in self.columns] + if self.create_time is not None: + body["create_time"] = self.create_time + if self.created_by is not None: + body["created_by"] = self.created_by + if self.description is not None: + body["description"] = self.description + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.id is not None: + body["id"] = self.id if self.metastore_id is not None: body["metastore_id"] = self.metastore_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.system_type is not None: + body["system_type"] = self.system_type.value + if self.update_time is not None: + body["update_time"] = self.update_time + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: - """Serializes the EnableRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the ExternalMetadata into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name + if self.columns: + body["columns"] = self.columns + if self.create_time is not None: + body["create_time"] = self.create_time + if self.created_by is not None: + body["created_by"] = self.created_by + if self.description is not None: + body["description"] = self.description + if self.entity_type is not None: + body["entity_type"] = self.entity_type + if self.id is not None: + body["id"] = self.id if self.metastore_id is not None: body["metastore_id"] = self.metastore_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.properties: + body["properties"] = self.properties + if self.system_type is not None: + body["system_type"] = self.system_type + if self.update_time is not None: + body["update_time"] = self.update_time + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.url is not None: + body["url"] = self.url return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EnableRequest: - """Deserializes the EnableRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ExternalMetadata: + """Deserializes the ExternalMetadata from a dictionary.""" return cls( - catalog_name=d.get("catalog_name", None), + columns=d.get("columns", None), + create_time=d.get("create_time", None), + created_by=d.get("created_by", None), + description=d.get("description", None), + entity_type=d.get("entity_type", None), + id=d.get("id", None), metastore_id=d.get("metastore_id", None), - schema_name=d.get("schema_name", None), + name=d.get("name", None), + owner=d.get("owner", None), + properties=d.get("properties", None), + system_type=_enum(d, "system_type", SystemType), + update_time=d.get("update_time", None), + updated_by=d.get("updated_by", None), + url=d.get("url", None), ) @dataclass -class EnableResponse: +class FailedStatus: + """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the + ONLINE_PIPELINE_FAILED state.""" + + last_processed_commit_version: Optional[int] = None + """The last source table Delta version that was synced to the online table. Note that this Delta + version may only be partially synced to the online table. Only populated if the table is still + online and available for serving.""" + + timestamp: Optional[str] = None + """The timestamp of the last time any data was synchronized from the source table to the online + table. Only populated if the table is still online and available for serving.""" + def as_dict(self) -> dict: - """Serializes the EnableResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the FailedStatus into a dictionary suitable for use as a JSON request body.""" body = {} + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body def as_shallow_dict(self) -> dict: - """Serializes the EnableResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the FailedStatus into a shallow dictionary of its immediate attributes.""" body = {} + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EnableResponse: - """Deserializes the EnableResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> FailedStatus: + """Deserializes the FailedStatus from a dictionary.""" + return cls( + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=d.get("timestamp", None), + ) @dataclass -class EncryptionDetails: - """Encryption options that apply to clients connecting to cloud storage.""" +class FileEventQueue: + managed_aqs: Optional[AzureQueueStorage] = None - sse_encryption_details: Optional[SseEncryptionDetails] = None - """Server-Side Encryption properties for clients communicating with AWS s3.""" + managed_pubsub: Optional[GcpPubsub] = None + + managed_sqs: Optional[AwsSqsQueue] = None + + provided_aqs: Optional[AzureQueueStorage] = None + + provided_pubsub: Optional[GcpPubsub] = None + + provided_sqs: Optional[AwsSqsQueue] = None def as_dict(self) -> dict: - """Serializes the EncryptionDetails into a dictionary suitable for use as a JSON request body.""" + """Serializes the FileEventQueue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.sse_encryption_details: - body["sse_encryption_details"] = self.sse_encryption_details.as_dict() + if self.managed_aqs: + body["managed_aqs"] = self.managed_aqs.as_dict() + if self.managed_pubsub: + body["managed_pubsub"] = self.managed_pubsub.as_dict() + if self.managed_sqs: + body["managed_sqs"] = self.managed_sqs.as_dict() + if self.provided_aqs: + body["provided_aqs"] = self.provided_aqs.as_dict() + if self.provided_pubsub: + body["provided_pubsub"] = self.provided_pubsub.as_dict() + if self.provided_sqs: + body["provided_sqs"] = self.provided_sqs.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes.""" + """Serializes the FileEventQueue into a shallow dictionary of its immediate attributes.""" body = {} - if self.sse_encryption_details: - body["sse_encryption_details"] = self.sse_encryption_details + if self.managed_aqs: + body["managed_aqs"] = self.managed_aqs + if self.managed_pubsub: + body["managed_pubsub"] = self.managed_pubsub + if self.managed_sqs: + body["managed_sqs"] = self.managed_sqs + if self.provided_aqs: + body["provided_aqs"] = self.provided_aqs + if self.provided_pubsub: + body["provided_pubsub"] = self.provided_pubsub + if self.provided_sqs: + body["provided_sqs"] = self.provided_sqs return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EncryptionDetails: - """Deserializes the EncryptionDetails from a dictionary.""" - return cls(sse_encryption_details=_from_dict(d, "sse_encryption_details", SseEncryptionDetails)) + def from_dict(cls, d: Dict[str, Any]) -> FileEventQueue: + """Deserializes the FileEventQueue from a dictionary.""" + return cls( + managed_aqs=_from_dict(d, "managed_aqs", AzureQueueStorage), + managed_pubsub=_from_dict(d, "managed_pubsub", GcpPubsub), + managed_sqs=_from_dict(d, "managed_sqs", AwsSqsQueue), + provided_aqs=_from_dict(d, "provided_aqs", AzureQueueStorage), + provided_pubsub=_from_dict(d, "provided_pubsub", GcpPubsub), + provided_sqs=_from_dict(d, "provided_sqs", AwsSqsQueue), + ) @dataclass -class ExternalLineageExternalMetadata: - name: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the ExternalLineageExternalMetadata into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineageExternalMetadata into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageExternalMetadata: - """Deserializes the ExternalLineageExternalMetadata from a dictionary.""" - return cls(name=d.get("name", None)) - - -@dataclass -class ExternalLineageExternalMetadataInfo: - """Represents the external metadata object in the lineage event.""" +class ForeignKeyConstraint: + name: str + """The name of the constraint.""" - entity_type: Optional[str] = None - """Type of entity represented by the external metadata object.""" + child_columns: List[str] + """Column names for this constraint.""" - event_time: Optional[str] = None - """Timestamp of the lineage event.""" + parent_table: str + """The full name of the parent constraint.""" - name: Optional[str] = None - """Name of the external metadata object.""" + parent_columns: List[str] + """Column names for this constraint.""" - system_type: Optional[SystemType] = None - """Type of external system.""" + rely: Optional[bool] = None + """True if the constraint is RELY, false or unset if NORELY.""" def as_dict(self) -> dict: - """Serializes the ExternalLineageExternalMetadataInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the ForeignKeyConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.entity_type is not None: - body["entity_type"] = self.entity_type - if self.event_time is not None: - body["event_time"] = self.event_time + if self.child_columns: + body["child_columns"] = [v for v in self.child_columns] if self.name is not None: body["name"] = self.name - if self.system_type is not None: - body["system_type"] = self.system_type.value + if self.parent_columns: + body["parent_columns"] = [v for v in self.parent_columns] + if self.parent_table is not None: + body["parent_table"] = self.parent_table + if self.rely is not None: + body["rely"] = self.rely return body def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineageExternalMetadataInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the ForeignKeyConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.entity_type is not None: - body["entity_type"] = self.entity_type - if self.event_time is not None: - body["event_time"] = self.event_time + if self.child_columns: + body["child_columns"] = self.child_columns if self.name is not None: body["name"] = self.name - if self.system_type is not None: - body["system_type"] = self.system_type + if self.parent_columns: + body["parent_columns"] = self.parent_columns + if self.parent_table is not None: + body["parent_table"] = self.parent_table + if self.rely is not None: + body["rely"] = self.rely return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageExternalMetadataInfo: - """Deserializes the ExternalLineageExternalMetadataInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ForeignKeyConstraint: + """Deserializes the ForeignKeyConstraint from a dictionary.""" return cls( - entity_type=d.get("entity_type", None), - event_time=d.get("event_time", None), + child_columns=d.get("child_columns", None), name=d.get("name", None), - system_type=_enum(d, "system_type", SystemType), + parent_columns=d.get("parent_columns", None), + parent_table=d.get("parent_table", None), + rely=d.get("rely", None), ) @dataclass -class ExternalLineageFileInfo: - """Represents the path information in the lineage event.""" - - event_time: Optional[str] = None - """Timestamp of the lineage event.""" - - path: Optional[str] = None - """URL of the path.""" - - securable_name: Optional[str] = None - """The full name of the securable on the path.""" - - securable_type: Optional[str] = None - """The securable type of the securable on the path.""" +class FunctionDependency: + """A function that is dependent on a SQL object.""" - storage_location: Optional[str] = None - """The storage location associated with securable on the path.""" + function_full_name: str + """Full name of the dependent function, in the form of + __catalog_name__.__schema_name__.__function_name__.""" def as_dict(self) -> dict: - """Serializes the ExternalLineageFileInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the FunctionDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.event_time is not None: - body["event_time"] = self.event_time - if self.path is not None: - body["path"] = self.path - if self.securable_name is not None: - body["securable_name"] = self.securable_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type - if self.storage_location is not None: - body["storage_location"] = self.storage_location + if self.function_full_name is not None: + body["function_full_name"] = self.function_full_name return body def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineageFileInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the FunctionDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.event_time is not None: - body["event_time"] = self.event_time - if self.path is not None: - body["path"] = self.path - if self.securable_name is not None: - body["securable_name"] = self.securable_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type - if self.storage_location is not None: - body["storage_location"] = self.storage_location + if self.function_full_name is not None: + body["function_full_name"] = self.function_full_name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageFileInfo: - """Deserializes the ExternalLineageFileInfo from a dictionary.""" - return cls( - event_time=d.get("event_time", None), - path=d.get("path", None), - securable_name=d.get("securable_name", None), - securable_type=d.get("securable_type", None), - storage_location=d.get("storage_location", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> FunctionDependency: + """Deserializes the FunctionDependency from a dictionary.""" + return cls(function_full_name=d.get("function_full_name", None)) @dataclass -class ExternalLineageInfo: - """Lineage response containing lineage information of a data asset.""" +class FunctionInfo: + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" - external_lineage_info: Optional[ExternalLineageRelationshipInfo] = None - """Information about the edge metadata of the external lineage relationship.""" + catalog_name: Optional[str] = None + """Name of parent catalog.""" - external_metadata_info: Optional[ExternalLineageExternalMetadataInfo] = None - """Information about external metadata involved in the lineage relationship.""" + comment: Optional[str] = None + """User-provided free-form text description.""" - file_info: Optional[ExternalLineageFileInfo] = None - """Information about the file involved in the lineage relationship.""" + created_at: Optional[int] = None + """Time at which this function was created, in epoch milliseconds.""" - model_info: Optional[ExternalLineageModelVersionInfo] = None - """Information about the model version involved in the lineage relationship.""" + created_by: Optional[str] = None + """Username of function creator.""" - table_info: Optional[ExternalLineageTableInfo] = None - """Information about the table involved in the lineage relationship.""" + data_type: Optional[ColumnTypeName] = None + """Scalar function return data type.""" - def as_dict(self) -> dict: - """Serializes the ExternalLineageInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.external_lineage_info: - body["external_lineage_info"] = self.external_lineage_info.as_dict() - if self.external_metadata_info: - body["external_metadata_info"] = self.external_metadata_info.as_dict() - if self.file_info: - body["file_info"] = self.file_info.as_dict() - if self.model_info: - body["model_info"] = self.model_info.as_dict() - if self.table_info: - body["table_info"] = self.table_info.as_dict() - return body + external_language: Optional[str] = None + """External function language.""" - def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineageInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.external_lineage_info: - body["external_lineage_info"] = self.external_lineage_info - if self.external_metadata_info: - body["external_metadata_info"] = self.external_metadata_info - if self.file_info: - body["file_info"] = self.file_info - if self.model_info: - body["model_info"] = self.model_info - if self.table_info: - body["table_info"] = self.table_info - return body + external_name: Optional[str] = None + """External function name.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageInfo: - """Deserializes the ExternalLineageInfo from a dictionary.""" - return cls( - external_lineage_info=_from_dict(d, "external_lineage_info", ExternalLineageRelationshipInfo), - external_metadata_info=_from_dict(d, "external_metadata_info", ExternalLineageExternalMetadataInfo), - file_info=_from_dict(d, "file_info", ExternalLineageFileInfo), - model_info=_from_dict(d, "model_info", ExternalLineageModelVersionInfo), - table_info=_from_dict(d, "table_info", ExternalLineageTableInfo), - ) + full_data_type: Optional[str] = None + """Pretty printed function data type.""" + full_name: Optional[str] = None + """Full name of function, in form of __catalog_name__.__schema_name__.__function__name__""" -@dataclass -class ExternalLineageModelVersion: - name: Optional[str] = None + function_id: Optional[str] = None + """Id of Function, relative to parent schema.""" - version: Optional[str] = None + input_params: Optional[FunctionParameterInfos] = None - def as_dict(self) -> dict: - """Serializes the ExternalLineageModelVersion into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version - return body + is_deterministic: Optional[bool] = None + """Whether the function is deterministic.""" - def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineageModelVersion into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version - return body + is_null_call: Optional[bool] = None + """Function null call.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageModelVersion: - """Deserializes the ExternalLineageModelVersion from a dictionary.""" - return cls(name=d.get("name", None), version=d.get("version", None)) + metastore_id: Optional[str] = None + """Unique identifier of parent metastore.""" + name: Optional[str] = None + """Name of function, relative to parent schema.""" -@dataclass -class ExternalLineageModelVersionInfo: - """Represents the model version information in the lineage event.""" + owner: Optional[str] = None + """Username of current owner of function.""" - event_time: Optional[str] = None - """Timestamp of the lineage event.""" + parameter_style: Optional[FunctionInfoParameterStyle] = None + """Function parameter style. **S** is the value for SQL.""" - model_name: Optional[str] = None - """Name of the model.""" + properties: Optional[str] = None + """JSON-serialized key-value pair map, encoded (escaped) as a string.""" - version: Optional[int] = None - """Version number of the model.""" + return_params: Optional[FunctionParameterInfos] = None + """Table function return parameters.""" - def as_dict(self) -> dict: - """Serializes the ExternalLineageModelVersionInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.event_time is not None: - body["event_time"] = self.event_time - if self.model_name is not None: - body["model_name"] = self.model_name - if self.version is not None: - body["version"] = self.version - return body + routine_body: Optional[FunctionInfoRoutineBody] = None + """Function language. When **EXTERNAL** is used, the language of the routine function should be + specified in the __external_language__ field, and the __return_params__ of the function cannot + be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + **NO_SQL**.""" - def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineageModelVersionInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.event_time is not None: - body["event_time"] = self.event_time - if self.model_name is not None: - body["model_name"] = self.model_name - if self.version is not None: - body["version"] = self.version - return body + routine_definition: Optional[str] = None + """Function body.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageModelVersionInfo: - """Deserializes the ExternalLineageModelVersionInfo from a dictionary.""" - return cls( - event_time=d.get("event_time", None), model_name=d.get("model_name", None), version=d.get("version", None) - ) + routine_dependencies: Optional[DependencyList] = None + """Function dependencies.""" + schema_name: Optional[str] = None + """Name of parent schema relative to its parent catalog.""" -@dataclass -class ExternalLineageObject: - external_metadata: Optional[ExternalLineageExternalMetadata] = None + security_type: Optional[FunctionInfoSecurityType] = None + """Function security type.""" - model_version: Optional[ExternalLineageModelVersion] = None + specific_name: Optional[str] = None + """Specific name of the function; Reserved for future use.""" - path: Optional[ExternalLineagePath] = None + sql_data_access: Optional[FunctionInfoSqlDataAccess] = None + """Function SQL data access.""" - table: Optional[ExternalLineageTable] = None + sql_path: Optional[str] = None + """List of schemes whose objects can be referenced without qualification.""" + + updated_at: Optional[int] = None + """Time at which this function was created, in epoch milliseconds.""" + + updated_by: Optional[str] = None + """Username of user who last modified function.""" def as_dict(self) -> dict: - """Serializes the ExternalLineageObject into a dictionary suitable for use as a JSON request body.""" + """Serializes the FunctionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.external_metadata: - body["external_metadata"] = self.external_metadata.as_dict() - if self.model_version: - body["model_version"] = self.model_version.as_dict() - if self.path: - body["path"] = self.path.as_dict() - if self.table: - body["table"] = self.table.as_dict() + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_type is not None: + body["data_type"] = self.data_type.value + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.full_name is not None: + body["full_name"] = self.full_name + if self.function_id is not None: + body["function_id"] = self.function_id + if self.input_params: + body["input_params"] = self.input_params.as_dict() + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style.value + if self.properties is not None: + body["properties"] = self.properties + if self.return_params: + body["return_params"] = self.return_params.as_dict() + if self.routine_body is not None: + body["routine_body"] = self.routine_body.value + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies.as_dict() + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type.value + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access.value + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineageObject into a shallow dictionary of its immediate attributes.""" + """Serializes the FunctionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.external_metadata: - body["external_metadata"] = self.external_metadata - if self.model_version: - body["model_version"] = self.model_version - if self.path: - body["path"] = self.path - if self.table: - body["table"] = self.table + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_type is not None: + body["data_type"] = self.data_type + if self.external_language is not None: + body["external_language"] = self.external_language + if self.external_name is not None: + body["external_name"] = self.external_name + if self.full_data_type is not None: + body["full_data_type"] = self.full_data_type + if self.full_name is not None: + body["full_name"] = self.full_name + if self.function_id is not None: + body["function_id"] = self.function_id + if self.input_params: + body["input_params"] = self.input_params + if self.is_deterministic is not None: + body["is_deterministic"] = self.is_deterministic + if self.is_null_call is not None: + body["is_null_call"] = self.is_null_call + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.parameter_style is not None: + body["parameter_style"] = self.parameter_style + if self.properties is not None: + body["properties"] = self.properties + if self.return_params: + body["return_params"] = self.return_params + if self.routine_body is not None: + body["routine_body"] = self.routine_body + if self.routine_definition is not None: + body["routine_definition"] = self.routine_definition + if self.routine_dependencies: + body["routine_dependencies"] = self.routine_dependencies + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.security_type is not None: + body["security_type"] = self.security_type + if self.specific_name is not None: + body["specific_name"] = self.specific_name + if self.sql_data_access is not None: + body["sql_data_access"] = self.sql_data_access + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageObject: - """Deserializes the ExternalLineageObject from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> FunctionInfo: + """Deserializes the FunctionInfo from a dictionary.""" return cls( - external_metadata=_from_dict(d, "external_metadata", ExternalLineageExternalMetadata), - model_version=_from_dict(d, "model_version", ExternalLineageModelVersion), - path=_from_dict(d, "path", ExternalLineagePath), - table=_from_dict(d, "table", ExternalLineageTable), + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + data_type=_enum(d, "data_type", ColumnTypeName), + external_language=d.get("external_language", None), + external_name=d.get("external_name", None), + full_data_type=d.get("full_data_type", None), + full_name=d.get("full_name", None), + function_id=d.get("function_id", None), + input_params=_from_dict(d, "input_params", FunctionParameterInfos), + is_deterministic=d.get("is_deterministic", None), + is_null_call=d.get("is_null_call", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + parameter_style=_enum(d, "parameter_style", FunctionInfoParameterStyle), + properties=d.get("properties", None), + return_params=_from_dict(d, "return_params", FunctionParameterInfos), + routine_body=_enum(d, "routine_body", FunctionInfoRoutineBody), + routine_definition=d.get("routine_definition", None), + routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), + schema_name=d.get("schema_name", None), + security_type=_enum(d, "security_type", FunctionInfoSecurityType), + specific_name=d.get("specific_name", None), + sql_data_access=_enum(d, "sql_data_access", FunctionInfoSqlDataAccess), + sql_path=d.get("sql_path", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), ) -@dataclass -class ExternalLineagePath: - url: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the ExternalLineagePath into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.url is not None: - body["url"] = self.url - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineagePath into a shallow dictionary of its immediate attributes.""" - body = {} - if self.url is not None: - body["url"] = self.url - return body +class FunctionInfoParameterStyle(Enum): + """Function parameter style. **S** is the value for SQL.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineagePath: - """Deserializes the ExternalLineagePath from a dictionary.""" - return cls(url=d.get("url", None)) + S = "S" -@dataclass -class ExternalLineageRelationship: - source: ExternalLineageObject - """Source object of the external lineage relationship.""" +class FunctionInfoRoutineBody(Enum): + """Function language. When **EXTERNAL** is used, the language of the routine function should be + specified in the __external_language__ field, and the __return_params__ of the function cannot + be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be + **NO_SQL**.""" - target: ExternalLineageObject - """Target object of the external lineage relationship.""" + EXTERNAL = "EXTERNAL" + SQL = "SQL" - columns: Optional[List[ColumnRelationship]] = None - """List of column relationships between source and target objects.""" - id: Optional[str] = None - """Unique identifier of the external lineage relationship.""" +class FunctionInfoSecurityType(Enum): + """The security type of the function.""" - properties: Optional[Dict[str, str]] = None - """Key-value properties associated with the external lineage relationship.""" + DEFINER = "DEFINER" - def as_dict(self) -> dict: - """Serializes the ExternalLineageRelationship into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] - if self.id is not None: - body["id"] = self.id - if self.properties: - body["properties"] = self.properties - if self.source: - body["source"] = self.source.as_dict() - if self.target: - body["target"] = self.target.as_dict() - return body - def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineageRelationship into a shallow dictionary of its immediate attributes.""" - body = {} - if self.columns: - body["columns"] = self.columns - if self.id is not None: - body["id"] = self.id - if self.properties: - body["properties"] = self.properties - if self.source: - body["source"] = self.source - if self.target: - body["target"] = self.target - return body +class FunctionInfoSqlDataAccess(Enum): + """Function SQL data access.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageRelationship: - """Deserializes the ExternalLineageRelationship from a dictionary.""" - return cls( - columns=_repeated_dict(d, "columns", ColumnRelationship), - id=d.get("id", None), - properties=d.get("properties", None), - source=_from_dict(d, "source", ExternalLineageObject), - target=_from_dict(d, "target", ExternalLineageObject), - ) + CONTAINS_SQL = "CONTAINS_SQL" + NO_SQL = "NO_SQL" + READS_SQL_DATA = "READS_SQL_DATA" @dataclass -class ExternalLineageRelationshipInfo: - source: ExternalLineageObject - """Source object of the external lineage relationship.""" +class FunctionParameterInfo: + name: str + """Name of parameter.""" - target: ExternalLineageObject - """Target object of the external lineage relationship.""" + type_text: str + """Full data type spec, SQL/catalogString text.""" - columns: Optional[List[ColumnRelationship]] = None - """List of column relationships between source and target objects.""" + type_name: ColumnTypeName - id: Optional[str] = None - """Unique identifier of the external lineage relationship.""" + position: int + """Ordinal position of column (starting at position 0).""" - properties: Optional[Dict[str, str]] = None - """Key-value properties associated with the external lineage relationship.""" + comment: Optional[str] = None + """User-provided free-form text description.""" + + parameter_default: Optional[str] = None + """Default value of the parameter.""" + + parameter_mode: Optional[FunctionParameterMode] = None + + parameter_type: Optional[FunctionParameterType] = None + + type_interval_type: Optional[str] = None + """Format of IntervalType.""" + + type_json: Optional[str] = None + """Full data type spec, JSON-serialized.""" + + type_precision: Optional[int] = None + """Digits of precision; required on Create for DecimalTypes.""" + + type_scale: Optional[int] = None + """Digits to right of decimal; Required on Create for DecimalTypes.""" def as_dict(self) -> dict: - """Serializes the ExternalLineageRelationshipInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the FunctionParameterInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] - if self.id is not None: - body["id"] = self.id - if self.properties: - body["properties"] = self.properties - if self.source: - body["source"] = self.source.as_dict() - if self.target: - body["target"] = self.target.as_dict() + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.parameter_default is not None: + body["parameter_default"] = self.parameter_default + if self.parameter_mode is not None: + body["parameter_mode"] = self.parameter_mode.value + if self.parameter_type is not None: + body["parameter_type"] = self.parameter_type.value + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_json is not None: + body["type_json"] = self.type_json + if self.type_name is not None: + body["type_name"] = self.type_name.value + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text return body def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineageRelationshipInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: - body["columns"] = self.columns - if self.id is not None: - body["id"] = self.id - if self.properties: - body["properties"] = self.properties - if self.source: - body["source"] = self.source - if self.target: - body["target"] = self.target + if self.comment is not None: + body["comment"] = self.comment + if self.name is not None: + body["name"] = self.name + if self.parameter_default is not None: + body["parameter_default"] = self.parameter_default + if self.parameter_mode is not None: + body["parameter_mode"] = self.parameter_mode + if self.parameter_type is not None: + body["parameter_type"] = self.parameter_type + if self.position is not None: + body["position"] = self.position + if self.type_interval_type is not None: + body["type_interval_type"] = self.type_interval_type + if self.type_json is not None: + body["type_json"] = self.type_json + if self.type_name is not None: + body["type_name"] = self.type_name + if self.type_precision is not None: + body["type_precision"] = self.type_precision + if self.type_scale is not None: + body["type_scale"] = self.type_scale + if self.type_text is not None: + body["type_text"] = self.type_text return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageRelationshipInfo: - """Deserializes the ExternalLineageRelationshipInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfo: + """Deserializes the FunctionParameterInfo from a dictionary.""" return cls( - columns=_repeated_dict(d, "columns", ColumnRelationship), - id=d.get("id", None), - properties=d.get("properties", None), - source=_from_dict(d, "source", ExternalLineageObject), - target=_from_dict(d, "target", ExternalLineageObject), + comment=d.get("comment", None), + name=d.get("name", None), + parameter_default=d.get("parameter_default", None), + parameter_mode=_enum(d, "parameter_mode", FunctionParameterMode), + parameter_type=_enum(d, "parameter_type", FunctionParameterType), + position=d.get("position", None), + type_interval_type=d.get("type_interval_type", None), + type_json=d.get("type_json", None), + type_name=_enum(d, "type_name", ColumnTypeName), + type_precision=d.get("type_precision", None), + type_scale=d.get("type_scale", None), + type_text=d.get("type_text", None), ) @dataclass -class ExternalLineageTable: - name: Optional[str] = None +class FunctionParameterInfos: + parameters: Optional[List[FunctionParameterInfo]] = None + """The array of __FunctionParameterInfo__ definitions of the function's parameters.""" def as_dict(self) -> dict: - """Serializes the ExternalLineageTable into a dictionary suitable for use as a JSON request body.""" + """Serializes the FunctionParameterInfos into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] return body def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineageTable into a shallow dictionary of its immediate attributes.""" + """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name + if self.parameters: + body["parameters"] = self.parameters return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageTable: - """Deserializes the ExternalLineageTable from a dictionary.""" - return cls(name=d.get("name", None)) + def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfos: + """Deserializes the FunctionParameterInfos from a dictionary.""" + return cls(parameters=_repeated_dict(d, "parameters", FunctionParameterInfo)) -@dataclass -class ExternalLineageTableInfo: - """Represents the table information in the lineage event.""" +class FunctionParameterMode(Enum): + """The mode of the function parameter.""" - catalog_name: Optional[str] = None - """Name of Catalog.""" + IN = "IN" - event_time: Optional[str] = None - """Timestamp of the lineage event.""" - name: Optional[str] = None - """Name of Table.""" +class FunctionParameterType(Enum): + """The type of function parameter.""" - schema_name: Optional[str] = None - """Name of Schema.""" + COLUMN = "COLUMN" + PARAM = "PARAM" + + +@dataclass +class GcpOauthToken: + """GCP temporary credentials for API authentication. Read more at + https://developers.google.com/identity/protocols/oauth2/service-account""" + + oauth_token: Optional[str] = None def as_dict(self) -> dict: - """Serializes the ExternalLineageTableInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the GcpOauthToken into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.event_time is not None: - body["event_time"] = self.event_time - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.oauth_token is not None: + body["oauth_token"] = self.oauth_token return body def as_shallow_dict(self) -> dict: - """Serializes the ExternalLineageTableInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the GcpOauthToken into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.event_time is not None: - body["event_time"] = self.event_time - if self.name is not None: - body["name"] = self.name - if self.schema_name is not None: - body["schema_name"] = self.schema_name + if self.oauth_token is not None: + body["oauth_token"] = self.oauth_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLineageTableInfo: - """Deserializes the ExternalLineageTableInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> GcpOauthToken: + """Deserializes the GcpOauthToken from a dictionary.""" + return cls(oauth_token=d.get("oauth_token", None)) + + +@dataclass +class GcpPubsub: + managed_resource_id: Optional[str] = None + """Unique identifier included in the name of file events managed cloud resources.""" + + subscription_name: Optional[str] = None + """The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription name} + Required for provided_pubsub.""" + + def as_dict(self) -> dict: + """Serializes the GcpPubsub into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.managed_resource_id is not None: + body["managed_resource_id"] = self.managed_resource_id + if self.subscription_name is not None: + body["subscription_name"] = self.subscription_name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the GcpPubsub into a shallow dictionary of its immediate attributes.""" + body = {} + if self.managed_resource_id is not None: + body["managed_resource_id"] = self.managed_resource_id + if self.subscription_name is not None: + body["subscription_name"] = self.subscription_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GcpPubsub: + """Deserializes the GcpPubsub from a dictionary.""" return cls( - catalog_name=d.get("catalog_name", None), - event_time=d.get("event_time", None), - name=d.get("name", None), - schema_name=d.get("schema_name", None), + managed_resource_id=d.get("managed_resource_id", None), subscription_name=d.get("subscription_name", None) ) @dataclass -class ExternalLocationInfo: - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" +class GenerateTemporaryServiceCredentialAzureOptions: + """The Azure cloud options to customize the requested temporary credential""" - comment: Optional[str] = None - """User-provided free-form text description.""" + resources: Optional[List[str]] = None + """The resources to which the temporary Azure credential should apply. These resources are the + scopes that are passed to the token provider (see + https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python)""" - created_at: Optional[int] = None - """Time at which this external location was created, in epoch milliseconds.""" + def as_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.resources: + body["resources"] = [v for v in self.resources] + return body - created_by: Optional[str] = None - """Username of external location creator.""" + def as_shallow_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.resources: + body["resources"] = self.resources + return body - credential_id: Optional[str] = None - """Unique ID of the location's storage credential.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialAzureOptions: + """Deserializes the GenerateTemporaryServiceCredentialAzureOptions from a dictionary.""" + return cls(resources=d.get("resources", None)) - credential_name: Optional[str] = None - """Name of the storage credential used with this location.""" - enable_file_events: Optional[bool] = None - """Whether to enable file events on this external location.""" +@dataclass +class GenerateTemporaryServiceCredentialGcpOptions: + """The GCP cloud options to customize the requested temporary credential""" - encryption_details: Optional[EncryptionDetails] = None + scopes: Optional[List[str]] = None + """The scopes to which the temporary GCP credential should apply. These resources are the scopes + that are passed to the token provider (see + https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.credentials.Credentials)""" - fallback: Optional[bool] = None - """Indicates whether fallback mode is enabled for this external location. When fallback mode is - enabled, the access to the location falls back to cluster credentials if UC credentials are not - sufficient.""" + def as_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.scopes: + body["scopes"] = [v for v in self.scopes] + return body - file_event_queue: Optional[FileEventQueue] = None - """File event queue settings.""" + def as_shallow_dict(self) -> dict: + """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a shallow dictionary of its immediate attributes.""" + body = {} + if self.scopes: + body["scopes"] = self.scopes + return body - isolation_mode: Optional[IsolationMode] = None + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialGcpOptions: + """Deserializes the GenerateTemporaryServiceCredentialGcpOptions from a dictionary.""" + return cls(scopes=d.get("scopes", None)) - metastore_id: Optional[str] = None - """Unique identifier of metastore hosting the external location.""" - name: Optional[str] = None - """Name of the external location.""" +@dataclass +class GenerateTemporaryTableCredentialResponse: + aws_temp_credentials: Optional[AwsCredentials] = None - owner: Optional[str] = None - """The owner of the external location.""" + azure_aad: Optional[AzureActiveDirectoryToken] = None - read_only: Optional[bool] = None - """Indicates whether the external location is read-only.""" + azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None - updated_at: Optional[int] = None - """Time at which external location this was last modified, in epoch milliseconds.""" + expiration_time: Optional[int] = None + """Server time when the credential will expire, in epoch milliseconds. The API client is advised to + cache the credential given this expiration time.""" - updated_by: Optional[str] = None - """Username of user who last modified the external location.""" + gcp_oauth_token: Optional[GcpOauthToken] = None + + r2_temp_credentials: Optional[R2Credentials] = None url: Optional[str] = None - """Path URL of the external location.""" + """The URL of the storage path accessible by the temporary credential.""" def as_dict(self) -> dict: - """Serializes the ExternalLocationInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue.as_dict() - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() + if self.azure_aad: + body["azure_aad"] = self.azure_aad.as_dict() + if self.azure_user_delegation_sas: + body["azure_user_delegation_sas"] = self.azure_user_delegation_sas.as_dict() + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() + if self.r2_temp_credentials: + body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict() if self.url is not None: body["url"] = self.url return body def as_shallow_dict(self) -> dict: - """Serializes the ExternalLocationInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the GenerateTemporaryTableCredentialResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.url is not None: + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials + if self.azure_aad: + body["azure_aad"] = self.azure_aad + if self.azure_user_delegation_sas: + body["azure_user_delegation_sas"] = self.azure_user_delegation_sas + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token + if self.r2_temp_credentials: + body["r2_temp_credentials"] = self.r2_temp_credentials + if self.url is not None: body["url"] = self.url return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalLocationInfo: - """Deserializes the ExternalLocationInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialResponse: + """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary.""" return cls( - browse_only=d.get("browse_only", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - credential_id=d.get("credential_id", None), - credential_name=d.get("credential_name", None), - enable_file_events=d.get("enable_file_events", None), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - fallback=d.get("fallback", None), - file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), + aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), + azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), + azure_user_delegation_sas=_from_dict(d, "azure_user_delegation_sas", AzureUserDelegationSas), + expiration_time=d.get("expiration_time", None), + gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), + r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials), url=d.get("url", None), ) @dataclass -class ExternalMetadata: - name: str - """Name of the external metadata object.""" +class GetCatalogWorkspaceBindingsResponse: + workspaces: Optional[List[int]] = None + """A list of workspace IDs""" - system_type: SystemType - """Type of external system.""" + def as_dict(self) -> dict: + """Serializes the GetCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.workspaces: + body["workspaces"] = [v for v in self.workspaces] + return body - entity_type: str - """Type of entity within the external system.""" + def as_shallow_dict(self) -> dict: + """Serializes the GetCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.workspaces: + body["workspaces"] = self.workspaces + return body - columns: Optional[List[str]] = None - """List of columns associated with the external metadata object.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetCatalogWorkspaceBindingsResponse: + """Deserializes the GetCatalogWorkspaceBindingsResponse from a dictionary.""" + return cls(workspaces=d.get("workspaces", None)) - create_time: Optional[str] = None - """Time at which this external metadata object was created.""" + +@dataclass +class GetMetastoreSummaryResponse: + cloud: Optional[str] = None + """Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`).""" + + created_at: Optional[int] = None + """Time at which this metastore was created, in epoch milliseconds.""" created_by: Optional[str] = None - """Username of external metadata object creator.""" + """Username of metastore creator.""" - description: Optional[str] = None - """User-provided free-form text description.""" + default_data_access_config_id: Optional[str] = None + """Unique identifier of the metastore's (Default) Data Access Configuration.""" - id: Optional[str] = None - """Unique identifier of the external metadata object.""" + delta_sharing_organization_name: Optional[str] = None + """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta + Sharing as the official name.""" + + delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None + """The lifetime of delta sharing recipient token in seconds.""" + + delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None + """The scope of Delta Sharing enabled for the metastore.""" + + external_access_enabled: Optional[bool] = None + """Whether to allow non-DBR clients to directly access entities under the metastore.""" + + global_metastore_id: Optional[str] = None + """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`.""" metastore_id: Optional[str] = None - """Unique identifier of parent metastore.""" + """Unique identifier of metastore.""" + + name: Optional[str] = None + """The user-specified name of the metastore.""" owner: Optional[str] = None - """Owner of the external metadata object.""" + """The owner of the metastore.""" - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the external metadata object.""" + privilege_model_version: Optional[str] = None + """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" - update_time: Optional[str] = None - """Time at which this external metadata object was last modified.""" + region: Optional[str] = None + """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" - updated_by: Optional[str] = None - """Username of user who last modified external metadata object.""" + storage_root: Optional[str] = None + """The storage root URL for metastore""" - url: Optional[str] = None - """URL associated with the external metadata object.""" + storage_root_credential_id: Optional[str] = None + """UUID of storage credential to access the metastore storage_root.""" + + storage_root_credential_name: Optional[str] = None + """Name of the storage credential to access the metastore storage_root.""" + + updated_at: Optional[int] = None + """Time at which the metastore was last modified, in epoch milliseconds.""" + + updated_by: Optional[str] = None + """Username of user who last modified the metastore.""" def as_dict(self) -> dict: - """Serializes the ExternalMetadata into a dictionary suitable for use as a JSON request body.""" + """Serializes the GetMetastoreSummaryResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: - body["columns"] = [v for v in self.columns] - if self.create_time is not None: - body["create_time"] = self.create_time + if self.cloud is not None: + body["cloud"] = self.cloud + if self.created_at is not None: + body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by - if self.description is not None: - body["description"] = self.description - if self.entity_type is not None: - body["entity_type"] = self.entity_type - if self.id is not None: - body["id"] = self.id + if self.default_data_access_config_id is not None: + body["default_data_access_config_id"] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope.value + if self.external_access_enabled is not None: + body["external_access_enabled"] = self.external_access_enabled + if self.global_metastore_id is not None: + body["global_metastore_id"] = self.global_metastore_id if self.metastore_id is not None: body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name if self.owner is not None: body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.system_type is not None: - body["system_type"] = self.system_type.value - if self.update_time is not None: - body["update_time"] = self.update_time + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: + body["storage_root_credential_name"] = self.storage_root_credential_name + if self.updated_at is not None: + body["updated_at"] = self.updated_at if self.updated_by is not None: body["updated_by"] = self.updated_by - if self.url is not None: - body["url"] = self.url return body def as_shallow_dict(self) -> dict: - """Serializes the ExternalMetadata into a shallow dictionary of its immediate attributes.""" + """Serializes the GetMetastoreSummaryResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: - body["columns"] = self.columns - if self.create_time is not None: - body["create_time"] = self.create_time + if self.cloud is not None: + body["cloud"] = self.cloud + if self.created_at is not None: + body["created_at"] = self.created_at if self.created_by is not None: body["created_by"] = self.created_by - if self.description is not None: - body["description"] = self.description - if self.entity_type is not None: - body["entity_type"] = self.entity_type - if self.id is not None: - body["id"] = self.id + if self.default_data_access_config_id is not None: + body["default_data_access_config_id"] = self.default_data_access_config_id + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope + if self.external_access_enabled is not None: + body["external_access_enabled"] = self.external_access_enabled + if self.global_metastore_id is not None: + body["global_metastore_id"] = self.global_metastore_id if self.metastore_id is not None: body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name if self.owner is not None: body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.system_type is not None: - body["system_type"] = self.system_type - if self.update_time is not None: - body["update_time"] = self.update_time + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.region is not None: + body["region"] = self.region + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id + if self.storage_root_credential_name is not None: + body["storage_root_credential_name"] = self.storage_root_credential_name + if self.updated_at is not None: + body["updated_at"] = self.updated_at if self.updated_by is not None: body["updated_by"] = self.updated_by - if self.url is not None: - body["url"] = self.url return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalMetadata: - """Deserializes the ExternalMetadata from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> GetMetastoreSummaryResponse: + """Deserializes the GetMetastoreSummaryResponse from a dictionary.""" return cls( - columns=d.get("columns", None), - create_time=d.get("create_time", None), + cloud=d.get("cloud", None), + created_at=d.get("created_at", None), created_by=d.get("created_by", None), - description=d.get("description", None), - entity_type=d.get("entity_type", None), - id=d.get("id", None), + default_data_access_config_id=d.get("default_data_access_config_id", None), + delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), + delta_sharing_recipient_token_lifetime_in_seconds=d.get( + "delta_sharing_recipient_token_lifetime_in_seconds", None + ), + delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), + external_access_enabled=d.get("external_access_enabled", None), + global_metastore_id=d.get("global_metastore_id", None), metastore_id=d.get("metastore_id", None), name=d.get("name", None), owner=d.get("owner", None), - properties=d.get("properties", None), - system_type=_enum(d, "system_type", SystemType), - update_time=d.get("update_time", None), - updated_by=d.get("updated_by", None), - url=d.get("url", None), + privilege_model_version=d.get("privilege_model_version", None), + region=d.get("region", None), + storage_root=d.get("storage_root", None), + storage_root_credential_id=d.get("storage_root_credential_id", None), + storage_root_credential_name=d.get("storage_root_credential_name", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), ) @dataclass -class FailedStatus: - """Detailed status of an online table. Shown if the online table is in the OFFLINE_FAILED or the - ONLINE_PIPELINE_FAILED state.""" - - last_processed_commit_version: Optional[int] = None - """The last source table Delta version that was synced to the online table. Note that this Delta - version may only be partially synced to the online table. Only populated if the table is still - online and available for serving.""" +class GetPermissionsResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - timestamp: Optional[str] = None - """The timestamp of the last time any data was synchronized from the source table to the online - table. Only populated if the table is still online and available for serving.""" + privilege_assignments: Optional[List[PrivilegeAssignment]] = None + """The privileges assigned to each principal""" def as_dict(self) -> dict: - """Serializes the FailedStatus into a dictionary suitable for use as a JSON request body.""" + """Serializes the GetPermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] return body def as_shallow_dict(self) -> dict: - """Serializes the FailedStatus into a shallow dictionary of its immediate attributes.""" + """Serializes the GetPermissionsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FailedStatus: - """Deserializes the FailedStatus from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> GetPermissionsResponse: + """Deserializes the GetPermissionsResponse from a dictionary.""" return cls( - last_processed_commit_version=d.get("last_processed_commit_version", None), - timestamp=d.get("timestamp", None), + next_page_token=d.get("next_page_token", None), + privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment), ) @dataclass -class FileEventQueue: - managed_aqs: Optional[AzureQueueStorage] = None +class GetQuotaResponse: + quota_info: Optional[QuotaInfo] = None + """The returned QuotaInfo.""" - managed_pubsub: Optional[GcpPubsub] = None + def as_dict(self) -> dict: + """Serializes the GetQuotaResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.quota_info: + body["quota_info"] = self.quota_info.as_dict() + return body - managed_sqs: Optional[AwsSqsQueue] = None + def as_shallow_dict(self) -> dict: + """Serializes the GetQuotaResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.quota_info: + body["quota_info"] = self.quota_info + return body - provided_aqs: Optional[AzureQueueStorage] = None + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> GetQuotaResponse: + """Deserializes the GetQuotaResponse from a dictionary.""" + return cls(quota_info=_from_dict(d, "quota_info", QuotaInfo)) - provided_pubsub: Optional[GcpPubsub] = None - provided_sqs: Optional[AwsSqsQueue] = None +@dataclass +class GetWorkspaceBindingsResponse: + bindings: Optional[List[WorkspaceBinding]] = None + """List of workspace bindings""" + + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" def as_dict(self) -> dict: - """Serializes the FileEventQueue into a dictionary suitable for use as a JSON request body.""" + """Serializes the GetWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.managed_aqs: - body["managed_aqs"] = self.managed_aqs.as_dict() - if self.managed_pubsub: - body["managed_pubsub"] = self.managed_pubsub.as_dict() - if self.managed_sqs: - body["managed_sqs"] = self.managed_sqs.as_dict() - if self.provided_aqs: - body["provided_aqs"] = self.provided_aqs.as_dict() - if self.provided_pubsub: - body["provided_pubsub"] = self.provided_pubsub.as_dict() - if self.provided_sqs: - body["provided_sqs"] = self.provided_sqs.as_dict() + if self.bindings: + body["bindings"] = [v.as_dict() for v in self.bindings] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: - """Serializes the FileEventQueue into a shallow dictionary of its immediate attributes.""" + """Serializes the GetWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.managed_aqs: - body["managed_aqs"] = self.managed_aqs - if self.managed_pubsub: - body["managed_pubsub"] = self.managed_pubsub - if self.managed_sqs: - body["managed_sqs"] = self.managed_sqs - if self.provided_aqs: - body["provided_aqs"] = self.provided_aqs - if self.provided_pubsub: - body["provided_pubsub"] = self.provided_pubsub - if self.provided_sqs: - body["provided_sqs"] = self.provided_sqs + if self.bindings: + body["bindings"] = self.bindings + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FileEventQueue: - """Deserializes the FileEventQueue from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceBindingsResponse: + """Deserializes the GetWorkspaceBindingsResponse from a dictionary.""" return cls( - managed_aqs=_from_dict(d, "managed_aqs", AzureQueueStorage), - managed_pubsub=_from_dict(d, "managed_pubsub", GcpPubsub), - managed_sqs=_from_dict(d, "managed_sqs", AwsSqsQueue), - provided_aqs=_from_dict(d, "provided_aqs", AzureQueueStorage), - provided_pubsub=_from_dict(d, "provided_pubsub", GcpPubsub), - provided_sqs=_from_dict(d, "provided_sqs", AwsSqsQueue), + bindings=_repeated_dict(d, "bindings", WorkspaceBinding), next_page_token=d.get("next_page_token", None) ) -@dataclass -class ForeignKeyConstraint: - name: str - """The name of the constraint.""" +class IsolationMode(Enum): - child_columns: List[str] - """Column names for this constraint.""" + ISOLATION_MODE_ISOLATED = "ISOLATION_MODE_ISOLATED" + ISOLATION_MODE_OPEN = "ISOLATION_MODE_OPEN" - parent_table: str - """The full name of the parent constraint.""" - parent_columns: List[str] - """Column names for this constraint.""" +class LineageDirection(Enum): - rely: Optional[bool] = None - """True if the constraint is RELY, false or unset if NORELY.""" + DOWNSTREAM = "DOWNSTREAM" + UPSTREAM = "UPSTREAM" + + +@dataclass +class ListAccountMetastoreAssignmentsResponse: + """The list of workspaces to which the given metastore is assigned.""" + + workspace_ids: Optional[List[int]] = None def as_dict(self) -> dict: - """Serializes the ForeignKeyConstraint into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListAccountMetastoreAssignmentsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.child_columns: - body["child_columns"] = [v for v in self.child_columns] - if self.name is not None: - body["name"] = self.name - if self.parent_columns: - body["parent_columns"] = [v for v in self.parent_columns] - if self.parent_table is not None: - body["parent_table"] = self.parent_table - if self.rely is not None: - body["rely"] = self.rely + if self.workspace_ids: + body["workspace_ids"] = [v for v in self.workspace_ids] return body def as_shallow_dict(self) -> dict: - """Serializes the ForeignKeyConstraint into a shallow dictionary of its immediate attributes.""" + """Serializes the ListAccountMetastoreAssignmentsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.child_columns: - body["child_columns"] = self.child_columns - if self.name is not None: - body["name"] = self.name - if self.parent_columns: - body["parent_columns"] = self.parent_columns - if self.parent_table is not None: - body["parent_table"] = self.parent_table - if self.rely is not None: - body["rely"] = self.rely + if self.workspace_ids: + body["workspace_ids"] = self.workspace_ids return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ForeignKeyConstraint: - """Deserializes the ForeignKeyConstraint from a dictionary.""" - return cls( - child_columns=d.get("child_columns", None), - name=d.get("name", None), - parent_columns=d.get("parent_columns", None), - parent_table=d.get("parent_table", None), - rely=d.get("rely", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> ListAccountMetastoreAssignmentsResponse: + """Deserializes the ListAccountMetastoreAssignmentsResponse from a dictionary.""" + return cls(workspace_ids=d.get("workspace_ids", None)) @dataclass -class FunctionDependency: - """A function that is dependent on a SQL object.""" - - function_full_name: str - """Full name of the dependent function, in the form of - __catalog_name__.__schema_name__.__function_name__.""" +class ListAccountStorageCredentialsResponse: + storage_credentials: Optional[List[StorageCredentialInfo]] = None + """An array of metastore storage credentials.""" def as_dict(self) -> dict: - """Serializes the FunctionDependency into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListAccountStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_full_name is not None: - body["function_full_name"] = self.function_full_name + if self.storage_credentials: + body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] return body def as_shallow_dict(self) -> dict: - """Serializes the FunctionDependency into a shallow dictionary of its immediate attributes.""" + """Serializes the ListAccountStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_full_name is not None: - body["function_full_name"] = self.function_full_name + if self.storage_credentials: + body["storage_credentials"] = self.storage_credentials return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FunctionDependency: - """Deserializes the FunctionDependency from a dictionary.""" - return cls(function_full_name=d.get("function_full_name", None)) + def from_dict(cls, d: Dict[str, Any]) -> ListAccountStorageCredentialsResponse: + """Deserializes the ListAccountStorageCredentialsResponse from a dictionary.""" + return cls(storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo)) @dataclass -class FunctionInfo: - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - - catalog_name: Optional[str] = None - """Name of parent catalog.""" - - comment: Optional[str] = None - """User-provided free-form text description.""" +class ListCatalogsResponse: + catalogs: Optional[List[CatalogInfo]] = None + """An array of catalog information objects.""" - created_at: Optional[int] = None - """Time at which this function was created, in epoch milliseconds.""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - created_by: Optional[str] = None - """Username of function creator.""" + def as_dict(self) -> dict: + """Serializes the ListCatalogsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.catalogs: + body["catalogs"] = [v.as_dict() for v in self.catalogs] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - data_type: Optional[ColumnTypeName] = None - """Scalar function return data type.""" + def as_shallow_dict(self) -> dict: + """Serializes the ListCatalogsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.catalogs: + body["catalogs"] = self.catalogs + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - external_language: Optional[str] = None - """External function language.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListCatalogsResponse: + """Deserializes the ListCatalogsResponse from a dictionary.""" + return cls(catalogs=_repeated_dict(d, "catalogs", CatalogInfo), next_page_token=d.get("next_page_token", None)) - external_name: Optional[str] = None - """External function name.""" - full_data_type: Optional[str] = None - """Pretty printed function data type.""" +@dataclass +class ListConnectionsResponse: + connections: Optional[List[ConnectionInfo]] = None + """An array of connection information objects.""" - full_name: Optional[str] = None - """Full name of function, in form of __catalog_name__.__schema_name__.__function__name__""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - function_id: Optional[str] = None - """Id of Function, relative to parent schema.""" + def as_dict(self) -> dict: + """Serializes the ListConnectionsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.connections: + body["connections"] = [v.as_dict() for v in self.connections] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - input_params: Optional[FunctionParameterInfos] = None + def as_shallow_dict(self) -> dict: + """Serializes the ListConnectionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.connections: + body["connections"] = self.connections + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - is_deterministic: Optional[bool] = None - """Whether the function is deterministic.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListConnectionsResponse: + """Deserializes the ListConnectionsResponse from a dictionary.""" + return cls( + connections=_repeated_dict(d, "connections", ConnectionInfo), next_page_token=d.get("next_page_token", None) + ) - is_null_call: Optional[bool] = None - """Function null call.""" - metastore_id: Optional[str] = None - """Unique identifier of parent metastore.""" +@dataclass +class ListCredentialsResponse: + credentials: Optional[List[CredentialInfo]] = None - name: Optional[str] = None - """Name of function, relative to parent schema.""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - owner: Optional[str] = None - """Username of current owner of function.""" + def as_dict(self) -> dict: + """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.credentials: + body["credentials"] = [v.as_dict() for v in self.credentials] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - parameter_style: Optional[FunctionInfoParameterStyle] = None - """Function parameter style. **S** is the value for SQL.""" + def as_shallow_dict(self) -> dict: + """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.credentials: + body["credentials"] = self.credentials + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - properties: Optional[str] = None - """JSON-serialized key-value pair map, encoded (escaped) as a string.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListCredentialsResponse: + """Deserializes the ListCredentialsResponse from a dictionary.""" + return cls( + credentials=_repeated_dict(d, "credentials", CredentialInfo), next_page_token=d.get("next_page_token", None) + ) - return_params: Optional[FunctionParameterInfos] = None - """Table function return parameters.""" - routine_body: Optional[FunctionInfoRoutineBody] = None - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" +@dataclass +class ListExternalLineageRelationshipsResponse: + external_lineage_relationships: Optional[List[ExternalLineageInfo]] = None - routine_definition: Optional[str] = None - """Function body.""" + next_page_token: Optional[str] = None - routine_dependencies: Optional[DependencyList] = None - """Function dependencies.""" + def as_dict(self) -> dict: + """Serializes the ListExternalLineageRelationshipsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.external_lineage_relationships: + body["external_lineage_relationships"] = [v.as_dict() for v in self.external_lineage_relationships] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - schema_name: Optional[str] = None - """Name of parent schema relative to its parent catalog.""" + def as_shallow_dict(self) -> dict: + """Serializes the ListExternalLineageRelationshipsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.external_lineage_relationships: + body["external_lineage_relationships"] = self.external_lineage_relationships + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - security_type: Optional[FunctionInfoSecurityType] = None - """Function security type.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListExternalLineageRelationshipsResponse: + """Deserializes the ListExternalLineageRelationshipsResponse from a dictionary.""" + return cls( + external_lineage_relationships=_repeated_dict(d, "external_lineage_relationships", ExternalLineageInfo), + next_page_token=d.get("next_page_token", None), + ) - specific_name: Optional[str] = None - """Specific name of the function; Reserved for future use.""" - sql_data_access: Optional[FunctionInfoSqlDataAccess] = None - """Function SQL data access.""" +@dataclass +class ListExternalLocationsResponse: + external_locations: Optional[List[ExternalLocationInfo]] = None + """An array of external locations.""" - sql_path: Optional[str] = None - """List of schemes whose objects can be referenced without qualification.""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - updated_at: Optional[int] = None - """Time at which this function was created, in epoch milliseconds.""" + def as_dict(self) -> dict: + """Serializes the ListExternalLocationsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.external_locations: + body["external_locations"] = [v.as_dict() for v in self.external_locations] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - updated_by: Optional[str] = None - """Username of user who last modified function.""" + def as_shallow_dict(self) -> dict: + """Serializes the ListExternalLocationsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.external_locations: + body["external_locations"] = self.external_locations + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListExternalLocationsResponse: + """Deserializes the ListExternalLocationsResponse from a dictionary.""" + return cls( + external_locations=_repeated_dict(d, "external_locations", ExternalLocationInfo), + next_page_token=d.get("next_page_token", None), + ) + + +@dataclass +class ListExternalMetadataResponse: + external_metadata: Optional[List[ExternalMetadata]] = None + + next_page_token: Optional[str] = None def as_dict(self) -> dict: - """Serializes the FunctionInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListExternalMetadataResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_type is not None: - body["data_type"] = self.data_type.value - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.full_name is not None: - body["full_name"] = self.full_name - if self.function_id is not None: - body["function_id"] = self.function_id - if self.input_params: - body["input_params"] = self.input_params.as_dict() - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style.value - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params.as_dict() - if self.routine_body is not None: - body["routine_body"] = self.routine_body.value - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies.as_dict() - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type.value - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access.value - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.external_metadata: + body["external_metadata"] = [v.as_dict() for v in self.external_metadata] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body def as_shallow_dict(self) -> dict: - """Serializes the FunctionInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the ListExternalMetadataResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_type is not None: - body["data_type"] = self.data_type - if self.external_language is not None: - body["external_language"] = self.external_language - if self.external_name is not None: - body["external_name"] = self.external_name - if self.full_data_type is not None: - body["full_data_type"] = self.full_data_type - if self.full_name is not None: - body["full_name"] = self.full_name - if self.function_id is not None: - body["function_id"] = self.function_id - if self.input_params: - body["input_params"] = self.input_params - if self.is_deterministic is not None: - body["is_deterministic"] = self.is_deterministic - if self.is_null_call is not None: - body["is_null_call"] = self.is_null_call - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.parameter_style is not None: - body["parameter_style"] = self.parameter_style - if self.properties is not None: - body["properties"] = self.properties - if self.return_params: - body["return_params"] = self.return_params - if self.routine_body is not None: - body["routine_body"] = self.routine_body - if self.routine_definition is not None: - body["routine_definition"] = self.routine_definition - if self.routine_dependencies: - body["routine_dependencies"] = self.routine_dependencies - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.security_type is not None: - body["security_type"] = self.security_type - if self.specific_name is not None: - body["specific_name"] = self.specific_name - if self.sql_data_access is not None: - body["sql_data_access"] = self.sql_data_access - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.external_metadata: + body["external_metadata"] = self.external_metadata + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FunctionInfo: - """Deserializes the FunctionInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListExternalMetadataResponse: + """Deserializes the ListExternalMetadataResponse from a dictionary.""" return cls( - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - data_type=_enum(d, "data_type", ColumnTypeName), - external_language=d.get("external_language", None), - external_name=d.get("external_name", None), - full_data_type=d.get("full_data_type", None), - full_name=d.get("full_name", None), - function_id=d.get("function_id", None), - input_params=_from_dict(d, "input_params", FunctionParameterInfos), - is_deterministic=d.get("is_deterministic", None), - is_null_call=d.get("is_null_call", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - parameter_style=_enum(d, "parameter_style", FunctionInfoParameterStyle), - properties=d.get("properties", None), - return_params=_from_dict(d, "return_params", FunctionParameterInfos), - routine_body=_enum(d, "routine_body", FunctionInfoRoutineBody), - routine_definition=d.get("routine_definition", None), - routine_dependencies=_from_dict(d, "routine_dependencies", DependencyList), - schema_name=d.get("schema_name", None), - security_type=_enum(d, "security_type", FunctionInfoSecurityType), - specific_name=d.get("specific_name", None), - sql_data_access=_enum(d, "sql_data_access", FunctionInfoSqlDataAccess), - sql_path=d.get("sql_path", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), + external_metadata=_repeated_dict(d, "external_metadata", ExternalMetadata), + next_page_token=d.get("next_page_token", None), ) -class FunctionInfoParameterStyle(Enum): - """Function parameter style. **S** is the value for SQL.""" +@dataclass +class ListFunctionsResponse: + functions: Optional[List[FunctionInfo]] = None + """An array of function information objects.""" - S = "S" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + def as_dict(self) -> dict: + """Serializes the ListFunctionsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.functions: + body["functions"] = [v.as_dict() for v in self.functions] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body -class FunctionInfoRoutineBody(Enum): - """Function language. When **EXTERNAL** is used, the language of the routine function should be - specified in the __external_language__ field, and the __return_params__ of the function cannot - be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be - **NO_SQL**.""" + def as_shallow_dict(self) -> dict: + """Serializes the ListFunctionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.functions: + body["functions"] = self.functions + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - EXTERNAL = "EXTERNAL" - SQL = "SQL" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListFunctionsResponse: + """Deserializes the ListFunctionsResponse from a dictionary.""" + return cls( + functions=_repeated_dict(d, "functions", FunctionInfo), next_page_token=d.get("next_page_token", None) + ) -class FunctionInfoSecurityType(Enum): - """The security type of the function.""" +@dataclass +class ListMetastoresResponse: + metastores: Optional[List[MetastoreInfo]] = None + """An array of metastore information objects.""" - DEFINER = "DEFINER" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + def as_dict(self) -> dict: + """Serializes the ListMetastoresResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.metastores: + body["metastores"] = [v.as_dict() for v in self.metastores] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body -class FunctionInfoSqlDataAccess(Enum): - """Function SQL data access.""" + def as_shallow_dict(self) -> dict: + """Serializes the ListMetastoresResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.metastores: + body["metastores"] = self.metastores + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - CONTAINS_SQL = "CONTAINS_SQL" - NO_SQL = "NO_SQL" - READS_SQL_DATA = "READS_SQL_DATA" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListMetastoresResponse: + """Deserializes the ListMetastoresResponse from a dictionary.""" + return cls( + metastores=_repeated_dict(d, "metastores", MetastoreInfo), next_page_token=d.get("next_page_token", None) + ) @dataclass -class FunctionParameterInfo: - name: str - """Name of parameter.""" +class ListModelVersionsResponse: + model_versions: Optional[List[ModelVersionInfo]] = None - type_text: str - """Full data type spec, SQL/catalogString text.""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - type_name: ColumnTypeName + def as_dict(self) -> dict: + """Serializes the ListModelVersionsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.model_versions: + body["model_versions"] = [v.as_dict() for v in self.model_versions] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - position: int - """Ordinal position of column (starting at position 0).""" + def as_shallow_dict(self) -> dict: + """Serializes the ListModelVersionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model_versions: + body["model_versions"] = self.model_versions + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + return body - comment: Optional[str] = None - """User-provided free-form text description.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListModelVersionsResponse: + """Deserializes the ListModelVersionsResponse from a dictionary.""" + return cls( + model_versions=_repeated_dict(d, "model_versions", ModelVersionInfo), + next_page_token=d.get("next_page_token", None), + ) - parameter_default: Optional[str] = None - """Default value of the parameter.""" - parameter_mode: Optional[FunctionParameterMode] = None +@dataclass +class ListQuotasResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request.""" - parameter_type: Optional[FunctionParameterType] = None + quotas: Optional[List[QuotaInfo]] = None + """An array of returned QuotaInfos.""" - type_interval_type: Optional[str] = None - """Format of IntervalType.""" + def as_dict(self) -> dict: + """Serializes the ListQuotasResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.quotas: + body["quotas"] = [v.as_dict() for v in self.quotas] + return body - type_json: Optional[str] = None - """Full data type spec, JSON-serialized.""" + def as_shallow_dict(self) -> dict: + """Serializes the ListQuotasResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.quotas: + body["quotas"] = self.quotas + return body - type_precision: Optional[int] = None - """Digits of precision; required on Create for DecimalTypes.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ListQuotasResponse: + """Deserializes the ListQuotasResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), quotas=_repeated_dict(d, "quotas", QuotaInfo)) - type_scale: Optional[int] = None - """Digits to right of decimal; Required on Create for DecimalTypes.""" - def as_dict(self) -> dict: - """Serializes the FunctionParameterInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.parameter_default is not None: - body["parameter_default"] = self.parameter_default - if self.parameter_mode is not None: - body["parameter_mode"] = self.parameter_mode.value - if self.parameter_type is not None: - body["parameter_type"] = self.parameter_type.value - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_json is not None: - body["type_json"] = self.type_json - if self.type_name is not None: - body["type_name"] = self.type_name.value - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text +@dataclass +class ListRegisteredModelsResponse: + next_page_token: Optional[str] = None + """Opaque token for pagination. Omitted if there are no more results. page_token should be set to + this value for fetching the next page.""" + + registered_models: Optional[List[RegisteredModelInfo]] = None + + def as_dict(self) -> dict: + """Serializes the ListRegisteredModelsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.registered_models: + body["registered_models"] = [v.as_dict() for v in self.registered_models] return body def as_shallow_dict(self) -> dict: - """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the ListRegisteredModelsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.parameter_default is not None: - body["parameter_default"] = self.parameter_default - if self.parameter_mode is not None: - body["parameter_mode"] = self.parameter_mode - if self.parameter_type is not None: - body["parameter_type"] = self.parameter_type - if self.position is not None: - body["position"] = self.position - if self.type_interval_type is not None: - body["type_interval_type"] = self.type_interval_type - if self.type_json is not None: - body["type_json"] = self.type_json - if self.type_name is not None: - body["type_name"] = self.type_name - if self.type_precision is not None: - body["type_precision"] = self.type_precision - if self.type_scale is not None: - body["type_scale"] = self.type_scale - if self.type_text is not None: - body["type_text"] = self.type_text + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.registered_models: + body["registered_models"] = self.registered_models return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfo: - """Deserializes the FunctionParameterInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListRegisteredModelsResponse: + """Deserializes the ListRegisteredModelsResponse from a dictionary.""" return cls( - comment=d.get("comment", None), - name=d.get("name", None), - parameter_default=d.get("parameter_default", None), - parameter_mode=_enum(d, "parameter_mode", FunctionParameterMode), - parameter_type=_enum(d, "parameter_type", FunctionParameterType), - position=d.get("position", None), - type_interval_type=d.get("type_interval_type", None), - type_json=d.get("type_json", None), - type_name=_enum(d, "type_name", ColumnTypeName), - type_precision=d.get("type_precision", None), - type_scale=d.get("type_scale", None), - type_text=d.get("type_text", None), + next_page_token=d.get("next_page_token", None), + registered_models=_repeated_dict(d, "registered_models", RegisteredModelInfo), ) @dataclass -class FunctionParameterInfos: - parameters: Optional[List[FunctionParameterInfo]] = None - """The array of __FunctionParameterInfo__ definitions of the function's parameters.""" +class ListSchemasResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + + schemas: Optional[List[SchemaInfo]] = None + """An array of schema information objects.""" def as_dict(self) -> dict: - """Serializes the FunctionParameterInfos into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListSchemasResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = [v.as_dict() for v in self.schemas] return body def as_shallow_dict(self) -> dict: - """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes.""" + """Serializes the ListSchemasResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.parameters: - body["parameters"] = self.parameters + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = self.schemas return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FunctionParameterInfos: - """Deserializes the FunctionParameterInfos from a dictionary.""" - return cls(parameters=_repeated_dict(d, "parameters", FunctionParameterInfo)) - - -class FunctionParameterMode(Enum): - """The mode of the function parameter.""" - - IN = "IN" - - -class FunctionParameterType(Enum): - """The type of function parameter.""" - - COLUMN = "COLUMN" - PARAM = "PARAM" + def from_dict(cls, d: Dict[str, Any]) -> ListSchemasResponse: + """Deserializes the ListSchemasResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SchemaInfo)) @dataclass -class GcpOauthToken: - """GCP temporary credentials for API authentication. Read more at - https://developers.google.com/identity/protocols/oauth2/service-account""" +class ListStorageCredentialsResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - oauth_token: Optional[str] = None + storage_credentials: Optional[List[StorageCredentialInfo]] = None def as_dict(self) -> dict: - """Serializes the GcpOauthToken into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.oauth_token is not None: - body["oauth_token"] = self.oauth_token + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.storage_credentials: + body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] return body def as_shallow_dict(self) -> dict: - """Serializes the GcpOauthToken into a shallow dictionary of its immediate attributes.""" + """Serializes the ListStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.oauth_token is not None: - body["oauth_token"] = self.oauth_token + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.storage_credentials: + body["storage_credentials"] = self.storage_credentials return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GcpOauthToken: - """Deserializes the GcpOauthToken from a dictionary.""" - return cls(oauth_token=d.get("oauth_token", None)) + def from_dict(cls, d: Dict[str, Any]) -> ListStorageCredentialsResponse: + """Deserializes the ListStorageCredentialsResponse from a dictionary.""" + return cls( + next_page_token=d.get("next_page_token", None), + storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo), + ) @dataclass -class GcpPubsub: - managed_resource_id: Optional[str] = None - """Unique identifier included in the name of file events managed cloud resources.""" +class ListSystemSchemasResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - subscription_name: Optional[str] = None - """The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription name} - Required for provided_pubsub.""" + schemas: Optional[List[SystemSchemaInfo]] = None + """An array of system schema information objects.""" def as_dict(self) -> dict: - """Serializes the GcpPubsub into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListSystemSchemasResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.managed_resource_id is not None: - body["managed_resource_id"] = self.managed_resource_id - if self.subscription_name is not None: - body["subscription_name"] = self.subscription_name + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = [v.as_dict() for v in self.schemas] return body def as_shallow_dict(self) -> dict: - """Serializes the GcpPubsub into a shallow dictionary of its immediate attributes.""" + """Serializes the ListSystemSchemasResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.managed_resource_id is not None: - body["managed_resource_id"] = self.managed_resource_id - if self.subscription_name is not None: - body["subscription_name"] = self.subscription_name + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.schemas: + body["schemas"] = self.schemas return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GcpPubsub: - """Deserializes the GcpPubsub from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ListSystemSchemasResponse: + """Deserializes the ListSystemSchemasResponse from a dictionary.""" return cls( - managed_resource_id=d.get("managed_resource_id", None), subscription_name=d.get("subscription_name", None) + next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SystemSchemaInfo) ) @dataclass -class GenerateTemporaryServiceCredentialAzureOptions: - """The Azure cloud options to customize the requested temporary credential""" +class ListTableSummariesResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - resources: Optional[List[str]] = None - """The resources to which the temporary Azure credential should apply. These resources are the - scopes that are passed to the token provider (see - https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python)""" + tables: Optional[List[TableSummary]] = None + """List of table summaries.""" def as_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListTableSummariesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.resources: - body["resources"] = [v for v in self.resources] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = [v.as_dict() for v in self.tables] return body def as_shallow_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a shallow dictionary of its immediate attributes.""" + """Serializes the ListTableSummariesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.resources: - body["resources"] = self.resources + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = self.tables return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialAzureOptions: - """Deserializes the GenerateTemporaryServiceCredentialAzureOptions from a dictionary.""" - return cls(resources=d.get("resources", None)) + def from_dict(cls, d: Dict[str, Any]) -> ListTableSummariesResponse: + """Deserializes the ListTableSummariesResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableSummary)) @dataclass -class GenerateTemporaryServiceCredentialGcpOptions: - """The GCP cloud options to customize the requested temporary credential""" +class ListTablesResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" - scopes: Optional[List[str]] = None - """The scopes to which the temporary GCP credential should apply. These resources are the scopes - that are passed to the token provider (see - https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.credentials.Credentials)""" + tables: Optional[List[TableInfo]] = None + """An array of table information objects.""" def as_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListTablesResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.scopes: - body["scopes"] = [v for v in self.scopes] + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = [v.as_dict() for v in self.tables] return body def as_shallow_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a shallow dictionary of its immediate attributes.""" + """Serializes the ListTablesResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.scopes: - body["scopes"] = self.scopes + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.tables: + body["tables"] = self.tables return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialGcpOptions: - """Deserializes the GenerateTemporaryServiceCredentialGcpOptions from a dictionary.""" - return cls(scopes=d.get("scopes", None)) + def from_dict(cls, d: Dict[str, Any]) -> ListTablesResponse: + """Deserializes the ListTablesResponse from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableInfo)) @dataclass -class GenerateTemporaryServiceCredentialRequest: - credential_name: str - """The name of the service credential used to generate a temporary credential""" - - azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None +class ListVolumesResponseContent: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request to retrieve the next page of + results.""" - gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None + volumes: Optional[List[VolumeInfo]] = None def as_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the ListVolumesResponseContent into a dictionary suitable for use as a JSON request body.""" body = {} - if self.azure_options: - body["azure_options"] = self.azure_options.as_dict() - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.gcp_options: - body["gcp_options"] = self.gcp_options.as_dict() + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.volumes: + body["volumes"] = [v.as_dict() for v in self.volumes] return body def as_shallow_dict(self) -> dict: - """Serializes the GenerateTemporaryServiceCredentialRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the ListVolumesResponseContent into a shallow dictionary of its immediate attributes.""" body = {} - if self.azure_options: - body["azure_options"] = self.azure_options - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.gcp_options: - body["gcp_options"] = self.gcp_options + if self.next_page_token is not None: + body["next_page_token"] = self.next_page_token + if self.volumes: + body["volumes"] = self.volumes return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryServiceCredentialRequest: - """Deserializes the GenerateTemporaryServiceCredentialRequest from a dictionary.""" - return cls( - azure_options=_from_dict(d, "azure_options", GenerateTemporaryServiceCredentialAzureOptions), - credential_name=d.get("credential_name", None), - gcp_options=_from_dict(d, "gcp_options", GenerateTemporaryServiceCredentialGcpOptions), - ) - - -@dataclass -class GenerateTemporaryTableCredentialRequest: - operation: Optional[TableOperation] = None - """The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is - specified, the credentials returned will have write permissions, otherwise, it will be read - only.""" - - table_id: Optional[str] = None - """UUID of the table to read or write.""" + def from_dict(cls, d: Dict[str, Any]) -> ListVolumesResponseContent: + """Deserializes the ListVolumesResponseContent from a dictionary.""" + return cls(next_page_token=d.get("next_page_token", None), volumes=_repeated_dict(d, "volumes", VolumeInfo)) - def as_dict(self) -> dict: - """Serializes the GenerateTemporaryTableCredentialRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.operation is not None: - body["operation"] = self.operation.value - if self.table_id is not None: - body["table_id"] = self.table_id - return body - def as_shallow_dict(self) -> dict: - """Serializes the GenerateTemporaryTableCredentialRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.operation is not None: - body["operation"] = self.operation - if self.table_id is not None: - body["table_id"] = self.table_id - return body +class MatchType(Enum): + """The artifact pattern matching type""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialRequest: - """Deserializes the GenerateTemporaryTableCredentialRequest from a dictionary.""" - return cls(operation=_enum(d, "operation", TableOperation), table_id=d.get("table_id", None)) + PREFIX_MATCH = "PREFIX_MATCH" @dataclass -class GenerateTemporaryTableCredentialResponse: - aws_temp_credentials: Optional[AwsCredentials] = None - - azure_aad: Optional[AzureActiveDirectoryToken] = None - - azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None - - expiration_time: Optional[int] = None - """Server time when the credential will expire, in epoch milliseconds. The API client is advised to - cache the credential given this expiration time.""" - - gcp_oauth_token: Optional[GcpOauthToken] = None +class MetastoreAssignment: + workspace_id: int + """The unique ID of the Databricks workspace.""" - r2_temp_credentials: Optional[R2Credentials] = None + metastore_id: str + """The unique ID of the metastore.""" - url: Optional[str] = None - """The URL of the storage path accessible by the temporary credential.""" + default_catalog_name: Optional[str] = None + """The name of the default catalog in the metastore.""" def as_dict(self) -> dict: - """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() - if self.azure_aad: - body["azure_aad"] = self.azure_aad.as_dict() - if self.azure_user_delegation_sas: - body["azure_user_delegation_sas"] = self.azure_user_delegation_sas.as_dict() - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() - if self.r2_temp_credentials: - body["r2_temp_credentials"] = self.r2_temp_credentials.as_dict() - if self.url is not None: - body["url"] = self.url + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: - """Serializes the GenerateTemporaryTableCredentialResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials - if self.azure_aad: - body["azure_aad"] = self.azure_aad - if self.azure_user_delegation_sas: - body["azure_user_delegation_sas"] = self.azure_user_delegation_sas - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token - if self.r2_temp_credentials: - body["r2_temp_credentials"] = self.r2_temp_credentials - if self.url is not None: - body["url"] = self.url + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateTemporaryTableCredentialResponse: - """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MetastoreAssignment: + """Deserializes the MetastoreAssignment from a dictionary.""" return cls( - aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), - azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), - azure_user_delegation_sas=_from_dict(d, "azure_user_delegation_sas", AzureUserDelegationSas), - expiration_time=d.get("expiration_time", None), - gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), - r2_temp_credentials=_from_dict(d, "r2_temp_credentials", R2Credentials), - url=d.get("url", None), + default_catalog_name=d.get("default_catalog_name", None), + metastore_id=d.get("metastore_id", None), + workspace_id=d.get("workspace_id", None), ) @dataclass -class GetCatalogWorkspaceBindingsResponse: - workspaces: Optional[List[int]] = None - """A list of workspace IDs""" - - def as_dict(self) -> dict: - """Serializes the GetCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.workspaces: - body["workspaces"] = [v for v in self.workspaces] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GetCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.workspaces: - body["workspaces"] = self.workspaces - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetCatalogWorkspaceBindingsResponse: - """Deserializes the GetCatalogWorkspaceBindingsResponse from a dictionary.""" - return cls(workspaces=d.get("workspaces", None)) - - -@dataclass -class GetMetastoreSummaryResponse: +class MetastoreInfo: cloud: Optional[str] = None """Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`).""" @@ -5556,7 +5524,7 @@ class GetMetastoreSummaryResponse: """Username of user who last modified the metastore.""" def as_dict(self) -> dict: - """Serializes the GetMetastoreSummaryResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MetastoreInfo into a dictionary suitable for use as a JSON request body.""" body = {} if self.cloud is not None: body["cloud"] = self.cloud @@ -5601,7 +5569,7 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the GetMetastoreSummaryResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MetastoreInfo into a shallow dictionary of its immediate attributes.""" body = {} if self.cloud is not None: body["cloud"] = self.cloud @@ -5646,8 +5614,8 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetMetastoreSummaryResponse: - """Deserializes the GetMetastoreSummaryResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MetastoreInfo: + """Deserializes the MetastoreInfo from a dictionary.""" return cls( cloud=d.get("cloud", None), created_at=d.get("created_at", None), @@ -5674,4439 +5642,2324 @@ def from_dict(cls, d: Dict[str, Any]) -> GetMetastoreSummaryResponse: @dataclass -class GetPermissionsResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" - - privilege_assignments: Optional[List[PrivilegeAssignment]] = None - """The privileges assigned to each principal""" +class ModelVersionInfo: + aliases: Optional[List[RegisteredModelAlias]] = None + """List of aliases associated with the model version""" - def as_dict(self) -> dict: - """Serializes the GetPermissionsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.privilege_assignments: - body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] - return body + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" - def as_shallow_dict(self) -> dict: - """Serializes the GetPermissionsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.privilege_assignments: - body["privilege_assignments"] = self.privilege_assignments - return body + catalog_name: Optional[str] = None + """The name of the catalog containing the model version""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetPermissionsResponse: - """Deserializes the GetPermissionsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment), - ) + comment: Optional[str] = None + """The comment attached to the model version""" + created_at: Optional[int] = None -@dataclass -class GetQuotaResponse: - quota_info: Optional[QuotaInfo] = None - """The returned QuotaInfo.""" + created_by: Optional[str] = None + """The identifier of the user who created the model version""" - def as_dict(self) -> dict: - """Serializes the GetQuotaResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.quota_info: - body["quota_info"] = self.quota_info.as_dict() - return body + id: Optional[str] = None + """The unique identifier of the model version""" - def as_shallow_dict(self) -> dict: - """Serializes the GetQuotaResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.quota_info: - body["quota_info"] = self.quota_info - return body + metastore_id: Optional[str] = None + """The unique identifier of the metastore containing the model version""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetQuotaResponse: - """Deserializes the GetQuotaResponse from a dictionary.""" - return cls(quota_info=_from_dict(d, "quota_info", QuotaInfo)) + model_name: Optional[str] = None + """The name of the parent registered model of the model version, relative to parent schema""" + model_version_dependencies: Optional[DependencyList] = None + """Model version dependencies, for feature-store packaged models""" -@dataclass -class GetWorkspaceBindingsResponse: - bindings: Optional[List[WorkspaceBinding]] = None - """List of workspace bindings""" + run_id: Optional[str] = None + """MLflow run ID used when creating the model version, if ``source`` was generated by an experiment + run stored in an MLflow tracking server""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + run_workspace_id: Optional[int] = None + """ID of the Databricks workspace containing the MLflow run that generated this model version, if + applicable""" + + schema_name: Optional[str] = None + """The name of the schema containing the model version, relative to parent catalog""" + + source: Optional[str] = None + """URI indicating the location of the source artifacts (files) for the model version""" + + status: Optional[ModelVersionInfoStatus] = None + """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION + status, then move to READY status once the model version files are uploaded and the model + version is finalized. Only model versions in READY status can be loaded for inference or served.""" + + storage_location: Optional[str] = None + """The storage location on the cloud under which model version data files are stored""" + + updated_at: Optional[int] = None + + updated_by: Optional[str] = None + """The identifier of the user who updated the model version last time""" + + version: Optional[int] = None + """Integer model version number, used to reference the model version in API requests.""" def as_dict(self) -> dict: - """Serializes the GetWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the ModelVersionInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bindings: - body["bindings"] = [v.as_dict() for v in self.bindings] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.aliases: + body["aliases"] = [v.as_dict() for v in self.aliases] + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.id is not None: + body["id"] = self.id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version_dependencies: + body["model_version_dependencies"] = self.model_version_dependencies.as_dict() + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_workspace_id is not None: + body["run_workspace_id"] = self.run_workspace_id + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.source is not None: + body["source"] = self.source + if self.status is not None: + body["status"] = self.status.value + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.version is not None: + body["version"] = self.version return body def as_shallow_dict(self) -> dict: - """Serializes the GetWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the ModelVersionInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.bindings: - body["bindings"] = self.bindings - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.aliases: + body["aliases"] = self.aliases + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.id is not None: + body["id"] = self.id + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.model_name is not None: + body["model_name"] = self.model_name + if self.model_version_dependencies: + body["model_version_dependencies"] = self.model_version_dependencies + if self.run_id is not None: + body["run_id"] = self.run_id + if self.run_workspace_id is not None: + body["run_workspace_id"] = self.run_workspace_id + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.source is not None: + body["source"] = self.source + if self.status is not None: + body["status"] = self.status + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.version is not None: + body["version"] = self.version return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceBindingsResponse: - """Deserializes the GetWorkspaceBindingsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> ModelVersionInfo: + """Deserializes the ModelVersionInfo from a dictionary.""" return cls( - bindings=_repeated_dict(d, "bindings", WorkspaceBinding), next_page_token=d.get("next_page_token", None) + aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + id=d.get("id", None), + metastore_id=d.get("metastore_id", None), + model_name=d.get("model_name", None), + model_version_dependencies=_from_dict(d, "model_version_dependencies", DependencyList), + run_id=d.get("run_id", None), + run_workspace_id=d.get("run_workspace_id", None), + schema_name=d.get("schema_name", None), + source=d.get("source", None), + status=_enum(d, "status", ModelVersionInfoStatus), + storage_location=d.get("storage_location", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + version=d.get("version", None), ) -class IsolationMode(Enum): - - ISOLATION_MODE_ISOLATED = "ISOLATION_MODE_ISOLATED" - ISOLATION_MODE_OPEN = "ISOLATION_MODE_OPEN" - - -class LineageDirection(Enum): +class ModelVersionInfoStatus(Enum): + """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION + status, then move to READY status once the model version files are uploaded and the model + version is finalized. Only model versions in READY status can be loaded for inference or served.""" - DOWNSTREAM = "DOWNSTREAM" - UPSTREAM = "UPSTREAM" + FAILED_REGISTRATION = "FAILED_REGISTRATION" + PENDING_REGISTRATION = "PENDING_REGISTRATION" + READY = "READY" @dataclass -class ListAccountMetastoreAssignmentsResponse: - """The list of workspaces to which the given metastore is assigned.""" +class MonitorCronSchedule: + quartz_cron_expression: str + """The expression that determines when to run the monitor. See [examples]. + + [examples]: https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" - workspace_ids: Optional[List[int]] = None + timezone_id: str + """The timezone id (e.g., ``"PST"``) in which to evaluate the quartz expression.""" + + pause_status: Optional[MonitorCronSchedulePauseStatus] = None + """Read only field that indicates whether a schedule is paused or not.""" def as_dict(self) -> dict: - """Serializes the ListAccountMetastoreAssignmentsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorCronSchedule into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspace_ids: - body["workspace_ids"] = [v for v in self.workspace_ids] + if self.pause_status is not None: + body["pause_status"] = self.pause_status.value + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body def as_shallow_dict(self) -> dict: - """Serializes the ListAccountMetastoreAssignmentsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorCronSchedule into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspace_ids: - body["workspace_ids"] = self.workspace_ids + if self.pause_status is not None: + body["pause_status"] = self.pause_status + if self.quartz_cron_expression is not None: + body["quartz_cron_expression"] = self.quartz_cron_expression + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListAccountMetastoreAssignmentsResponse: - """Deserializes the ListAccountMetastoreAssignmentsResponse from a dictionary.""" - return cls(workspace_ids=d.get("workspace_ids", None)) + def from_dict(cls, d: Dict[str, Any]) -> MonitorCronSchedule: + """Deserializes the MonitorCronSchedule from a dictionary.""" + return cls( + pause_status=_enum(d, "pause_status", MonitorCronSchedulePauseStatus), + quartz_cron_expression=d.get("quartz_cron_expression", None), + timezone_id=d.get("timezone_id", None), + ) + + +class MonitorCronSchedulePauseStatus(Enum): + """Read only field that indicates whether a schedule is paused or not.""" + + PAUSED = "PAUSED" + UNPAUSED = "UNPAUSED" @dataclass -class ListAccountStorageCredentialsResponse: - storage_credentials: Optional[List[StorageCredentialInfo]] = None - """An array of metastore storage credentials.""" +class MonitorDataClassificationConfig: + enabled: Optional[bool] = None + """Whether data classification is enabled.""" def as_dict(self) -> dict: - """Serializes the ListAccountStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorDataClassificationConfig into a dictionary suitable for use as a JSON request body.""" body = {} - if self.storage_credentials: - body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] + if self.enabled is not None: + body["enabled"] = self.enabled return body def as_shallow_dict(self) -> dict: - """Serializes the ListAccountStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorDataClassificationConfig into a shallow dictionary of its immediate attributes.""" body = {} - if self.storage_credentials: - body["storage_credentials"] = self.storage_credentials + if self.enabled is not None: + body["enabled"] = self.enabled return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListAccountStorageCredentialsResponse: - """Deserializes the ListAccountStorageCredentialsResponse from a dictionary.""" - return cls(storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo)) + def from_dict(cls, d: Dict[str, Any]) -> MonitorDataClassificationConfig: + """Deserializes the MonitorDataClassificationConfig from a dictionary.""" + return cls(enabled=d.get("enabled", None)) @dataclass -class ListCatalogsResponse: - catalogs: Optional[List[CatalogInfo]] = None - """An array of catalog information objects.""" - - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" +class MonitorDestination: + email_addresses: Optional[List[str]] = None + """The list of email addresses to send the notification to. A maximum of 5 email addresses is + supported.""" def as_dict(self) -> dict: - """Serializes the ListCatalogsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorDestination into a dictionary suitable for use as a JSON request body.""" body = {} - if self.catalogs: - body["catalogs"] = [v.as_dict() for v in self.catalogs] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.email_addresses: + body["email_addresses"] = [v for v in self.email_addresses] return body def as_shallow_dict(self) -> dict: - """Serializes the ListCatalogsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorDestination into a shallow dictionary of its immediate attributes.""" body = {} - if self.catalogs: - body["catalogs"] = self.catalogs - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.email_addresses: + body["email_addresses"] = self.email_addresses return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListCatalogsResponse: - """Deserializes the ListCatalogsResponse from a dictionary.""" - return cls(catalogs=_repeated_dict(d, "catalogs", CatalogInfo), next_page_token=d.get("next_page_token", None)) + def from_dict(cls, d: Dict[str, Any]) -> MonitorDestination: + """Deserializes the MonitorDestination from a dictionary.""" + return cls(email_addresses=d.get("email_addresses", None)) @dataclass -class ListConnectionsResponse: - connections: Optional[List[ConnectionInfo]] = None - """An array of connection information objects.""" - - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" +class MonitorInferenceLog: + timestamp_col: str + """Column that contains the timestamps of requests. The column must be one of the following: - A + ``TimestampType`` column - A column whose values can be converted to timestamps through the + pyspark ``to_timestamp`` [function]. + + [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" - def as_dict(self) -> dict: - """Serializes the ListConnectionsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.connections: - body["connections"] = [v.as_dict() for v in self.connections] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body + granularities: List[str] + """Granularities for aggregating data into time windows based on their timestamp. Currently the + following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, + ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" - def as_shallow_dict(self) -> dict: - """Serializes the ListConnectionsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.connections: - body["connections"] = self.connections - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body + model_id_col: str + """Column that contains the id of the model generating the predictions. Metrics will be computed + per model id by default, and also across all model ids.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListConnectionsResponse: - """Deserializes the ListConnectionsResponse from a dictionary.""" - return cls( - connections=_repeated_dict(d, "connections", ConnectionInfo), next_page_token=d.get("next_page_token", None) - ) + problem_type: MonitorInferenceLogProblemType + """Problem type the model aims to solve. Determines the type of model-quality metrics that will be + computed.""" + prediction_col: str + """Column that contains the output/prediction from the model.""" -@dataclass -class ListCredentialsResponse: - credentials: Optional[List[CredentialInfo]] = None + label_col: Optional[str] = None + """Optional column that contains the ground truth for the prediction.""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + prediction_proba_col: Optional[str] = None + """Optional column that contains the prediction probabilities for each class in a classification + problem type. The values in this column should be a map, mapping each class label to the + prediction probability for a given sample. The map should be of PySpark MapType().""" def as_dict(self) -> dict: - """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorInferenceLog into a dictionary suitable for use as a JSON request body.""" body = {} - if self.credentials: - body["credentials"] = [v.as_dict() for v in self.credentials] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.granularities: + body["granularities"] = [v for v in self.granularities] + if self.label_col is not None: + body["label_col"] = self.label_col + if self.model_id_col is not None: + body["model_id_col"] = self.model_id_col + if self.prediction_col is not None: + body["prediction_col"] = self.prediction_col + if self.prediction_proba_col is not None: + body["prediction_proba_col"] = self.prediction_proba_col + if self.problem_type is not None: + body["problem_type"] = self.problem_type.value + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body def as_shallow_dict(self) -> dict: - """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorInferenceLog into a shallow dictionary of its immediate attributes.""" body = {} - if self.credentials: - body["credentials"] = self.credentials - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.granularities: + body["granularities"] = self.granularities + if self.label_col is not None: + body["label_col"] = self.label_col + if self.model_id_col is not None: + body["model_id_col"] = self.model_id_col + if self.prediction_col is not None: + body["prediction_col"] = self.prediction_col + if self.prediction_proba_col is not None: + body["prediction_proba_col"] = self.prediction_proba_col + if self.problem_type is not None: + body["problem_type"] = self.problem_type + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListCredentialsResponse: - """Deserializes the ListCredentialsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MonitorInferenceLog: + """Deserializes the MonitorInferenceLog from a dictionary.""" return cls( - credentials=_repeated_dict(d, "credentials", CredentialInfo), next_page_token=d.get("next_page_token", None) + granularities=d.get("granularities", None), + label_col=d.get("label_col", None), + model_id_col=d.get("model_id_col", None), + prediction_col=d.get("prediction_col", None), + prediction_proba_col=d.get("prediction_proba_col", None), + problem_type=_enum(d, "problem_type", MonitorInferenceLogProblemType), + timestamp_col=d.get("timestamp_col", None), ) -@dataclass -class ListExternalLineageRelationshipsResponse: - external_lineage_relationships: Optional[List[ExternalLineageInfo]] = None - - next_page_token: Optional[str] = None +class MonitorInferenceLogProblemType(Enum): + """Problem type the model aims to solve. Determines the type of model-quality metrics that will be + computed.""" - def as_dict(self) -> dict: - """Serializes the ListExternalLineageRelationshipsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.external_lineage_relationships: - body["external_lineage_relationships"] = [v.as_dict() for v in self.external_lineage_relationships] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body + PROBLEM_TYPE_CLASSIFICATION = "PROBLEM_TYPE_CLASSIFICATION" + PROBLEM_TYPE_REGRESSION = "PROBLEM_TYPE_REGRESSION" - def as_shallow_dict(self) -> dict: - """Serializes the ListExternalLineageRelationshipsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.external_lineage_relationships: - body["external_lineage_relationships"] = self.external_lineage_relationships - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListExternalLineageRelationshipsResponse: - """Deserializes the ListExternalLineageRelationshipsResponse from a dictionary.""" - return cls( - external_lineage_relationships=_repeated_dict(d, "external_lineage_relationships", ExternalLineageInfo), - next_page_token=d.get("next_page_token", None), - ) +@dataclass +class MonitorInfo: + table_name: str + """The full name of the table to monitor. Format: __catalog_name__.__schema_name__.__table_name__.""" + status: MonitorInfoStatus -@dataclass -class ListExternalLocationsResponse: - external_locations: Optional[List[ExternalLocationInfo]] = None - """An array of external locations.""" + monitor_version: str + """The version of the monitor config (e.g. 1,2,3). If negative, the monitor may be corrupted.""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + profile_metrics_table_name: str + """The full name of the profile metrics table. Format: + __catalog_name__.__schema_name__.__table_name__.""" - def as_dict(self) -> dict: - """Serializes the ListExternalLocationsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.external_locations: - body["external_locations"] = [v.as_dict() for v in self.external_locations] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body + drift_metrics_table_name: str + """The full name of the drift metrics table. Format: + __catalog_name__.__schema_name__.__table_name__.""" - def as_shallow_dict(self) -> dict: - """Serializes the ListExternalLocationsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.external_locations: - body["external_locations"] = self.external_locations - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body + assets_dir: Optional[str] = None + """The directory to store monitoring assets (e.g. dashboard, metric tables).""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListExternalLocationsResponse: - """Deserializes the ListExternalLocationsResponse from a dictionary.""" - return cls( - external_locations=_repeated_dict(d, "external_locations", ExternalLocationInfo), - next_page_token=d.get("next_page_token", None), - ) + baseline_table_name: Optional[str] = None + """Name of the baseline table from which drift metrics are computed from. Columns in the monitored + table should also be present in the baseline table.""" + custom_metrics: Optional[List[MonitorMetric]] = None + """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived + metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across + time windows).""" -@dataclass -class ListExternalMetadataResponse: - external_metadata: Optional[List[ExternalMetadata]] = None + dashboard_id: Optional[str] = None + """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in + PENDING state.""" - next_page_token: Optional[str] = None + data_classification_config: Optional[MonitorDataClassificationConfig] = None + """The data classification config for the monitor.""" - def as_dict(self) -> dict: - """Serializes the ListExternalMetadataResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.external_metadata: - body["external_metadata"] = [v.as_dict() for v in self.external_metadata] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body + inference_log: Optional[MonitorInferenceLog] = None + """Configuration for monitoring inference logs.""" - def as_shallow_dict(self) -> dict: - """Serializes the ListExternalMetadataResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.external_metadata: - body["external_metadata"] = self.external_metadata - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - return body + latest_monitor_failure_msg: Optional[str] = None + """The latest failure message of the monitor (if any).""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListExternalMetadataResponse: - """Deserializes the ListExternalMetadataResponse from a dictionary.""" - return cls( - external_metadata=_repeated_dict(d, "external_metadata", ExternalMetadata), - next_page_token=d.get("next_page_token", None), - ) + notifications: Optional[MonitorNotifications] = None + """The notification settings for the monitor.""" + output_schema_name: Optional[str] = None + """Schema where output metric tables are created.""" -@dataclass -class ListFunctionsResponse: - functions: Optional[List[FunctionInfo]] = None - """An array of function information objects.""" + schedule: Optional[MonitorCronSchedule] = None + """The schedule for automatically updating and refreshing metric tables.""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + slicing_exprs: Optional[List[str]] = None + """List of column expressions to slice data with for targeted analysis. The data is grouped by each + expression independently, resulting in a separate slice for each predicate and its complements. + For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" + + snapshot: Optional[MonitorSnapshot] = None + """Configuration for monitoring snapshot tables.""" + + time_series: Optional[MonitorTimeSeries] = None + """Configuration for monitoring time series tables.""" def as_dict(self) -> dict: - """Serializes the ListFunctionsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.functions: - body["functions"] = [v.as_dict() for v in self.functions] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.data_classification_config: + body["data_classification_config"] = self.data_classification_config.as_dict() + if self.drift_metrics_table_name is not None: + body["drift_metrics_table_name"] = self.drift_metrics_table_name + if self.inference_log: + body["inference_log"] = self.inference_log.as_dict() + if self.latest_monitor_failure_msg is not None: + body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg + if self.monitor_version is not None: + body["monitor_version"] = self.monitor_version + if self.notifications: + body["notifications"] = self.notifications.as_dict() + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.profile_metrics_table_name is not None: + body["profile_metrics_table_name"] = self.profile_metrics_table_name + if self.schedule: + body["schedule"] = self.schedule.as_dict() + if self.slicing_exprs: + body["slicing_exprs"] = [v for v in self.slicing_exprs] + if self.snapshot: + body["snapshot"] = self.snapshot.as_dict() + if self.status is not None: + body["status"] = self.status.value + if self.table_name is not None: + body["table_name"] = self.table_name + if self.time_series: + body["time_series"] = self.time_series.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the ListFunctionsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.functions: - body["functions"] = self.functions - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.assets_dir is not None: + body["assets_dir"] = self.assets_dir + if self.baseline_table_name is not None: + body["baseline_table_name"] = self.baseline_table_name + if self.custom_metrics: + body["custom_metrics"] = self.custom_metrics + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.data_classification_config: + body["data_classification_config"] = self.data_classification_config + if self.drift_metrics_table_name is not None: + body["drift_metrics_table_name"] = self.drift_metrics_table_name + if self.inference_log: + body["inference_log"] = self.inference_log + if self.latest_monitor_failure_msg is not None: + body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg + if self.monitor_version is not None: + body["monitor_version"] = self.monitor_version + if self.notifications: + body["notifications"] = self.notifications + if self.output_schema_name is not None: + body["output_schema_name"] = self.output_schema_name + if self.profile_metrics_table_name is not None: + body["profile_metrics_table_name"] = self.profile_metrics_table_name + if self.schedule: + body["schedule"] = self.schedule + if self.slicing_exprs: + body["slicing_exprs"] = self.slicing_exprs + if self.snapshot: + body["snapshot"] = self.snapshot + if self.status is not None: + body["status"] = self.status + if self.table_name is not None: + body["table_name"] = self.table_name + if self.time_series: + body["time_series"] = self.time_series return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListFunctionsResponse: - """Deserializes the ListFunctionsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MonitorInfo: + """Deserializes the MonitorInfo from a dictionary.""" return cls( - functions=_repeated_dict(d, "functions", FunctionInfo), next_page_token=d.get("next_page_token", None) + assets_dir=d.get("assets_dir", None), + baseline_table_name=d.get("baseline_table_name", None), + custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), + dashboard_id=d.get("dashboard_id", None), + data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), + drift_metrics_table_name=d.get("drift_metrics_table_name", None), + inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), + latest_monitor_failure_msg=d.get("latest_monitor_failure_msg", None), + monitor_version=d.get("monitor_version", None), + notifications=_from_dict(d, "notifications", MonitorNotifications), + output_schema_name=d.get("output_schema_name", None), + profile_metrics_table_name=d.get("profile_metrics_table_name", None), + schedule=_from_dict(d, "schedule", MonitorCronSchedule), + slicing_exprs=d.get("slicing_exprs", None), + snapshot=_from_dict(d, "snapshot", MonitorSnapshot), + status=_enum(d, "status", MonitorInfoStatus), + table_name=d.get("table_name", None), + time_series=_from_dict(d, "time_series", MonitorTimeSeries), ) +class MonitorInfoStatus(Enum): + """The status of the monitor.""" + + MONITOR_STATUS_ACTIVE = "MONITOR_STATUS_ACTIVE" + MONITOR_STATUS_DELETE_PENDING = "MONITOR_STATUS_DELETE_PENDING" + MONITOR_STATUS_ERROR = "MONITOR_STATUS_ERROR" + MONITOR_STATUS_FAILED = "MONITOR_STATUS_FAILED" + MONITOR_STATUS_PENDING = "MONITOR_STATUS_PENDING" + + @dataclass -class ListMetastoresResponse: - metastores: Optional[List[MetastoreInfo]] = None - """An array of metastore information objects.""" +class MonitorMetric: + name: str + """Name of the metric in the output tables.""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + definition: str + """Jinja template for a SQL expression that specifies how to compute the metric. See [create metric + definition]. + + [create metric definition]: https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition""" + + input_columns: List[str] + """A list of column names in the input table the metric should be computed for. Can use + ``":table"`` to indicate that the metric needs information from multiple columns.""" + + output_data_type: str + """The output type of the custom metric.""" + + type: MonitorMetricType + """Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or + ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and + ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the + ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the + two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing + columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate + metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" def as_dict(self) -> dict: - """Serializes the ListMetastoresResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorMetric into a dictionary suitable for use as a JSON request body.""" body = {} - if self.metastores: - body["metastores"] = [v.as_dict() for v in self.metastores] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.definition is not None: + body["definition"] = self.definition + if self.input_columns: + body["input_columns"] = [v for v in self.input_columns] + if self.name is not None: + body["name"] = self.name + if self.output_data_type is not None: + body["output_data_type"] = self.output_data_type + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: - """Serializes the ListMetastoresResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorMetric into a shallow dictionary of its immediate attributes.""" body = {} - if self.metastores: - body["metastores"] = self.metastores - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.definition is not None: + body["definition"] = self.definition + if self.input_columns: + body["input_columns"] = self.input_columns + if self.name is not None: + body["name"] = self.name + if self.output_data_type is not None: + body["output_data_type"] = self.output_data_type + if self.type is not None: + body["type"] = self.type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListMetastoresResponse: - """Deserializes the ListMetastoresResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MonitorMetric: + """Deserializes the MonitorMetric from a dictionary.""" return cls( - metastores=_repeated_dict(d, "metastores", MetastoreInfo), next_page_token=d.get("next_page_token", None) + definition=d.get("definition", None), + input_columns=d.get("input_columns", None), + name=d.get("name", None), + output_data_type=d.get("output_data_type", None), + type=_enum(d, "type", MonitorMetricType), ) +class MonitorMetricType(Enum): + """Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or + ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and + ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the + ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the + two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing + columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate + metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" + + CUSTOM_METRIC_TYPE_AGGREGATE = "CUSTOM_METRIC_TYPE_AGGREGATE" + CUSTOM_METRIC_TYPE_DERIVED = "CUSTOM_METRIC_TYPE_DERIVED" + CUSTOM_METRIC_TYPE_DRIFT = "CUSTOM_METRIC_TYPE_DRIFT" + + @dataclass -class ListModelVersionsResponse: - model_versions: Optional[List[ModelVersionInfo]] = None +class MonitorNotifications: + on_failure: Optional[MonitorDestination] = None + """Who to send notifications to on monitor failure.""" - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" + on_new_classification_tag_detected: Optional[MonitorDestination] = None + """Who to send notifications to when new data classification tags are detected.""" def as_dict(self) -> dict: - """Serializes the ListModelVersionsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorNotifications into a dictionary suitable for use as a JSON request body.""" body = {} - if self.model_versions: - body["model_versions"] = [v.as_dict() for v in self.model_versions] - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.on_failure: + body["on_failure"] = self.on_failure.as_dict() + if self.on_new_classification_tag_detected: + body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the ListModelVersionsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorNotifications into a shallow dictionary of its immediate attributes.""" body = {} - if self.model_versions: - body["model_versions"] = self.model_versions - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token + if self.on_failure: + body["on_failure"] = self.on_failure + if self.on_new_classification_tag_detected: + body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListModelVersionsResponse: - """Deserializes the ListModelVersionsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MonitorNotifications: + """Deserializes the MonitorNotifications from a dictionary.""" return cls( - model_versions=_repeated_dict(d, "model_versions", ModelVersionInfo), - next_page_token=d.get("next_page_token", None), + on_failure=_from_dict(d, "on_failure", MonitorDestination), + on_new_classification_tag_detected=_from_dict(d, "on_new_classification_tag_detected", MonitorDestination), ) @dataclass -class ListQuotasResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request.""" +class MonitorRefreshInfo: + refresh_id: int + """Unique id of the refresh operation.""" - quotas: Optional[List[QuotaInfo]] = None - """An array of returned QuotaInfos.""" - - def as_dict(self) -> dict: - """Serializes the ListQuotasResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.quotas: - body["quotas"] = [v.as_dict() for v in self.quotas] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ListQuotasResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.quotas: - body["quotas"] = self.quotas - return body + state: MonitorRefreshInfoState + """The current state of the refresh.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListQuotasResponse: - """Deserializes the ListQuotasResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), quotas=_repeated_dict(d, "quotas", QuotaInfo)) + start_time_ms: int + """Time at which refresh operation was initiated (milliseconds since 1/1/1970 UTC).""" + end_time_ms: Optional[int] = None + """Time at which refresh operation completed (milliseconds since 1/1/1970 UTC).""" -@dataclass -class ListRegisteredModelsResponse: - next_page_token: Optional[str] = None - """Opaque token for pagination. Omitted if there are no more results. page_token should be set to - this value for fetching the next page.""" + message: Optional[str] = None + """An optional message to give insight into the current state of the job (e.g. FAILURE messages).""" - registered_models: Optional[List[RegisteredModelInfo]] = None + trigger: Optional[MonitorRefreshInfoTrigger] = None + """The method by which the refresh was triggered.""" def as_dict(self) -> dict: - """Serializes the ListRegisteredModelsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorRefreshInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.registered_models: - body["registered_models"] = [v.as_dict() for v in self.registered_models] + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.message is not None: + body["message"] = self.message + if self.refresh_id is not None: + body["refresh_id"] = self.refresh_id + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms + if self.state is not None: + body["state"] = self.state.value + if self.trigger is not None: + body["trigger"] = self.trigger.value return body def as_shallow_dict(self) -> dict: - """Serializes the ListRegisteredModelsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorRefreshInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.registered_models: - body["registered_models"] = self.registered_models + if self.end_time_ms is not None: + body["end_time_ms"] = self.end_time_ms + if self.message is not None: + body["message"] = self.message + if self.refresh_id is not None: + body["refresh_id"] = self.refresh_id + if self.start_time_ms is not None: + body["start_time_ms"] = self.start_time_ms + if self.state is not None: + body["state"] = self.state + if self.trigger is not None: + body["trigger"] = self.trigger return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListRegisteredModelsResponse: - """Deserializes the ListRegisteredModelsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshInfo: + """Deserializes the MonitorRefreshInfo from a dictionary.""" return cls( - next_page_token=d.get("next_page_token", None), - registered_models=_repeated_dict(d, "registered_models", RegisteredModelInfo), + end_time_ms=d.get("end_time_ms", None), + message=d.get("message", None), + refresh_id=d.get("refresh_id", None), + start_time_ms=d.get("start_time_ms", None), + state=_enum(d, "state", MonitorRefreshInfoState), + trigger=_enum(d, "trigger", MonitorRefreshInfoTrigger), ) -@dataclass -class ListSchemasResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" +class MonitorRefreshInfoState(Enum): + """The current state of the refresh.""" - schemas: Optional[List[SchemaInfo]] = None - """An array of schema information objects.""" + CANCELED = "CANCELED" + FAILED = "FAILED" + PENDING = "PENDING" + RUNNING = "RUNNING" + SUCCESS = "SUCCESS" - def as_dict(self) -> dict: - """Serializes the ListSchemasResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = [v.as_dict() for v in self.schemas] - return body - def as_shallow_dict(self) -> dict: - """Serializes the ListSchemasResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = self.schemas - return body +class MonitorRefreshInfoTrigger(Enum): + """The method by which the refresh was triggered.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListSchemasResponse: - """Deserializes the ListSchemasResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SchemaInfo)) + MANUAL = "MANUAL" + SCHEDULE = "SCHEDULE" @dataclass -class ListStorageCredentialsResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" - - storage_credentials: Optional[List[StorageCredentialInfo]] = None +class MonitorRefreshListResponse: + refreshes: Optional[List[MonitorRefreshInfo]] = None + """List of refreshes.""" def as_dict(self) -> dict: - """Serializes the ListStorageCredentialsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorRefreshListResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.storage_credentials: - body["storage_credentials"] = [v.as_dict() for v in self.storage_credentials] + if self.refreshes: + body["refreshes"] = [v.as_dict() for v in self.refreshes] return body def as_shallow_dict(self) -> dict: - """Serializes the ListStorageCredentialsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorRefreshListResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.storage_credentials: - body["storage_credentials"] = self.storage_credentials + if self.refreshes: + body["refreshes"] = self.refreshes return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListStorageCredentialsResponse: - """Deserializes the ListStorageCredentialsResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), - storage_credentials=_repeated_dict(d, "storage_credentials", StorageCredentialInfo), - ) + def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshListResponse: + """Deserializes the MonitorRefreshListResponse from a dictionary.""" + return cls(refreshes=_repeated_dict(d, "refreshes", MonitorRefreshInfo)) @dataclass -class ListSystemSchemasResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" - - schemas: Optional[List[SystemSchemaInfo]] = None - """An array of system schema information objects.""" - +class MonitorSnapshot: def as_dict(self) -> dict: - """Serializes the ListSystemSchemasResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorSnapshot into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = [v.as_dict() for v in self.schemas] return body def as_shallow_dict(self) -> dict: - """Serializes the ListSystemSchemasResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorSnapshot into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.schemas: - body["schemas"] = self.schemas return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListSystemSchemasResponse: - """Deserializes the ListSystemSchemasResponse from a dictionary.""" - return cls( - next_page_token=d.get("next_page_token", None), schemas=_repeated_dict(d, "schemas", SystemSchemaInfo) - ) + def from_dict(cls, d: Dict[str, Any]) -> MonitorSnapshot: + """Deserializes the MonitorSnapshot from a dictionary.""" + return cls() @dataclass -class ListTableSummariesResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" +class MonitorTimeSeries: + timestamp_col: str + """Column that contains the timestamps of requests. The column must be one of the following: - A + ``TimestampType`` column - A column whose values can be converted to timestamps through the + pyspark ``to_timestamp`` [function]. + + [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" - tables: Optional[List[TableSummary]] = None - """List of table summaries.""" + granularities: List[str] + """Granularities for aggregating data into time windows based on their timestamp. Currently the + following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, + ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" def as_dict(self) -> dict: - """Serializes the ListTableSummariesResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the MonitorTimeSeries into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = [v.as_dict() for v in self.tables] + if self.granularities: + body["granularities"] = [v for v in self.granularities] + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body def as_shallow_dict(self) -> dict: - """Serializes the ListTableSummariesResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the MonitorTimeSeries into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = self.tables + if self.granularities: + body["granularities"] = self.granularities + if self.timestamp_col is not None: + body["timestamp_col"] = self.timestamp_col return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListTableSummariesResponse: - """Deserializes the ListTableSummariesResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableSummary)) + def from_dict(cls, d: Dict[str, Any]) -> MonitorTimeSeries: + """Deserializes the MonitorTimeSeries from a dictionary.""" + return cls(granularities=d.get("granularities", None), timestamp_col=d.get("timestamp_col", None)) @dataclass -class ListTablesResponse: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request (for the next page of results).""" - - tables: Optional[List[TableInfo]] = None - """An array of table information objects.""" +class NamedTableConstraint: + name: str + """The name of the constraint.""" def as_dict(self) -> dict: - """Serializes the ListTablesResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the NamedTableConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = [v.as_dict() for v in self.tables] + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: - """Serializes the ListTablesResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the NamedTableConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.tables: - body["tables"] = self.tables + if self.name is not None: + body["name"] = self.name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListTablesResponse: - """Deserializes the ListTablesResponse from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), tables=_repeated_dict(d, "tables", TableInfo)) + def from_dict(cls, d: Dict[str, Any]) -> NamedTableConstraint: + """Deserializes the NamedTableConstraint from a dictionary.""" + return cls(name=d.get("name", None)) @dataclass -class ListVolumesResponseContent: - next_page_token: Optional[str] = None - """Opaque token to retrieve the next page of results. Absent if there are no more pages. - __page_token__ should be set to this value for the next request to retrieve the next page of - results.""" +class OnlineTable: + """Online Table information.""" - volumes: Optional[List[VolumeInfo]] = None + name: Optional[str] = None + """Full three-part (catalog, schema, table) name of the table.""" + + spec: Optional[OnlineTableSpec] = None + """Specification of the online table.""" + + status: Optional[OnlineTableStatus] = None + """Online Table data synchronization status""" + + table_serving_url: Optional[str] = None + """Data serving REST API URL for this table""" + + unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None + """The provisioning state of the online table entity in Unity Catalog. This is distinct from the + state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline + may be in "PROVISIONING" as it runs asynchronously).""" def as_dict(self) -> dict: - """Serializes the ListVolumesResponseContent into a dictionary suitable for use as a JSON request body.""" + """Serializes the OnlineTable into a dictionary suitable for use as a JSON request body.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.volumes: - body["volumes"] = [v.as_dict() for v in self.volumes] + if self.name is not None: + body["name"] = self.name + if self.spec: + body["spec"] = self.spec.as_dict() + if self.status: + body["status"] = self.status.as_dict() + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: + body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value return body def as_shallow_dict(self) -> dict: - """Serializes the ListVolumesResponseContent into a shallow dictionary of its immediate attributes.""" + """Serializes the OnlineTable into a shallow dictionary of its immediate attributes.""" body = {} - if self.next_page_token is not None: - body["next_page_token"] = self.next_page_token - if self.volumes: - body["volumes"] = self.volumes + if self.name is not None: + body["name"] = self.name + if self.spec: + body["spec"] = self.spec + if self.status: + body["status"] = self.status + if self.table_serving_url is not None: + body["table_serving_url"] = self.table_serving_url + if self.unity_catalog_provisioning_state is not None: + body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ListVolumesResponseContent: - """Deserializes the ListVolumesResponseContent from a dictionary.""" - return cls(next_page_token=d.get("next_page_token", None), volumes=_repeated_dict(d, "volumes", VolumeInfo)) + def from_dict(cls, d: Dict[str, Any]) -> OnlineTable: + """Deserializes the OnlineTable from a dictionary.""" + return cls( + name=d.get("name", None), + spec=_from_dict(d, "spec", OnlineTableSpec), + status=_from_dict(d, "status", OnlineTableStatus), + table_serving_url=d.get("table_serving_url", None), + unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), + ) -class MatchType(Enum): - """The artifact pattern matching type""" +@dataclass +class OnlineTableSpec: + """Specification of an online table.""" - PREFIX_MATCH = "PREFIX_MATCH" + perform_full_copy: Optional[bool] = None + """Whether to create a full-copy pipeline -- a pipeline that stops after creates a full copy of the + source table upon initialization and does not process any change data feeds (CDFs) afterwards. + The pipeline can still be manually triggered afterwards, but it always perform a full copy of + the source table and there are no incremental updates. This mode is useful for syncing views or + tables without CDFs to online tables. Note that the full-copy pipeline only supports "triggered" + scheduling policy.""" + pipeline_id: Optional[str] = None + """ID of the associated pipeline. Generated by the server - cannot be set by the caller.""" -@dataclass -class MetastoreAssignment: - workspace_id: int - """The unique ID of the Databricks workspace.""" + primary_key_columns: Optional[List[str]] = None + """Primary Key columns to be used for data insert/update in the destination.""" - metastore_id: str - """The unique ID of the metastore.""" + run_continuously: Optional[OnlineTableSpecContinuousSchedulingPolicy] = None + """Pipeline runs continuously after generating the initial data.""" - default_catalog_name: Optional[str] = None - """The name of the default catalog in the metastore.""" + run_triggered: Optional[OnlineTableSpecTriggeredSchedulingPolicy] = None + """Pipeline stops after generating the initial data and can be triggered later (manually, through a + cron job or through data triggers)""" + + source_table_full_name: Optional[str] = None + """Three-part (catalog, schema, table) name of the source Delta table.""" + + timeseries_key: Optional[str] = None + """Time series key to deduplicate (tie-break) rows with the same primary key.""" def as_dict(self) -> dict: - """Serializes the MetastoreAssignment into a dictionary suitable for use as a JSON request body.""" + """Serializes the OnlineTableSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.perform_full_copy is not None: + body["perform_full_copy"] = self.perform_full_copy + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.primary_key_columns: + body["primary_key_columns"] = [v for v in self.primary_key_columns] + if self.run_continuously: + body["run_continuously"] = self.run_continuously.as_dict() + if self.run_triggered: + body["run_triggered"] = self.run_triggered.as_dict() + if self.source_table_full_name is not None: + body["source_table_full_name"] = self.source_table_full_name + if self.timeseries_key is not None: + body["timeseries_key"] = self.timeseries_key return body def as_shallow_dict(self) -> dict: - """Serializes the MetastoreAssignment into a shallow dictionary of its immediate attributes.""" + """Serializes the OnlineTableSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.perform_full_copy is not None: + body["perform_full_copy"] = self.perform_full_copy + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.primary_key_columns: + body["primary_key_columns"] = self.primary_key_columns + if self.run_continuously: + body["run_continuously"] = self.run_continuously + if self.run_triggered: + body["run_triggered"] = self.run_triggered + if self.source_table_full_name is not None: + body["source_table_full_name"] = self.source_table_full_name + if self.timeseries_key is not None: + body["timeseries_key"] = self.timeseries_key return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MetastoreAssignment: - """Deserializes the MetastoreAssignment from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpec: + """Deserializes the OnlineTableSpec from a dictionary.""" return cls( - default_catalog_name=d.get("default_catalog_name", None), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), + perform_full_copy=d.get("perform_full_copy", None), + pipeline_id=d.get("pipeline_id", None), + primary_key_columns=d.get("primary_key_columns", None), + run_continuously=_from_dict(d, "run_continuously", OnlineTableSpecContinuousSchedulingPolicy), + run_triggered=_from_dict(d, "run_triggered", OnlineTableSpecTriggeredSchedulingPolicy), + source_table_full_name=d.get("source_table_full_name", None), + timeseries_key=d.get("timeseries_key", None), ) @dataclass -class MetastoreInfo: - cloud: Optional[str] = None - """Cloud vendor of the metastore home shard (e.g., `aws`, `azure`, `gcp`).""" +class OnlineTableSpecContinuousSchedulingPolicy: + def as_dict(self) -> dict: + """Serializes the OnlineTableSpecContinuousSchedulingPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - created_at: Optional[int] = None - """Time at which this metastore was created, in epoch milliseconds.""" + def as_shallow_dict(self) -> dict: + """Serializes the OnlineTableSpecContinuousSchedulingPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + return body - created_by: Optional[str] = None - """Username of metastore creator.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecContinuousSchedulingPolicy: + """Deserializes the OnlineTableSpecContinuousSchedulingPolicy from a dictionary.""" + return cls() - default_data_access_config_id: Optional[str] = None - """Unique identifier of the metastore's (Default) Data Access Configuration.""" - delta_sharing_organization_name: Optional[str] = None - """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta - Sharing as the official name.""" +@dataclass +class OnlineTableSpecTriggeredSchedulingPolicy: + def as_dict(self) -> dict: + """Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None - """The lifetime of delta sharing recipient token in seconds.""" + def as_shallow_dict(self) -> dict: + """Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a shallow dictionary of its immediate attributes.""" + body = {} + return body - delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None - """The scope of Delta Sharing enabled for the metastore.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecTriggeredSchedulingPolicy: + """Deserializes the OnlineTableSpecTriggeredSchedulingPolicy from a dictionary.""" + return cls() - external_access_enabled: Optional[bool] = None - """Whether to allow non-DBR clients to directly access entities under the metastore.""" - global_metastore_id: Optional[str] = None - """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`.""" +class OnlineTableState(Enum): + """The state of an online table.""" - metastore_id: Optional[str] = None - """Unique identifier of metastore.""" + OFFLINE = "OFFLINE" + OFFLINE_FAILED = "OFFLINE_FAILED" + ONLINE = "ONLINE" + ONLINE_CONTINUOUS_UPDATE = "ONLINE_CONTINUOUS_UPDATE" + ONLINE_NO_PENDING_UPDATE = "ONLINE_NO_PENDING_UPDATE" + ONLINE_PIPELINE_FAILED = "ONLINE_PIPELINE_FAILED" + ONLINE_TRIGGERED_UPDATE = "ONLINE_TRIGGERED_UPDATE" + ONLINE_UPDATING_PIPELINE_RESOURCES = "ONLINE_UPDATING_PIPELINE_RESOURCES" + PROVISIONING = "PROVISIONING" + PROVISIONING_INITIAL_SNAPSHOT = "PROVISIONING_INITIAL_SNAPSHOT" + PROVISIONING_PIPELINE_RESOURCES = "PROVISIONING_PIPELINE_RESOURCES" - name: Optional[str] = None - """The user-specified name of the metastore.""" - owner: Optional[str] = None - """The owner of the metastore.""" +@dataclass +class OnlineTableStatus: + """Status of an online table.""" - privilege_model_version: Optional[str] = None - """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" + continuous_update_status: Optional[ContinuousUpdateStatus] = None - region: Optional[str] = None - """Cloud region which the metastore serves (e.g., `us-west-2`, `westus`).""" + detailed_state: Optional[OnlineTableState] = None + """The state of the online table.""" - storage_root: Optional[str] = None - """The storage root URL for metastore""" + failed_status: Optional[FailedStatus] = None - storage_root_credential_id: Optional[str] = None - """UUID of storage credential to access the metastore storage_root.""" + message: Optional[str] = None + """A text description of the current state of the online table.""" - storage_root_credential_name: Optional[str] = None - """Name of the storage credential to access the metastore storage_root.""" + provisioning_status: Optional[ProvisioningStatus] = None - updated_at: Optional[int] = None - """Time at which the metastore was last modified, in epoch milliseconds.""" - - updated_by: Optional[str] = None - """Username of user who last modified the metastore.""" + triggered_update_status: Optional[TriggeredUpdateStatus] = None def as_dict(self) -> dict: - """Serializes the MetastoreInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the OnlineTableStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cloud is not None: - body["cloud"] = self.cloud - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.default_data_access_config_id is not None: - body["default_data_access_config_id"] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope.value - if self.external_access_enabled is not None: - body["external_access_enabled"] = self.external_access_enabled - if self.global_metastore_id is not None: - body["global_metastore_id"] = self.global_metastore_id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: - body["storage_root_credential_name"] = self.storage_root_credential_name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.continuous_update_status: + body["continuous_update_status"] = self.continuous_update_status.as_dict() + if self.detailed_state is not None: + body["detailed_state"] = self.detailed_state.value + if self.failed_status: + body["failed_status"] = self.failed_status.as_dict() + if self.message is not None: + body["message"] = self.message + if self.provisioning_status: + body["provisioning_status"] = self.provisioning_status.as_dict() + if self.triggered_update_status: + body["triggered_update_status"] = self.triggered_update_status.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the MetastoreInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the OnlineTableStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.cloud is not None: - body["cloud"] = self.cloud - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.default_data_access_config_id is not None: - body["default_data_access_config_id"] = self.default_data_access_config_id - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope - if self.external_access_enabled is not None: - body["external_access_enabled"] = self.external_access_enabled - if self.global_metastore_id is not None: - body["global_metastore_id"] = self.global_metastore_id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.region is not None: - body["region"] = self.region - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id - if self.storage_root_credential_name is not None: - body["storage_root_credential_name"] = self.storage_root_credential_name - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by + if self.continuous_update_status: + body["continuous_update_status"] = self.continuous_update_status + if self.detailed_state is not None: + body["detailed_state"] = self.detailed_state + if self.failed_status: + body["failed_status"] = self.failed_status + if self.message is not None: + body["message"] = self.message + if self.provisioning_status: + body["provisioning_status"] = self.provisioning_status + if self.triggered_update_status: + body["triggered_update_status"] = self.triggered_update_status return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MetastoreInfo: - """Deserializes the MetastoreInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> OnlineTableStatus: + """Deserializes the OnlineTableStatus from a dictionary.""" return cls( - cloud=d.get("cloud", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - default_data_access_config_id=d.get("default_data_access_config_id", None), - delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), - delta_sharing_recipient_token_lifetime_in_seconds=d.get( - "delta_sharing_recipient_token_lifetime_in_seconds", None - ), - delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), - external_access_enabled=d.get("external_access_enabled", None), - global_metastore_id=d.get("global_metastore_id", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - privilege_model_version=d.get("privilege_model_version", None), - region=d.get("region", None), - storage_root=d.get("storage_root", None), - storage_root_credential_id=d.get("storage_root_credential_id", None), - storage_root_credential_name=d.get("storage_root_credential_name", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), + continuous_update_status=_from_dict(d, "continuous_update_status", ContinuousUpdateStatus), + detailed_state=_enum(d, "detailed_state", OnlineTableState), + failed_status=_from_dict(d, "failed_status", FailedStatus), + message=d.get("message", None), + provisioning_status=_from_dict(d, "provisioning_status", ProvisioningStatus), + triggered_update_status=_from_dict(d, "triggered_update_status", TriggeredUpdateStatus), ) @dataclass -class ModelVersionInfo: - aliases: Optional[List[RegisteredModelAlias]] = None - """List of aliases associated with the model version""" - - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - - catalog_name: Optional[str] = None - """The name of the catalog containing the model version""" - - comment: Optional[str] = None - """The comment attached to the model version""" - - created_at: Optional[int] = None +class OptionSpec: + """Spec of an allowed option on a securable kind and its attributes. This is mostly used by UI to + provide user friendly hints and descriptions in order to facilitate the securable creation + process.""" - created_by: Optional[str] = None - """The identifier of the user who created the model version""" + allowed_values: Optional[List[str]] = None + """For drop down / radio button selections, UI will want to know the possible input values, it can + also be used by other option types to limit input selections.""" - id: Optional[str] = None - """The unique identifier of the model version""" + default_value: Optional[str] = None + """The default value of the option, for example, value '443' for 'port' option.""" - metastore_id: Optional[str] = None - """The unique identifier of the metastore containing the model version""" + description: Optional[str] = None + """A concise user facing description of what the input value of this option should look like.""" - model_name: Optional[str] = None - """The name of the parent registered model of the model version, relative to parent schema""" + hint: Optional[str] = None + """The hint is used on the UI to suggest what the input value can possibly be like, for example: + example.com for 'host' option. Unlike default value, it will not be applied automatically + without user input.""" - model_version_dependencies: Optional[DependencyList] = None - """Model version dependencies, for feature-store packaged models""" + is_copiable: Optional[bool] = None + """Indicates whether an option should be displayed with copy button on the UI.""" - run_id: Optional[str] = None - """MLflow run ID used when creating the model version, if ``source`` was generated by an experiment - run stored in an MLflow tracking server""" + is_creatable: Optional[bool] = None + """Indicates whether an option can be provided by users in the create/update path of an entity.""" - run_workspace_id: Optional[int] = None - """ID of the Databricks workspace containing the MLflow run that generated this model version, if - applicable""" + is_hidden: Optional[bool] = None + """Is the option value not user settable and is thus not shown on the UI.""" - schema_name: Optional[str] = None - """The name of the schema containing the model version, relative to parent catalog""" + is_loggable: Optional[bool] = None + """Specifies whether this option is safe to log, i.e. no sensitive information.""" - source: Optional[str] = None - """URI indicating the location of the source artifacts (files) for the model version""" + is_required: Optional[bool] = None + """Is the option required.""" - status: Optional[ModelVersionInfoStatus] = None - """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION - status, then move to READY status once the model version files are uploaded and the model - version is finalized. Only model versions in READY status can be loaded for inference or served.""" + is_secret: Optional[bool] = None + """Is the option value considered secret and thus redacted on the UI.""" - storage_location: Optional[str] = None - """The storage location on the cloud under which model version data files are stored""" + is_updatable: Optional[bool] = None + """Is the option updatable by users.""" - updated_at: Optional[int] = None + name: Optional[str] = None + """The unique name of the option.""" - updated_by: Optional[str] = None - """The identifier of the user who updated the model version last time""" + oauth_stage: Optional[OptionSpecOauthStage] = None + """Specifies when the option value is displayed on the UI within the OAuth flow.""" - version: Optional[int] = None - """Integer model version number, used to reference the model version in API requests.""" + type: Optional[OptionSpecOptionType] = None + """The type of the option.""" def as_dict(self) -> dict: - """Serializes the ModelVersionInfo into a dictionary suitable for use as a JSON request body.""" + """Serializes the OptionSpec into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aliases: - body["aliases"] = [v.as_dict() for v in self.aliases] - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.id is not None: - body["id"] = self.id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version_dependencies: - body["model_version_dependencies"] = self.model_version_dependencies.as_dict() - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_workspace_id is not None: - body["run_workspace_id"] = self.run_workspace_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.source is not None: - body["source"] = self.source - if self.status is not None: - body["status"] = self.status.value - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.version is not None: - body["version"] = self.version + if self.allowed_values: + body["allowed_values"] = [v for v in self.allowed_values] + if self.default_value is not None: + body["default_value"] = self.default_value + if self.description is not None: + body["description"] = self.description + if self.hint is not None: + body["hint"] = self.hint + if self.is_copiable is not None: + body["is_copiable"] = self.is_copiable + if self.is_creatable is not None: + body["is_creatable"] = self.is_creatable + if self.is_hidden is not None: + body["is_hidden"] = self.is_hidden + if self.is_loggable is not None: + body["is_loggable"] = self.is_loggable + if self.is_required is not None: + body["is_required"] = self.is_required + if self.is_secret is not None: + body["is_secret"] = self.is_secret + if self.is_updatable is not None: + body["is_updatable"] = self.is_updatable + if self.name is not None: + body["name"] = self.name + if self.oauth_stage is not None: + body["oauth_stage"] = self.oauth_stage.value + if self.type is not None: + body["type"] = self.type.value return body def as_shallow_dict(self) -> dict: - """Serializes the ModelVersionInfo into a shallow dictionary of its immediate attributes.""" + """Serializes the OptionSpec into a shallow dictionary of its immediate attributes.""" body = {} - if self.aliases: - body["aliases"] = self.aliases - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.id is not None: - body["id"] = self.id - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.model_name is not None: - body["model_name"] = self.model_name - if self.model_version_dependencies: - body["model_version_dependencies"] = self.model_version_dependencies - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_workspace_id is not None: - body["run_workspace_id"] = self.run_workspace_id - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.source is not None: - body["source"] = self.source - if self.status is not None: - body["status"] = self.status - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.version is not None: - body["version"] = self.version + if self.allowed_values: + body["allowed_values"] = self.allowed_values + if self.default_value is not None: + body["default_value"] = self.default_value + if self.description is not None: + body["description"] = self.description + if self.hint is not None: + body["hint"] = self.hint + if self.is_copiable is not None: + body["is_copiable"] = self.is_copiable + if self.is_creatable is not None: + body["is_creatable"] = self.is_creatable + if self.is_hidden is not None: + body["is_hidden"] = self.is_hidden + if self.is_loggable is not None: + body["is_loggable"] = self.is_loggable + if self.is_required is not None: + body["is_required"] = self.is_required + if self.is_secret is not None: + body["is_secret"] = self.is_secret + if self.is_updatable is not None: + body["is_updatable"] = self.is_updatable + if self.name is not None: + body["name"] = self.name + if self.oauth_stage is not None: + body["oauth_stage"] = self.oauth_stage + if self.type is not None: + body["type"] = self.type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ModelVersionInfo: - """Deserializes the ModelVersionInfo from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> OptionSpec: + """Deserializes the OptionSpec from a dictionary.""" return cls( - aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - id=d.get("id", None), - metastore_id=d.get("metastore_id", None), - model_name=d.get("model_name", None), - model_version_dependencies=_from_dict(d, "model_version_dependencies", DependencyList), - run_id=d.get("run_id", None), - run_workspace_id=d.get("run_workspace_id", None), - schema_name=d.get("schema_name", None), - source=d.get("source", None), - status=_enum(d, "status", ModelVersionInfoStatus), - storage_location=d.get("storage_location", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - version=d.get("version", None), + allowed_values=d.get("allowed_values", None), + default_value=d.get("default_value", None), + description=d.get("description", None), + hint=d.get("hint", None), + is_copiable=d.get("is_copiable", None), + is_creatable=d.get("is_creatable", None), + is_hidden=d.get("is_hidden", None), + is_loggable=d.get("is_loggable", None), + is_required=d.get("is_required", None), + is_secret=d.get("is_secret", None), + is_updatable=d.get("is_updatable", None), + name=d.get("name", None), + oauth_stage=_enum(d, "oauth_stage", OptionSpecOauthStage), + type=_enum(d, "type", OptionSpecOptionType), ) -class ModelVersionInfoStatus(Enum): - """Current status of the model version. Newly created model versions start in PENDING_REGISTRATION - status, then move to READY status once the model version files are uploaded and the model - version is finalized. Only model versions in READY status can be loaded for inference or served.""" +class OptionSpecOauthStage(Enum): + """During the OAuth flow, specifies which stage the option should be displayed in the UI. + OAUTH_STAGE_UNSPECIFIED is the default value for options unrelated to the OAuth flow. + BEFORE_AUTHORIZATION_CODE corresponds to options necessary to initiate the OAuth process. + BEFORE_ACCESS_TOKEN corresponds to options that are necessary to create a foreign connection, + but that should be displayed after the authorization code has already been received.""" - FAILED_REGISTRATION = "FAILED_REGISTRATION" - PENDING_REGISTRATION = "PENDING_REGISTRATION" - READY = "READY" + BEFORE_ACCESS_TOKEN = "BEFORE_ACCESS_TOKEN" + BEFORE_AUTHORIZATION_CODE = "BEFORE_AUTHORIZATION_CODE" + + +class OptionSpecOptionType(Enum): + """Type of the option, we purposely follow JavaScript types so that the UI can map the options to + JS types. https://www.w3schools.com/js/js_datatypes.asp Enum is a special case that it's just + string with selections.""" + + OPTION_BIGINT = "OPTION_BIGINT" + OPTION_BOOLEAN = "OPTION_BOOLEAN" + OPTION_ENUM = "OPTION_ENUM" + OPTION_MULTILINE_STRING = "OPTION_MULTILINE_STRING" + OPTION_NUMBER = "OPTION_NUMBER" + OPTION_SERVICE_CREDENTIAL = "OPTION_SERVICE_CREDENTIAL" + OPTION_STRING = "OPTION_STRING" @dataclass -class MonitorCronSchedule: - quartz_cron_expression: str - """The expression that determines when to run the monitor. See [examples]. - - [examples]: https://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html""" +class PermissionsChange: + add: Optional[List[Privilege]] = None + """The set of privileges to add.""" - timezone_id: str - """The timezone id (e.g., ``"PST"``) in which to evaluate the quartz expression.""" + principal: Optional[str] = None + """The principal whose privileges we are changing. Only one of principal or principal_id should be + specified, never both at the same time.""" - pause_status: Optional[MonitorCronSchedulePauseStatus] = None - """Read only field that indicates whether a schedule is paused or not.""" + remove: Optional[List[Privilege]] = None + """The set of privileges to remove.""" def as_dict(self) -> dict: - """Serializes the MonitorCronSchedule into a dictionary suitable for use as a JSON request body.""" + """Serializes the PermissionsChange into a dictionary suitable for use as a JSON request body.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status.value - if self.quartz_cron_expression is not None: - body["quartz_cron_expression"] = self.quartz_cron_expression - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.add: + body["add"] = [v.value for v in self.add] + if self.principal is not None: + body["principal"] = self.principal + if self.remove: + body["remove"] = [v.value for v in self.remove] return body def as_shallow_dict(self) -> dict: - """Serializes the MonitorCronSchedule into a shallow dictionary of its immediate attributes.""" + """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes.""" body = {} - if self.pause_status is not None: - body["pause_status"] = self.pause_status - if self.quartz_cron_expression is not None: - body["quartz_cron_expression"] = self.quartz_cron_expression - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id + if self.add: + body["add"] = self.add + if self.principal is not None: + body["principal"] = self.principal + if self.remove: + body["remove"] = self.remove return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorCronSchedule: - """Deserializes the MonitorCronSchedule from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: + """Deserializes the PermissionsChange from a dictionary.""" return cls( - pause_status=_enum(d, "pause_status", MonitorCronSchedulePauseStatus), - quartz_cron_expression=d.get("quartz_cron_expression", None), - timezone_id=d.get("timezone_id", None), + add=_repeated_enum(d, "add", Privilege), + principal=d.get("principal", None), + remove=_repeated_enum(d, "remove", Privilege), ) -class MonitorCronSchedulePauseStatus(Enum): - """Read only field that indicates whether a schedule is paused or not.""" +@dataclass +class PipelineProgress: + """Progress information of the Online Table data synchronization pipeline.""" - PAUSED = "PAUSED" - UNPAUSED = "UNPAUSED" + estimated_completion_time_seconds: Optional[float] = None + """The estimated time remaining to complete this update in seconds.""" + latest_version_currently_processing: Optional[int] = None + """The source table Delta version that was last processed by the pipeline. The pipeline may not + have completely processed this version yet.""" -@dataclass -class MonitorDataClassificationConfig: - enabled: Optional[bool] = None - """Whether data classification is enabled.""" + sync_progress_completion: Optional[float] = None + """The completion ratio of this update. This is a number between 0 and 1.""" - def as_dict(self) -> dict: - """Serializes the MonitorDataClassificationConfig into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - return body + synced_row_count: Optional[int] = None + """The number of rows that have been synced in this update.""" - def as_shallow_dict(self) -> dict: - """Serializes the MonitorDataClassificationConfig into a shallow dictionary of its immediate attributes.""" + total_row_count: Optional[int] = None + """The total number of rows that need to be synced in this update. This number may be an estimate.""" + + def as_dict(self) -> dict: + """Serializes the PipelineProgress into a dictionary suitable for use as a JSON request body.""" body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorDataClassificationConfig: - """Deserializes the MonitorDataClassificationConfig from a dictionary.""" - return cls(enabled=d.get("enabled", None)) - - -@dataclass -class MonitorDestination: - email_addresses: Optional[List[str]] = None - """The list of email addresses to send the notification to. A maximum of 5 email addresses is - supported.""" - - def as_dict(self) -> dict: - """Serializes the MonitorDestination into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.email_addresses: - body["email_addresses"] = [v for v in self.email_addresses] + if self.estimated_completion_time_seconds is not None: + body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: + body["latest_version_currently_processing"] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: + body["sync_progress_completion"] = self.sync_progress_completion + if self.synced_row_count is not None: + body["synced_row_count"] = self.synced_row_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count return body def as_shallow_dict(self) -> dict: - """Serializes the MonitorDestination into a shallow dictionary of its immediate attributes.""" + """Serializes the PipelineProgress into a shallow dictionary of its immediate attributes.""" body = {} - if self.email_addresses: - body["email_addresses"] = self.email_addresses + if self.estimated_completion_time_seconds is not None: + body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds + if self.latest_version_currently_processing is not None: + body["latest_version_currently_processing"] = self.latest_version_currently_processing + if self.sync_progress_completion is not None: + body["sync_progress_completion"] = self.sync_progress_completion + if self.synced_row_count is not None: + body["synced_row_count"] = self.synced_row_count + if self.total_row_count is not None: + body["total_row_count"] = self.total_row_count return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorDestination: - """Deserializes the MonitorDestination from a dictionary.""" - return cls(email_addresses=d.get("email_addresses", None)) + def from_dict(cls, d: Dict[str, Any]) -> PipelineProgress: + """Deserializes the PipelineProgress from a dictionary.""" + return cls( + estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None), + latest_version_currently_processing=d.get("latest_version_currently_processing", None), + sync_progress_completion=d.get("sync_progress_completion", None), + synced_row_count=d.get("synced_row_count", None), + total_row_count=d.get("total_row_count", None), + ) @dataclass -class MonitorInferenceLog: - timestamp_col: str - """Column that contains the timestamps of requests. The column must be one of the following: - A - ``TimestampType`` column - A column whose values can be converted to timestamps through the - pyspark ``to_timestamp`` [function]. - - [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" - - granularities: List[str] - """Granularities for aggregating data into time windows based on their timestamp. Currently the - following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, - ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" - - model_id_col: str - """Column that contains the id of the model generating the predictions. Metrics will be computed - per model id by default, and also across all model ids.""" - - problem_type: MonitorInferenceLogProblemType - """Problem type the model aims to solve. Determines the type of model-quality metrics that will be - computed.""" +class PrimaryKeyConstraint: + name: str + """The name of the constraint.""" - prediction_col: str - """Column that contains the output/prediction from the model.""" + child_columns: List[str] + """Column names for this constraint.""" - label_col: Optional[str] = None - """Optional column that contains the ground truth for the prediction.""" + rely: Optional[bool] = None + """True if the constraint is RELY, false or unset if NORELY.""" - prediction_proba_col: Optional[str] = None - """Optional column that contains the prediction probabilities for each class in a classification - problem type. The values in this column should be a map, mapping each class label to the - prediction probability for a given sample. The map should be of PySpark MapType().""" + timeseries_columns: Optional[List[str]] = None + """Column names that represent a timeseries.""" def as_dict(self) -> dict: - """Serializes the MonitorInferenceLog into a dictionary suitable for use as a JSON request body.""" + """Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.granularities: - body["granularities"] = [v for v in self.granularities] - if self.label_col is not None: - body["label_col"] = self.label_col - if self.model_id_col is not None: - body["model_id_col"] = self.model_id_col - if self.prediction_col is not None: - body["prediction_col"] = self.prediction_col - if self.prediction_proba_col is not None: - body["prediction_proba_col"] = self.prediction_proba_col - if self.problem_type is not None: - body["problem_type"] = self.problem_type.value - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col + if self.child_columns: + body["child_columns"] = [v for v in self.child_columns] + if self.name is not None: + body["name"] = self.name + if self.rely is not None: + body["rely"] = self.rely + if self.timeseries_columns: + body["timeseries_columns"] = [v for v in self.timeseries_columns] return body def as_shallow_dict(self) -> dict: - """Serializes the MonitorInferenceLog into a shallow dictionary of its immediate attributes.""" + """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.granularities: - body["granularities"] = self.granularities - if self.label_col is not None: - body["label_col"] = self.label_col - if self.model_id_col is not None: - body["model_id_col"] = self.model_id_col - if self.prediction_col is not None: - body["prediction_col"] = self.prediction_col - if self.prediction_proba_col is not None: - body["prediction_proba_col"] = self.prediction_proba_col - if self.problem_type is not None: - body["problem_type"] = self.problem_type - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col + if self.child_columns: + body["child_columns"] = self.child_columns + if self.name is not None: + body["name"] = self.name + if self.rely is not None: + body["rely"] = self.rely + if self.timeseries_columns: + body["timeseries_columns"] = self.timeseries_columns return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorInferenceLog: - """Deserializes the MonitorInferenceLog from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: + """Deserializes the PrimaryKeyConstraint from a dictionary.""" return cls( - granularities=d.get("granularities", None), - label_col=d.get("label_col", None), - model_id_col=d.get("model_id_col", None), - prediction_col=d.get("prediction_col", None), - prediction_proba_col=d.get("prediction_proba_col", None), - problem_type=_enum(d, "problem_type", MonitorInferenceLogProblemType), - timestamp_col=d.get("timestamp_col", None), + child_columns=d.get("child_columns", None), + name=d.get("name", None), + rely=d.get("rely", None), + timeseries_columns=d.get("timeseries_columns", None), ) -class MonitorInferenceLogProblemType(Enum): - """Problem type the model aims to solve. Determines the type of model-quality metrics that will be - computed.""" - - PROBLEM_TYPE_CLASSIFICATION = "PROBLEM_TYPE_CLASSIFICATION" - PROBLEM_TYPE_REGRESSION = "PROBLEM_TYPE_REGRESSION" - - -@dataclass -class MonitorInfo: - table_name: str - """The full name of the table to monitor. Format: __catalog_name__.__schema_name__.__table_name__.""" - - status: MonitorInfoStatus - - monitor_version: str - """The version of the monitor config (e.g. 1,2,3). If negative, the monitor may be corrupted.""" - - profile_metrics_table_name: str - """The full name of the profile metrics table. Format: - __catalog_name__.__schema_name__.__table_name__.""" - - drift_metrics_table_name: str - """The full name of the drift metrics table. Format: - __catalog_name__.__schema_name__.__table_name__.""" - - assets_dir: Optional[str] = None - """The directory to store monitoring assets (e.g. dashboard, metric tables).""" - - baseline_table_name: Optional[str] = None - """Name of the baseline table from which drift metrics are computed from. Columns in the monitored - table should also be present in the baseline table.""" - - custom_metrics: Optional[List[MonitorMetric]] = None - """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived - metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across - time windows).""" - - dashboard_id: Optional[str] = None - """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in - PENDING state.""" - - data_classification_config: Optional[MonitorDataClassificationConfig] = None - """The data classification config for the monitor.""" +class Privilege(Enum): - inference_log: Optional[MonitorInferenceLog] = None - """Configuration for monitoring inference logs.""" + ACCESS = "ACCESS" + ALL_PRIVILEGES = "ALL_PRIVILEGES" + APPLY_TAG = "APPLY_TAG" + BROWSE = "BROWSE" + CREATE = "CREATE" + CREATE_CATALOG = "CREATE_CATALOG" + CREATE_CLEAN_ROOM = "CREATE_CLEAN_ROOM" + CREATE_CONNECTION = "CREATE_CONNECTION" + CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION" + CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE" + CREATE_EXTERNAL_VOLUME = "CREATE_EXTERNAL_VOLUME" + CREATE_FOREIGN_CATALOG = "CREATE_FOREIGN_CATALOG" + CREATE_FOREIGN_SECURABLE = "CREATE_FOREIGN_SECURABLE" + CREATE_FUNCTION = "CREATE_FUNCTION" + CREATE_MANAGED_STORAGE = "CREATE_MANAGED_STORAGE" + CREATE_MATERIALIZED_VIEW = "CREATE_MATERIALIZED_VIEW" + CREATE_MODEL = "CREATE_MODEL" + CREATE_PROVIDER = "CREATE_PROVIDER" + CREATE_RECIPIENT = "CREATE_RECIPIENT" + CREATE_SCHEMA = "CREATE_SCHEMA" + CREATE_SERVICE_CREDENTIAL = "CREATE_SERVICE_CREDENTIAL" + CREATE_SHARE = "CREATE_SHARE" + CREATE_STORAGE_CREDENTIAL = "CREATE_STORAGE_CREDENTIAL" + CREATE_TABLE = "CREATE_TABLE" + CREATE_VIEW = "CREATE_VIEW" + CREATE_VOLUME = "CREATE_VOLUME" + EXECUTE = "EXECUTE" + EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK" + MANAGE = "MANAGE" + MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" + MODIFY = "MODIFY" + MODIFY_CLEAN_ROOM = "MODIFY_CLEAN_ROOM" + READ_FILES = "READ_FILES" + READ_PRIVATE_FILES = "READ_PRIVATE_FILES" + READ_VOLUME = "READ_VOLUME" + REFRESH = "REFRESH" + SELECT = "SELECT" + SET_SHARE_PERMISSION = "SET_SHARE_PERMISSION" + USAGE = "USAGE" + USE_CATALOG = "USE_CATALOG" + USE_CONNECTION = "USE_CONNECTION" + USE_MARKETPLACE_ASSETS = "USE_MARKETPLACE_ASSETS" + USE_PROVIDER = "USE_PROVIDER" + USE_RECIPIENT = "USE_RECIPIENT" + USE_SCHEMA = "USE_SCHEMA" + USE_SHARE = "USE_SHARE" + WRITE_FILES = "WRITE_FILES" + WRITE_PRIVATE_FILES = "WRITE_PRIVATE_FILES" + WRITE_VOLUME = "WRITE_VOLUME" - latest_monitor_failure_msg: Optional[str] = None - """The latest failure message of the monitor (if any).""" - notifications: Optional[MonitorNotifications] = None - """The notification settings for the monitor.""" - - output_schema_name: Optional[str] = None - """Schema where output metric tables are created.""" - - schedule: Optional[MonitorCronSchedule] = None - """The schedule for automatically updating and refreshing metric tables.""" - - slicing_exprs: Optional[List[str]] = None - """List of column expressions to slice data with for targeted analysis. The data is grouped by each - expression independently, resulting in a separate slice for each predicate and its complements. - For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" - - snapshot: Optional[MonitorSnapshot] = None - """Configuration for monitoring snapshot tables.""" - - time_series: Optional[MonitorTimeSeries] = None - """Configuration for monitoring time series tables.""" - - def as_dict(self) -> dict: - """Serializes the MonitorInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config.as_dict() - if self.drift_metrics_table_name is not None: - body["drift_metrics_table_name"] = self.drift_metrics_table_name - if self.inference_log: - body["inference_log"] = self.inference_log.as_dict() - if self.latest_monitor_failure_msg is not None: - body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg - if self.monitor_version is not None: - body["monitor_version"] = self.monitor_version - if self.notifications: - body["notifications"] = self.notifications.as_dict() - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.profile_metrics_table_name is not None: - body["profile_metrics_table_name"] = self.profile_metrics_table_name - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.slicing_exprs: - body["slicing_exprs"] = [v for v in self.slicing_exprs] - if self.snapshot: - body["snapshot"] = self.snapshot.as_dict() - if self.status is not None: - body["status"] = self.status.value - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MonitorInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.assets_dir is not None: - body["assets_dir"] = self.assets_dir - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = self.custom_metrics - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config - if self.drift_metrics_table_name is not None: - body["drift_metrics_table_name"] = self.drift_metrics_table_name - if self.inference_log: - body["inference_log"] = self.inference_log - if self.latest_monitor_failure_msg is not None: - body["latest_monitor_failure_msg"] = self.latest_monitor_failure_msg - if self.monitor_version is not None: - body["monitor_version"] = self.monitor_version - if self.notifications: - body["notifications"] = self.notifications - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.profile_metrics_table_name is not None: - body["profile_metrics_table_name"] = self.profile_metrics_table_name - if self.schedule: - body["schedule"] = self.schedule - if self.slicing_exprs: - body["slicing_exprs"] = self.slicing_exprs - if self.snapshot: - body["snapshot"] = self.snapshot - if self.status is not None: - body["status"] = self.status - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorInfo: - """Deserializes the MonitorInfo from a dictionary.""" - return cls( - assets_dir=d.get("assets_dir", None), - baseline_table_name=d.get("baseline_table_name", None), - custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), - dashboard_id=d.get("dashboard_id", None), - data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), - drift_metrics_table_name=d.get("drift_metrics_table_name", None), - inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), - latest_monitor_failure_msg=d.get("latest_monitor_failure_msg", None), - monitor_version=d.get("monitor_version", None), - notifications=_from_dict(d, "notifications", MonitorNotifications), - output_schema_name=d.get("output_schema_name", None), - profile_metrics_table_name=d.get("profile_metrics_table_name", None), - schedule=_from_dict(d, "schedule", MonitorCronSchedule), - slicing_exprs=d.get("slicing_exprs", None), - snapshot=_from_dict(d, "snapshot", MonitorSnapshot), - status=_enum(d, "status", MonitorInfoStatus), - table_name=d.get("table_name", None), - time_series=_from_dict(d, "time_series", MonitorTimeSeries), - ) - - -class MonitorInfoStatus(Enum): - """The status of the monitor.""" - - MONITOR_STATUS_ACTIVE = "MONITOR_STATUS_ACTIVE" - MONITOR_STATUS_DELETE_PENDING = "MONITOR_STATUS_DELETE_PENDING" - MONITOR_STATUS_ERROR = "MONITOR_STATUS_ERROR" - MONITOR_STATUS_FAILED = "MONITOR_STATUS_FAILED" - MONITOR_STATUS_PENDING = "MONITOR_STATUS_PENDING" - - -@dataclass -class MonitorMetric: - name: str - """Name of the metric in the output tables.""" - - definition: str - """Jinja template for a SQL expression that specifies how to compute the metric. See [create metric - definition]. - - [create metric definition]: https://docs.databricks.com/en/lakehouse-monitoring/custom-metrics.html#create-definition""" - - input_columns: List[str] - """A list of column names in the input table the metric should be computed for. Can use - ``":table"`` to indicate that the metric needs information from multiple columns.""" - - output_data_type: str - """The output type of the custom metric.""" - - type: MonitorMetricType - """Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or - ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and - ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the - ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the - two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing - columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate - metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" - - def as_dict(self) -> dict: - """Serializes the MonitorMetric into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.input_columns: - body["input_columns"] = [v for v in self.input_columns] - if self.name is not None: - body["name"] = self.name - if self.output_data_type is not None: - body["output_data_type"] = self.output_data_type - if self.type is not None: - body["type"] = self.type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MonitorMetric into a shallow dictionary of its immediate attributes.""" - body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.input_columns: - body["input_columns"] = self.input_columns - if self.name is not None: - body["name"] = self.name - if self.output_data_type is not None: - body["output_data_type"] = self.output_data_type - if self.type is not None: - body["type"] = self.type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorMetric: - """Deserializes the MonitorMetric from a dictionary.""" - return cls( - definition=d.get("definition", None), - input_columns=d.get("input_columns", None), - name=d.get("name", None), - output_data_type=d.get("output_data_type", None), - type=_enum(d, "type", MonitorMetricType), - ) - - -class MonitorMetricType(Enum): - """Can only be one of ``"CUSTOM_METRIC_TYPE_AGGREGATE"``, ``"CUSTOM_METRIC_TYPE_DERIVED"``, or - ``"CUSTOM_METRIC_TYPE_DRIFT"``. The ``"CUSTOM_METRIC_TYPE_AGGREGATE"`` and - ``"CUSTOM_METRIC_TYPE_DERIVED"`` metrics are computed on a single table, whereas the - ``"CUSTOM_METRIC_TYPE_DRIFT"`` compare metrics across baseline and input table, or across the - two consecutive time windows. - CUSTOM_METRIC_TYPE_AGGREGATE: only depend on the existing - columns in your table - CUSTOM_METRIC_TYPE_DERIVED: depend on previously computed aggregate - metrics - CUSTOM_METRIC_TYPE_DRIFT: depend on previously computed aggregate or derived metrics""" - - CUSTOM_METRIC_TYPE_AGGREGATE = "CUSTOM_METRIC_TYPE_AGGREGATE" - CUSTOM_METRIC_TYPE_DERIVED = "CUSTOM_METRIC_TYPE_DERIVED" - CUSTOM_METRIC_TYPE_DRIFT = "CUSTOM_METRIC_TYPE_DRIFT" - - -@dataclass -class MonitorNotifications: - on_failure: Optional[MonitorDestination] = None - """Who to send notifications to on monitor failure.""" - - on_new_classification_tag_detected: Optional[MonitorDestination] = None - """Who to send notifications to when new data classification tags are detected.""" - - def as_dict(self) -> dict: - """Serializes the MonitorNotifications into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.on_failure: - body["on_failure"] = self.on_failure.as_dict() - if self.on_new_classification_tag_detected: - body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MonitorNotifications into a shallow dictionary of its immediate attributes.""" - body = {} - if self.on_failure: - body["on_failure"] = self.on_failure - if self.on_new_classification_tag_detected: - body["on_new_classification_tag_detected"] = self.on_new_classification_tag_detected - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorNotifications: - """Deserializes the MonitorNotifications from a dictionary.""" - return cls( - on_failure=_from_dict(d, "on_failure", MonitorDestination), - on_new_classification_tag_detected=_from_dict(d, "on_new_classification_tag_detected", MonitorDestination), - ) - - -@dataclass -class MonitorRefreshInfo: - refresh_id: int - """Unique id of the refresh operation.""" - - state: MonitorRefreshInfoState - """The current state of the refresh.""" - - start_time_ms: int - """Time at which refresh operation was initiated (milliseconds since 1/1/1970 UTC).""" - - end_time_ms: Optional[int] = None - """Time at which refresh operation completed (milliseconds since 1/1/1970 UTC).""" - - message: Optional[str] = None - """An optional message to give insight into the current state of the job (e.g. FAILURE messages).""" - - trigger: Optional[MonitorRefreshInfoTrigger] = None - """The method by which the refresh was triggered.""" - - def as_dict(self) -> dict: - """Serializes the MonitorRefreshInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.end_time_ms is not None: - body["end_time_ms"] = self.end_time_ms - if self.message is not None: - body["message"] = self.message - if self.refresh_id is not None: - body["refresh_id"] = self.refresh_id - if self.start_time_ms is not None: - body["start_time_ms"] = self.start_time_ms - if self.state is not None: - body["state"] = self.state.value - if self.trigger is not None: - body["trigger"] = self.trigger.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MonitorRefreshInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.end_time_ms is not None: - body["end_time_ms"] = self.end_time_ms - if self.message is not None: - body["message"] = self.message - if self.refresh_id is not None: - body["refresh_id"] = self.refresh_id - if self.start_time_ms is not None: - body["start_time_ms"] = self.start_time_ms - if self.state is not None: - body["state"] = self.state - if self.trigger is not None: - body["trigger"] = self.trigger - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshInfo: - """Deserializes the MonitorRefreshInfo from a dictionary.""" - return cls( - end_time_ms=d.get("end_time_ms", None), - message=d.get("message", None), - refresh_id=d.get("refresh_id", None), - start_time_ms=d.get("start_time_ms", None), - state=_enum(d, "state", MonitorRefreshInfoState), - trigger=_enum(d, "trigger", MonitorRefreshInfoTrigger), - ) - - -class MonitorRefreshInfoState(Enum): - """The current state of the refresh.""" - - CANCELED = "CANCELED" - FAILED = "FAILED" - PENDING = "PENDING" - RUNNING = "RUNNING" - SUCCESS = "SUCCESS" - - -class MonitorRefreshInfoTrigger(Enum): - """The method by which the refresh was triggered.""" - - MANUAL = "MANUAL" - SCHEDULE = "SCHEDULE" - - -@dataclass -class MonitorRefreshListResponse: - refreshes: Optional[List[MonitorRefreshInfo]] = None - """List of refreshes.""" - - def as_dict(self) -> dict: - """Serializes the MonitorRefreshListResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.refreshes: - body["refreshes"] = [v.as_dict() for v in self.refreshes] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MonitorRefreshListResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.refreshes: - body["refreshes"] = self.refreshes - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorRefreshListResponse: - """Deserializes the MonitorRefreshListResponse from a dictionary.""" - return cls(refreshes=_repeated_dict(d, "refreshes", MonitorRefreshInfo)) - - -@dataclass -class MonitorSnapshot: - def as_dict(self) -> dict: - """Serializes the MonitorSnapshot into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MonitorSnapshot into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorSnapshot: - """Deserializes the MonitorSnapshot from a dictionary.""" - return cls() - - -@dataclass -class MonitorTimeSeries: - timestamp_col: str - """Column that contains the timestamps of requests. The column must be one of the following: - A - ``TimestampType`` column - A column whose values can be converted to timestamps through the - pyspark ``to_timestamp`` [function]. - - [function]: https://spark.apache.org/docs/latest/api/python/reference/pyspark.sql/api/pyspark.sql.functions.to_timestamp.html""" - - granularities: List[str] - """Granularities for aggregating data into time windows based on their timestamp. Currently the - following static granularities are supported: {``"5 minutes"``, ``"30 minutes"``, ``"1 hour"``, - ``"1 day"``, ``" week(s)"``, ``"1 month"``, ``"1 year"``}.""" - - def as_dict(self) -> dict: - """Serializes the MonitorTimeSeries into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.granularities: - body["granularities"] = [v for v in self.granularities] - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MonitorTimeSeries into a shallow dictionary of its immediate attributes.""" - body = {} - if self.granularities: - body["granularities"] = self.granularities - if self.timestamp_col is not None: - body["timestamp_col"] = self.timestamp_col - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MonitorTimeSeries: - """Deserializes the MonitorTimeSeries from a dictionary.""" - return cls(granularities=d.get("granularities", None), timestamp_col=d.get("timestamp_col", None)) - - -@dataclass -class NamedTableConstraint: - name: str - """The name of the constraint.""" - - def as_dict(self) -> dict: - """Serializes the NamedTableConstraint into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the NamedTableConstraint into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> NamedTableConstraint: - """Deserializes the NamedTableConstraint from a dictionary.""" - return cls(name=d.get("name", None)) - - -@dataclass -class OnlineTable: - """Online Table information.""" - - name: Optional[str] = None - """Full three-part (catalog, schema, table) name of the table.""" - - spec: Optional[OnlineTableSpec] = None - """Specification of the online table.""" - - status: Optional[OnlineTableStatus] = None - """Online Table data synchronization status""" - - table_serving_url: Optional[str] = None - """Data serving REST API URL for this table""" - - unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None - """The provisioning state of the online table entity in Unity Catalog. This is distinct from the - state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline - may be in "PROVISIONING" as it runs asynchronously).""" - - def as_dict(self) -> dict: - """Serializes the OnlineTable into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.spec: - body["spec"] = self.spec.as_dict() - if self.status: - body["status"] = self.status.as_dict() - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: - body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the OnlineTable into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.spec: - body["spec"] = self.spec - if self.status: - body["status"] = self.status - if self.table_serving_url is not None: - body["table_serving_url"] = self.table_serving_url - if self.unity_catalog_provisioning_state is not None: - body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OnlineTable: - """Deserializes the OnlineTable from a dictionary.""" - return cls( - name=d.get("name", None), - spec=_from_dict(d, "spec", OnlineTableSpec), - status=_from_dict(d, "status", OnlineTableStatus), - table_serving_url=d.get("table_serving_url", None), - unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState), - ) - - -@dataclass -class OnlineTableSpec: - """Specification of an online table.""" - - perform_full_copy: Optional[bool] = None - """Whether to create a full-copy pipeline -- a pipeline that stops after creates a full copy of the - source table upon initialization and does not process any change data feeds (CDFs) afterwards. - The pipeline can still be manually triggered afterwards, but it always perform a full copy of - the source table and there are no incremental updates. This mode is useful for syncing views or - tables without CDFs to online tables. Note that the full-copy pipeline only supports "triggered" - scheduling policy.""" - - pipeline_id: Optional[str] = None - """ID of the associated pipeline. Generated by the server - cannot be set by the caller.""" - - primary_key_columns: Optional[List[str]] = None - """Primary Key columns to be used for data insert/update in the destination.""" - - run_continuously: Optional[OnlineTableSpecContinuousSchedulingPolicy] = None - """Pipeline runs continuously after generating the initial data.""" - - run_triggered: Optional[OnlineTableSpecTriggeredSchedulingPolicy] = None - """Pipeline stops after generating the initial data and can be triggered later (manually, through a - cron job or through data triggers)""" - - source_table_full_name: Optional[str] = None - """Three-part (catalog, schema, table) name of the source Delta table.""" - - timeseries_key: Optional[str] = None - """Time series key to deduplicate (tie-break) rows with the same primary key.""" - - def as_dict(self) -> dict: - """Serializes the OnlineTableSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.perform_full_copy is not None: - body["perform_full_copy"] = self.perform_full_copy - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.primary_key_columns: - body["primary_key_columns"] = [v for v in self.primary_key_columns] - if self.run_continuously: - body["run_continuously"] = self.run_continuously.as_dict() - if self.run_triggered: - body["run_triggered"] = self.run_triggered.as_dict() - if self.source_table_full_name is not None: - body["source_table_full_name"] = self.source_table_full_name - if self.timeseries_key is not None: - body["timeseries_key"] = self.timeseries_key - return body - - def as_shallow_dict(self) -> dict: - """Serializes the OnlineTableSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.perform_full_copy is not None: - body["perform_full_copy"] = self.perform_full_copy - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.primary_key_columns: - body["primary_key_columns"] = self.primary_key_columns - if self.run_continuously: - body["run_continuously"] = self.run_continuously - if self.run_triggered: - body["run_triggered"] = self.run_triggered - if self.source_table_full_name is not None: - body["source_table_full_name"] = self.source_table_full_name - if self.timeseries_key is not None: - body["timeseries_key"] = self.timeseries_key - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpec: - """Deserializes the OnlineTableSpec from a dictionary.""" - return cls( - perform_full_copy=d.get("perform_full_copy", None), - pipeline_id=d.get("pipeline_id", None), - primary_key_columns=d.get("primary_key_columns", None), - run_continuously=_from_dict(d, "run_continuously", OnlineTableSpecContinuousSchedulingPolicy), - run_triggered=_from_dict(d, "run_triggered", OnlineTableSpecTriggeredSchedulingPolicy), - source_table_full_name=d.get("source_table_full_name", None), - timeseries_key=d.get("timeseries_key", None), - ) - - -@dataclass -class OnlineTableSpecContinuousSchedulingPolicy: - def as_dict(self) -> dict: - """Serializes the OnlineTableSpecContinuousSchedulingPolicy into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the OnlineTableSpecContinuousSchedulingPolicy into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecContinuousSchedulingPolicy: - """Deserializes the OnlineTableSpecContinuousSchedulingPolicy from a dictionary.""" - return cls() - - -@dataclass -class OnlineTableSpecTriggeredSchedulingPolicy: - def as_dict(self) -> dict: - """Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OnlineTableSpecTriggeredSchedulingPolicy: - """Deserializes the OnlineTableSpecTriggeredSchedulingPolicy from a dictionary.""" - return cls() - - -class OnlineTableState(Enum): - """The state of an online table.""" - - OFFLINE = "OFFLINE" - OFFLINE_FAILED = "OFFLINE_FAILED" - ONLINE = "ONLINE" - ONLINE_CONTINUOUS_UPDATE = "ONLINE_CONTINUOUS_UPDATE" - ONLINE_NO_PENDING_UPDATE = "ONLINE_NO_PENDING_UPDATE" - ONLINE_PIPELINE_FAILED = "ONLINE_PIPELINE_FAILED" - ONLINE_TRIGGERED_UPDATE = "ONLINE_TRIGGERED_UPDATE" - ONLINE_UPDATING_PIPELINE_RESOURCES = "ONLINE_UPDATING_PIPELINE_RESOURCES" - PROVISIONING = "PROVISIONING" - PROVISIONING_INITIAL_SNAPSHOT = "PROVISIONING_INITIAL_SNAPSHOT" - PROVISIONING_PIPELINE_RESOURCES = "PROVISIONING_PIPELINE_RESOURCES" - - -@dataclass -class OnlineTableStatus: - """Status of an online table.""" - - continuous_update_status: Optional[ContinuousUpdateStatus] = None - - detailed_state: Optional[OnlineTableState] = None - """The state of the online table.""" - - failed_status: Optional[FailedStatus] = None - - message: Optional[str] = None - """A text description of the current state of the online table.""" - - provisioning_status: Optional[ProvisioningStatus] = None - - triggered_update_status: Optional[TriggeredUpdateStatus] = None - - def as_dict(self) -> dict: - """Serializes the OnlineTableStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.continuous_update_status: - body["continuous_update_status"] = self.continuous_update_status.as_dict() - if self.detailed_state is not None: - body["detailed_state"] = self.detailed_state.value - if self.failed_status: - body["failed_status"] = self.failed_status.as_dict() - if self.message is not None: - body["message"] = self.message - if self.provisioning_status: - body["provisioning_status"] = self.provisioning_status.as_dict() - if self.triggered_update_status: - body["triggered_update_status"] = self.triggered_update_status.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the OnlineTableStatus into a shallow dictionary of its immediate attributes.""" - body = {} - if self.continuous_update_status: - body["continuous_update_status"] = self.continuous_update_status - if self.detailed_state is not None: - body["detailed_state"] = self.detailed_state - if self.failed_status: - body["failed_status"] = self.failed_status - if self.message is not None: - body["message"] = self.message - if self.provisioning_status: - body["provisioning_status"] = self.provisioning_status - if self.triggered_update_status: - body["triggered_update_status"] = self.triggered_update_status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OnlineTableStatus: - """Deserializes the OnlineTableStatus from a dictionary.""" - return cls( - continuous_update_status=_from_dict(d, "continuous_update_status", ContinuousUpdateStatus), - detailed_state=_enum(d, "detailed_state", OnlineTableState), - failed_status=_from_dict(d, "failed_status", FailedStatus), - message=d.get("message", None), - provisioning_status=_from_dict(d, "provisioning_status", ProvisioningStatus), - triggered_update_status=_from_dict(d, "triggered_update_status", TriggeredUpdateStatus), - ) - - -@dataclass -class OptionSpec: - """Spec of an allowed option on a securable kind and its attributes. This is mostly used by UI to - provide user friendly hints and descriptions in order to facilitate the securable creation - process.""" - - allowed_values: Optional[List[str]] = None - """For drop down / radio button selections, UI will want to know the possible input values, it can - also be used by other option types to limit input selections.""" - - default_value: Optional[str] = None - """The default value of the option, for example, value '443' for 'port' option.""" - - description: Optional[str] = None - """A concise user facing description of what the input value of this option should look like.""" - - hint: Optional[str] = None - """The hint is used on the UI to suggest what the input value can possibly be like, for example: - example.com for 'host' option. Unlike default value, it will not be applied automatically - without user input.""" - - is_copiable: Optional[bool] = None - """Indicates whether an option should be displayed with copy button on the UI.""" - - is_creatable: Optional[bool] = None - """Indicates whether an option can be provided by users in the create/update path of an entity.""" - - is_hidden: Optional[bool] = None - """Is the option value not user settable and is thus not shown on the UI.""" - - is_loggable: Optional[bool] = None - """Specifies whether this option is safe to log, i.e. no sensitive information.""" - - is_required: Optional[bool] = None - """Is the option required.""" - - is_secret: Optional[bool] = None - """Is the option value considered secret and thus redacted on the UI.""" - - is_updatable: Optional[bool] = None - """Is the option updatable by users.""" - - name: Optional[str] = None - """The unique name of the option.""" - - oauth_stage: Optional[OptionSpecOauthStage] = None - """Specifies when the option value is displayed on the UI within the OAuth flow.""" - - type: Optional[OptionSpecOptionType] = None - """The type of the option.""" - - def as_dict(self) -> dict: - """Serializes the OptionSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allowed_values: - body["allowed_values"] = [v for v in self.allowed_values] - if self.default_value is not None: - body["default_value"] = self.default_value - if self.description is not None: - body["description"] = self.description - if self.hint is not None: - body["hint"] = self.hint - if self.is_copiable is not None: - body["is_copiable"] = self.is_copiable - if self.is_creatable is not None: - body["is_creatable"] = self.is_creatable - if self.is_hidden is not None: - body["is_hidden"] = self.is_hidden - if self.is_loggable is not None: - body["is_loggable"] = self.is_loggable - if self.is_required is not None: - body["is_required"] = self.is_required - if self.is_secret is not None: - body["is_secret"] = self.is_secret - if self.is_updatable is not None: - body["is_updatable"] = self.is_updatable - if self.name is not None: - body["name"] = self.name - if self.oauth_stage is not None: - body["oauth_stage"] = self.oauth_stage.value - if self.type is not None: - body["type"] = self.type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the OptionSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allowed_values: - body["allowed_values"] = self.allowed_values - if self.default_value is not None: - body["default_value"] = self.default_value - if self.description is not None: - body["description"] = self.description - if self.hint is not None: - body["hint"] = self.hint - if self.is_copiable is not None: - body["is_copiable"] = self.is_copiable - if self.is_creatable is not None: - body["is_creatable"] = self.is_creatable - if self.is_hidden is not None: - body["is_hidden"] = self.is_hidden - if self.is_loggable is not None: - body["is_loggable"] = self.is_loggable - if self.is_required is not None: - body["is_required"] = self.is_required - if self.is_secret is not None: - body["is_secret"] = self.is_secret - if self.is_updatable is not None: - body["is_updatable"] = self.is_updatable - if self.name is not None: - body["name"] = self.name - if self.oauth_stage is not None: - body["oauth_stage"] = self.oauth_stage - if self.type is not None: - body["type"] = self.type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> OptionSpec: - """Deserializes the OptionSpec from a dictionary.""" - return cls( - allowed_values=d.get("allowed_values", None), - default_value=d.get("default_value", None), - description=d.get("description", None), - hint=d.get("hint", None), - is_copiable=d.get("is_copiable", None), - is_creatable=d.get("is_creatable", None), - is_hidden=d.get("is_hidden", None), - is_loggable=d.get("is_loggable", None), - is_required=d.get("is_required", None), - is_secret=d.get("is_secret", None), - is_updatable=d.get("is_updatable", None), - name=d.get("name", None), - oauth_stage=_enum(d, "oauth_stage", OptionSpecOauthStage), - type=_enum(d, "type", OptionSpecOptionType), - ) - - -class OptionSpecOauthStage(Enum): - """During the OAuth flow, specifies which stage the option should be displayed in the UI. - OAUTH_STAGE_UNSPECIFIED is the default value for options unrelated to the OAuth flow. - BEFORE_AUTHORIZATION_CODE corresponds to options necessary to initiate the OAuth process. - BEFORE_ACCESS_TOKEN corresponds to options that are necessary to create a foreign connection, - but that should be displayed after the authorization code has already been received.""" - - BEFORE_ACCESS_TOKEN = "BEFORE_ACCESS_TOKEN" - BEFORE_AUTHORIZATION_CODE = "BEFORE_AUTHORIZATION_CODE" - - -class OptionSpecOptionType(Enum): - """Type of the option, we purposely follow JavaScript types so that the UI can map the options to - JS types. https://www.w3schools.com/js/js_datatypes.asp Enum is a special case that it's just - string with selections.""" - - OPTION_BIGINT = "OPTION_BIGINT" - OPTION_BOOLEAN = "OPTION_BOOLEAN" - OPTION_ENUM = "OPTION_ENUM" - OPTION_MULTILINE_STRING = "OPTION_MULTILINE_STRING" - OPTION_NUMBER = "OPTION_NUMBER" - OPTION_SERVICE_CREDENTIAL = "OPTION_SERVICE_CREDENTIAL" - OPTION_STRING = "OPTION_STRING" - - -@dataclass -class PermissionsChange: - add: Optional[List[Privilege]] = None - """The set of privileges to add.""" - - principal: Optional[str] = None - """The principal whose privileges we are changing.""" - - remove: Optional[List[Privilege]] = None - """The set of privileges to remove.""" - - def as_dict(self) -> dict: - """Serializes the PermissionsChange into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.add: - body["add"] = [v.value for v in self.add] - if self.principal is not None: - body["principal"] = self.principal - if self.remove: - body["remove"] = [v.value for v in self.remove] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes.""" - body = {} - if self.add: - body["add"] = self.add - if self.principal is not None: - body["principal"] = self.principal - if self.remove: - body["remove"] = self.remove - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PermissionsChange: - """Deserializes the PermissionsChange from a dictionary.""" - return cls( - add=_repeated_enum(d, "add", Privilege), - principal=d.get("principal", None), - remove=_repeated_enum(d, "remove", Privilege), - ) - - -@dataclass -class PipelineProgress: - """Progress information of the Online Table data synchronization pipeline.""" - - estimated_completion_time_seconds: Optional[float] = None - """The estimated time remaining to complete this update in seconds.""" - - latest_version_currently_processing: Optional[int] = None - """The source table Delta version that was last processed by the pipeline. The pipeline may not - have completely processed this version yet.""" - - sync_progress_completion: Optional[float] = None - """The completion ratio of this update. This is a number between 0 and 1.""" - - synced_row_count: Optional[int] = None - """The number of rows that have been synced in this update.""" - - total_row_count: Optional[int] = None - """The total number of rows that need to be synced in this update. This number may be an estimate.""" - - def as_dict(self) -> dict: - """Serializes the PipelineProgress into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.estimated_completion_time_seconds is not None: - body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds - if self.latest_version_currently_processing is not None: - body["latest_version_currently_processing"] = self.latest_version_currently_processing - if self.sync_progress_completion is not None: - body["sync_progress_completion"] = self.sync_progress_completion - if self.synced_row_count is not None: - body["synced_row_count"] = self.synced_row_count - if self.total_row_count is not None: - body["total_row_count"] = self.total_row_count - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PipelineProgress into a shallow dictionary of its immediate attributes.""" - body = {} - if self.estimated_completion_time_seconds is not None: - body["estimated_completion_time_seconds"] = self.estimated_completion_time_seconds - if self.latest_version_currently_processing is not None: - body["latest_version_currently_processing"] = self.latest_version_currently_processing - if self.sync_progress_completion is not None: - body["sync_progress_completion"] = self.sync_progress_completion - if self.synced_row_count is not None: - body["synced_row_count"] = self.synced_row_count - if self.total_row_count is not None: - body["total_row_count"] = self.total_row_count - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PipelineProgress: - """Deserializes the PipelineProgress from a dictionary.""" - return cls( - estimated_completion_time_seconds=d.get("estimated_completion_time_seconds", None), - latest_version_currently_processing=d.get("latest_version_currently_processing", None), - sync_progress_completion=d.get("sync_progress_completion", None), - synced_row_count=d.get("synced_row_count", None), - total_row_count=d.get("total_row_count", None), - ) - - -@dataclass -class PrimaryKeyConstraint: - name: str - """The name of the constraint.""" - - child_columns: List[str] - """Column names for this constraint.""" - - rely: Optional[bool] = None - """True if the constraint is RELY, false or unset if NORELY.""" - - timeseries_columns: Optional[List[str]] = None - """Column names that represent a timeseries.""" - - def as_dict(self) -> dict: - """Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.child_columns: - body["child_columns"] = [v for v in self.child_columns] - if self.name is not None: - body["name"] = self.name - if self.rely is not None: - body["rely"] = self.rely - if self.timeseries_columns: - body["timeseries_columns"] = [v for v in self.timeseries_columns] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes.""" - body = {} - if self.child_columns: - body["child_columns"] = self.child_columns - if self.name is not None: - body["name"] = self.name - if self.rely is not None: - body["rely"] = self.rely - if self.timeseries_columns: - body["timeseries_columns"] = self.timeseries_columns - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint: - """Deserializes the PrimaryKeyConstraint from a dictionary.""" - return cls( - child_columns=d.get("child_columns", None), - name=d.get("name", None), - rely=d.get("rely", None), - timeseries_columns=d.get("timeseries_columns", None), - ) - - -class Privilege(Enum): - - ACCESS = "ACCESS" - ALL_PRIVILEGES = "ALL_PRIVILEGES" - APPLY_TAG = "APPLY_TAG" - BROWSE = "BROWSE" - CREATE = "CREATE" - CREATE_CATALOG = "CREATE_CATALOG" - CREATE_CLEAN_ROOM = "CREATE_CLEAN_ROOM" - CREATE_CONNECTION = "CREATE_CONNECTION" - CREATE_EXTERNAL_LOCATION = "CREATE_EXTERNAL_LOCATION" - CREATE_EXTERNAL_TABLE = "CREATE_EXTERNAL_TABLE" - CREATE_EXTERNAL_VOLUME = "CREATE_EXTERNAL_VOLUME" - CREATE_FOREIGN_CATALOG = "CREATE_FOREIGN_CATALOG" - CREATE_FOREIGN_SECURABLE = "CREATE_FOREIGN_SECURABLE" - CREATE_FUNCTION = "CREATE_FUNCTION" - CREATE_MANAGED_STORAGE = "CREATE_MANAGED_STORAGE" - CREATE_MATERIALIZED_VIEW = "CREATE_MATERIALIZED_VIEW" - CREATE_MODEL = "CREATE_MODEL" - CREATE_PROVIDER = "CREATE_PROVIDER" - CREATE_RECIPIENT = "CREATE_RECIPIENT" - CREATE_SCHEMA = "CREATE_SCHEMA" - CREATE_SERVICE_CREDENTIAL = "CREATE_SERVICE_CREDENTIAL" - CREATE_SHARE = "CREATE_SHARE" - CREATE_STORAGE_CREDENTIAL = "CREATE_STORAGE_CREDENTIAL" - CREATE_TABLE = "CREATE_TABLE" - CREATE_VIEW = "CREATE_VIEW" - CREATE_VOLUME = "CREATE_VOLUME" - EXECUTE = "EXECUTE" - EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK" - MANAGE = "MANAGE" - MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" - MODIFY = "MODIFY" - MODIFY_CLEAN_ROOM = "MODIFY_CLEAN_ROOM" - READ_FILES = "READ_FILES" - READ_PRIVATE_FILES = "READ_PRIVATE_FILES" - READ_VOLUME = "READ_VOLUME" - REFRESH = "REFRESH" - SELECT = "SELECT" - SET_SHARE_PERMISSION = "SET_SHARE_PERMISSION" - USAGE = "USAGE" - USE_CATALOG = "USE_CATALOG" - USE_CONNECTION = "USE_CONNECTION" - USE_MARKETPLACE_ASSETS = "USE_MARKETPLACE_ASSETS" - USE_PROVIDER = "USE_PROVIDER" - USE_RECIPIENT = "USE_RECIPIENT" - USE_SCHEMA = "USE_SCHEMA" - USE_SHARE = "USE_SHARE" - WRITE_FILES = "WRITE_FILES" - WRITE_PRIVATE_FILES = "WRITE_PRIVATE_FILES" - WRITE_VOLUME = "WRITE_VOLUME" - - -@dataclass -class PrivilegeAssignment: - principal: Optional[str] = None - """The principal (user email address or group name). For deleted principals, `principal` is empty - while `principal_id` is populated.""" - - privileges: Optional[List[Privilege]] = None - """The privileges assigned to the principal.""" - - def as_dict(self) -> dict: - """Serializes the PrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = [v.value for v in self.privileges] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.privileges: - body["privileges"] = self.privileges - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: - """Deserializes the PrivilegeAssignment from a dictionary.""" - return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) - - -@dataclass -class ProvisioningInfo: - """Status of an asynchronously provisioned resource.""" - - state: Optional[ProvisioningInfoState] = None - """The provisioning state of the resource.""" - - def as_dict(self) -> dict: - """Serializes the ProvisioningInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.state is not None: - body["state"] = self.state.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ProvisioningInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.state is not None: - body["state"] = self.state - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ProvisioningInfo: - """Deserializes the ProvisioningInfo from a dictionary.""" - return cls(state=_enum(d, "state", ProvisioningInfoState)) - - -class ProvisioningInfoState(Enum): - - ACTIVE = "ACTIVE" - DEGRADED = "DEGRADED" - DELETING = "DELETING" - FAILED = "FAILED" - PROVISIONING = "PROVISIONING" - UPDATING = "UPDATING" - - -@dataclass -class ProvisioningStatus: - """Detailed status of an online table. Shown if the online table is in the - PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" - - initial_pipeline_sync_progress: Optional[PipelineProgress] = None - """Details about initial data synchronization. Only populated when in the - PROVISIONING_INITIAL_SNAPSHOT state.""" - - def as_dict(self) -> dict: - """Serializes the ProvisioningStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.initial_pipeline_sync_progress: - body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ProvisioningStatus into a shallow dictionary of its immediate attributes.""" - body = {} - if self.initial_pipeline_sync_progress: - body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ProvisioningStatus: - """Deserializes the ProvisioningStatus from a dictionary.""" - return cls(initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", PipelineProgress)) - - -@dataclass -class QuotaInfo: - last_refreshed_at: Optional[int] = None - """The timestamp that indicates when the quota count was last updated.""" - - parent_full_name: Optional[str] = None - """Name of the parent resource. Returns metastore ID if the parent is a metastore.""" - - parent_securable_type: Optional[SecurableType] = None - """The quota parent securable type.""" - - quota_count: Optional[int] = None - """The current usage of the resource quota.""" - - quota_limit: Optional[int] = None - """The current limit of the resource quota.""" - - quota_name: Optional[str] = None - """The name of the quota.""" - - def as_dict(self) -> dict: - """Serializes the QuotaInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.last_refreshed_at is not None: - body["last_refreshed_at"] = self.last_refreshed_at - if self.parent_full_name is not None: - body["parent_full_name"] = self.parent_full_name - if self.parent_securable_type is not None: - body["parent_securable_type"] = self.parent_securable_type.value - if self.quota_count is not None: - body["quota_count"] = self.quota_count - if self.quota_limit is not None: - body["quota_limit"] = self.quota_limit - if self.quota_name is not None: - body["quota_name"] = self.quota_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QuotaInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.last_refreshed_at is not None: - body["last_refreshed_at"] = self.last_refreshed_at - if self.parent_full_name is not None: - body["parent_full_name"] = self.parent_full_name - if self.parent_securable_type is not None: - body["parent_securable_type"] = self.parent_securable_type - if self.quota_count is not None: - body["quota_count"] = self.quota_count - if self.quota_limit is not None: - body["quota_limit"] = self.quota_limit - if self.quota_name is not None: - body["quota_name"] = self.quota_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QuotaInfo: - """Deserializes the QuotaInfo from a dictionary.""" - return cls( - last_refreshed_at=d.get("last_refreshed_at", None), - parent_full_name=d.get("parent_full_name", None), - parent_securable_type=_enum(d, "parent_securable_type", SecurableType), - quota_count=d.get("quota_count", None), - quota_limit=d.get("quota_limit", None), - quota_name=d.get("quota_name", None), - ) - - -@dataclass -class R2Credentials: - """R2 temporary credentials for API authentication. Read more at - https://developers.cloudflare.com/r2/api/s3/tokens/.""" - - access_key_id: Optional[str] = None - """The access key ID that identifies the temporary credentials.""" - - secret_access_key: Optional[str] = None - """The secret access key associated with the access key.""" - - session_token: Optional[str] = None - """The generated JWT that users must pass to use the temporary credentials.""" - - def as_dict(self) -> dict: - """Serializes the R2Credentials into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_key_id is not None: - body["access_key_id"] = self.access_key_id - if self.secret_access_key is not None: - body["secret_access_key"] = self.secret_access_key - if self.session_token is not None: - body["session_token"] = self.session_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the R2Credentials into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_key_id is not None: - body["access_key_id"] = self.access_key_id - if self.secret_access_key is not None: - body["secret_access_key"] = self.secret_access_key - if self.session_token is not None: - body["session_token"] = self.session_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> R2Credentials: - """Deserializes the R2Credentials from a dictionary.""" - return cls( - access_key_id=d.get("access_key_id", None), - secret_access_key=d.get("secret_access_key", None), - session_token=d.get("session_token", None), - ) - - -@dataclass -class RegenerateDashboardRequest: - table_name: Optional[str] = None - """Full name of the table.""" - - warehouse_id: Optional[str] = None - """Optional argument to specify the warehouse for dashboard regeneration. If not specified, the - first running warehouse will be used.""" - - def as_dict(self) -> dict: - """Serializes the RegenerateDashboardRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.table_name is not None: - body["table_name"] = self.table_name - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RegenerateDashboardRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.table_name is not None: - body["table_name"] = self.table_name - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardRequest: - """Deserializes the RegenerateDashboardRequest from a dictionary.""" - return cls(table_name=d.get("table_name", None), warehouse_id=d.get("warehouse_id", None)) - - -@dataclass -class RegenerateDashboardResponse: - dashboard_id: Optional[str] = None - """Id of the regenerated monitoring dashboard.""" - - parent_folder: Optional[str] = None - """The directory where the regenerated dashboard is stored.""" - - def as_dict(self) -> dict: - """Serializes the RegenerateDashboardResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.parent_folder is not None: - body["parent_folder"] = self.parent_folder - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RegenerateDashboardResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.parent_folder is not None: - body["parent_folder"] = self.parent_folder - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardResponse: - """Deserializes the RegenerateDashboardResponse from a dictionary.""" - return cls(dashboard_id=d.get("dashboard_id", None), parent_folder=d.get("parent_folder", None)) - - -@dataclass -class RegisteredModelAlias: - """Registered model alias.""" - - alias_name: Optional[str] = None - """Name of the alias, e.g. 'champion' or 'latest_stable'""" - - version_num: Optional[int] = None - """Integer version number of the model version to which this alias points.""" - - def as_dict(self) -> dict: - """Serializes the RegisteredModelAlias into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alias_name is not None: - body["alias_name"] = self.alias_name - if self.version_num is not None: - body["version_num"] = self.version_num - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alias_name is not None: - body["alias_name"] = self.alias_name - if self.version_num is not None: - body["version_num"] = self.version_num - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAlias: - """Deserializes the RegisteredModelAlias from a dictionary.""" - return cls(alias_name=d.get("alias_name", None), version_num=d.get("version_num", None)) - - -@dataclass -class RegisteredModelInfo: - aliases: Optional[List[RegisteredModelAlias]] = None - """List of aliases associated with the registered model""" - - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - - catalog_name: Optional[str] = None - """The name of the catalog where the schema and the registered model reside""" - - comment: Optional[str] = None - """The comment attached to the registered model""" - - created_at: Optional[int] = None - """Creation timestamp of the registered model in milliseconds since the Unix epoch""" - - created_by: Optional[str] = None - """The identifier of the user who created the registered model""" - - full_name: Optional[str] = None - """The three-level (fully qualified) name of the registered model""" - - metastore_id: Optional[str] = None - """The unique identifier of the metastore""" - - name: Optional[str] = None - """The name of the registered model""" - - owner: Optional[str] = None - """The identifier of the user who owns the registered model""" - - schema_name: Optional[str] = None - """The name of the schema where the registered model resides""" - - storage_location: Optional[str] = None - """The storage location on the cloud under which model version data files are stored""" - - updated_at: Optional[int] = None - """Last-update timestamp of the registered model in milliseconds since the Unix epoch""" - - updated_by: Optional[str] = None - """The identifier of the user who updated the registered model last time""" - - def as_dict(self) -> dict: - """Serializes the RegisteredModelInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aliases: - body["aliases"] = [v.as_dict() for v in self.aliases] - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RegisteredModelInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aliases: - body["aliases"] = self.aliases - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelInfo: - """Deserializes the RegisteredModelInfo from a dictionary.""" - return cls( - aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - full_name=d.get("full_name", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - schema_name=d.get("schema_name", None), - storage_location=d.get("storage_location", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) - - -@dataclass -class SchemaInfo: - """Next ID: 40""" - - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - - catalog_name: Optional[str] = None - """Name of parent catalog.""" - - catalog_type: Optional[CatalogType] = None - """The type of the parent catalog.""" - - comment: Optional[str] = None - """User-provided free-form text description.""" - - created_at: Optional[int] = None - """Time at which this schema was created, in epoch milliseconds.""" - - created_by: Optional[str] = None - """Username of schema creator.""" - - effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - - enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - """Whether predictive optimization should be enabled for this object and objects under it.""" - - full_name: Optional[str] = None - """Full name of schema, in form of __catalog_name__.__schema_name__.""" - - metastore_id: Optional[str] = None - """Unique identifier of parent metastore.""" - - name: Optional[str] = None - """Name of schema, relative to parent catalog.""" - - owner: Optional[str] = None - """Username of current owner of schema.""" - - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" - - schema_id: Optional[str] = None - """The unique identifier of the schema.""" - - storage_location: Optional[str] = None - """Storage location for managed tables within schema.""" - - storage_root: Optional[str] = None - """Storage root URL for managed tables within schema.""" - - updated_at: Optional[int] = None - """Time at which this schema was created, in epoch milliseconds.""" - - updated_by: Optional[str] = None - """Username of user who last modified schema.""" - - def as_dict(self) -> dict: - """Serializes the SchemaInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.catalog_type is not None: - body["catalog_type"] = self.catalog_type.value - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.schema_id is not None: - body["schema_id"] = self.schema_id - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SchemaInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.catalog_type is not None: - body["catalog_type"] = self.catalog_type - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties - if self.schema_id is not None: - body["schema_id"] = self.schema_id - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SchemaInfo: - """Deserializes the SchemaInfo from a dictionary.""" - return cls( - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - catalog_type=_enum(d, "catalog_type", CatalogType), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - effective_predictive_optimization_flag=_from_dict( - d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag - ), - enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - full_name=d.get("full_name", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - properties=d.get("properties", None), - schema_id=d.get("schema_id", None), - storage_location=d.get("storage_location", None), - storage_root=d.get("storage_root", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - ) - - -class SecurableKind(Enum): - """Latest kind: TABLE_DELTA_ICEBERG_DELTASHARING = 252; Next id:253""" - - TABLE_DB_STORAGE = "TABLE_DB_STORAGE" - TABLE_DELTA = "TABLE_DELTA" - TABLE_DELTASHARING = "TABLE_DELTASHARING" - TABLE_DELTASHARING_MUTABLE = "TABLE_DELTASHARING_MUTABLE" - TABLE_DELTA_EXTERNAL = "TABLE_DELTA_EXTERNAL" - TABLE_DELTA_ICEBERG_DELTASHARING = "TABLE_DELTA_ICEBERG_DELTASHARING" - TABLE_DELTA_ICEBERG_MANAGED = "TABLE_DELTA_ICEBERG_MANAGED" - TABLE_DELTA_UNIFORM_HUDI_EXTERNAL = "TABLE_DELTA_UNIFORM_HUDI_EXTERNAL" - TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL = "TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL" - TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL = ( - "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL" - ) - TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_MANAGED = ( - "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_MANAGED" - ) - TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_SNOWFLAKE = "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_SNOWFLAKE" - TABLE_EXTERNAL = "TABLE_EXTERNAL" - TABLE_FEATURE_STORE = "TABLE_FEATURE_STORE" - TABLE_FEATURE_STORE_EXTERNAL = "TABLE_FEATURE_STORE_EXTERNAL" - TABLE_FOREIGN_BIGQUERY = "TABLE_FOREIGN_BIGQUERY" - TABLE_FOREIGN_DATABRICKS = "TABLE_FOREIGN_DATABRICKS" - TABLE_FOREIGN_DELTASHARING = "TABLE_FOREIGN_DELTASHARING" - TABLE_FOREIGN_HIVE_METASTORE = "TABLE_FOREIGN_HIVE_METASTORE" - TABLE_FOREIGN_HIVE_METASTORE_DBFS_EXTERNAL = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_EXTERNAL" - TABLE_FOREIGN_HIVE_METASTORE_DBFS_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_MANAGED" - TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_EXTERNAL = ( - "TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_EXTERNAL" - ) - TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_MANAGED" - TABLE_FOREIGN_HIVE_METASTORE_DBFS_VIEW = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_VIEW" - TABLE_FOREIGN_HIVE_METASTORE_EXTERNAL = "TABLE_FOREIGN_HIVE_METASTORE_EXTERNAL" - TABLE_FOREIGN_HIVE_METASTORE_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_MANAGED" - TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_EXTERNAL = "TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_EXTERNAL" - TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_MANAGED" - TABLE_FOREIGN_HIVE_METASTORE_VIEW = "TABLE_FOREIGN_HIVE_METASTORE_VIEW" - TABLE_FOREIGN_MONGODB = "TABLE_FOREIGN_MONGODB" - TABLE_FOREIGN_MYSQL = "TABLE_FOREIGN_MYSQL" - TABLE_FOREIGN_NETSUITE = "TABLE_FOREIGN_NETSUITE" - TABLE_FOREIGN_ORACLE = "TABLE_FOREIGN_ORACLE" - TABLE_FOREIGN_POSTGRESQL = "TABLE_FOREIGN_POSTGRESQL" - TABLE_FOREIGN_REDSHIFT = "TABLE_FOREIGN_REDSHIFT" - TABLE_FOREIGN_SALESFORCE = "TABLE_FOREIGN_SALESFORCE" - TABLE_FOREIGN_SALESFORCE_DATA_CLOUD = "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD" - TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING = "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING" - TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING_VIEW = "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING_VIEW" - TABLE_FOREIGN_SNOWFLAKE = "TABLE_FOREIGN_SNOWFLAKE" - TABLE_FOREIGN_SQLDW = "TABLE_FOREIGN_SQLDW" - TABLE_FOREIGN_SQLSERVER = "TABLE_FOREIGN_SQLSERVER" - TABLE_FOREIGN_TERADATA = "TABLE_FOREIGN_TERADATA" - TABLE_FOREIGN_WORKDAY_RAAS = "TABLE_FOREIGN_WORKDAY_RAAS" - TABLE_ICEBERG_UNIFORM_MANAGED = "TABLE_ICEBERG_UNIFORM_MANAGED" - TABLE_INTERNAL = "TABLE_INTERNAL" - TABLE_MANAGED_POSTGRESQL = "TABLE_MANAGED_POSTGRESQL" - TABLE_MATERIALIZED_VIEW = "TABLE_MATERIALIZED_VIEW" - TABLE_MATERIALIZED_VIEW_DELTASHARING = "TABLE_MATERIALIZED_VIEW_DELTASHARING" - TABLE_METRIC_VIEW = "TABLE_METRIC_VIEW" - TABLE_ONLINE_VECTOR_INDEX_DIRECT = "TABLE_ONLINE_VECTOR_INDEX_DIRECT" - TABLE_ONLINE_VECTOR_INDEX_REPLICA = "TABLE_ONLINE_VECTOR_INDEX_REPLICA" - TABLE_ONLINE_VIEW = "TABLE_ONLINE_VIEW" - TABLE_STANDARD = "TABLE_STANDARD" - TABLE_STREAMING_LIVE_TABLE = "TABLE_STREAMING_LIVE_TABLE" - TABLE_STREAMING_LIVE_TABLE_DELTASHARING = "TABLE_STREAMING_LIVE_TABLE_DELTASHARING" - TABLE_SYSTEM = "TABLE_SYSTEM" - TABLE_SYSTEM_DELTASHARING = "TABLE_SYSTEM_DELTASHARING" - TABLE_VIEW = "TABLE_VIEW" - TABLE_VIEW_DELTASHARING = "TABLE_VIEW_DELTASHARING" - - -@dataclass -class SecurableKindManifest: - """Manifest of a specific securable kind.""" - - assignable_privileges: Optional[List[str]] = None - """Privileges that can be assigned to the securable.""" - - capabilities: Optional[List[str]] = None - """A list of capabilities in the securable kind.""" - - options: Optional[List[OptionSpec]] = None - """Detailed specs of allowed options.""" - - securable_kind: Optional[SecurableKind] = None - """Securable kind to get manifest of.""" - - securable_type: Optional[SecurableType] = None - """Securable Type of the kind.""" - - def as_dict(self) -> dict: - """Serializes the SecurableKindManifest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.assignable_privileges: - body["assignable_privileges"] = [v for v in self.assignable_privileges] - if self.capabilities: - body["capabilities"] = [v for v in self.capabilities] - if self.options: - body["options"] = [v.as_dict() for v in self.options] - if self.securable_kind is not None: - body["securable_kind"] = self.securable_kind.value - if self.securable_type is not None: - body["securable_type"] = self.securable_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SecurableKindManifest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.assignable_privileges: - body["assignable_privileges"] = self.assignable_privileges - if self.capabilities: - body["capabilities"] = self.capabilities - if self.options: - body["options"] = self.options - if self.securable_kind is not None: - body["securable_kind"] = self.securable_kind - if self.securable_type is not None: - body["securable_type"] = self.securable_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SecurableKindManifest: - """Deserializes the SecurableKindManifest from a dictionary.""" - return cls( - assignable_privileges=d.get("assignable_privileges", None), - capabilities=d.get("capabilities", None), - options=_repeated_dict(d, "options", OptionSpec), - securable_kind=_enum(d, "securable_kind", SecurableKind), - securable_type=_enum(d, "securable_type", SecurableType), - ) - - -class SecurableType(Enum): - """The type of Unity Catalog securable.""" - - CATALOG = "CATALOG" - CLEAN_ROOM = "CLEAN_ROOM" - CONNECTION = "CONNECTION" - CREDENTIAL = "CREDENTIAL" - EXTERNAL_LOCATION = "EXTERNAL_LOCATION" - EXTERNAL_METADATA = "EXTERNAL_METADATA" - FUNCTION = "FUNCTION" - METASTORE = "METASTORE" - PIPELINE = "PIPELINE" - PROVIDER = "PROVIDER" - RECIPIENT = "RECIPIENT" - SCHEMA = "SCHEMA" - SHARE = "SHARE" - STAGING_TABLE = "STAGING_TABLE" - STORAGE_CREDENTIAL = "STORAGE_CREDENTIAL" - TABLE = "TABLE" - VOLUME = "VOLUME" - - -@dataclass -class SetArtifactAllowlist: - artifact_matchers: List[ArtifactMatcher] - """A list of allowed artifact match patterns.""" - - artifact_type: Optional[ArtifactType] = None - """The artifact type of the allowlist.""" - - created_at: Optional[int] = None - """Time at which this artifact allowlist was set, in epoch milliseconds.""" - - created_by: Optional[str] = None - """Username of the user who set the artifact allowlist.""" - - metastore_id: Optional[str] = None - """Unique identifier of parent metastore.""" - - def as_dict(self) -> dict: - """Serializes the SetArtifactAllowlist into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.artifact_matchers: - body["artifact_matchers"] = [v.as_dict() for v in self.artifact_matchers] - if self.artifact_type is not None: - body["artifact_type"] = self.artifact_type.value - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetArtifactAllowlist into a shallow dictionary of its immediate attributes.""" - body = {} - if self.artifact_matchers: - body["artifact_matchers"] = self.artifact_matchers - if self.artifact_type is not None: - body["artifact_type"] = self.artifact_type - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetArtifactAllowlist: - """Deserializes the SetArtifactAllowlist from a dictionary.""" - return cls( - artifact_matchers=_repeated_dict(d, "artifact_matchers", ArtifactMatcher), - artifact_type=_enum(d, "artifact_type", ArtifactType), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - metastore_id=d.get("metastore_id", None), - ) - - -@dataclass -class SetRegisteredModelAliasRequest: - full_name: str - """Full name of the registered model""" - - alias: str - """The name of the alias""" - - version_num: int - """The version number of the model version to which the alias points""" - - def as_dict(self) -> dict: - """Serializes the SetRegisteredModelAliasRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alias is not None: - body["alias"] = self.alias - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version_num is not None: - body["version_num"] = self.version_num - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetRegisteredModelAliasRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alias is not None: - body["alias"] = self.alias - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version_num is not None: - body["version_num"] = self.version_num - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetRegisteredModelAliasRequest: - """Deserializes the SetRegisteredModelAliasRequest from a dictionary.""" - return cls( - alias=d.get("alias", None), full_name=d.get("full_name", None), version_num=d.get("version_num", None) - ) - - -@dataclass -class SseEncryptionDetails: - """Server-Side Encryption properties for clients communicating with AWS s3.""" - - algorithm: Optional[SseEncryptionDetailsAlgorithm] = None - """Sets the value of the 'x-amz-server-side-encryption' header in S3 request.""" - - aws_kms_key_arn: Optional[str] = None - """Optional. The ARN of the SSE-KMS key used with the S3 location, when algorithm = "SSE-KMS". Sets - the value of the 'x-amz-server-side-encryption-aws-kms-key-id' header.""" - - def as_dict(self) -> dict: - """Serializes the SseEncryptionDetails into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.algorithm is not None: - body["algorithm"] = self.algorithm.value - if self.aws_kms_key_arn is not None: - body["aws_kms_key_arn"] = self.aws_kms_key_arn - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SseEncryptionDetails into a shallow dictionary of its immediate attributes.""" - body = {} - if self.algorithm is not None: - body["algorithm"] = self.algorithm - if self.aws_kms_key_arn is not None: - body["aws_kms_key_arn"] = self.aws_kms_key_arn - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SseEncryptionDetails: - """Deserializes the SseEncryptionDetails from a dictionary.""" - return cls( - algorithm=_enum(d, "algorithm", SseEncryptionDetailsAlgorithm), - aws_kms_key_arn=d.get("aws_kms_key_arn", None), - ) - - -class SseEncryptionDetailsAlgorithm(Enum): - - AWS_SSE_KMS = "AWS_SSE_KMS" - AWS_SSE_S3 = "AWS_SSE_S3" - - -@dataclass -class StorageCredentialInfo: - aws_iam_role: Optional[AwsIamRoleResponse] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentityResponse] = None - """The Azure managed identity configuration.""" - - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" - - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" - - comment: Optional[str] = None - """Comment associated with the credential.""" - - created_at: Optional[int] = None - """Time at which this credential was created, in epoch milliseconds.""" - - created_by: Optional[str] = None - """Username of credential creator.""" - - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountResponse] = None - """The Databricks managed GCP service account configuration.""" - - full_name: Optional[str] = None - """The full name of the credential.""" - - id: Optional[str] = None - """The unique identifier of the credential.""" - - isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - - metastore_id: Optional[str] = None - """Unique identifier of the parent metastore.""" - - name: Optional[str] = None - """The credential name. The name must be unique among storage and service credentials within the - metastore.""" - - owner: Optional[str] = None - """Username of current owner of credential.""" - - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" - - updated_at: Optional[int] = None - """Time at which this credential was last modified, in epoch milliseconds.""" - - updated_by: Optional[str] = None - """Username of user who last modified the credential.""" - - used_for_managed_storage: Optional[bool] = None - """Whether this credential is the current metastore's root storage credential. Only applicable when - purpose is **STORAGE**.""" - - def as_dict(self) -> dict: - """Serializes the StorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage - return body - - def as_shallow_dict(self) -> dict: - """Serializes the StorageCredentialInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.full_name is not None: - body["full_name"] = self.full_name - if self.id is not None: - body["id"] = self.id - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.used_for_managed_storage is not None: - body["used_for_managed_storage"] = self.used_for_managed_storage - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> StorageCredentialInfo: - """Deserializes the StorageCredentialInfo from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleResponse), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountResponse - ), - full_name=d.get("full_name", None), - id=d.get("id", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - used_for_managed_storage=d.get("used_for_managed_storage", None), - ) - - -@dataclass -class SystemSchemaInfo: - schema: str - """Name of the system schema.""" - - state: str - """The current state of enablement for the system schema. An empty string means the system schema - is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED | - ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE""" - - def as_dict(self) -> dict: - """Serializes the SystemSchemaInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.schema is not None: - body["schema"] = self.schema - if self.state is not None: - body["state"] = self.state - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SystemSchemaInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.schema is not None: - body["schema"] = self.schema - if self.state is not None: - body["state"] = self.state - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SystemSchemaInfo: - """Deserializes the SystemSchemaInfo from a dictionary.""" - return cls(schema=d.get("schema", None), state=d.get("state", None)) - - -class SystemType(Enum): - - AMAZON_REDSHIFT = "AMAZON_REDSHIFT" - AZURE_SYNAPSE = "AZURE_SYNAPSE" - CONFLUENT = "CONFLUENT" - GOOGLE_BIGQUERY = "GOOGLE_BIGQUERY" - KAFKA = "KAFKA" - LOOKER = "LOOKER" - MICROSOFT_FABRIC = "MICROSOFT_FABRIC" - MICROSOFT_SQL_SERVER = "MICROSOFT_SQL_SERVER" - MONGODB = "MONGODB" - MYSQL = "MYSQL" - ORACLE = "ORACLE" - OTHER = "OTHER" - POSTGRESQL = "POSTGRESQL" - POWER_BI = "POWER_BI" - SALESFORCE = "SALESFORCE" - SAP = "SAP" - SERVICENOW = "SERVICENOW" - SNOWFLAKE = "SNOWFLAKE" - TABLEAU = "TABLEAU" - TERADATA = "TERADATA" - WORKDAY = "WORKDAY" - - -@dataclass -class TableConstraint: - """A table constraint, as defined by *one* of the following fields being set: - __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.""" - - foreign_key_constraint: Optional[ForeignKeyConstraint] = None - - named_table_constraint: Optional[NamedTableConstraint] = None - - primary_key_constraint: Optional[PrimaryKeyConstraint] = None - - def as_dict(self) -> dict: - """Serializes the TableConstraint into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.foreign_key_constraint: - body["foreign_key_constraint"] = self.foreign_key_constraint.as_dict() - if self.named_table_constraint: - body["named_table_constraint"] = self.named_table_constraint.as_dict() - if self.primary_key_constraint: - body["primary_key_constraint"] = self.primary_key_constraint.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TableConstraint into a shallow dictionary of its immediate attributes.""" - body = {} - if self.foreign_key_constraint: - body["foreign_key_constraint"] = self.foreign_key_constraint - if self.named_table_constraint: - body["named_table_constraint"] = self.named_table_constraint - if self.primary_key_constraint: - body["primary_key_constraint"] = self.primary_key_constraint - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableConstraint: - """Deserializes the TableConstraint from a dictionary.""" - return cls( - foreign_key_constraint=_from_dict(d, "foreign_key_constraint", ForeignKeyConstraint), - named_table_constraint=_from_dict(d, "named_table_constraint", NamedTableConstraint), - primary_key_constraint=_from_dict(d, "primary_key_constraint", PrimaryKeyConstraint), - ) - - -@dataclass -class TableDependency: - """A table that is dependent on a SQL object.""" - - table_full_name: str - """Full name of the dependent table, in the form of - __catalog_name__.__schema_name__.__table_name__.""" - - def as_dict(self) -> dict: - """Serializes the TableDependency into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.table_full_name is not None: - body["table_full_name"] = self.table_full_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TableDependency into a shallow dictionary of its immediate attributes.""" - body = {} - if self.table_full_name is not None: - body["table_full_name"] = self.table_full_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableDependency: - """Deserializes the TableDependency from a dictionary.""" - return cls(table_full_name=d.get("table_full_name", None)) - - -@dataclass -class TableExistsResponse: - table_exists: Optional[bool] = None - """Whether the table exists or not.""" - - def as_dict(self) -> dict: - """Serializes the TableExistsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.table_exists is not None: - body["table_exists"] = self.table_exists - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TableExistsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.table_exists is not None: - body["table_exists"] = self.table_exists - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableExistsResponse: - """Deserializes the TableExistsResponse from a dictionary.""" - return cls(table_exists=d.get("table_exists", None)) - - -@dataclass -class TableInfo: - access_point: Optional[str] = None - """The AWS access point to use when accesing s3 for this external location.""" - - browse_only: Optional[bool] = None - """Indicates whether the principal is limited to retrieving metadata for the associated object - through the BROWSE privilege when include_browse is enabled in the request.""" - - catalog_name: Optional[str] = None - """Name of parent catalog.""" - - columns: Optional[List[ColumnInfo]] = None - """The array of __ColumnInfo__ definitions of the table's columns.""" - - comment: Optional[str] = None - """User-provided free-form text description.""" - - created_at: Optional[int] = None - """Time at which this table was created, in epoch milliseconds.""" - - created_by: Optional[str] = None - """Username of table creator.""" - - data_access_configuration_id: Optional[str] = None - """Unique ID of the Data Access Configuration to use with the table data.""" - - data_source_format: Optional[DataSourceFormat] = None - - deleted_at: Optional[int] = None - """Time at which this table was deleted, in epoch milliseconds. Field is omitted if table is not - deleted.""" - - delta_runtime_properties_kvpairs: Optional[DeltaRuntimePropertiesKvPairs] = None - """Information pertaining to current state of the delta table.""" - - effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - - enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - - encryption_details: Optional[EncryptionDetails] = None - - full_name: Optional[str] = None - """Full name of table, in form of __catalog_name__.__schema_name__.__table_name__""" - - metastore_id: Optional[str] = None - """Unique identifier of parent metastore.""" - - name: Optional[str] = None - """Name of table, relative to parent schema.""" - - owner: Optional[str] = None - """Username of current owner of table.""" - - pipeline_id: Optional[str] = None - """The pipeline ID of the table. Applicable for tables created by pipelines (Materialized View, - Streaming Table, etc.).""" - - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" - - row_filter: Optional[TableRowFilter] = None - - schema_name: Optional[str] = None - """Name of parent schema relative to its parent catalog.""" - - securable_kind_manifest: Optional[SecurableKindManifest] = None - """SecurableKindManifest of table, including capabilities the table has.""" - - sql_path: Optional[str] = None - """List of schemes whose objects can be referenced without qualification.""" - - storage_credential_name: Optional[str] = None - """Name of the storage credential, when a storage credential is configured for use with this table.""" - - storage_location: Optional[str] = None - """Storage root URL for table (for **MANAGED**, **EXTERNAL** tables).""" - - table_constraints: Optional[List[TableConstraint]] = None - """List of table constraints. Note: this field is not set in the output of the __listTables__ API.""" - - table_id: Optional[str] = None - """The unique identifier of the table.""" - - table_type: Optional[TableType] = None - - updated_at: Optional[int] = None - """Time at which this table was last modified, in epoch milliseconds.""" - - updated_by: Optional[str] = None - """Username of user who last modified the table.""" - - view_definition: Optional[str] = None - """View definition SQL (when __table_type__ is **VIEW**, **MATERIALIZED_VIEW**, or - **STREAMING_TABLE**)""" - - view_dependencies: Optional[DependencyList] = None - """View dependencies (when table_type == **VIEW** or **MATERIALIZED_VIEW**, **STREAMING_TABLE**) - - when DependencyList is None, the dependency is not provided; - when DependencyList is an empty - list, the dependency is provided but is empty; - when DependencyList is not an empty list, - dependencies are provided and recorded.""" - - def as_dict(self) -> dict: - """Serializes the TableInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_point is not None: - body["access_point"] = self.access_point - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_access_configuration_id is not None: - body["data_access_configuration_id"] = self.data_access_configuration_id - if self.data_source_format is not None: - body["data_source_format"] = self.data_source_format.value - if self.deleted_at is not None: - body["deleted_at"] = self.deleted_at - if self.delta_runtime_properties_kvpairs: - body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs.as_dict() - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.properties: - body["properties"] = self.properties - if self.row_filter: - body["row_filter"] = self.row_filter.as_dict() - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.securable_kind_manifest: - body["securable_kind_manifest"] = self.securable_kind_manifest.as_dict() - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.table_constraints: - body["table_constraints"] = [v.as_dict() for v in self.table_constraints] - if self.table_id is not None: - body["table_id"] = self.table_id - if self.table_type is not None: - body["table_type"] = self.table_type.value - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.view_definition is not None: - body["view_definition"] = self.view_definition - if self.view_dependencies: - body["view_dependencies"] = self.view_dependencies.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TableInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_point is not None: - body["access_point"] = self.access_point - if self.browse_only is not None: - body["browse_only"] = self.browse_only - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.columns: - body["columns"] = self.columns - if self.comment is not None: - body["comment"] = self.comment - if self.created_at is not None: - body["created_at"] = self.created_at - if self.created_by is not None: - body["created_by"] = self.created_by - if self.data_access_configuration_id is not None: - body["data_access_configuration_id"] = self.data_access_configuration_id - if self.data_source_format is not None: - body["data_source_format"] = self.data_source_format - if self.deleted_at is not None: - body["deleted_at"] = self.deleted_at - if self.delta_runtime_properties_kvpairs: - body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs - if self.effective_predictive_optimization_flag: - body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.full_name is not None: - body["full_name"] = self.full_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.properties: - body["properties"] = self.properties - if self.row_filter: - body["row_filter"] = self.row_filter - if self.schema_name is not None: - body["schema_name"] = self.schema_name - if self.securable_kind_manifest: - body["securable_kind_manifest"] = self.securable_kind_manifest - if self.sql_path is not None: - body["sql_path"] = self.sql_path - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.storage_location is not None: - body["storage_location"] = self.storage_location - if self.table_constraints: - body["table_constraints"] = self.table_constraints - if self.table_id is not None: - body["table_id"] = self.table_id - if self.table_type is not None: - body["table_type"] = self.table_type - if self.updated_at is not None: - body["updated_at"] = self.updated_at - if self.updated_by is not None: - body["updated_by"] = self.updated_by - if self.view_definition is not None: - body["view_definition"] = self.view_definition - if self.view_dependencies: - body["view_dependencies"] = self.view_dependencies +@dataclass +class PrivilegeAssignment: + principal: Optional[str] = None + """The principal (user email address or group name). For deleted principals, `principal` is empty + while `principal_id` is populated.""" + + privileges: Optional[List[Privilege]] = None + """The privileges assigned to the principal.""" + + def as_dict(self) -> dict: + """Serializes the PrivilegeAssignment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = [v.value for v in self.privileges] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes.""" + body = {} + if self.principal is not None: + body["principal"] = self.principal + if self.privileges: + body["privileges"] = self.privileges return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableInfo: - """Deserializes the TableInfo from a dictionary.""" - return cls( - access_point=d.get("access_point", None), - browse_only=d.get("browse_only", None), - catalog_name=d.get("catalog_name", None), - columns=_repeated_dict(d, "columns", ColumnInfo), - comment=d.get("comment", None), - created_at=d.get("created_at", None), - created_by=d.get("created_by", None), - data_access_configuration_id=d.get("data_access_configuration_id", None), - data_source_format=_enum(d, "data_source_format", DataSourceFormat), - deleted_at=d.get("deleted_at", None), - delta_runtime_properties_kvpairs=_from_dict( - d, "delta_runtime_properties_kvpairs", DeltaRuntimePropertiesKvPairs - ), - effective_predictive_optimization_flag=_from_dict( - d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag - ), - enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - full_name=d.get("full_name", None), - metastore_id=d.get("metastore_id", None), - name=d.get("name", None), - owner=d.get("owner", None), - pipeline_id=d.get("pipeline_id", None), - properties=d.get("properties", None), - row_filter=_from_dict(d, "row_filter", TableRowFilter), - schema_name=d.get("schema_name", None), - securable_kind_manifest=_from_dict(d, "securable_kind_manifest", SecurableKindManifest), - sql_path=d.get("sql_path", None), - storage_credential_name=d.get("storage_credential_name", None), - storage_location=d.get("storage_location", None), - table_constraints=_repeated_dict(d, "table_constraints", TableConstraint), - table_id=d.get("table_id", None), - table_type=_enum(d, "table_type", TableType), - updated_at=d.get("updated_at", None), - updated_by=d.get("updated_by", None), - view_definition=d.get("view_definition", None), - view_dependencies=_from_dict(d, "view_dependencies", DependencyList), - ) + def from_dict(cls, d: Dict[str, Any]) -> PrivilegeAssignment: + """Deserializes the PrivilegeAssignment from a dictionary.""" + return cls(principal=d.get("principal", None), privileges=_repeated_enum(d, "privileges", Privilege)) -class TableOperation(Enum): +@dataclass +class ProvisioningInfo: + """Status of an asynchronously provisioned resource.""" - READ = "READ" - READ_WRITE = "READ_WRITE" + state: Optional[ProvisioningInfoState] = None + """The provisioning state of the resource.""" + + def as_dict(self) -> dict: + """Serializes the ProvisioningInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.state is not None: + body["state"] = self.state.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the ProvisioningInfo into a shallow dictionary of its immediate attributes.""" + body = {} + if self.state is not None: + body["state"] = self.state + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> ProvisioningInfo: + """Deserializes the ProvisioningInfo from a dictionary.""" + return cls(state=_enum(d, "state", ProvisioningInfoState)) + + +class ProvisioningInfoState(Enum): + + ACTIVE = "ACTIVE" + DEGRADED = "DEGRADED" + DELETING = "DELETING" + FAILED = "FAILED" + PROVISIONING = "PROVISIONING" + UPDATING = "UPDATING" @dataclass -class TableRowFilter: - function_name: str - """The full name of the row filter SQL UDF.""" +class ProvisioningStatus: + """Detailed status of an online table. Shown if the online table is in the + PROVISIONING_PIPELINE_RESOURCES or the PROVISIONING_INITIAL_SNAPSHOT state.""" - input_column_names: List[str] - """The list of table columns to be passed as input to the row filter function. The column types - should match the types of the filter function arguments.""" + initial_pipeline_sync_progress: Optional[PipelineProgress] = None + """Details about initial data synchronization. Only populated when in the + PROVISIONING_INITIAL_SNAPSHOT state.""" def as_dict(self) -> dict: - """Serializes the TableRowFilter into a dictionary suitable for use as a JSON request body.""" + """Serializes the ProvisioningStatus into a dictionary suitable for use as a JSON request body.""" body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.input_column_names: - body["input_column_names"] = [v for v in self.input_column_names] + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the TableRowFilter into a shallow dictionary of its immediate attributes.""" + """Serializes the ProvisioningStatus into a shallow dictionary of its immediate attributes.""" body = {} - if self.function_name is not None: - body["function_name"] = self.function_name - if self.input_column_names: - body["input_column_names"] = self.input_column_names + if self.initial_pipeline_sync_progress: + body["initial_pipeline_sync_progress"] = self.initial_pipeline_sync_progress return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableRowFilter: - """Deserializes the TableRowFilter from a dictionary.""" - return cls(function_name=d.get("function_name", None), input_column_names=d.get("input_column_names", None)) + def from_dict(cls, d: Dict[str, Any]) -> ProvisioningStatus: + """Deserializes the ProvisioningStatus from a dictionary.""" + return cls(initial_pipeline_sync_progress=_from_dict(d, "initial_pipeline_sync_progress", PipelineProgress)) @dataclass -class TableSummary: - full_name: Optional[str] = None - """The full name of the table.""" +class QuotaInfo: + last_refreshed_at: Optional[int] = None + """The timestamp that indicates when the quota count was last updated.""" - securable_kind_manifest: Optional[SecurableKindManifest] = None - """SecurableKindManifest of table, including capabilities the table has.""" + parent_full_name: Optional[str] = None + """Name of the parent resource. Returns metastore ID if the parent is a metastore.""" - table_type: Optional[TableType] = None + parent_securable_type: Optional[SecurableType] = None + """The quota parent securable type.""" + + quota_count: Optional[int] = None + """The current usage of the resource quota.""" + + quota_limit: Optional[int] = None + """The current limit of the resource quota.""" + + quota_name: Optional[str] = None + """The name of the quota.""" def as_dict(self) -> dict: - """Serializes the TableSummary into a dictionary suitable for use as a JSON request body.""" + """Serializes the QuotaInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.full_name is not None: - body["full_name"] = self.full_name - if self.securable_kind_manifest: - body["securable_kind_manifest"] = self.securable_kind_manifest.as_dict() - if self.table_type is not None: - body["table_type"] = self.table_type.value + if self.last_refreshed_at is not None: + body["last_refreshed_at"] = self.last_refreshed_at + if self.parent_full_name is not None: + body["parent_full_name"] = self.parent_full_name + if self.parent_securable_type is not None: + body["parent_securable_type"] = self.parent_securable_type.value + if self.quota_count is not None: + body["quota_count"] = self.quota_count + if self.quota_limit is not None: + body["quota_limit"] = self.quota_limit + if self.quota_name is not None: + body["quota_name"] = self.quota_name return body def as_shallow_dict(self) -> dict: - """Serializes the TableSummary into a shallow dictionary of its immediate attributes.""" + """Serializes the QuotaInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.full_name is not None: - body["full_name"] = self.full_name - if self.securable_kind_manifest: - body["securable_kind_manifest"] = self.securable_kind_manifest - if self.table_type is not None: - body["table_type"] = self.table_type + if self.last_refreshed_at is not None: + body["last_refreshed_at"] = self.last_refreshed_at + if self.parent_full_name is not None: + body["parent_full_name"] = self.parent_full_name + if self.parent_securable_type is not None: + body["parent_securable_type"] = self.parent_securable_type + if self.quota_count is not None: + body["quota_count"] = self.quota_count + if self.quota_limit is not None: + body["quota_limit"] = self.quota_limit + if self.quota_name is not None: + body["quota_name"] = self.quota_name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TableSummary: - """Deserializes the TableSummary from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> QuotaInfo: + """Deserializes the QuotaInfo from a dictionary.""" return cls( - full_name=d.get("full_name", None), - securable_kind_manifest=_from_dict(d, "securable_kind_manifest", SecurableKindManifest), - table_type=_enum(d, "table_type", TableType), + last_refreshed_at=d.get("last_refreshed_at", None), + parent_full_name=d.get("parent_full_name", None), + parent_securable_type=_enum(d, "parent_securable_type", SecurableType), + quota_count=d.get("quota_count", None), + quota_limit=d.get("quota_limit", None), + quota_name=d.get("quota_name", None), ) -class TableType(Enum): +@dataclass +class R2Credentials: + """R2 temporary credentials for API authentication. Read more at + https://developers.cloudflare.com/r2/api/s3/tokens/.""" - EXTERNAL = "EXTERNAL" - EXTERNAL_SHALLOW_CLONE = "EXTERNAL_SHALLOW_CLONE" - FOREIGN = "FOREIGN" - MANAGED = "MANAGED" - MANAGED_SHALLOW_CLONE = "MANAGED_SHALLOW_CLONE" - MATERIALIZED_VIEW = "MATERIALIZED_VIEW" - METRIC_VIEW = "METRIC_VIEW" - STREAMING_TABLE = "STREAMING_TABLE" - VIEW = "VIEW" + access_key_id: Optional[str] = None + """The access key ID that identifies the temporary credentials.""" + + secret_access_key: Optional[str] = None + """The secret access key associated with the access key.""" + + session_token: Optional[str] = None + """The generated JWT that users must pass to use the temporary credentials.""" + + def as_dict(self) -> dict: + """Serializes the R2Credentials into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_key_id is not None: + body["access_key_id"] = self.access_key_id + if self.secret_access_key is not None: + body["secret_access_key"] = self.secret_access_key + if self.session_token is not None: + body["session_token"] = self.session_token + return body + + def as_shallow_dict(self) -> dict: + """Serializes the R2Credentials into a shallow dictionary of its immediate attributes.""" + body = {} + if self.access_key_id is not None: + body["access_key_id"] = self.access_key_id + if self.secret_access_key is not None: + body["secret_access_key"] = self.secret_access_key + if self.session_token is not None: + body["session_token"] = self.session_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> R2Credentials: + """Deserializes the R2Credentials from a dictionary.""" + return cls( + access_key_id=d.get("access_key_id", None), + secret_access_key=d.get("secret_access_key", None), + session_token=d.get("session_token", None), + ) @dataclass -class TagKeyValue: - key: Optional[str] = None - """name of the tag""" +class RegenerateDashboardResponse: + dashboard_id: Optional[str] = None + """Id of the regenerated monitoring dashboard.""" - value: Optional[str] = None - """value of the tag associated with the key, could be optional""" + parent_folder: Optional[str] = None + """The directory where the regenerated dashboard is stored.""" def as_dict(self) -> dict: - """Serializes the TagKeyValue into a dictionary suitable for use as a JSON request body.""" + """Serializes the RegenerateDashboardResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.parent_folder is not None: + body["parent_folder"] = self.parent_folder return body def as_shallow_dict(self) -> dict: - """Serializes the TagKeyValue into a shallow dictionary of its immediate attributes.""" + """Serializes the RegenerateDashboardResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value + if self.dashboard_id is not None: + body["dashboard_id"] = self.dashboard_id + if self.parent_folder is not None: + body["parent_folder"] = self.parent_folder return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TagKeyValue: - """Deserializes the TagKeyValue from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) + def from_dict(cls, d: Dict[str, Any]) -> RegenerateDashboardResponse: + """Deserializes the RegenerateDashboardResponse from a dictionary.""" + return cls(dashboard_id=d.get("dashboard_id", None), parent_folder=d.get("parent_folder", None)) @dataclass -class TemporaryCredentials: - aws_temp_credentials: Optional[AwsCredentials] = None - - azure_aad: Optional[AzureActiveDirectoryToken] = None +class RegisteredModelAlias: + """Registered model alias.""" - expiration_time: Optional[int] = None - """Server time when the credential will expire, in epoch milliseconds. The API client is advised to - cache the credential given this expiration time.""" + alias_name: Optional[str] = None + """Name of the alias, e.g. 'champion' or 'latest_stable'""" - gcp_oauth_token: Optional[GcpOauthToken] = None + version_num: Optional[int] = None + """Integer version number of the model version to which this alias points.""" def as_dict(self) -> dict: - """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body.""" + """Serializes the RegisteredModelAlias into a dictionary suitable for use as a JSON request body.""" body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() - if self.azure_aad: - body["azure_aad"] = self.azure_aad.as_dict() - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() + if self.alias_name is not None: + body["alias_name"] = self.alias_name + if self.version_num is not None: + body["version_num"] = self.version_num return body def as_shallow_dict(self) -> dict: - """Serializes the TemporaryCredentials into a shallow dictionary of its immediate attributes.""" + """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_temp_credentials: - body["aws_temp_credentials"] = self.aws_temp_credentials - if self.azure_aad: - body["azure_aad"] = self.azure_aad - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.gcp_oauth_token: - body["gcp_oauth_token"] = self.gcp_oauth_token + if self.alias_name is not None: + body["alias_name"] = self.alias_name + if self.version_num is not None: + body["version_num"] = self.version_num return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TemporaryCredentials: - """Deserializes the TemporaryCredentials from a dictionary.""" - return cls( - aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), - azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), - expiration_time=d.get("expiration_time", None), - gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), - ) + def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelAlias: + """Deserializes the RegisteredModelAlias from a dictionary.""" + return cls(alias_name=d.get("alias_name", None), version_num=d.get("version_num", None)) @dataclass -class TriggeredUpdateStatus: - """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE - or the ONLINE_NO_PENDING_UPDATE state.""" +class RegisteredModelInfo: + aliases: Optional[List[RegisteredModelAlias]] = None + """List of aliases associated with the registered model""" - last_processed_commit_version: Optional[int] = None - """The last source table Delta version that was synced to the online table. Note that this Delta - version may not be completely synced to the online table yet.""" + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" - timestamp: Optional[str] = None - """The timestamp of the last time any data was synchronized from the source table to the online - table.""" + catalog_name: Optional[str] = None + """The name of the catalog where the schema and the registered model reside""" - triggered_update_progress: Optional[PipelineProgress] = None - """Progress of the active data synchronization pipeline.""" + comment: Optional[str] = None + """The comment attached to the registered model""" - def as_dict(self) -> dict: - """Serializes the TriggeredUpdateStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.triggered_update_progress: - body["triggered_update_progress"] = self.triggered_update_progress.as_dict() - return body + created_at: Optional[int] = None + """Creation timestamp of the registered model in milliseconds since the Unix epoch""" - def as_shallow_dict(self) -> dict: - """Serializes the TriggeredUpdateStatus into a shallow dictionary of its immediate attributes.""" - body = {} - if self.last_processed_commit_version is not None: - body["last_processed_commit_version"] = self.last_processed_commit_version - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.triggered_update_progress: - body["triggered_update_progress"] = self.triggered_update_progress - return body + created_by: Optional[str] = None + """The identifier of the user who created the registered model""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TriggeredUpdateStatus: - """Deserializes the TriggeredUpdateStatus from a dictionary.""" - return cls( - last_processed_commit_version=d.get("last_processed_commit_version", None), - timestamp=d.get("timestamp", None), - triggered_update_progress=_from_dict(d, "triggered_update_progress", PipelineProgress), - ) + full_name: Optional[str] = None + """The three-level (fully qualified) name of the registered model""" + metastore_id: Optional[str] = None + """The unique identifier of the metastore""" + + name: Optional[str] = None + """The name of the registered model""" + + owner: Optional[str] = None + """The identifier of the user who owns the registered model""" + + schema_name: Optional[str] = None + """The name of the schema where the registered model resides""" + + storage_location: Optional[str] = None + """The storage location on the cloud under which model version data files are stored""" + + updated_at: Optional[int] = None + """Last-update timestamp of the registered model in milliseconds since the Unix epoch""" + + updated_by: Optional[str] = None + """The identifier of the user who updated the registered model last time""" -@dataclass -class UnassignResponse: def as_dict(self) -> dict: - """Serializes the UnassignResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the RegisteredModelInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.aliases: + body["aliases"] = [v.as_dict() for v in self.aliases] + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: - """Serializes the UnassignResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the RegisteredModelInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.aliases: + body["aliases"] = self.aliases + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.comment is not None: + body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name + if self.owner is not None: + body["owner"] = self.owner + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UnassignResponse: - """Deserializes the UnassignResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelInfo: + """Deserializes the RegisteredModelInfo from a dictionary.""" + return cls( + aliases=_repeated_dict(d, "aliases", RegisteredModelAlias), + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + full_name=d.get("full_name", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), + owner=d.get("owner", None), + schema_name=d.get("schema_name", None), + storage_location=d.get("storage_location", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + ) @dataclass -class UpdateAssignmentResponse: - def as_dict(self) -> dict: - """Serializes the UpdateAssignmentResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body +class SchemaInfo: + """Next ID: 40""" - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAssignmentResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAssignmentResponse: - """Deserializes the UpdateAssignmentResponse from a dictionary.""" - return cls() + catalog_name: Optional[str] = None + """Name of parent catalog.""" + catalog_type: Optional[CatalogType] = None + """The type of the parent catalog.""" -@dataclass -class UpdateCatalog: comment: Optional[str] = None """User-provided free-form text description.""" + created_at: Optional[int] = None + """Time at which this schema was created, in epoch milliseconds.""" + + created_by: Optional[str] = None + """Username of schema creator.""" + + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None """Whether predictive optimization should be enabled for this object and objects under it.""" - isolation_mode: Optional[CatalogIsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - - name: Optional[str] = None - """The name of the catalog.""" + full_name: Optional[str] = None + """Full name of schema, in form of __catalog_name__.__schema_name__.""" - new_name: Optional[str] = None - """New name for the catalog.""" + metastore_id: Optional[str] = None + """Unique identifier of parent metastore.""" - options: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" + name: Optional[str] = None + """Name of schema, relative to parent catalog.""" owner: Optional[str] = None - """Username of current owner of catalog.""" + """Username of current owner of schema.""" properties: Optional[Dict[str, str]] = None """A map of key-value properties attached to the securable.""" + schema_id: Optional[str] = None + """The unique identifier of the schema.""" + + storage_location: Optional[str] = None + """Storage location for managed tables within schema.""" + + storage_root: Optional[str] = None + """Storage root URL for managed tables within schema.""" + + updated_at: Optional[int] = None + """Time at which this schema was created, in epoch milliseconds.""" + + updated_by: Optional[str] = None + """Username of user who last modified schema.""" + def as_dict(self) -> dict: - """Serializes the UpdateCatalog into a dictionary suitable for use as a JSON request body.""" + """Serializes the SchemaInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.catalog_type is not None: + body["catalog_type"] = self.catalog_type.value if self.comment is not None: body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() if self.enable_predictive_optimization is not None: body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options if self.owner is not None: body["owner"] = self.owner if self.properties: body["properties"] = self.properties + if self.schema_id is not None: + body["schema_id"] = self.schema_id + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateCatalog into a shallow dictionary of its immediate attributes.""" + """Serializes the SchemaInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.catalog_type is not None: + body["catalog_type"] = self.catalog_type if self.comment is not None: body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag if self.enable_predictive_optimization is not None: body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode + if self.full_name is not None: + body["full_name"] = self.full_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options if self.owner is not None: body["owner"] = self.owner if self.properties: body["properties"] = self.properties + if self.schema_id is not None: + body["schema_id"] = self.schema_id + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.storage_root is not None: + body["storage_root"] = self.storage_root + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalog: - """Deserializes the UpdateCatalog from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> SchemaInfo: + """Deserializes the SchemaInfo from a dictionary.""" return cls( + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + catalog_type=_enum(d, "catalog_type", CatalogType), comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + effective_predictive_optimization_flag=_from_dict( + d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag + ), enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - isolation_mode=_enum(d, "isolation_mode", CatalogIsolationMode), + full_name=d.get("full_name", None), + metastore_id=d.get("metastore_id", None), name=d.get("name", None), - new_name=d.get("new_name", None), - options=d.get("options", None), owner=d.get("owner", None), properties=d.get("properties", None), + schema_id=d.get("schema_id", None), + storage_location=d.get("storage_location", None), + storage_root=d.get("storage_root", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), ) +class SecurableKind(Enum): + + TABLE_DB_STORAGE = "TABLE_DB_STORAGE" + TABLE_DELTA = "TABLE_DELTA" + TABLE_DELTASHARING = "TABLE_DELTASHARING" + TABLE_DELTASHARING_MUTABLE = "TABLE_DELTASHARING_MUTABLE" + TABLE_DELTA_EXTERNAL = "TABLE_DELTA_EXTERNAL" + TABLE_DELTA_ICEBERG_DELTASHARING = "TABLE_DELTA_ICEBERG_DELTASHARING" + TABLE_DELTA_ICEBERG_MANAGED = "TABLE_DELTA_ICEBERG_MANAGED" + TABLE_DELTA_UNIFORM_HUDI_EXTERNAL = "TABLE_DELTA_UNIFORM_HUDI_EXTERNAL" + TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL = "TABLE_DELTA_UNIFORM_ICEBERG_EXTERNAL" + TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL = ( + "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_EXTERNAL" + ) + TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_MANAGED = ( + "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_HIVE_METASTORE_MANAGED" + ) + TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_SNOWFLAKE = "TABLE_DELTA_UNIFORM_ICEBERG_FOREIGN_SNOWFLAKE" + TABLE_EXTERNAL = "TABLE_EXTERNAL" + TABLE_FEATURE_STORE = "TABLE_FEATURE_STORE" + TABLE_FEATURE_STORE_EXTERNAL = "TABLE_FEATURE_STORE_EXTERNAL" + TABLE_FOREIGN_BIGQUERY = "TABLE_FOREIGN_BIGQUERY" + TABLE_FOREIGN_DATABRICKS = "TABLE_FOREIGN_DATABRICKS" + TABLE_FOREIGN_DELTASHARING = "TABLE_FOREIGN_DELTASHARING" + TABLE_FOREIGN_HIVE_METASTORE = "TABLE_FOREIGN_HIVE_METASTORE" + TABLE_FOREIGN_HIVE_METASTORE_DBFS_EXTERNAL = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_EXTERNAL" + TABLE_FOREIGN_HIVE_METASTORE_DBFS_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_MANAGED" + TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_EXTERNAL = ( + "TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_EXTERNAL" + ) + TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_SHALLOW_CLONE_MANAGED" + TABLE_FOREIGN_HIVE_METASTORE_DBFS_VIEW = "TABLE_FOREIGN_HIVE_METASTORE_DBFS_VIEW" + TABLE_FOREIGN_HIVE_METASTORE_EXTERNAL = "TABLE_FOREIGN_HIVE_METASTORE_EXTERNAL" + TABLE_FOREIGN_HIVE_METASTORE_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_MANAGED" + TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_EXTERNAL = "TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_EXTERNAL" + TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_MANAGED = "TABLE_FOREIGN_HIVE_METASTORE_SHALLOW_CLONE_MANAGED" + TABLE_FOREIGN_HIVE_METASTORE_VIEW = "TABLE_FOREIGN_HIVE_METASTORE_VIEW" + TABLE_FOREIGN_MONGODB = "TABLE_FOREIGN_MONGODB" + TABLE_FOREIGN_MYSQL = "TABLE_FOREIGN_MYSQL" + TABLE_FOREIGN_NETSUITE = "TABLE_FOREIGN_NETSUITE" + TABLE_FOREIGN_ORACLE = "TABLE_FOREIGN_ORACLE" + TABLE_FOREIGN_POSTGRESQL = "TABLE_FOREIGN_POSTGRESQL" + TABLE_FOREIGN_REDSHIFT = "TABLE_FOREIGN_REDSHIFT" + TABLE_FOREIGN_SALESFORCE = "TABLE_FOREIGN_SALESFORCE" + TABLE_FOREIGN_SALESFORCE_DATA_CLOUD = "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD" + TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING = "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING" + TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING_VIEW = "TABLE_FOREIGN_SALESFORCE_DATA_CLOUD_FILE_SHARING_VIEW" + TABLE_FOREIGN_SNOWFLAKE = "TABLE_FOREIGN_SNOWFLAKE" + TABLE_FOREIGN_SQLDW = "TABLE_FOREIGN_SQLDW" + TABLE_FOREIGN_SQLSERVER = "TABLE_FOREIGN_SQLSERVER" + TABLE_FOREIGN_TERADATA = "TABLE_FOREIGN_TERADATA" + TABLE_FOREIGN_WORKDAY_RAAS = "TABLE_FOREIGN_WORKDAY_RAAS" + TABLE_ICEBERG_UNIFORM_MANAGED = "TABLE_ICEBERG_UNIFORM_MANAGED" + TABLE_INTERNAL = "TABLE_INTERNAL" + TABLE_MANAGED_POSTGRESQL = "TABLE_MANAGED_POSTGRESQL" + TABLE_MATERIALIZED_VIEW = "TABLE_MATERIALIZED_VIEW" + TABLE_MATERIALIZED_VIEW_DELTASHARING = "TABLE_MATERIALIZED_VIEW_DELTASHARING" + TABLE_METRIC_VIEW = "TABLE_METRIC_VIEW" + TABLE_ONLINE_VECTOR_INDEX_DIRECT = "TABLE_ONLINE_VECTOR_INDEX_DIRECT" + TABLE_ONLINE_VECTOR_INDEX_REPLICA = "TABLE_ONLINE_VECTOR_INDEX_REPLICA" + TABLE_ONLINE_VIEW = "TABLE_ONLINE_VIEW" + TABLE_STANDARD = "TABLE_STANDARD" + TABLE_STREAMING_LIVE_TABLE = "TABLE_STREAMING_LIVE_TABLE" + TABLE_STREAMING_LIVE_TABLE_DELTASHARING = "TABLE_STREAMING_LIVE_TABLE_DELTASHARING" + TABLE_SYSTEM = "TABLE_SYSTEM" + TABLE_SYSTEM_DELTASHARING = "TABLE_SYSTEM_DELTASHARING" + TABLE_VIEW = "TABLE_VIEW" + TABLE_VIEW_DELTASHARING = "TABLE_VIEW_DELTASHARING" + + @dataclass -class UpdateCatalogWorkspaceBindingsResponse: - workspaces: Optional[List[int]] = None - """A list of workspace IDs""" +class SecurableKindManifest: + """Manifest of a specific securable kind.""" + + assignable_privileges: Optional[List[str]] = None + """Privileges that can be assigned to the securable.""" + + capabilities: Optional[List[str]] = None + """A list of capabilities in the securable kind.""" + + options: Optional[List[OptionSpec]] = None + """Detailed specs of allowed options.""" + + securable_kind: Optional[SecurableKind] = None + """Securable kind to get manifest of.""" + + securable_type: Optional[SecurableType] = None + """Securable Type of the kind.""" def as_dict(self) -> dict: - """Serializes the UpdateCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the SecurableKindManifest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.workspaces: - body["workspaces"] = [v for v in self.workspaces] + if self.assignable_privileges: + body["assignable_privileges"] = [v for v in self.assignable_privileges] + if self.capabilities: + body["capabilities"] = [v for v in self.capabilities] + if self.options: + body["options"] = [v.as_dict() for v in self.options] + if self.securable_kind is not None: + body["securable_kind"] = self.securable_kind.value + if self.securable_type is not None: + body["securable_type"] = self.securable_type.value return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the SecurableKindManifest into a shallow dictionary of its immediate attributes.""" body = {} - if self.workspaces: - body["workspaces"] = self.workspaces + if self.assignable_privileges: + body["assignable_privileges"] = self.assignable_privileges + if self.capabilities: + body["capabilities"] = self.capabilities + if self.options: + body["options"] = self.options + if self.securable_kind is not None: + body["securable_kind"] = self.securable_kind + if self.securable_type is not None: + body["securable_type"] = self.securable_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: - """Deserializes the UpdateCatalogWorkspaceBindingsResponse from a dictionary.""" - return cls(workspaces=d.get("workspaces", None)) + def from_dict(cls, d: Dict[str, Any]) -> SecurableKindManifest: + """Deserializes the SecurableKindManifest from a dictionary.""" + return cls( + assignable_privileges=d.get("assignable_privileges", None), + capabilities=d.get("capabilities", None), + options=_repeated_dict(d, "options", OptionSpec), + securable_kind=_enum(d, "securable_kind", SecurableKind), + securable_type=_enum(d, "securable_type", SecurableType), + ) -@dataclass -class UpdateConnection: - options: Dict[str, str] - """A map of key-value properties attached to the securable.""" +class SecurableType(Enum): + """The type of Unity Catalog securable.""" - name: Optional[str] = None - """Name of the connection.""" + CATALOG = "CATALOG" + CLEAN_ROOM = "CLEAN_ROOM" + CONNECTION = "CONNECTION" + CREDENTIAL = "CREDENTIAL" + EXTERNAL_LOCATION = "EXTERNAL_LOCATION" + EXTERNAL_METADATA = "EXTERNAL_METADATA" + FUNCTION = "FUNCTION" + METASTORE = "METASTORE" + PIPELINE = "PIPELINE" + PROVIDER = "PROVIDER" + RECIPIENT = "RECIPIENT" + SCHEMA = "SCHEMA" + SHARE = "SHARE" + STAGING_TABLE = "STAGING_TABLE" + STORAGE_CREDENTIAL = "STORAGE_CREDENTIAL" + TABLE = "TABLE" + VOLUME = "VOLUME" - new_name: Optional[str] = None - """New name for the connection.""" - owner: Optional[str] = None - """Username of current owner of the connection.""" +@dataclass +class SseEncryptionDetails: + """Server-Side Encryption properties for clients communicating with AWS s3.""" + + algorithm: Optional[SseEncryptionDetailsAlgorithm] = None + """Sets the value of the 'x-amz-server-side-encryption' header in S3 request.""" + + aws_kms_key_arn: Optional[str] = None + """Optional. The ARN of the SSE-KMS key used with the S3 location, when algorithm = "SSE-KMS". Sets + the value of the 'x-amz-server-side-encryption-aws-kms-key-id' header.""" def as_dict(self) -> dict: - """Serializes the UpdateConnection into a dictionary suitable for use as a JSON request body.""" + """Serializes the SseEncryptionDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner + if self.algorithm is not None: + body["algorithm"] = self.algorithm.value + if self.aws_kms_key_arn is not None: + body["aws_kms_key_arn"] = self.aws_kms_key_arn return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateConnection into a shallow dictionary of its immediate attributes.""" + """Serializes the SseEncryptionDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.options: - body["options"] = self.options - if self.owner is not None: - body["owner"] = self.owner + if self.algorithm is not None: + body["algorithm"] = self.algorithm + if self.aws_kms_key_arn is not None: + body["aws_kms_key_arn"] = self.aws_kms_key_arn return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateConnection: - """Deserializes the UpdateConnection from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> SseEncryptionDetails: + """Deserializes the SseEncryptionDetails from a dictionary.""" return cls( - name=d.get("name", None), - new_name=d.get("new_name", None), - options=d.get("options", None), - owner=d.get("owner", None), + algorithm=_enum(d, "algorithm", SseEncryptionDetailsAlgorithm), + aws_kms_key_arn=d.get("aws_kms_key_arn", None), ) +class SseEncryptionDetailsAlgorithm(Enum): + + AWS_SSE_KMS = "AWS_SSE_KMS" + AWS_SSE_S3 = "AWS_SSE_S3" + + @dataclass -class UpdateCredentialRequest: - aws_iam_role: Optional[AwsIamRole] = None +class StorageCredentialInfo: + aws_iam_role: Optional[AwsIamRoleResponse] = None """The AWS IAM role configuration.""" - azure_managed_identity: Optional[AzureManagedIdentity] = None + azure_managed_identity: Optional[AzureManagedIdentityResponse] = None """The Azure managed identity configuration.""" azure_service_principal: Optional[AzureServicePrincipal] = None """The Azure service principal configuration.""" + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" + comment: Optional[str] = None """Comment associated with the credential.""" - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None + created_at: Optional[int] = None + """Time at which this credential was created, in epoch milliseconds.""" + + created_by: Optional[str] = None + """Username of credential creator.""" + + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountResponse] = None """The Databricks managed GCP service account configuration.""" - force: Optional[bool] = None - """Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent - external locations and external tables (when purpose is **STORAGE**).""" + full_name: Optional[str] = None + """The full name of the credential.""" + + id: Optional[str] = None + """The unique identifier of the credential.""" isolation_mode: Optional[IsolationMode] = None """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" - name_arg: Optional[str] = None - """Name of the credential.""" + metastore_id: Optional[str] = None + """Unique identifier of the parent metastore.""" - new_name: Optional[str] = None - """New name of credential.""" + name: Optional[str] = None + """The credential name. The name must be unique among storage and service credentials within the + metastore.""" owner: Optional[str] = None """Username of current owner of credential.""" @@ -10115,11 +7968,18 @@ class UpdateCredentialRequest: """Whether the credential is usable only for read operations. Only applicable when purpose is **STORAGE**.""" - skip_validation: Optional[bool] = None - """Supply true to this argument to skip validation of the updated credential.""" + updated_at: Optional[int] = None + """Time at which this credential was last modified, in epoch milliseconds.""" + + updated_by: Optional[str] = None + """Username of user who last modified the credential.""" + + used_for_managed_storage: Optional[bool] = None + """Whether this credential is the current metastore's root storage credential. Only applicable when + purpose is **STORAGE**.""" def as_dict(self) -> dict: - """Serializes the UpdateCredentialRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the StorageCredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} if self.aws_iam_role: body["aws_iam_role"] = self.aws_iam_role.as_dict() @@ -10127,1210 +7987,1223 @@ def as_dict(self) -> dict: body["azure_managed_identity"] = self.azure_managed_identity.as_dict() if self.azure_service_principal: body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() if self.comment is not None: body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name_arg is not None: - body["name_arg"] = self.name_arg - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCredentialRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name_arg is not None: - body["name_arg"] = self.name_arg - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialRequest: - """Deserializes the UpdateCredentialRequest from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), - force=d.get("force", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - name_arg=d.get("name_arg", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) - - -@dataclass -class UpdateExternalLocation: - comment: Optional[str] = None - """User-provided free-form text description.""" - - credential_name: Optional[str] = None - """Name of the storage credential used with this location.""" - - enable_file_events: Optional[bool] = None - """Whether to enable file events on this external location.""" - - encryption_details: Optional[EncryptionDetails] = None - - fallback: Optional[bool] = None - """Indicates whether fallback mode is enabled for this external location. When fallback mode is - enabled, the access to the location falls back to cluster credentials if UC credentials are not - sufficient.""" - - file_event_queue: Optional[FileEventQueue] = None - """File event queue settings.""" - - force: Optional[bool] = None - """Force update even if changing url invalidates dependent external tables or mounts.""" - - isolation_mode: Optional[IsolationMode] = None - - name: Optional[str] = None - """Name of the external location.""" - - new_name: Optional[str] = None - """New name for the external location.""" - - owner: Optional[str] = None - """The owner of the external location.""" - - read_only: Optional[bool] = None - """Indicates whether the external location is read-only.""" - - skip_validation: Optional[bool] = None - """Skips validation of the storage credential associated with the external location.""" - - url: Optional[str] = None - """Path URL of the external location.""" - - def as_dict(self) -> dict: - """Serializes the UpdateExternalLocation into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details.as_dict() - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue.as_dict() - if self.force is not None: - body["force"] = self.force + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id if self.isolation_mode is not None: body["isolation_mode"] = self.isolation_mode.value + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name if self.owner is not None: body["owner"] = self.owner if self.read_only is not None: body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateExternalLocation into a shallow dictionary of its immediate attributes.""" + """Serializes the StorageCredentialInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.aws_iam_role: + body["aws_iam_role"] = self.aws_iam_role + if self.azure_managed_identity: + body["azure_managed_identity"] = self.azure_managed_identity + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token if self.comment is not None: body["comment"] = self.comment - if self.credential_name is not None: - body["credential_name"] = self.credential_name - if self.enable_file_events is not None: - body["enable_file_events"] = self.enable_file_events - if self.encryption_details: - body["encryption_details"] = self.encryption_details - if self.fallback is not None: - body["fallback"] = self.fallback - if self.file_event_queue: - body["file_event_queue"] = self.file_event_queue - if self.force is not None: - body["force"] = self.force + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.databricks_gcp_service_account: + body["databricks_gcp_service_account"] = self.databricks_gcp_service_account + if self.full_name is not None: + body["full_name"] = self.full_name + if self.id is not None: + body["id"] = self.id if self.isolation_mode is not None: body["isolation_mode"] = self.isolation_mode + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id if self.name is not None: body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name if self.owner is not None: body["owner"] = self.owner if self.read_only is not None: body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - if self.url is not None: - body["url"] = self.url + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.used_for_managed_storage is not None: + body["used_for_managed_storage"] = self.used_for_managed_storage return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateExternalLocation: - """Deserializes the UpdateExternalLocation from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> StorageCredentialInfo: + """Deserializes the StorageCredentialInfo from a dictionary.""" return cls( + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleResponse), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), comment=d.get("comment", None), - credential_name=d.get("credential_name", None), - enable_file_events=d.get("enable_file_events", None), - encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), - fallback=d.get("fallback", None), - file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue), - force=d.get("force", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountResponse + ), + full_name=d.get("full_name", None), + id=d.get("id", None), isolation_mode=_enum(d, "isolation_mode", IsolationMode), + metastore_id=d.get("metastore_id", None), name=d.get("name", None), - new_name=d.get("new_name", None), owner=d.get("owner", None), read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - url=d.get("url", None), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + used_for_managed_storage=d.get("used_for_managed_storage", None), ) @dataclass -class UpdateFunction: - name: Optional[str] = None - """The fully-qualified name of the function (of the form - __catalog_name__.__schema_name__.__function__name__).""" +class SystemSchemaInfo: + schema: str + """Name of the system schema.""" - owner: Optional[str] = None - """Username of current owner of function.""" + state: str + """The current state of enablement for the system schema. An empty string means the system schema + is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED | + ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE""" def as_dict(self) -> dict: - """Serializes the UpdateFunction into a dictionary suitable for use as a JSON request body.""" + """Serializes the SystemSchemaInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner + if self.schema is not None: + body["schema"] = self.schema + if self.state is not None: + body["state"] = self.state return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateFunction into a shallow dictionary of its immediate attributes.""" + """Serializes the SystemSchemaInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner + if self.schema is not None: + body["schema"] = self.schema + if self.state is not None: + body["state"] = self.state return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateFunction: - """Deserializes the UpdateFunction from a dictionary.""" - return cls(name=d.get("name", None), owner=d.get("owner", None)) - + def from_dict(cls, d: Dict[str, Any]) -> SystemSchemaInfo: + """Deserializes the SystemSchemaInfo from a dictionary.""" + return cls(schema=d.get("schema", None), state=d.get("state", None)) -@dataclass -class UpdateMetastore: - delta_sharing_organization_name: Optional[str] = None - """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta - Sharing as the official name.""" - delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None - """The lifetime of delta sharing recipient token in seconds.""" +class SystemType(Enum): - delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None - """The scope of Delta Sharing enabled for the metastore.""" + AMAZON_REDSHIFT = "AMAZON_REDSHIFT" + AZURE_SYNAPSE = "AZURE_SYNAPSE" + CONFLUENT = "CONFLUENT" + DATABRICKS = "DATABRICKS" + GOOGLE_BIGQUERY = "GOOGLE_BIGQUERY" + KAFKA = "KAFKA" + LOOKER = "LOOKER" + MICROSOFT_FABRIC = "MICROSOFT_FABRIC" + MICROSOFT_SQL_SERVER = "MICROSOFT_SQL_SERVER" + MONGODB = "MONGODB" + MYSQL = "MYSQL" + ORACLE = "ORACLE" + OTHER = "OTHER" + POSTGRESQL = "POSTGRESQL" + POWER_BI = "POWER_BI" + SALESFORCE = "SALESFORCE" + SAP = "SAP" + SERVICENOW = "SERVICENOW" + SNOWFLAKE = "SNOWFLAKE" + TABLEAU = "TABLEAU" + TERADATA = "TERADATA" + WORKDAY = "WORKDAY" - id: Optional[str] = None - """Unique ID of the metastore.""" - new_name: Optional[str] = None - """New name for the metastore.""" +@dataclass +class TableConstraint: + """A table constraint, as defined by *one* of the following fields being set: + __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.""" - owner: Optional[str] = None - """The owner of the metastore.""" + foreign_key_constraint: Optional[ForeignKeyConstraint] = None - privilege_model_version: Optional[str] = None - """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" + named_table_constraint: Optional[NamedTableConstraint] = None - storage_root_credential_id: Optional[str] = None - """UUID of storage credential to access the metastore storage_root.""" + primary_key_constraint: Optional[PrimaryKeyConstraint] = None def as_dict(self) -> dict: - """Serializes the UpdateMetastore into a dictionary suitable for use as a JSON request body.""" + """Serializes the TableConstraint into a dictionary suitable for use as a JSON request body.""" body = {} - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope.value - if self.id is not None: - body["id"] = self.id - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id + if self.foreign_key_constraint: + body["foreign_key_constraint"] = self.foreign_key_constraint.as_dict() + if self.named_table_constraint: + body["named_table_constraint"] = self.named_table_constraint.as_dict() + if self.primary_key_constraint: + body["primary_key_constraint"] = self.primary_key_constraint.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes.""" + """Serializes the TableConstraint into a shallow dictionary of its immediate attributes.""" body = {} - if self.delta_sharing_organization_name is not None: - body["delta_sharing_organization_name"] = self.delta_sharing_organization_name - if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: - body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( - self.delta_sharing_recipient_token_lifetime_in_seconds - ) - if self.delta_sharing_scope is not None: - body["delta_sharing_scope"] = self.delta_sharing_scope - if self.id is not None: - body["id"] = self.id - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.privilege_model_version is not None: - body["privilege_model_version"] = self.privilege_model_version - if self.storage_root_credential_id is not None: - body["storage_root_credential_id"] = self.storage_root_credential_id + if self.foreign_key_constraint: + body["foreign_key_constraint"] = self.foreign_key_constraint + if self.named_table_constraint: + body["named_table_constraint"] = self.named_table_constraint + if self.primary_key_constraint: + body["primary_key_constraint"] = self.primary_key_constraint return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastore: - """Deserializes the UpdateMetastore from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> TableConstraint: + """Deserializes the TableConstraint from a dictionary.""" return cls( - delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), - delta_sharing_recipient_token_lifetime_in_seconds=d.get( - "delta_sharing_recipient_token_lifetime_in_seconds", None - ), - delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), - id=d.get("id", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - privilege_model_version=d.get("privilege_model_version", None), - storage_root_credential_id=d.get("storage_root_credential_id", None), + foreign_key_constraint=_from_dict(d, "foreign_key_constraint", ForeignKeyConstraint), + named_table_constraint=_from_dict(d, "named_table_constraint", NamedTableConstraint), + primary_key_constraint=_from_dict(d, "primary_key_constraint", PrimaryKeyConstraint), ) @dataclass -class UpdateMetastoreAssignment: - default_catalog_name: Optional[str] = None - """The name of the default catalog in the metastore. This field is deprecated. Please use "Default - Namespace API" to configure the default catalog for a Databricks workspace.""" - - metastore_id: Optional[str] = None - """The unique ID of the metastore.""" - - workspace_id: Optional[int] = None - """A workspace ID.""" +class TableDependency: + """A table that is dependent on a SQL object.""" + + table_full_name: str + """Full name of the dependent table, in the form of + __catalog_name__.__schema_name__.__table_name__.""" def as_dict(self) -> dict: - """Serializes the UpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" + """Serializes the TableDependency into a dictionary suitable for use as a JSON request body.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.table_full_name is not None: + body["table_full_name"] = self.table_full_name return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" + """Serializes the TableDependency into a shallow dictionary of its immediate attributes.""" body = {} - if self.default_catalog_name is not None: - body["default_catalog_name"] = self.default_catalog_name - if self.metastore_id is not None: - body["metastore_id"] = self.metastore_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id + if self.table_full_name is not None: + body["table_full_name"] = self.table_full_name return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastoreAssignment: - """Deserializes the UpdateMetastoreAssignment from a dictionary.""" - return cls( - default_catalog_name=d.get("default_catalog_name", None), - metastore_id=d.get("metastore_id", None), - workspace_id=d.get("workspace_id", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> TableDependency: + """Deserializes the TableDependency from a dictionary.""" + return cls(table_full_name=d.get("table_full_name", None)) @dataclass -class UpdateModelVersionRequest: - comment: Optional[str] = None - """The comment attached to the model version""" - - full_name: Optional[str] = None - """The three-level (fully qualified) name of the model version""" - - version: Optional[int] = None - """The integer version number of the model version""" +class TableExistsResponse: + table_exists: Optional[bool] = None + """Whether the table exists or not.""" def as_dict(self) -> dict: - """Serializes the UpdateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the TableExistsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version is not None: - body["version"] = self.version + if self.table_exists is not None: + body["table_exists"] = self.table_exists return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the TableExistsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.full_name is not None: - body["full_name"] = self.full_name - if self.version is not None: - body["version"] = self.version + if self.table_exists is not None: + body["table_exists"] = self.table_exists return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionRequest: - """Deserializes the UpdateModelVersionRequest from a dictionary.""" - return cls(comment=d.get("comment", None), full_name=d.get("full_name", None), version=d.get("version", None)) + def from_dict(cls, d: Dict[str, Any]) -> TableExistsResponse: + """Deserializes the TableExistsResponse from a dictionary.""" + return cls(table_exists=d.get("table_exists", None)) @dataclass -class UpdateMonitor: - output_schema_name: str - """Schema where output metric tables are created.""" +class TableInfo: + access_point: Optional[str] = None + """The AWS access point to use when accesing s3 for this external location.""" - baseline_table_name: Optional[str] = None - """Name of the baseline table from which drift metrics are computed from. Columns in the monitored - table should also be present in the baseline table.""" + browse_only: Optional[bool] = None + """Indicates whether the principal is limited to retrieving metadata for the associated object + through the BROWSE privilege when include_browse is enabled in the request.""" - custom_metrics: Optional[List[MonitorMetric]] = None - """Custom metrics to compute on the monitored table. These can be aggregate metrics, derived - metrics (from already computed aggregate metrics), or drift metrics (comparing metrics across - time windows).""" + catalog_name: Optional[str] = None + """Name of parent catalog.""" - dashboard_id: Optional[str] = None - """Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in - PENDING state.""" + columns: Optional[List[ColumnInfo]] = None + """The array of __ColumnInfo__ definitions of the table's columns.""" - data_classification_config: Optional[MonitorDataClassificationConfig] = None - """The data classification config for the monitor.""" + comment: Optional[str] = None + """User-provided free-form text description.""" - inference_log: Optional[MonitorInferenceLog] = None - """Configuration for monitoring inference logs.""" + created_at: Optional[int] = None + """Time at which this table was created, in epoch milliseconds.""" - notifications: Optional[MonitorNotifications] = None - """The notification settings for the monitor.""" + created_by: Optional[str] = None + """Username of table creator.""" - schedule: Optional[MonitorCronSchedule] = None - """The schedule for automatically updating and refreshing metric tables.""" + data_access_configuration_id: Optional[str] = None + """Unique ID of the Data Access Configuration to use with the table data.""" - slicing_exprs: Optional[List[str]] = None - """List of column expressions to slice data with for targeted analysis. The data is grouped by each - expression independently, resulting in a separate slice for each predicate and its complements. - For high-cardinality columns, only the top 100 unique values by frequency will generate slices.""" + data_source_format: Optional[DataSourceFormat] = None - snapshot: Optional[MonitorSnapshot] = None - """Configuration for monitoring snapshot tables.""" + deleted_at: Optional[int] = None + """Time at which this table was deleted, in epoch milliseconds. Field is omitted if table is not + deleted.""" - table_name: Optional[str] = None - """Full name of the table.""" + delta_runtime_properties_kvpairs: Optional[DeltaRuntimePropertiesKvPairs] = None + """Information pertaining to current state of the delta table.""" - time_series: Optional[MonitorTimeSeries] = None - """Configuration for monitoring time series tables.""" + effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None - def as_dict(self) -> dict: - """Serializes the UpdateMonitor into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = [v.as_dict() for v in self.custom_metrics] - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config.as_dict() - if self.inference_log: - body["inference_log"] = self.inference_log.as_dict() - if self.notifications: - body["notifications"] = self.notifications.as_dict() - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.slicing_exprs: - body["slicing_exprs"] = [v for v in self.slicing_exprs] - if self.snapshot: - body["snapshot"] = self.snapshot.as_dict() - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series.as_dict() - return body + enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - def as_shallow_dict(self) -> dict: - """Serializes the UpdateMonitor into a shallow dictionary of its immediate attributes.""" - body = {} - if self.baseline_table_name is not None: - body["baseline_table_name"] = self.baseline_table_name - if self.custom_metrics: - body["custom_metrics"] = self.custom_metrics - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.data_classification_config: - body["data_classification_config"] = self.data_classification_config - if self.inference_log: - body["inference_log"] = self.inference_log - if self.notifications: - body["notifications"] = self.notifications - if self.output_schema_name is not None: - body["output_schema_name"] = self.output_schema_name - if self.schedule: - body["schedule"] = self.schedule - if self.slicing_exprs: - body["slicing_exprs"] = self.slicing_exprs - if self.snapshot: - body["snapshot"] = self.snapshot - if self.table_name is not None: - body["table_name"] = self.table_name - if self.time_series: - body["time_series"] = self.time_series - return body + encryption_details: Optional[EncryptionDetails] = None - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateMonitor: - """Deserializes the UpdateMonitor from a dictionary.""" - return cls( - baseline_table_name=d.get("baseline_table_name", None), - custom_metrics=_repeated_dict(d, "custom_metrics", MonitorMetric), - dashboard_id=d.get("dashboard_id", None), - data_classification_config=_from_dict(d, "data_classification_config", MonitorDataClassificationConfig), - inference_log=_from_dict(d, "inference_log", MonitorInferenceLog), - notifications=_from_dict(d, "notifications", MonitorNotifications), - output_schema_name=d.get("output_schema_name", None), - schedule=_from_dict(d, "schedule", MonitorCronSchedule), - slicing_exprs=d.get("slicing_exprs", None), - snapshot=_from_dict(d, "snapshot", MonitorSnapshot), - table_name=d.get("table_name", None), - time_series=_from_dict(d, "time_series", MonitorTimeSeries), - ) + full_name: Optional[str] = None + """Full name of table, in form of __catalog_name__.__schema_name__.__table_name__""" + metastore_id: Optional[str] = None + """Unique identifier of parent metastore.""" -@dataclass -class UpdatePermissions: - changes: Optional[List[PermissionsChange]] = None - """Array of permissions change objects.""" + name: Optional[str] = None + """Name of table, relative to parent schema.""" - full_name: Optional[str] = None - """Full name of securable.""" + owner: Optional[str] = None + """Username of current owner of table.""" - securable_type: Optional[str] = None - """Type of securable.""" + pipeline_id: Optional[str] = None + """The pipeline ID of the table. Applicable for tables created by pipelines (Materialized View, + Streaming Table, etc.).""" - def as_dict(self) -> dict: - """Serializes the UpdatePermissions into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.changes: - body["changes"] = [v.as_dict() for v in self.changes] - if self.full_name is not None: - body["full_name"] = self.full_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type - return body + properties: Optional[Dict[str, str]] = None + """A map of key-value properties attached to the securable.""" - def as_shallow_dict(self) -> dict: - """Serializes the UpdatePermissions into a shallow dictionary of its immediate attributes.""" - body = {} - if self.changes: - body["changes"] = self.changes - if self.full_name is not None: - body["full_name"] = self.full_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type - return body + row_filter: Optional[TableRowFilter] = None - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdatePermissions: - """Deserializes the UpdatePermissions from a dictionary.""" - return cls( - changes=_repeated_dict(d, "changes", PermissionsChange), - full_name=d.get("full_name", None), - securable_type=d.get("securable_type", None), - ) + schema_name: Optional[str] = None + """Name of parent schema relative to its parent catalog.""" + securable_kind_manifest: Optional[SecurableKindManifest] = None + """SecurableKindManifest of table, including capabilities the table has.""" -@dataclass -class UpdatePermissionsResponse: - privilege_assignments: Optional[List[PrivilegeAssignment]] = None - """The privileges assigned to each principal""" + sql_path: Optional[str] = None + """List of schemes whose objects can be referenced without qualification.""" - def as_dict(self) -> dict: - """Serializes the UpdatePermissionsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.privilege_assignments: - body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] - return body + storage_credential_name: Optional[str] = None + """Name of the storage credential, when a storage credential is configured for use with this table.""" - def as_shallow_dict(self) -> dict: - """Serializes the UpdatePermissionsResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.privilege_assignments: - body["privilege_assignments"] = self.privilege_assignments - return body + storage_location: Optional[str] = None + """Storage root URL for table (for **MANAGED**, **EXTERNAL** tables).""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdatePermissionsResponse: - """Deserializes the UpdatePermissionsResponse from a dictionary.""" - return cls(privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment)) + table_constraints: Optional[List[TableConstraint]] = None + """List of table constraints. Note: this field is not set in the output of the __listTables__ API.""" + + table_id: Optional[str] = None + """The unique identifier of the table.""" + table_type: Optional[TableType] = None -@dataclass -class UpdateRegisteredModelRequest: - comment: Optional[str] = None - """The comment attached to the registered model""" + updated_at: Optional[int] = None + """Time at which this table was last modified, in epoch milliseconds.""" - full_name: Optional[str] = None - """The three-level (fully qualified) name of the registered model""" + updated_by: Optional[str] = None + """Username of user who last modified the table.""" - new_name: Optional[str] = None - """New name for the registered model.""" + view_definition: Optional[str] = None + """View definition SQL (when __table_type__ is **VIEW**, **MATERIALIZED_VIEW**, or + **STREAMING_TABLE**)""" - owner: Optional[str] = None - """The identifier of the user who owns the registered model""" + view_dependencies: Optional[DependencyList] = None + """View dependencies (when table_type == **VIEW** or **MATERIALIZED_VIEW**, **STREAMING_TABLE**) - + when DependencyList is None, the dependency is not provided; - when DependencyList is an empty + list, the dependency is provided but is empty; - when DependencyList is not an empty list, + dependencies are provided and recorded.""" def as_dict(self) -> dict: - """Serializes the UpdateRegisteredModelRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the TableInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.access_point is not None: + body["access_point"] = self.access_point + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] if self.comment is not None: body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_access_configuration_id is not None: + body["data_access_configuration_id"] = self.data_access_configuration_id + if self.data_source_format is not None: + body["data_source_format"] = self.data_source_format.value + if self.deleted_at is not None: + body["deleted_at"] = self.deleted_at + if self.delta_runtime_properties_kvpairs: + body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs.as_dict() + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag.as_dict() + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization.value + if self.encryption_details: + body["encryption_details"] = self.encryption_details.as_dict() if self.full_name is not None: body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name if self.owner is not None: body["owner"] = self.owner + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.properties: + body["properties"] = self.properties + if self.row_filter: + body["row_filter"] = self.row_filter.as_dict() + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.securable_kind_manifest: + body["securable_kind_manifest"] = self.securable_kind_manifest.as_dict() + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.storage_credential_name is not None: + body["storage_credential_name"] = self.storage_credential_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.table_constraints: + body["table_constraints"] = [v.as_dict() for v in self.table_constraints] + if self.table_id is not None: + body["table_id"] = self.table_id + if self.table_type is not None: + body["table_type"] = self.table_type.value + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.view_definition is not None: + body["view_definition"] = self.view_definition + if self.view_dependencies: + body["view_dependencies"] = self.view_dependencies.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateRegisteredModelRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the TableInfo into a shallow dictionary of its immediate attributes.""" body = {} + if self.access_point is not None: + body["access_point"] = self.access_point + if self.browse_only is not None: + body["browse_only"] = self.browse_only + if self.catalog_name is not None: + body["catalog_name"] = self.catalog_name + if self.columns: + body["columns"] = self.columns if self.comment is not None: body["comment"] = self.comment + if self.created_at is not None: + body["created_at"] = self.created_at + if self.created_by is not None: + body["created_by"] = self.created_by + if self.data_access_configuration_id is not None: + body["data_access_configuration_id"] = self.data_access_configuration_id + if self.data_source_format is not None: + body["data_source_format"] = self.data_source_format + if self.deleted_at is not None: + body["deleted_at"] = self.deleted_at + if self.delta_runtime_properties_kvpairs: + body["delta_runtime_properties_kvpairs"] = self.delta_runtime_properties_kvpairs + if self.effective_predictive_optimization_flag: + body["effective_predictive_optimization_flag"] = self.effective_predictive_optimization_flag + if self.enable_predictive_optimization is not None: + body["enable_predictive_optimization"] = self.enable_predictive_optimization + if self.encryption_details: + body["encryption_details"] = self.encryption_details if self.full_name is not None: body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.name is not None: + body["name"] = self.name if self.owner is not None: body["owner"] = self.owner + if self.pipeline_id is not None: + body["pipeline_id"] = self.pipeline_id + if self.properties: + body["properties"] = self.properties + if self.row_filter: + body["row_filter"] = self.row_filter + if self.schema_name is not None: + body["schema_name"] = self.schema_name + if self.securable_kind_manifest: + body["securable_kind_manifest"] = self.securable_kind_manifest + if self.sql_path is not None: + body["sql_path"] = self.sql_path + if self.storage_credential_name is not None: + body["storage_credential_name"] = self.storage_credential_name + if self.storage_location is not None: + body["storage_location"] = self.storage_location + if self.table_constraints: + body["table_constraints"] = self.table_constraints + if self.table_id is not None: + body["table_id"] = self.table_id + if self.table_type is not None: + body["table_type"] = self.table_type + if self.updated_at is not None: + body["updated_at"] = self.updated_at + if self.updated_by is not None: + body["updated_by"] = self.updated_by + if self.view_definition is not None: + body["view_definition"] = self.view_definition + if self.view_dependencies: + body["view_dependencies"] = self.view_dependencies return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRegisteredModelRequest: - """Deserializes the UpdateRegisteredModelRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> TableInfo: + """Deserializes the TableInfo from a dictionary.""" return cls( + access_point=d.get("access_point", None), + browse_only=d.get("browse_only", None), + catalog_name=d.get("catalog_name", None), + columns=_repeated_dict(d, "columns", ColumnInfo), comment=d.get("comment", None), + created_at=d.get("created_at", None), + created_by=d.get("created_by", None), + data_access_configuration_id=d.get("data_access_configuration_id", None), + data_source_format=_enum(d, "data_source_format", DataSourceFormat), + deleted_at=d.get("deleted_at", None), + delta_runtime_properties_kvpairs=_from_dict( + d, "delta_runtime_properties_kvpairs", DeltaRuntimePropertiesKvPairs + ), + effective_predictive_optimization_flag=_from_dict( + d, "effective_predictive_optimization_flag", EffectivePredictiveOptimizationFlag + ), + enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), + encryption_details=_from_dict(d, "encryption_details", EncryptionDetails), full_name=d.get("full_name", None), - new_name=d.get("new_name", None), + metastore_id=d.get("metastore_id", None), + name=d.get("name", None), owner=d.get("owner", None), + pipeline_id=d.get("pipeline_id", None), + properties=d.get("properties", None), + row_filter=_from_dict(d, "row_filter", TableRowFilter), + schema_name=d.get("schema_name", None), + securable_kind_manifest=_from_dict(d, "securable_kind_manifest", SecurableKindManifest), + sql_path=d.get("sql_path", None), + storage_credential_name=d.get("storage_credential_name", None), + storage_location=d.get("storage_location", None), + table_constraints=_repeated_dict(d, "table_constraints", TableConstraint), + table_id=d.get("table_id", None), + table_type=_enum(d, "table_type", TableType), + updated_at=d.get("updated_at", None), + updated_by=d.get("updated_by", None), + view_definition=d.get("view_definition", None), + view_dependencies=_from_dict(d, "view_dependencies", DependencyList), ) +class TableOperation(Enum): + + READ = "READ" + READ_WRITE = "READ_WRITE" + + @dataclass -class UpdateRequestExternalLineage: - source: ExternalLineageObject - """Source object of the external lineage relationship.""" +class TableRowFilter: + function_name: str + """The full name of the row filter SQL UDF.""" - target: ExternalLineageObject - """Target object of the external lineage relationship.""" + input_column_names: List[str] + """The list of table columns to be passed as input to the row filter function. The column types + should match the types of the filter function arguments.""" - columns: Optional[List[ColumnRelationship]] = None - """List of column relationships between source and target objects.""" + def as_dict(self) -> dict: + """Serializes the TableRowFilter into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.function_name is not None: + body["function_name"] = self.function_name + if self.input_column_names: + body["input_column_names"] = [v for v in self.input_column_names] + return body - id: Optional[str] = None - """Unique identifier of the external lineage relationship.""" + def as_shallow_dict(self) -> dict: + """Serializes the TableRowFilter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.function_name is not None: + body["function_name"] = self.function_name + if self.input_column_names: + body["input_column_names"] = self.input_column_names + return body - properties: Optional[Dict[str, str]] = None - """Key-value properties associated with the external lineage relationship.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TableRowFilter: + """Deserializes the TableRowFilter from a dictionary.""" + return cls(function_name=d.get("function_name", None), input_column_names=d.get("input_column_names", None)) + + +@dataclass +class TableSummary: + full_name: Optional[str] = None + """The full name of the table.""" + + securable_kind_manifest: Optional[SecurableKindManifest] = None + """SecurableKindManifest of table, including capabilities the table has.""" + + table_type: Optional[TableType] = None def as_dict(self) -> dict: - """Serializes the UpdateRequestExternalLineage into a dictionary suitable for use as a JSON request body.""" + """Serializes the TableSummary into a dictionary suitable for use as a JSON request body.""" body = {} - if self.columns: - body["columns"] = [v.as_dict() for v in self.columns] - if self.id is not None: - body["id"] = self.id - if self.properties: - body["properties"] = self.properties - if self.source: - body["source"] = self.source.as_dict() - if self.target: - body["target"] = self.target.as_dict() + if self.full_name is not None: + body["full_name"] = self.full_name + if self.securable_kind_manifest: + body["securable_kind_manifest"] = self.securable_kind_manifest.as_dict() + if self.table_type is not None: + body["table_type"] = self.table_type.value return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateRequestExternalLineage into a shallow dictionary of its immediate attributes.""" + """Serializes the TableSummary into a shallow dictionary of its immediate attributes.""" body = {} - if self.columns: - body["columns"] = self.columns - if self.id is not None: - body["id"] = self.id - if self.properties: - body["properties"] = self.properties - if self.source: - body["source"] = self.source - if self.target: - body["target"] = self.target + if self.full_name is not None: + body["full_name"] = self.full_name + if self.securable_kind_manifest: + body["securable_kind_manifest"] = self.securable_kind_manifest + if self.table_type is not None: + body["table_type"] = self.table_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRequestExternalLineage: - """Deserializes the UpdateRequestExternalLineage from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> TableSummary: + """Deserializes the TableSummary from a dictionary.""" return cls( - columns=_repeated_dict(d, "columns", ColumnRelationship), - id=d.get("id", None), - properties=d.get("properties", None), - source=_from_dict(d, "source", ExternalLineageObject), - target=_from_dict(d, "target", ExternalLineageObject), + full_name=d.get("full_name", None), + securable_kind_manifest=_from_dict(d, "securable_kind_manifest", SecurableKindManifest), + table_type=_enum(d, "table_type", TableType), ) +class TableType(Enum): + + EXTERNAL = "EXTERNAL" + EXTERNAL_SHALLOW_CLONE = "EXTERNAL_SHALLOW_CLONE" + FOREIGN = "FOREIGN" + MANAGED = "MANAGED" + MANAGED_SHALLOW_CLONE = "MANAGED_SHALLOW_CLONE" + MATERIALIZED_VIEW = "MATERIALIZED_VIEW" + METRIC_VIEW = "METRIC_VIEW" + STREAMING_TABLE = "STREAMING_TABLE" + VIEW = "VIEW" + + @dataclass -class UpdateResponse: +class TagKeyValue: + key: Optional[str] = None + """name of the tag""" + + value: Optional[str] = None + """value of the tag associated with the key, could be optional""" + def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the TagKeyValue into a dictionary suitable for use as a JSON request body.""" body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the TagKeyValue into a shallow dictionary of its immediate attributes.""" body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> TagKeyValue: + """Deserializes the TagKeyValue from a dictionary.""" + return cls(key=d.get("key", None), value=d.get("value", None)) @dataclass -class UpdateSchema: - comment: Optional[str] = None - """User-provided free-form text description.""" - - enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None - """Whether predictive optimization should be enabled for this object and objects under it.""" - - full_name: Optional[str] = None - """Full name of the schema.""" +class TemporaryCredentials: + aws_temp_credentials: Optional[AwsCredentials] = None - new_name: Optional[str] = None - """New name for the schema.""" + azure_aad: Optional[AzureActiveDirectoryToken] = None - owner: Optional[str] = None - """Username of current owner of schema.""" + expiration_time: Optional[int] = None + """Server time when the credential will expire, in epoch milliseconds. The API client is advised to + cache the credential given this expiration time.""" - properties: Optional[Dict[str, str]] = None - """A map of key-value properties attached to the securable.""" + gcp_oauth_token: Optional[GcpOauthToken] = None def as_dict(self) -> dict: - """Serializes the UpdateSchema into a dictionary suitable for use as a JSON request body.""" + """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization.value - if self.full_name is not None: - body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials.as_dict() + if self.azure_aad: + body["azure_aad"] = self.azure_aad.as_dict() + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateSchema into a shallow dictionary of its immediate attributes.""" + """Serializes the TemporaryCredentials into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.enable_predictive_optimization is not None: - body["enable_predictive_optimization"] = self.enable_predictive_optimization - if self.full_name is not None: - body["full_name"] = self.full_name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties: - body["properties"] = self.properties + if self.aws_temp_credentials: + body["aws_temp_credentials"] = self.aws_temp_credentials + if self.azure_aad: + body["azure_aad"] = self.azure_aad + if self.expiration_time is not None: + body["expiration_time"] = self.expiration_time + if self.gcp_oauth_token: + body["gcp_oauth_token"] = self.gcp_oauth_token return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateSchema: - """Deserializes the UpdateSchema from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> TemporaryCredentials: + """Deserializes the TemporaryCredentials from a dictionary.""" return cls( - comment=d.get("comment", None), - enable_predictive_optimization=_enum(d, "enable_predictive_optimization", EnablePredictiveOptimization), - full_name=d.get("full_name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - properties=d.get("properties", None), + aws_temp_credentials=_from_dict(d, "aws_temp_credentials", AwsCredentials), + azure_aad=_from_dict(d, "azure_aad", AzureActiveDirectoryToken), + expiration_time=d.get("expiration_time", None), + gcp_oauth_token=_from_dict(d, "gcp_oauth_token", GcpOauthToken), ) @dataclass -class UpdateStorageCredential: - aws_iam_role: Optional[AwsIamRoleRequest] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentityResponse] = None - """The Azure managed identity configuration.""" +class TriggeredUpdateStatus: + """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE + or the ONLINE_NO_PENDING_UPDATE state.""" - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" + last_processed_commit_version: Optional[int] = None + """The last source table Delta version that was synced to the online table. Note that this Delta + version may not be completely synced to the online table yet.""" - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" + timestamp: Optional[str] = None + """The timestamp of the last time any data was synchronized from the source table to the online + table.""" - comment: Optional[str] = None - """Comment associated with the credential.""" + triggered_update_progress: Optional[PipelineProgress] = None + """Progress of the active data synchronization pipeline.""" - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None - """The Databricks managed GCP service account configuration.""" + def as_dict(self) -> dict: + """Serializes the TriggeredUpdateStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.triggered_update_progress: + body["triggered_update_progress"] = self.triggered_update_progress.as_dict() + return body - force: Optional[bool] = None - """Force update even if there are dependent external locations or external tables.""" + def as_shallow_dict(self) -> dict: + """Serializes the TriggeredUpdateStatus into a shallow dictionary of its immediate attributes.""" + body = {} + if self.last_processed_commit_version is not None: + body["last_processed_commit_version"] = self.last_processed_commit_version + if self.timestamp is not None: + body["timestamp"] = self.timestamp + if self.triggered_update_progress: + body["triggered_update_progress"] = self.triggered_update_progress + return body - isolation_mode: Optional[IsolationMode] = None - """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> TriggeredUpdateStatus: + """Deserializes the TriggeredUpdateStatus from a dictionary.""" + return cls( + last_processed_commit_version=d.get("last_processed_commit_version", None), + timestamp=d.get("timestamp", None), + triggered_update_progress=_from_dict(d, "triggered_update_progress", PipelineProgress), + ) - name: Optional[str] = None - """Name of the storage credential.""" - new_name: Optional[str] = None - """New name for the storage credential.""" +@dataclass +class UnassignResponse: + def as_dict(self) -> dict: + """Serializes the UnassignResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - owner: Optional[str] = None - """Username of current owner of credential.""" + def as_shallow_dict(self) -> dict: + """Serializes the UnassignResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - read_only: Optional[bool] = None - """Whether the credential is usable only for read operations. Only applicable when purpose is - **STORAGE**.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UnassignResponse: + """Deserializes the UnassignResponse from a dictionary.""" + return cls() - skip_validation: Optional[bool] = None - """Supplying true to this argument skips validation of the updated credential.""" +@dataclass +class UpdateAssignmentResponse: def as_dict(self) -> dict: - """Serializes the UpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode.value - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation + """Serializes the UpdateAssignmentResponse into a dictionary suitable for use as a JSON request body.""" + body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateAssignmentResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.comment is not None: - body["comment"] = self.comment - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.force is not None: - body["force"] = self.force - if self.isolation_mode is not None: - body["isolation_mode"] = self.isolation_mode - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.read_only is not None: - body["read_only"] = self.read_only - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateStorageCredential: - """Deserializes the UpdateStorageCredential from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - comment=d.get("comment", None), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - force=d.get("force", None), - isolation_mode=_enum(d, "isolation_mode", IsolationMode), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - read_only=d.get("read_only", None), - skip_validation=d.get("skip_validation", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> UpdateAssignmentResponse: + """Deserializes the UpdateAssignmentResponse from a dictionary.""" + return cls() @dataclass -class UpdateTableRequest: - full_name: Optional[str] = None - """Full name of the table.""" - - owner: Optional[str] = None - """Username of current owner of table.""" +class UpdateCatalogWorkspaceBindingsResponse: + workspaces: Optional[List[int]] = None + """A list of workspace IDs""" def as_dict(self) -> dict: - """Serializes the UpdateTableRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.full_name is not None: - body["full_name"] = self.full_name - if self.owner is not None: - body["owner"] = self.owner + if self.workspaces: + body["workspaces"] = [v for v in self.workspaces] return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateTableRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.full_name is not None: - body["full_name"] = self.full_name - if self.owner is not None: - body["owner"] = self.owner + if self.workspaces: + body["workspaces"] = self.workspaces return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateTableRequest: - """Deserializes the UpdateTableRequest from a dictionary.""" - return cls(full_name=d.get("full_name", None), owner=d.get("owner", None)) + def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse: + """Deserializes the UpdateCatalogWorkspaceBindingsResponse from a dictionary.""" + return cls(workspaces=d.get("workspaces", None)) @dataclass -class UpdateVolumeRequestContent: - comment: Optional[str] = None - """The comment attached to the volume""" +class UpdateMetastore: + delta_sharing_organization_name: Optional[str] = None + """The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta + Sharing as the official name.""" - name: Optional[str] = None - """The three-level (fully qualified) name of the volume""" + delta_sharing_recipient_token_lifetime_in_seconds: Optional[int] = None + """The lifetime of delta sharing recipient token in seconds.""" + + delta_sharing_scope: Optional[DeltaSharingScopeEnum] = None + """The scope of Delta Sharing enabled for the metastore.""" + + id: Optional[str] = None + """Unique ID of the metastore.""" new_name: Optional[str] = None - """New name for the volume.""" + """New name for the metastore.""" owner: Optional[str] = None - """The identifier of the user who owns the volume""" + """The owner of the metastore.""" + + privilege_model_version: Optional[str] = None + """Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).""" + + storage_root_credential_id: Optional[str] = None + """UUID of storage credential to access the metastore storage_root.""" def as_dict(self) -> dict: - """Serializes the UpdateVolumeRequestContent into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateMetastore into a dictionary suitable for use as a JSON request body.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope.value + if self.id is not None: + body["id"] = self.id if self.new_name is not None: body["new_name"] = self.new_name if self.owner is not None: body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateVolumeRequestContent into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes.""" body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name + if self.delta_sharing_organization_name is not None: + body["delta_sharing_organization_name"] = self.delta_sharing_organization_name + if self.delta_sharing_recipient_token_lifetime_in_seconds is not None: + body["delta_sharing_recipient_token_lifetime_in_seconds"] = ( + self.delta_sharing_recipient_token_lifetime_in_seconds + ) + if self.delta_sharing_scope is not None: + body["delta_sharing_scope"] = self.delta_sharing_scope + if self.id is not None: + body["id"] = self.id if self.new_name is not None: body["new_name"] = self.new_name if self.owner is not None: body["owner"] = self.owner + if self.privilege_model_version is not None: + body["privilege_model_version"] = self.privilege_model_version + if self.storage_root_credential_id is not None: + body["storage_root_credential_id"] = self.storage_root_credential_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateVolumeRequestContent: - """Deserializes the UpdateVolumeRequestContent from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastore: + """Deserializes the UpdateMetastore from a dictionary.""" return cls( - comment=d.get("comment", None), - name=d.get("name", None), + delta_sharing_organization_name=d.get("delta_sharing_organization_name", None), + delta_sharing_recipient_token_lifetime_in_seconds=d.get( + "delta_sharing_recipient_token_lifetime_in_seconds", None + ), + delta_sharing_scope=_enum(d, "delta_sharing_scope", DeltaSharingScopeEnum), + id=d.get("id", None), new_name=d.get("new_name", None), owner=d.get("owner", None), + privilege_model_version=d.get("privilege_model_version", None), + storage_root_credential_id=d.get("storage_root_credential_id", None), ) @dataclass -class UpdateWorkspaceBindings: - assign_workspaces: Optional[List[int]] = None - """A list of workspace IDs.""" +class UpdateMetastoreAssignment: + default_catalog_name: Optional[str] = None + """The name of the default catalog in the metastore. This field is deprecated. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace.""" - name: Optional[str] = None - """The name of the catalog.""" + metastore_id: Optional[str] = None + """The unique ID of the metastore.""" - unassign_workspaces: Optional[List[int]] = None - """A list of workspace IDs.""" + workspace_id: Optional[int] = None + """A workspace ID.""" def as_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindings into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateMetastoreAssignment into a dictionary suitable for use as a JSON request body.""" body = {} - if self.assign_workspaces: - body["assign_workspaces"] = [v for v in self.assign_workspaces] - if self.name is not None: - body["name"] = self.name - if self.unassign_workspaces: - body["unassign_workspaces"] = [v for v in self.unassign_workspaces] + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindings into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateMetastoreAssignment into a shallow dictionary of its immediate attributes.""" body = {} - if self.assign_workspaces: - body["assign_workspaces"] = self.assign_workspaces - if self.name is not None: - body["name"] = self.name - if self.unassign_workspaces: - body["unassign_workspaces"] = self.unassign_workspaces + if self.default_catalog_name is not None: + body["default_catalog_name"] = self.default_catalog_name + if self.metastore_id is not None: + body["metastore_id"] = self.metastore_id + if self.workspace_id is not None: + body["workspace_id"] = self.workspace_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindings: - """Deserializes the UpdateWorkspaceBindings from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> UpdateMetastoreAssignment: + """Deserializes the UpdateMetastoreAssignment from a dictionary.""" return cls( - assign_workspaces=d.get("assign_workspaces", None), - name=d.get("name", None), - unassign_workspaces=d.get("unassign_workspaces", None), + default_catalog_name=d.get("default_catalog_name", None), + metastore_id=d.get("metastore_id", None), + workspace_id=d.get("workspace_id", None), ) @dataclass -class UpdateWorkspaceBindingsParameters: - add: Optional[List[WorkspaceBinding]] = None - """List of workspace bindings.""" +class UpdatePermissionsResponse: + privilege_assignments: Optional[List[PrivilegeAssignment]] = None + """The privileges assigned to each principal""" - remove: Optional[List[WorkspaceBinding]] = None - """List of workspace bindings.""" + def as_dict(self) -> dict: + """Serializes the UpdatePermissionsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.privilege_assignments: + body["privilege_assignments"] = [v.as_dict() for v in self.privilege_assignments] + return body - securable_name: Optional[str] = None - """The name of the securable.""" + def as_shallow_dict(self) -> dict: + """Serializes the UpdatePermissionsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.privilege_assignments: + body["privilege_assignments"] = self.privilege_assignments + return body - securable_type: Optional[str] = None - """The type of the securable to bind to a workspace (catalog, storage_credential, credential, or - external_location).""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdatePermissionsResponse: + """Deserializes the UpdatePermissionsResponse from a dictionary.""" + return cls(privilege_assignments=_repeated_dict(d, "privilege_assignments", PrivilegeAssignment)) + + +@dataclass +class UpdateRequestExternalLineage: + source: ExternalLineageObject + """Source object of the external lineage relationship.""" + + target: ExternalLineageObject + """Target object of the external lineage relationship.""" + + columns: Optional[List[ColumnRelationship]] = None + """List of column relationships between source and target objects.""" + + id: Optional[str] = None + """Unique identifier of the external lineage relationship.""" + + properties: Optional[Dict[str, str]] = None + """Key-value properties associated with the external lineage relationship.""" def as_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateRequestExternalLineage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.add: - body["add"] = [v.as_dict() for v in self.add] - if self.remove: - body["remove"] = [v.as_dict() for v in self.remove] - if self.securable_name is not None: - body["securable_name"] = self.securable_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type + if self.columns: + body["columns"] = [v.as_dict() for v in self.columns] + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source.as_dict() + if self.target: + body["target"] = self.target.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindingsParameters into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateRequestExternalLineage into a shallow dictionary of its immediate attributes.""" body = {} - if self.add: - body["add"] = self.add - if self.remove: - body["remove"] = self.remove - if self.securable_name is not None: - body["securable_name"] = self.securable_name - if self.securable_type is not None: - body["securable_type"] = self.securable_type + if self.columns: + body["columns"] = self.columns + if self.id is not None: + body["id"] = self.id + if self.properties: + body["properties"] = self.properties + if self.source: + body["source"] = self.source + if self.target: + body["target"] = self.target return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindingsParameters: - """Deserializes the UpdateWorkspaceBindingsParameters from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> UpdateRequestExternalLineage: + """Deserializes the UpdateRequestExternalLineage from a dictionary.""" return cls( - add=_repeated_dict(d, "add", WorkspaceBinding), - remove=_repeated_dict(d, "remove", WorkspaceBinding), - securable_name=d.get("securable_name", None), - securable_type=d.get("securable_type", None), + columns=_repeated_dict(d, "columns", ColumnRelationship), + id=d.get("id", None), + properties=d.get("properties", None), + source=_from_dict(d, "source", ExternalLineageObject), + target=_from_dict(d, "target", ExternalLineageObject), ) @dataclass -class UpdateWorkspaceBindingsResponse: - """A list of workspace IDs that are bound to the securable""" - - bindings: Optional[List[WorkspaceBinding]] = None - """List of workspace bindings.""" - +class UpdateResponse: def as_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.bindings: - body["bindings"] = [v.as_dict() for v in self.bindings] return body def as_shallow_dict(self) -> dict: - """Serializes the UpdateWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.bindings: - body["bindings"] = self.bindings return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindingsResponse: - """Deserializes the UpdateWorkspaceBindingsResponse from a dictionary.""" - return cls(bindings=_repeated_dict(d, "bindings", WorkspaceBinding)) + def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: + """Deserializes the UpdateResponse from a dictionary.""" + return cls() @dataclass -class ValidateCredentialRequest: - """Next ID: 17""" +class UpdateStorageCredential: + aws_iam_role: Optional[AwsIamRoleRequest] = None + """The AWS IAM role configuration.""" - aws_iam_role: Optional[AwsIamRole] = None + azure_managed_identity: Optional[AzureManagedIdentityResponse] = None + """The Azure managed identity configuration.""" - azure_managed_identity: Optional[AzureManagedIdentity] = None + azure_service_principal: Optional[AzureServicePrincipal] = None + """The Azure service principal configuration.""" - credential_name: Optional[str] = None - """Required. The name of an existing credential or long-lived cloud credential to validate.""" + cloudflare_api_token: Optional[CloudflareApiToken] = None + """The Cloudflare API token configuration.""" - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None + comment: Optional[str] = None + """Comment associated with the credential.""" + + databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None + """The Databricks managed GCP service account configuration.""" - external_location_name: Optional[str] = None - """The name of an existing external location to validate. Only applicable for storage credentials - (purpose is **STORAGE**.)""" + force: Optional[bool] = None + """Force update even if there are dependent external locations or external tables.""" - purpose: Optional[CredentialPurpose] = None - """The purpose of the credential. This should only be used when the credential is specified.""" + isolation_mode: Optional[IsolationMode] = None + """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" + + name: Optional[str] = None + """Name of the storage credential.""" + + new_name: Optional[str] = None + """New name for the storage credential.""" + + owner: Optional[str] = None + """Username of current owner of credential.""" read_only: Optional[bool] = None - """Whether the credential is only usable for read operations. Only applicable for storage - credentials (purpose is **STORAGE**.)""" + """Whether the credential is usable only for read operations. Only applicable when purpose is + **STORAGE**.""" - url: Optional[str] = None - """The external location url to validate. Only applicable when purpose is **STORAGE**.""" + skip_validation: Optional[bool] = None + """Supplying true to this argument skips validation of the updated credential.""" def as_dict(self) -> dict: - """Serializes the ValidateCredentialRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateStorageCredential into a dictionary suitable for use as a JSON request body.""" body = {} if self.aws_iam_role: body["aws_iam_role"] = self.aws_iam_role.as_dict() if self.azure_managed_identity: body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.credential_name is not None: - body["credential_name"] = self.credential_name + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal.as_dict() + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() + if self.comment is not None: + body["comment"] = self.comment if self.databricks_gcp_service_account: body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.purpose is not None: - body["purpose"] = self.purpose.value + if self.force is not None: + body["force"] = self.force + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode.value + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner if self.read_only is not None: body["read_only"] = self.read_only - if self.url is not None: - body["url"] = self.url + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body def as_shallow_dict(self) -> dict: - """Serializes the ValidateCredentialRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes.""" body = {} if self.aws_iam_role: body["aws_iam_role"] = self.aws_iam_role if self.azure_managed_identity: body["azure_managed_identity"] = self.azure_managed_identity - if self.credential_name is not None: - body["credential_name"] = self.credential_name + if self.azure_service_principal: + body["azure_service_principal"] = self.azure_service_principal + if self.cloudflare_api_token: + body["cloudflare_api_token"] = self.cloudflare_api_token + if self.comment is not None: + body["comment"] = self.comment if self.databricks_gcp_service_account: body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.purpose is not None: - body["purpose"] = self.purpose + if self.force is not None: + body["force"] = self.force + if self.isolation_mode is not None: + body["isolation_mode"] = self.isolation_mode + if self.name is not None: + body["name"] = self.name + if self.new_name is not None: + body["new_name"] = self.new_name + if self.owner is not None: + body["owner"] = self.owner if self.read_only is not None: body["read_only"] = self.read_only - if self.url is not None: - body["url"] = self.url + if self.skip_validation is not None: + body["skip_validation"] = self.skip_validation return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ValidateCredentialRequest: - """Deserializes the ValidateCredentialRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> UpdateStorageCredential: + """Deserializes the UpdateStorageCredential from a dictionary.""" return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRole), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentity), - credential_name=d.get("credential_name", None), - databricks_gcp_service_account=_from_dict(d, "databricks_gcp_service_account", DatabricksGcpServiceAccount), - external_location_name=d.get("external_location_name", None), - purpose=_enum(d, "purpose", CredentialPurpose), + aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), + azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityResponse), + azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), + cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), + comment=d.get("comment", None), + databricks_gcp_service_account=_from_dict( + d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest + ), + force=d.get("force", None), + isolation_mode=_enum(d, "isolation_mode", IsolationMode), + name=d.get("name", None), + new_name=d.get("new_name", None), + owner=d.get("owner", None), read_only=d.get("read_only", None), - url=d.get("url", None), + skip_validation=d.get("skip_validation", None), ) +@dataclass +class UpdateWorkspaceBindingsResponse: + """A list of workspace IDs that are bound to the securable""" + + bindings: Optional[List[WorkspaceBinding]] = None + """List of workspace bindings.""" + + def as_dict(self) -> dict: + """Serializes the UpdateWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.bindings: + body["bindings"] = [v.as_dict() for v in self.bindings] + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.bindings: + body["bindings"] = self.bindings + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindingsResponse: + """Deserializes the UpdateWorkspaceBindingsResponse from a dictionary.""" + return cls(bindings=_repeated_dict(d, "bindings", WorkspaceBinding)) + + @dataclass class ValidateCredentialResponse: is_dir: Optional[bool] = None @@ -11372,99 +9245,6 @@ class ValidateCredentialResult(Enum): SKIP = "SKIP" -@dataclass -class ValidateStorageCredential: - aws_iam_role: Optional[AwsIamRoleRequest] = None - """The AWS IAM role configuration.""" - - azure_managed_identity: Optional[AzureManagedIdentityRequest] = None - """The Azure managed identity configuration.""" - - azure_service_principal: Optional[AzureServicePrincipal] = None - """The Azure service principal configuration.""" - - cloudflare_api_token: Optional[CloudflareApiToken] = None - """The Cloudflare API token configuration.""" - - databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest] = None - """The Databricks created GCP service account configuration.""" - - external_location_name: Optional[str] = None - """The name of an existing external location to validate.""" - - read_only: Optional[bool] = None - """Whether the storage credential is only usable for read operations.""" - - storage_credential_name: Optional[str] = None - """Required. The name of an existing credential or long-lived cloud credential to validate.""" - - url: Optional[str] = None - """The external location url to validate.""" - - def as_dict(self) -> dict: - """Serializes the ValidateStorageCredential into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role.as_dict() - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity.as_dict() - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal.as_dict() - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token.as_dict() - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account.as_dict() - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.url is not None: - body["url"] = self.url - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ValidateStorageCredential into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_iam_role: - body["aws_iam_role"] = self.aws_iam_role - if self.azure_managed_identity: - body["azure_managed_identity"] = self.azure_managed_identity - if self.azure_service_principal: - body["azure_service_principal"] = self.azure_service_principal - if self.cloudflare_api_token: - body["cloudflare_api_token"] = self.cloudflare_api_token - if self.databricks_gcp_service_account: - body["databricks_gcp_service_account"] = self.databricks_gcp_service_account - if self.external_location_name is not None: - body["external_location_name"] = self.external_location_name - if self.read_only is not None: - body["read_only"] = self.read_only - if self.storage_credential_name is not None: - body["storage_credential_name"] = self.storage_credential_name - if self.url is not None: - body["url"] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ValidateStorageCredential: - """Deserializes the ValidateStorageCredential from a dictionary.""" - return cls( - aws_iam_role=_from_dict(d, "aws_iam_role", AwsIamRoleRequest), - azure_managed_identity=_from_dict(d, "azure_managed_identity", AzureManagedIdentityRequest), - azure_service_principal=_from_dict(d, "azure_service_principal", AzureServicePrincipal), - cloudflare_api_token=_from_dict(d, "cloudflare_api_token", CloudflareApiToken), - databricks_gcp_service_account=_from_dict( - d, "databricks_gcp_service_account", DatabricksGcpServiceAccountRequest - ), - external_location_name=d.get("external_location_name", None), - read_only=d.get("read_only", None), - storage_credential_name=d.get("storage_credential_name", None), - url=d.get("url", None), - ) - - @dataclass class ValidateStorageCredentialResponse: is_dir: Optional[bool] = None @@ -12453,6 +10233,7 @@ def create( options: Dict[str, str], *, comment: Optional[str] = None, + environment_settings: Optional[EnvironmentSettings] = None, properties: Optional[Dict[str, str]] = None, read_only: Optional[bool] = None, ) -> ConnectionInfo: @@ -12469,6 +10250,8 @@ def create( A map of key-value properties attached to the securable. :param comment: str (optional) User-provided free-form text description. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param read_only: bool (optional) @@ -12481,6 +10264,8 @@ def create( body["comment"] = comment if connection_type is not None: body["connection_type"] = connection_type.value + if environment_settings is not None: + body["environment_settings"] = environment_settings.as_dict() if name is not None: body["name"] = name if options is not None: @@ -12561,7 +10346,13 @@ def list(self, *, max_results: Optional[int] = None, page_token: Optional[str] = query["page_token"] = json["next_page_token"] def update( - self, name: str, options: Dict[str, str], *, new_name: Optional[str] = None, owner: Optional[str] = None + self, + name: str, + options: Dict[str, str], + *, + environment_settings: Optional[EnvironmentSettings] = None, + new_name: Optional[str] = None, + owner: Optional[str] = None, ) -> ConnectionInfo: """Updates the connection that matches the supplied name. @@ -12569,6 +10360,8 @@ def update( Name of the connection. :param options: Dict[str,str] A map of key-value properties attached to the securable. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param new_name: str (optional) New name for the connection. :param owner: str (optional) @@ -12577,6 +10370,8 @@ def update( :returns: :class:`ConnectionInfo` """ body = {} + if environment_settings is not None: + body["environment_settings"] = environment_settings.as_dict() if new_name is not None: body["new_name"] = new_name if options is not None: @@ -13001,11 +10796,16 @@ def list_external_lineage_relationships( direction. :param object_info: :class:`ExternalLineageObject` - The object to query external lineage relationship on. + The object to query external lineage relationships for. Since this field is a query parameter, + please flatten the nested fields. For example, if the object is a table, the query parameter should + look like: `object_info.table.name=main.sales.customers` :param lineage_direction: :class:`LineageDirection` The lineage direction to filter on. :param page_size: int (optional) + Specifies the maximum number of external lineage relationships to return in a single response. The + value must be less than or equal to 1000. :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ExternalLineageInfo` """ @@ -13400,7 +11200,10 @@ def list_external_metadata( the array. :param page_size: int (optional) + Specifies the maximum number of external metadata objects to return in a single response. The value + must be less than or equal to 1000. :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ExternalMetadata` """ diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py index 6c8b9525a..5d6ea9ce6 100755 --- a/databricks/sdk/service/cleanrooms.py +++ b/databricks/sdk/service/cleanrooms.py @@ -1164,37 +1164,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListCleanRoomsResponse: ) -@dataclass -class UpdateCleanRoomRequest: - clean_room: Optional[CleanRoom] = None - - name: Optional[str] = None - """Name of the clean room.""" - - def as_dict(self) -> dict: - """Serializes the UpdateCleanRoomRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.clean_room: - body["clean_room"] = self.clean_room.as_dict() - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCleanRoomRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.clean_room: - body["clean_room"] = self.clean_room - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCleanRoomRequest: - """Deserializes the UpdateCleanRoomRequest from a dictionary.""" - return cls(clean_room=_from_dict(d, "clean_room", CleanRoom), name=d.get("name", None)) - - class CleanRoomAssetsAPI: """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room.""" @@ -1209,7 +1178,8 @@ def create(self, clean_room_name: str, asset: CleanRoomAsset) -> CleanRoomAsset: access the asset. Typically, you should use a group as the clean room owner. :param clean_room_name: str - Name of the clean room. + The name of the clean room this asset belongs to. This is an output-only field to ensure proper + resource identification. :param asset: :class:`CleanRoomAsset` :returns: :class:`CleanRoomAsset` diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index def14aa81..071ac89e0 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -19,70 +19,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class AddInstanceProfile: - instance_profile_arn: str - """The AWS ARN of the instance profile to register with Databricks. This field is required.""" - - iam_role_arn: Optional[str] = None - """The AWS IAM role ARN of the role associated with the instance profile. This field is required if - your role name and instance profile name do not match and you want to use the instance profile - with [Databricks SQL Serverless]. - - Otherwise, this field is optional. - - [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html""" - - is_meta_instance_profile: Optional[bool] = None - """Boolean flag indicating whether the instance profile should only be used in credential - passthrough scenarios. If true, it means the instance profile contains an meta IAM role which - could assume a wide range of roles. Therefore it should always be used with authorization. This - field is optional, the default value is `false`.""" - - skip_validation: Optional[bool] = None - """By default, Databricks validates that it has sufficient permissions to launch instances with the - instance profile. This validation uses AWS dry-run mode for the RunInstances API. If validation - fails with an error message that does not indicate an IAM related permission issue, (e.g. - “Your requested instance type is not supported in your requested availability zone”), you - can pass this flag to skip the validation and forcibly add the instance profile.""" - - def as_dict(self) -> dict: - """Serializes the AddInstanceProfile into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.iam_role_arn is not None: - body["iam_role_arn"] = self.iam_role_arn - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: - body["is_meta_instance_profile"] = self.is_meta_instance_profile - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AddInstanceProfile into a shallow dictionary of its immediate attributes.""" - body = {} - if self.iam_role_arn is not None: - body["iam_role_arn"] = self.iam_role_arn - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.is_meta_instance_profile is not None: - body["is_meta_instance_profile"] = self.is_meta_instance_profile - if self.skip_validation is not None: - body["skip_validation"] = self.skip_validation - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AddInstanceProfile: - """Deserializes the AddInstanceProfile from a dictionary.""" - return cls( - iam_role_arn=d.get("iam_role_arn", None), - instance_profile_arn=d.get("instance_profile_arn", None), - is_meta_instance_profile=d.get("is_meta_instance_profile", None), - skip_validation=d.get("skip_validation", None), - ) - - @dataclass class AddResponse: def as_dict(self) -> dict: @@ -389,46 +325,6 @@ class AzureAvailability(Enum): SPOT_WITH_FALLBACK_AZURE = "SPOT_WITH_FALLBACK_AZURE" -@dataclass -class CancelCommand: - cluster_id: Optional[str] = None - - command_id: Optional[str] = None - - context_id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the CancelCommand into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command_id is not None: - body["commandId"] = self.command_id - if self.context_id is not None: - body["contextId"] = self.context_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelCommand into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command_id is not None: - body["commandId"] = self.command_id - if self.context_id is not None: - body["contextId"] = self.context_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelCommand: - """Deserializes the CancelCommand from a dictionary.""" - return cls( - cluster_id=d.get("clusterId", None), - command_id=d.get("commandId", None), - context_id=d.get("contextId", None), - ) - - @dataclass class CancelResponse: def as_dict(self) -> dict: @@ -447,37 +343,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CancelResponse: return cls() -@dataclass -class ChangeClusterOwner: - cluster_id: str - - owner_username: str - """New owner of the cluster_id after this RPC.""" - - def as_dict(self) -> dict: - """Serializes the ChangeClusterOwner into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.owner_username is not None: - body["owner_username"] = self.owner_username - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ChangeClusterOwner into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.owner_username is not None: - body["owner_username"] = self.owner_username - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ChangeClusterOwner: - """Deserializes the ChangeClusterOwner from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), owner_username=d.get("owner_username", None)) - - @dataclass class ChangeClusterOwnerResponse: def as_dict(self) -> dict: @@ -1820,40 +1685,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ClusterPermissionsDescription: ) -@dataclass -class ClusterPermissionsRequest: - access_control_list: Optional[List[ClusterAccessControlRequest]] = None - - cluster_id: Optional[str] = None - """The cluster for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the ClusterPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ClusterPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ClusterPermissionsRequest: - """Deserializes the ClusterPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ClusterAccessControlRequest), - cluster_id=d.get("cluster_id", None), - ) - - @dataclass class ClusterPolicyAccessControlRequest: group_name: Optional[str] = None @@ -2082,40 +1913,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermissionsDescription: ) -@dataclass -class ClusterPolicyPermissionsRequest: - access_control_list: Optional[List[ClusterPolicyAccessControlRequest]] = None - - cluster_policy_id: Optional[str] = None - """The cluster policy for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the ClusterPolicyPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.cluster_policy_id is not None: - body["cluster_policy_id"] = self.cluster_policy_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ClusterPolicyPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.cluster_policy_id is not None: - body["cluster_policy_id"] = self.cluster_policy_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ClusterPolicyPermissionsRequest: - """Deserializes the ClusterPolicyPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ClusterPolicyAccessControlRequest), - cluster_policy_id=d.get("cluster_policy_id", None), - ) - - @dataclass class ClusterSettingsChange: """Represents a change to the cluster settings required for the cluster to become compliant with @@ -2561,56 +2358,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ClusterSpec: ) -@dataclass -class Command: - cluster_id: Optional[str] = None - """Running cluster id""" - - command: Optional[str] = None - """Executable code""" - - context_id: Optional[str] = None - """Running context id""" - - language: Optional[Language] = None - - def as_dict(self) -> dict: - """Serializes the Command into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command is not None: - body["command"] = self.command - if self.context_id is not None: - body["contextId"] = self.context_id - if self.language is not None: - body["language"] = self.language.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Command into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.command is not None: - body["command"] = self.command - if self.context_id is not None: - body["contextId"] = self.context_id - if self.language is not None: - body["language"] = self.language - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Command: - """Deserializes the Command from a dictionary.""" - return cls( - cluster_id=d.get("clusterId", None), - command=d.get("command", None), - context_id=d.get("contextId", None), - language=_enum(d, "language", Language), - ) - - class CommandStatus(Enum): CANCELLED = "Cancelled" @@ -2697,1896 +2444,605 @@ def from_dict(cls, d: Dict[str, Any]) -> ContextStatusResponse: @dataclass -class CreateCluster: - spark_version: str - """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can - be retrieved by using the :method:clusters/sparkVersions API call.""" +class CreateClusterResponse: + cluster_id: Optional[str] = None - apply_policy_default_values: Optional[bool] = None - """When set to true, fixed and default values from the policy will be used for fields that are - omitted. When set to false, only fixed values from the policy will be applied.""" + def as_dict(self) -> dict: + """Serializes the CreateClusterResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + return body - autoscale: Optional[AutoScale] = None - """Parameters needed in order to automatically scale clusters up and down based on load. Note: - autoscaling works best with DB runtime versions 3.0 or later.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreateClusterResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cluster_id is not None: + body["cluster_id"] = self.cluster_id + return body - autotermination_minutes: Optional[int] = None - """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, - this cluster will not be automatically terminated. If specified, the threshold must be between - 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic - termination.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateClusterResponse: + """Deserializes the CreateClusterResponse from a dictionary.""" + return cls(cluster_id=d.get("cluster_id", None)) - aws_attributes: Optional[AwsAttributes] = None - """Attributes related to clusters running on Amazon Web Services. If not specified at cluster - creation, a set of default values will be used.""" - azure_attributes: Optional[AzureAttributes] = None - """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, - a set of default values will be used.""" +@dataclass +class CreateInstancePoolResponse: + instance_pool_id: Optional[str] = None + """The ID of the created instance pool.""" - clone_from: Optional[CloneCluster] = None - """When specified, this clones libraries from a source cluster during the creation of a new - cluster.""" + def as_dict(self) -> dict: + """Serializes the CreateInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + return body - cluster_log_conf: Optional[ClusterLogConf] = None - """The configuration for delivering spark logs to a long-term storage destination. Three kinds of - destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be - specified for one cluster. If the conf is given, the logs will be delivered to the destination - every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the - destination of executor logs is `$destination/$clusterId/executor`.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreateInstancePoolResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.instance_pool_id is not None: + body["instance_pool_id"] = self.instance_pool_id + return body - cluster_name: Optional[str] = None - """Cluster name requested by the user. This doesn't have to be unique. If not specified at - creation, the cluster name will be an empty string. For job clusters, the cluster name is - automatically set based on the job and job run IDs.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateInstancePoolResponse: + """Deserializes the CreateInstancePoolResponse from a dictionary.""" + return cls(instance_pool_id=d.get("instance_pool_id", None)) - custom_tags: Optional[Dict[str, str]] = None - """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS - instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - - - Currently, Databricks allows at most 45 custom tags - - - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster - tags""" - data_security_mode: Optional[DataSecurityMode] = None +@dataclass +class CreatePolicyResponse: + policy_id: Optional[str] = None + """Canonical unique identifier for the cluster policy.""" - docker_image: Optional[DockerImage] = None - """Custom docker image BYOC""" + def as_dict(self) -> dict: + """Serializes the CreatePolicyResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.policy_id is not None: + body["policy_id"] = self.policy_id + return body - driver_instance_pool_id: Optional[str] = None - """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster - uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreatePolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.policy_id is not None: + body["policy_id"] = self.policy_id + return body - driver_node_type_id: Optional[str] = None - """The node type of the Spark driver. Note that this field is optional; if unset, the driver node - type will be set as the same value as `node_type_id` defined above. - - This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both - driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id - and node_type_id take precedence.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreatePolicyResponse: + """Deserializes the CreatePolicyResponse from a dictionary.""" + return cls(policy_id=d.get("policy_id", None)) - enable_elastic_disk: Optional[bool] = None - """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk - space when its Spark workers are running low on disk space. This feature requires specific AWS - permissions to function correctly - refer to the User Guide for more details.""" - enable_local_disk_encryption: Optional[bool] = None - """Whether to enable LUKS on cluster VMs' local disks""" +@dataclass +class CreateResponse: + script_id: Optional[str] = None + """The global init script ID.""" - gcp_attributes: Optional[GcpAttributes] = None - """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster - creation, a set of default values will be used.""" + def as_dict(self) -> dict: + """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.script_id is not None: + body["script_id"] = self.script_id + return body - init_scripts: Optional[List[InitScriptInfo]] = None - """The configuration for storing init scripts. Any number of destinations can be specified. The - scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, - init script logs are sent to `//init_scripts`.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.script_id is not None: + body["script_id"] = self.script_id + return body - instance_pool_id: Optional[str] = None - """The optional ID of the instance pool to which the cluster belongs.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: + """Deserializes the CreateResponse from a dictionary.""" + return cls(script_id=d.get("script_id", None)) - is_single_node: Optional[bool] = None - """This field can only be used when `kind = CLASSIC_PREVIEW`. - - When set to true, Databricks will automatically set single node related `custom_tags`, - `spark_conf`, and `num_workers`""" - kind: Optional[Kind] = None +@dataclass +class Created: + id: Optional[str] = None - node_type_id: Optional[str] = None - """This field encodes, through a single value, the resources available to each of the Spark nodes - in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or - compute intensive workloads. A list of available node types can be retrieved by using the - :method:clusters/listNodeTypes API call.""" + def as_dict(self) -> dict: + """Serializes the Created into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.id is not None: + body["id"] = self.id + return body - num_workers: Optional[int] = None - """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and - `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - - Note: When reading the properties of a cluster, this field reflects the desired number of - workers rather than the actual current number of workers. For instance, if a cluster is resized - from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 - workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the - new nodes are provisioned.""" + def as_shallow_dict(self) -> dict: + """Serializes the Created into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: + body["id"] = self.id + return body - policy_id: Optional[str] = None - """The ID of the cluster policy used to create the cluster if applicable.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> Created: + """Deserializes the Created from a dictionary.""" + return cls(id=d.get("id", None)) - remote_disk_throughput: Optional[int] = None - """If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only - supported for GCP HYPERDISK_BALANCED disks.""" - runtime_engine: Optional[RuntimeEngine] = None - """Determines the cluster's runtime engine, either standard or Photon. - - This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove - `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - - If left unspecified, the runtime engine defaults to standard unless the spark_version contains - -photon-, in which case Photon will be used.""" +@dataclass +class CustomPolicyTag: + key: str + """The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be + “budget-policy-name”, “budget-policy-id” or "budget-policy-resolution-result" - these + tags are preserved.""" - single_user_name: Optional[str] = None - """Single user name if data_security_mode is `SINGLE_USER`""" + value: Optional[str] = None + """The value of the tag.""" - spark_conf: Optional[Dict[str, str]] = None - """An object containing a set of optional, user-specified Spark configuration key-value pairs. - Users can also pass in a string of extra JVM options to the driver and the executors via - `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" + def as_dict(self) -> dict: + """Serializes the CustomPolicyTag into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body - spark_env_vars: Optional[Dict[str, str]] = None - """An object containing a set of optional, user-specified environment variable key-value pairs. - Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) - while launching the driver and workers. - - In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them - to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default - databricks managed environmental variables are included as well. - - Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": - "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS - -Dspark.shuffle.service.enabled=true"}`""" + def as_shallow_dict(self) -> dict: + """Serializes the CustomPolicyTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body - ssh_public_keys: Optional[List[str]] = None - """SSH public key contents that will be added to each Spark node in this cluster. The corresponding - private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can - be specified.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CustomPolicyTag: + """Deserializes the CustomPolicyTag from a dictionary.""" + return cls(key=d.get("key", None), value=d.get("value", None)) - total_initial_remote_disk_size: Optional[int] = None - """If set, what the total initial volume size (in GB) of the remote disks should be. Currently only - supported for GCP HYPERDISK_BALANCED disks.""" - use_ml_runtime: Optional[bool] = None - """This field can only be used when `kind = CLASSIC_PREVIEW`. - - `effective_spark_version` is determined by `spark_version` (DBR release), this field - `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" +@dataclass +class DataPlaneEventDetails: + event_type: Optional[DataPlaneEventDetailsEventType] = None - workload_type: Optional[WorkloadType] = None + executor_failures: Optional[int] = None + + host_id: Optional[str] = None + + timestamp: Optional[int] = None def as_dict(self) -> dict: - """Serializes the CreateCluster into a dictionary suitable for use as a JSON request body.""" + """Serializes the DataPlaneEventDetails into a dictionary suitable for use as a JSON request body.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.clone_from: - body["clone_from"] = self.clone_from.as_dict() - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode.value - if self.docker_image: - body["docker_image"] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.init_scripts: - body["init_scripts"] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind.value - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.remote_disk_throughput is not None: - body["remote_disk_throughput"] = self.remote_disk_throughput - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine.value - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = [v for v in self.ssh_public_keys] - if self.total_initial_remote_disk_size is not None: - body["total_initial_remote_disk_size"] = self.total_initial_remote_disk_size - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type.as_dict() + if self.event_type is not None: + body["event_type"] = self.event_type.value + if self.executor_failures is not None: + body["executor_failures"] = self.executor_failures + if self.host_id is not None: + body["host_id"] = self.host_id + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body def as_shallow_dict(self) -> dict: - """Serializes the CreateCluster into a shallow dictionary of its immediate attributes.""" + """Serializes the DataPlaneEventDetails into a shallow dictionary of its immediate attributes.""" body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.clone_from: - body["clone_from"] = self.clone_from - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode - if self.docker_image: - body["docker_image"] = self.docker_image - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.remote_disk_throughput is not None: - body["remote_disk_throughput"] = self.remote_disk_throughput - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = self.ssh_public_keys - if self.total_initial_remote_disk_size is not None: - body["total_initial_remote_disk_size"] = self.total_initial_remote_disk_size - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type + if self.event_type is not None: + body["event_type"] = self.event_type + if self.executor_failures is not None: + body["executor_failures"] = self.executor_failures + if self.host_id is not None: + body["host_id"] = self.host_id + if self.timestamp is not None: + body["timestamp"] = self.timestamp return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCluster: - """Deserializes the CreateCluster from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> DataPlaneEventDetails: + """Deserializes the DataPlaneEventDetails from a dictionary.""" return cls( - apply_policy_default_values=d.get("apply_policy_default_values", None), - autoscale=_from_dict(d, "autoscale", AutoScale), - autotermination_minutes=d.get("autotermination_minutes", None), - aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), - clone_from=_from_dict(d, "clone_from", CloneCluster), - cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), - cluster_name=d.get("cluster_name", None), - custom_tags=d.get("custom_tags", None), - data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), - docker_image=_from_dict(d, "docker_image", DockerImage), - driver_instance_pool_id=d.get("driver_instance_pool_id", None), - driver_node_type_id=d.get("driver_node_type_id", None), - enable_elastic_disk=d.get("enable_elastic_disk", None), - enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), - gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), - init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), - instance_pool_id=d.get("instance_pool_id", None), - is_single_node=d.get("is_single_node", None), - kind=_enum(d, "kind", Kind), - node_type_id=d.get("node_type_id", None), - num_workers=d.get("num_workers", None), - policy_id=d.get("policy_id", None), - remote_disk_throughput=d.get("remote_disk_throughput", None), - runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), - single_user_name=d.get("single_user_name", None), - spark_conf=d.get("spark_conf", None), - spark_env_vars=d.get("spark_env_vars", None), - spark_version=d.get("spark_version", None), - ssh_public_keys=d.get("ssh_public_keys", None), - total_initial_remote_disk_size=d.get("total_initial_remote_disk_size", None), - use_ml_runtime=d.get("use_ml_runtime", None), - workload_type=_from_dict(d, "workload_type", WorkloadType), + event_type=_enum(d, "event_type", DataPlaneEventDetailsEventType), + executor_failures=d.get("executor_failures", None), + host_id=d.get("host_id", None), + timestamp=d.get("timestamp", None), ) +class DataPlaneEventDetailsEventType(Enum): + + NODE_BLACKLISTED = "NODE_BLACKLISTED" + NODE_EXCLUDED_DECOMMISSIONED = "NODE_EXCLUDED_DECOMMISSIONED" + + +class DataSecurityMode(Enum): + """Data security mode decides what data governance model to use when accessing data from a cluster. + + The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: + Databricks will choose the most appropriate access mode depending on your compute configuration. + * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: + Alias for `SINGLE_USER`. + + The following modes can be used regardless of `kind`. * `NONE`: No security isolation for + multiple users sharing the cluster. Data governance features are not available in this mode. * + `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in + `single_user_name`. Most programming languages, cluster features and data governance features + are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple + users. Cluster users are fully isolated so that they cannot see each other's data and + credentials. Most data governance features are supported in this mode. But programming languages + and cluster features might be limited. + + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for + future Databricks Runtime versions: + + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * + `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high + concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy + Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that + doesn’t have UC nor passthrough enabled.""" + + DATA_SECURITY_MODE_AUTO = "DATA_SECURITY_MODE_AUTO" + DATA_SECURITY_MODE_DEDICATED = "DATA_SECURITY_MODE_DEDICATED" + DATA_SECURITY_MODE_STANDARD = "DATA_SECURITY_MODE_STANDARD" + LEGACY_PASSTHROUGH = "LEGACY_PASSTHROUGH" + LEGACY_SINGLE_USER = "LEGACY_SINGLE_USER" + LEGACY_SINGLE_USER_STANDARD = "LEGACY_SINGLE_USER_STANDARD" + LEGACY_TABLE_ACL = "LEGACY_TABLE_ACL" + NONE = "NONE" + SINGLE_USER = "SINGLE_USER" + USER_ISOLATION = "USER_ISOLATION" + + @dataclass -class CreateClusterResponse: - cluster_id: Optional[str] = None +class DbfsStorageInfo: + """A storage location in DBFS""" + + destination: str + """dbfs destination, e.g. `dbfs:/my/path`""" def as_dict(self) -> dict: - """Serializes the CreateClusterResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DbfsStorageInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.destination is not None: + body["destination"] = self.destination return body def as_shallow_dict(self) -> dict: - """Serializes the CreateClusterResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the DbfsStorageInfo into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id + if self.destination is not None: + body["destination"] = self.destination return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateClusterResponse: - """Deserializes the CreateClusterResponse from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) + def from_dict(cls, d: Dict[str, Any]) -> DbfsStorageInfo: + """Deserializes the DbfsStorageInfo from a dictionary.""" + return cls(destination=d.get("destination", None)) @dataclass -class CreateContext: - cluster_id: Optional[str] = None - """Running cluster id""" - - language: Optional[Language] = None - +class DeleteClusterResponse: def as_dict(self) -> dict: - """Serializes the CreateContext into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteClusterResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.language is not None: - body["language"] = self.language.value return body def as_shallow_dict(self) -> dict: - """Serializes the CreateContext into a shallow dictionary of its immediate attributes.""" + """Serializes the DeleteClusterResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.language is not None: - body["language"] = self.language return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateContext: - """Deserializes the CreateContext from a dictionary.""" - return cls(cluster_id=d.get("clusterId", None), language=_enum(d, "language", Language)) + def from_dict(cls, d: Dict[str, Any]) -> DeleteClusterResponse: + """Deserializes the DeleteClusterResponse from a dictionary.""" + return cls() @dataclass -class CreateInstancePool: - instance_pool_name: str - """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 - characters.""" +class DeleteInstancePoolResponse: + def as_dict(self) -> dict: + """Serializes the DeleteInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - node_type_id: str - """This field encodes, through a single value, the resources available to each of the Spark nodes - in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or - compute intensive workloads. A list of available node types can be retrieved by using the - :method:clusters/listNodeTypes API call.""" + def as_shallow_dict(self) -> dict: + """Serializes the DeleteInstancePoolResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - aws_attributes: Optional[InstancePoolAwsAttributes] = None - """Attributes related to instance pools running on Amazon Web Services. If not specified at pool - creation, a set of default values will be used.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteInstancePoolResponse: + """Deserializes the DeleteInstancePoolResponse from a dictionary.""" + return cls() - azure_attributes: Optional[InstancePoolAzureAttributes] = None - """Attributes related to instance pools running on Azure. If not specified at pool creation, a set - of default values will be used.""" - custom_tags: Optional[Dict[str, str]] = None - """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances - and EBS volumes) with these tags in addition to `default_tags`. Notes: - - - Currently, Databricks allows at most 45 custom tags""" +@dataclass +class DeletePolicyResponse: + def as_dict(self) -> dict: + """Serializes the DeletePolicyResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - disk_spec: Optional[DiskSpec] = None - """Defines the specification of the disks that will be attached to all spark containers.""" + def as_shallow_dict(self) -> dict: + """Serializes the DeletePolicyResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - enable_elastic_disk: Optional[bool] = None - """Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire - additional disk space when its Spark workers are running low on disk space. In AWS, this feature - requires specific AWS permissions to function correctly - refer to the User Guide for more - details.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeletePolicyResponse: + """Deserializes the DeletePolicyResponse from a dictionary.""" + return cls() - gcp_attributes: Optional[InstancePoolGcpAttributes] = None - """Attributes related to instance pools running on Google Cloud Platform. If not specified at pool - creation, a set of default values will be used.""" - idle_instance_autotermination_minutes: Optional[int] = None - """Automatically terminates the extra instances in the pool cache after they are inactive for this - time in minutes if min_idle_instances requirement is already met. If not set, the extra pool - instances will be automatically terminated after a default timeout. If specified, the threshold - must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle - instances from the cache if min cache size could still hold.""" +@dataclass +class DeleteResponse: + def as_dict(self) -> dict: + """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - max_capacity: Optional[int] = None - """Maximum number of outstanding instances to keep in the pool, including both instances used by - clusters and idle instances. Clusters that require further instance provisioning will fail - during upsize requests.""" + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - min_idle_instances: Optional[int] = None - """Minimum number of idle instances to keep in the instance pool""" - - preloaded_docker_images: Optional[List[DockerImage]] = None - """Custom Docker Image BYOC""" - - preloaded_spark_versions: Optional[List[str]] = None - """A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters - started with the preloaded Spark version will start faster. A list of available Spark versions - can be retrieved by using the :method:clusters/sparkVersions API call.""" - - remote_disk_throughput: Optional[int] = None - """If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only - supported for GCP HYPERDISK_BALANCED types.""" - - total_initial_remote_disk_size: Optional[int] = None - """If set, what the total initial volume size (in GB) of the remote disks should be. Currently only - supported for GCP HYPERDISK_BALANCED types.""" - - def as_dict(self) -> dict: - """Serializes the CreateInstancePool into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.disk_spec: - body["disk_spec"] = self.disk_spec.as_dict() - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.preloaded_docker_images: - body["preloaded_docker_images"] = [v.as_dict() for v in self.preloaded_docker_images] - if self.preloaded_spark_versions: - body["preloaded_spark_versions"] = [v for v in self.preloaded_spark_versions] - if self.remote_disk_throughput is not None: - body["remote_disk_throughput"] = self.remote_disk_throughput - if self.total_initial_remote_disk_size is not None: - body["total_initial_remote_disk_size"] = self.total_initial_remote_disk_size - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateInstancePool into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.disk_spec: - body["disk_spec"] = self.disk_spec - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.preloaded_docker_images: - body["preloaded_docker_images"] = self.preloaded_docker_images - if self.preloaded_spark_versions: - body["preloaded_spark_versions"] = self.preloaded_spark_versions - if self.remote_disk_throughput is not None: - body["remote_disk_throughput"] = self.remote_disk_throughput - if self.total_initial_remote_disk_size is not None: - body["total_initial_remote_disk_size"] = self.total_initial_remote_disk_size - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateInstancePool: - """Deserializes the CreateInstancePool from a dictionary.""" - return cls( - aws_attributes=_from_dict(d, "aws_attributes", InstancePoolAwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", InstancePoolAzureAttributes), - custom_tags=d.get("custom_tags", None), - disk_spec=_from_dict(d, "disk_spec", DiskSpec), - enable_elastic_disk=d.get("enable_elastic_disk", None), - gcp_attributes=_from_dict(d, "gcp_attributes", InstancePoolGcpAttributes), - idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), - instance_pool_name=d.get("instance_pool_name", None), - max_capacity=d.get("max_capacity", None), - min_idle_instances=d.get("min_idle_instances", None), - node_type_id=d.get("node_type_id", None), - preloaded_docker_images=_repeated_dict(d, "preloaded_docker_images", DockerImage), - preloaded_spark_versions=d.get("preloaded_spark_versions", None), - remote_disk_throughput=d.get("remote_disk_throughput", None), - total_initial_remote_disk_size=d.get("total_initial_remote_disk_size", None), - ) + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: + """Deserializes the DeleteResponse from a dictionary.""" + return cls() @dataclass -class CreateInstancePoolResponse: - instance_pool_id: Optional[str] = None - """The ID of the created instance pool.""" - +class DestroyResponse: def as_dict(self) -> dict: - """Serializes the CreateInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DestroyResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id return body def as_shallow_dict(self) -> dict: - """Serializes the CreateInstancePoolResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the DestroyResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateInstancePoolResponse: - """Deserializes the CreateInstancePoolResponse from a dictionary.""" - return cls(instance_pool_id=d.get("instance_pool_id", None)) + def from_dict(cls, d: Dict[str, Any]) -> DestroyResponse: + """Deserializes the DestroyResponse from a dictionary.""" + return cls() @dataclass -class CreatePolicy: - definition: Optional[str] = None - """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - - [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - - description: Optional[str] = None - """Additional human-readable description of the cluster policy.""" - - libraries: Optional[List[Library]] = None - """A list of libraries to be installed on the next cluster restart that uses this policy. The - maximum number of libraries is 500.""" - - max_clusters_per_user: Optional[int] = None - """Max number of clusters per user that can be active using this policy. If not present, there is - no max limit.""" - - name: Optional[str] = None - """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and - 100 characters.""" +class DiskSpec: + """Describes the disks that are launched for each instance in the spark cluster. For example, if + the cluster has 3 instances, each instance is configured to launch 2 disks, 100 GiB each, then + Databricks will launch a total of 6 disks, 100 GiB each, for this cluster.""" - policy_family_definition_overrides: Optional[str] = None - """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON - document must be passed as a string and cannot be embedded in the requests. + disk_count: Optional[int] = None + """The number of disks launched for each instance: - This feature is only enabled for supported + node types. - Users can choose up to the limit of the disks supported by the node type. - For + node types with no OS disk, at least one disk must be specified; otherwise, cluster creation + will fail. - You can use this to customize the policy definition inherited from the policy family. Policy - rules specified here are merged into the inherited policy definition. + If disks are attached, Databricks will configure Spark to use only the disks for scratch + storage, because heterogenously sized scratch devices can lead to inefficient disk utilization. + If no disks are attached, Databricks will configure Spark to use instance store disks. - [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - - policy_family_id: Optional[str] = None - """ID of the policy family. The cluster policy's policy definition inherits the policy family's - policy definition. + Note: If disks are specified, then the Spark configuration `spark.local.dir` will be overridden. - Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize - the policy definition.""" - - def as_dict(self) -> dict: - """Serializes the CreatePolicy into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreatePolicy into a shallow dictionary of its immediate attributes.""" - body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = self.libraries - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePolicy: - """Deserializes the CreatePolicy from a dictionary.""" - return cls( - definition=d.get("definition", None), - description=d.get("description", None), - libraries=_repeated_dict(d, "libraries", Library), - max_clusters_per_user=d.get("max_clusters_per_user", None), - name=d.get("name", None), - policy_family_definition_overrides=d.get("policy_family_definition_overrides", None), - policy_family_id=d.get("policy_family_id", None), - ) - - -@dataclass -class CreatePolicyResponse: - policy_id: Optional[str] = None - """Canonical unique identifier for the cluster policy.""" - - def as_dict(self) -> dict: - """Serializes the CreatePolicyResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreatePolicyResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePolicyResponse: - """Deserializes the CreatePolicyResponse from a dictionary.""" - return cls(policy_id=d.get("policy_id", None)) - - -@dataclass -class CreateResponse: - script_id: Optional[str] = None - """The global init script ID.""" - - def as_dict(self) -> dict: - """Serializes the CreateResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.script_id is not None: - body["script_id"] = self.script_id - return body + Disks will be mounted at: - For AWS: `/ebs0`, `/ebs1`, and etc. - For Azure: `/remote_volume0`, + `/remote_volume1`, and etc.""" - def as_shallow_dict(self) -> dict: - """Serializes the CreateResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.script_id is not None: - body["script_id"] = self.script_id - return body + disk_iops: Optional[int] = None - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: - """Deserializes the CreateResponse from a dictionary.""" - return cls(script_id=d.get("script_id", None)) - - -@dataclass -class Created: - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the Created into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Created into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Created: - """Deserializes the Created from a dictionary.""" - return cls(id=d.get("id", None)) - - -@dataclass -class CustomPolicyTag: - key: str - """The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be - “budget-policy-name”, “budget-policy-id” or "budget-policy-resolution-result" - these - tags are preserved.""" - - value: Optional[str] = None - """The value of the tag.""" - - def as_dict(self) -> dict: - """Serializes the CustomPolicyTag into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CustomPolicyTag into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CustomPolicyTag: - """Deserializes the CustomPolicyTag from a dictionary.""" - return cls(key=d.get("key", None), value=d.get("value", None)) - - -@dataclass -class DataPlaneEventDetails: - event_type: Optional[DataPlaneEventDetailsEventType] = None - - executor_failures: Optional[int] = None - - host_id: Optional[str] = None - - timestamp: Optional[int] = None - - def as_dict(self) -> dict: - """Serializes the DataPlaneEventDetails into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.event_type is not None: - body["event_type"] = self.event_type.value - if self.executor_failures is not None: - body["executor_failures"] = self.executor_failures - if self.host_id is not None: - body["host_id"] = self.host_id - if self.timestamp is not None: - body["timestamp"] = self.timestamp - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DataPlaneEventDetails into a shallow dictionary of its immediate attributes.""" - body = {} - if self.event_type is not None: - body["event_type"] = self.event_type - if self.executor_failures is not None: - body["executor_failures"] = self.executor_failures - if self.host_id is not None: - body["host_id"] = self.host_id - if self.timestamp is not None: - body["timestamp"] = self.timestamp - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DataPlaneEventDetails: - """Deserializes the DataPlaneEventDetails from a dictionary.""" - return cls( - event_type=_enum(d, "event_type", DataPlaneEventDetailsEventType), - executor_failures=d.get("executor_failures", None), - host_id=d.get("host_id", None), - timestamp=d.get("timestamp", None), - ) - - -class DataPlaneEventDetailsEventType(Enum): - - NODE_BLACKLISTED = "NODE_BLACKLISTED" - NODE_EXCLUDED_DECOMMISSIONED = "NODE_EXCLUDED_DECOMMISSIONED" - - -class DataSecurityMode(Enum): - """Data security mode decides what data governance model to use when accessing data from a cluster. - - The following modes can only be used when `kind = CLASSIC_PREVIEW`. * `DATA_SECURITY_MODE_AUTO`: - Databricks will choose the most appropriate access mode depending on your compute configuration. - * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: - Alias for `SINGLE_USER`. - - The following modes can be used regardless of `kind`. * `NONE`: No security isolation for - multiple users sharing the cluster. Data governance features are not available in this mode. * - `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in - `single_user_name`. Most programming languages, cluster features and data governance features - are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple - users. Cluster users are fully isolated so that they cannot see each other's data and - credentials. Most data governance features are supported in this mode. But programming languages - and cluster features might be limited. - - The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for - future Databricks Runtime versions: - - * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * - `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high - concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy - Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that - doesn’t have UC nor passthrough enabled.""" - - DATA_SECURITY_MODE_AUTO = "DATA_SECURITY_MODE_AUTO" - DATA_SECURITY_MODE_DEDICATED = "DATA_SECURITY_MODE_DEDICATED" - DATA_SECURITY_MODE_STANDARD = "DATA_SECURITY_MODE_STANDARD" - LEGACY_PASSTHROUGH = "LEGACY_PASSTHROUGH" - LEGACY_SINGLE_USER = "LEGACY_SINGLE_USER" - LEGACY_SINGLE_USER_STANDARD = "LEGACY_SINGLE_USER_STANDARD" - LEGACY_TABLE_ACL = "LEGACY_TABLE_ACL" - NONE = "NONE" - SINGLE_USER = "SINGLE_USER" - USER_ISOLATION = "USER_ISOLATION" - - -@dataclass -class DbfsStorageInfo: - """A storage location in DBFS""" - - destination: str - """dbfs destination, e.g. `dbfs:/my/path`""" - - def as_dict(self) -> dict: - """Serializes the DbfsStorageInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.destination is not None: - body["destination"] = self.destination - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DbfsStorageInfo into a shallow dictionary of its immediate attributes.""" - body = {} - if self.destination is not None: - body["destination"] = self.destination - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DbfsStorageInfo: - """Deserializes the DbfsStorageInfo from a dictionary.""" - return cls(destination=d.get("destination", None)) - - -@dataclass -class DeleteCluster: - cluster_id: str - """The cluster to be terminated.""" - - def as_dict(self) -> dict: - """Serializes the DeleteCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteCluster: - """Deserializes the DeleteCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) - - -@dataclass -class DeleteClusterResponse: - def as_dict(self) -> dict: - """Serializes the DeleteClusterResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteClusterResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteClusterResponse: - """Deserializes the DeleteClusterResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteInstancePool: - instance_pool_id: str - """The instance pool to be terminated.""" - - def as_dict(self) -> dict: - """Serializes the DeleteInstancePool into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteInstancePool into a shallow dictionary of its immediate attributes.""" - body = {} - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteInstancePool: - """Deserializes the DeleteInstancePool from a dictionary.""" - return cls(instance_pool_id=d.get("instance_pool_id", None)) - - -@dataclass -class DeleteInstancePoolResponse: - def as_dict(self) -> dict: - """Serializes the DeleteInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteInstancePoolResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteInstancePoolResponse: - """Deserializes the DeleteInstancePoolResponse from a dictionary.""" - return cls() - - -@dataclass -class DeletePolicy: - policy_id: str - """The ID of the policy to delete.""" - - def as_dict(self) -> dict: - """Serializes the DeletePolicy into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeletePolicy into a shallow dictionary of its immediate attributes.""" - body = {} - if self.policy_id is not None: - body["policy_id"] = self.policy_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeletePolicy: - """Deserializes the DeletePolicy from a dictionary.""" - return cls(policy_id=d.get("policy_id", None)) - - -@dataclass -class DeletePolicyResponse: - def as_dict(self) -> dict: - """Serializes the DeletePolicyResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeletePolicyResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeletePolicyResponse: - """Deserializes the DeletePolicyResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - -@dataclass -class DestroyContext: - cluster_id: str - - context_id: str - - def as_dict(self) -> dict: - """Serializes the DestroyContext into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.context_id is not None: - body["contextId"] = self.context_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DestroyContext into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["clusterId"] = self.cluster_id - if self.context_id is not None: - body["contextId"] = self.context_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DestroyContext: - """Deserializes the DestroyContext from a dictionary.""" - return cls(cluster_id=d.get("clusterId", None), context_id=d.get("contextId", None)) - - -@dataclass -class DestroyResponse: - def as_dict(self) -> dict: - """Serializes the DestroyResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DestroyResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DestroyResponse: - """Deserializes the DestroyResponse from a dictionary.""" - return cls() - - -@dataclass -class DiskSpec: - """Describes the disks that are launched for each instance in the spark cluster. For example, if - the cluster has 3 instances, each instance is configured to launch 2 disks, 100 GiB each, then - Databricks will launch a total of 6 disks, 100 GiB each, for this cluster.""" - - disk_count: Optional[int] = None - """The number of disks launched for each instance: - This feature is only enabled for supported - node types. - Users can choose up to the limit of the disks supported by the node type. - For - node types with no OS disk, at least one disk must be specified; otherwise, cluster creation - will fail. - - If disks are attached, Databricks will configure Spark to use only the disks for scratch - storage, because heterogenously sized scratch devices can lead to inefficient disk utilization. - If no disks are attached, Databricks will configure Spark to use instance store disks. - - Note: If disks are specified, then the Spark configuration `spark.local.dir` will be overridden. - - Disks will be mounted at: - For AWS: `/ebs0`, `/ebs1`, and etc. - For Azure: `/remote_volume0`, - `/remote_volume1`, and etc.""" - - disk_iops: Optional[int] = None - - disk_size: Optional[int] = None - """The size of each disk (in GiB) launched for each instance. Values must fall into the supported - range for a particular instance type. - - For AWS: - General Purpose SSD: 100 - 4096 GiB - Throughput Optimized HDD: 500 - 4096 GiB - - For Azure: - Premium LRS (SSD): 1 - 1023 GiB - Standard LRS (HDD): 1- 1023 GiB""" - - disk_throughput: Optional[int] = None - - disk_type: Optional[DiskType] = None - """The type of disks that will be launched with this cluster.""" - - def as_dict(self) -> dict: - """Serializes the DiskSpec into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.disk_count is not None: - body["disk_count"] = self.disk_count - if self.disk_iops is not None: - body["disk_iops"] = self.disk_iops - if self.disk_size is not None: - body["disk_size"] = self.disk_size - if self.disk_throughput is not None: - body["disk_throughput"] = self.disk_throughput - if self.disk_type: - body["disk_type"] = self.disk_type.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DiskSpec into a shallow dictionary of its immediate attributes.""" - body = {} - if self.disk_count is not None: - body["disk_count"] = self.disk_count - if self.disk_iops is not None: - body["disk_iops"] = self.disk_iops - if self.disk_size is not None: - body["disk_size"] = self.disk_size - if self.disk_throughput is not None: - body["disk_throughput"] = self.disk_throughput - if self.disk_type: - body["disk_type"] = self.disk_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DiskSpec: - """Deserializes the DiskSpec from a dictionary.""" - return cls( - disk_count=d.get("disk_count", None), - disk_iops=d.get("disk_iops", None), - disk_size=d.get("disk_size", None), - disk_throughput=d.get("disk_throughput", None), - disk_type=_from_dict(d, "disk_type", DiskType), - ) - - -@dataclass -class DiskType: - """Describes the disk type.""" - - azure_disk_volume_type: Optional[DiskTypeAzureDiskVolumeType] = None - - ebs_volume_type: Optional[DiskTypeEbsVolumeType] = None - - def as_dict(self) -> dict: - """Serializes the DiskType into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.azure_disk_volume_type is not None: - body["azure_disk_volume_type"] = self.azure_disk_volume_type.value - if self.ebs_volume_type is not None: - body["ebs_volume_type"] = self.ebs_volume_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DiskType into a shallow dictionary of its immediate attributes.""" - body = {} - if self.azure_disk_volume_type is not None: - body["azure_disk_volume_type"] = self.azure_disk_volume_type - if self.ebs_volume_type is not None: - body["ebs_volume_type"] = self.ebs_volume_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DiskType: - """Deserializes the DiskType from a dictionary.""" - return cls( - azure_disk_volume_type=_enum(d, "azure_disk_volume_type", DiskTypeAzureDiskVolumeType), - ebs_volume_type=_enum(d, "ebs_volume_type", DiskTypeEbsVolumeType), - ) - - -class DiskTypeAzureDiskVolumeType(Enum): - """All Azure Disk types that Databricks supports. See - https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks""" - - PREMIUM_LRS = "PREMIUM_LRS" - STANDARD_LRS = "STANDARD_LRS" - - -class DiskTypeEbsVolumeType(Enum): - """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for - details.""" - - GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" - THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" - - -@dataclass -class DockerBasicAuth: - password: Optional[str] = None - """Password of the user""" - - username: Optional[str] = None - """Name of the user""" - - def as_dict(self) -> dict: - """Serializes the DockerBasicAuth into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.password is not None: - body["password"] = self.password - if self.username is not None: - body["username"] = self.username - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DockerBasicAuth into a shallow dictionary of its immediate attributes.""" - body = {} - if self.password is not None: - body["password"] = self.password - if self.username is not None: - body["username"] = self.username - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DockerBasicAuth: - """Deserializes the DockerBasicAuth from a dictionary.""" - return cls(password=d.get("password", None), username=d.get("username", None)) - - -@dataclass -class DockerImage: - basic_auth: Optional[DockerBasicAuth] = None - """Basic auth with username and password""" - - url: Optional[str] = None - """URL of the docker image.""" - - def as_dict(self) -> dict: - """Serializes the DockerImage into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.basic_auth: - body["basic_auth"] = self.basic_auth.as_dict() - if self.url is not None: - body["url"] = self.url - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DockerImage into a shallow dictionary of its immediate attributes.""" - body = {} - if self.basic_auth: - body["basic_auth"] = self.basic_auth - if self.url is not None: - body["url"] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DockerImage: - """Deserializes the DockerImage from a dictionary.""" - return cls(basic_auth=_from_dict(d, "basic_auth", DockerBasicAuth), url=d.get("url", None)) - - -class EbsVolumeType(Enum): - """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for - details.""" - - GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" - THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" - - -@dataclass -class EditCluster: - cluster_id: str - """ID of the cluster""" - - spark_version: str - """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can - be retrieved by using the :method:clusters/sparkVersions API call.""" - - apply_policy_default_values: Optional[bool] = None - """When set to true, fixed and default values from the policy will be used for fields that are - omitted. When set to false, only fixed values from the policy will be applied.""" - - autoscale: Optional[AutoScale] = None - """Parameters needed in order to automatically scale clusters up and down based on load. Note: - autoscaling works best with DB runtime versions 3.0 or later.""" - - autotermination_minutes: Optional[int] = None - """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, - this cluster will not be automatically terminated. If specified, the threshold must be between - 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic - termination.""" - - aws_attributes: Optional[AwsAttributes] = None - """Attributes related to clusters running on Amazon Web Services. If not specified at cluster - creation, a set of default values will be used.""" - - azure_attributes: Optional[AzureAttributes] = None - """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, - a set of default values will be used.""" - - cluster_log_conf: Optional[ClusterLogConf] = None - """The configuration for delivering spark logs to a long-term storage destination. Three kinds of - destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be - specified for one cluster. If the conf is given, the logs will be delivered to the destination - every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the - destination of executor logs is `$destination/$clusterId/executor`.""" - - cluster_name: Optional[str] = None - """Cluster name requested by the user. This doesn't have to be unique. If not specified at - creation, the cluster name will be an empty string. For job clusters, the cluster name is - automatically set based on the job and job run IDs.""" - - custom_tags: Optional[Dict[str, str]] = None - """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS - instances and EBS volumes) with these tags in addition to `default_tags`. Notes: - - - Currently, Databricks allows at most 45 custom tags - - - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster - tags""" - - data_security_mode: Optional[DataSecurityMode] = None - - docker_image: Optional[DockerImage] = None - """Custom docker image BYOC""" - - driver_instance_pool_id: Optional[str] = None - """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster - uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" - - driver_node_type_id: Optional[str] = None - """The node type of the Spark driver. Note that this field is optional; if unset, the driver node - type will be set as the same value as `node_type_id` defined above. - - This field, along with node_type_id, should not be set if virtual_cluster_size is set. If both - driver_node_type_id, node_type_id, and virtual_cluster_size are specified, driver_node_type_id - and node_type_id take precedence.""" - - enable_elastic_disk: Optional[bool] = None - """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk - space when its Spark workers are running low on disk space. This feature requires specific AWS - permissions to function correctly - refer to the User Guide for more details.""" - - enable_local_disk_encryption: Optional[bool] = None - """Whether to enable LUKS on cluster VMs' local disks""" - - gcp_attributes: Optional[GcpAttributes] = None - """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster - creation, a set of default values will be used.""" - - init_scripts: Optional[List[InitScriptInfo]] = None - """The configuration for storing init scripts. Any number of destinations can be specified. The - scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, - init script logs are sent to `//init_scripts`.""" - - instance_pool_id: Optional[str] = None - """The optional ID of the instance pool to which the cluster belongs.""" - - is_single_node: Optional[bool] = None - """This field can only be used when `kind = CLASSIC_PREVIEW`. - - When set to true, Databricks will automatically set single node related `custom_tags`, - `spark_conf`, and `num_workers`""" - - kind: Optional[Kind] = None - - node_type_id: Optional[str] = None - """This field encodes, through a single value, the resources available to each of the Spark nodes - in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or - compute intensive workloads. A list of available node types can be retrieved by using the - :method:clusters/listNodeTypes API call.""" - - num_workers: Optional[int] = None - """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and - `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - - Note: When reading the properties of a cluster, this field reflects the desired number of - workers rather than the actual current number of workers. For instance, if a cluster is resized - from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 - workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the - new nodes are provisioned.""" - - policy_id: Optional[str] = None - """The ID of the cluster policy used to create the cluster if applicable.""" - - remote_disk_throughput: Optional[int] = None - """If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only - supported for GCP HYPERDISK_BALANCED disks.""" - - runtime_engine: Optional[RuntimeEngine] = None - """Determines the cluster's runtime engine, either standard or Photon. - - This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove - `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`. - - If left unspecified, the runtime engine defaults to standard unless the spark_version contains - -photon-, in which case Photon will be used.""" - - single_user_name: Optional[str] = None - """Single user name if data_security_mode is `SINGLE_USER`""" - - spark_conf: Optional[Dict[str, str]] = None - """An object containing a set of optional, user-specified Spark configuration key-value pairs. - Users can also pass in a string of extra JVM options to the driver and the executors via - `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" - - spark_env_vars: Optional[Dict[str, str]] = None - """An object containing a set of optional, user-specified environment variable key-value pairs. - Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) - while launching the driver and workers. - - In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them - to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default - databricks managed environmental variables are included as well. - - Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": - "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS - -Dspark.shuffle.service.enabled=true"}`""" - - ssh_public_keys: Optional[List[str]] = None - """SSH public key contents that will be added to each Spark node in this cluster. The corresponding - private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can - be specified.""" - - total_initial_remote_disk_size: Optional[int] = None - """If set, what the total initial volume size (in GB) of the remote disks should be. Currently only - supported for GCP HYPERDISK_BALANCED disks.""" - - use_ml_runtime: Optional[bool] = None - """This field can only be used when `kind = CLASSIC_PREVIEW`. - - `effective_spark_version` is determined by `spark_version` (DBR release), this field - `use_ml_runtime`, and whether `node_type_id` is gpu node or not.""" - - workload_type: Optional[WorkloadType] = None - - def as_dict(self) -> dict: - """Serializes the EditCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes.as_dict() - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes.as_dict() - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf.as_dict() - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode.value - if self.docker_image: - body["docker_image"] = self.docker_image.as_dict() - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes.as_dict() - if self.init_scripts: - body["init_scripts"] = [v.as_dict() for v in self.init_scripts] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind.value - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.remote_disk_throughput is not None: - body["remote_disk_throughput"] = self.remote_disk_throughput - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine.value - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = [v for v in self.ssh_public_keys] - if self.total_initial_remote_disk_size is not None: - body["total_initial_remote_disk_size"] = self.total_initial_remote_disk_size - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EditCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.apply_policy_default_values is not None: - body["apply_policy_default_values"] = self.apply_policy_default_values - if self.autoscale: - body["autoscale"] = self.autoscale - if self.autotermination_minutes is not None: - body["autotermination_minutes"] = self.autotermination_minutes - if self.aws_attributes: - body["aws_attributes"] = self.aws_attributes - if self.azure_attributes: - body["azure_attributes"] = self.azure_attributes - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.cluster_log_conf: - body["cluster_log_conf"] = self.cluster_log_conf - if self.cluster_name is not None: - body["cluster_name"] = self.cluster_name - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.data_security_mode is not None: - body["data_security_mode"] = self.data_security_mode - if self.docker_image: - body["docker_image"] = self.docker_image - if self.driver_instance_pool_id is not None: - body["driver_instance_pool_id"] = self.driver_instance_pool_id - if self.driver_node_type_id is not None: - body["driver_node_type_id"] = self.driver_node_type_id - if self.enable_elastic_disk is not None: - body["enable_elastic_disk"] = self.enable_elastic_disk - if self.enable_local_disk_encryption is not None: - body["enable_local_disk_encryption"] = self.enable_local_disk_encryption - if self.gcp_attributes: - body["gcp_attributes"] = self.gcp_attributes - if self.init_scripts: - body["init_scripts"] = self.init_scripts - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.is_single_node is not None: - body["is_single_node"] = self.is_single_node - if self.kind is not None: - body["kind"] = self.kind - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - if self.policy_id is not None: - body["policy_id"] = self.policy_id - if self.remote_disk_throughput is not None: - body["remote_disk_throughput"] = self.remote_disk_throughput - if self.runtime_engine is not None: - body["runtime_engine"] = self.runtime_engine - if self.single_user_name is not None: - body["single_user_name"] = self.single_user_name - if self.spark_conf: - body["spark_conf"] = self.spark_conf - if self.spark_env_vars: - body["spark_env_vars"] = self.spark_env_vars - if self.spark_version is not None: - body["spark_version"] = self.spark_version - if self.ssh_public_keys: - body["ssh_public_keys"] = self.ssh_public_keys - if self.total_initial_remote_disk_size is not None: - body["total_initial_remote_disk_size"] = self.total_initial_remote_disk_size - if self.use_ml_runtime is not None: - body["use_ml_runtime"] = self.use_ml_runtime - if self.workload_type: - body["workload_type"] = self.workload_type + disk_size: Optional[int] = None + """The size of each disk (in GiB) launched for each instance. Values must fall into the supported + range for a particular instance type. + + For AWS: - General Purpose SSD: 100 - 4096 GiB - Throughput Optimized HDD: 500 - 4096 GiB + + For Azure: - Premium LRS (SSD): 1 - 1023 GiB - Standard LRS (HDD): 1- 1023 GiB""" + + disk_throughput: Optional[int] = None + + disk_type: Optional[DiskType] = None + """The type of disks that will be launched with this cluster.""" + + def as_dict(self) -> dict: + """Serializes the DiskSpec into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.disk_count is not None: + body["disk_count"] = self.disk_count + if self.disk_iops is not None: + body["disk_iops"] = self.disk_iops + if self.disk_size is not None: + body["disk_size"] = self.disk_size + if self.disk_throughput is not None: + body["disk_throughput"] = self.disk_throughput + if self.disk_type: + body["disk_type"] = self.disk_type.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DiskSpec into a shallow dictionary of its immediate attributes.""" + body = {} + if self.disk_count is not None: + body["disk_count"] = self.disk_count + if self.disk_iops is not None: + body["disk_iops"] = self.disk_iops + if self.disk_size is not None: + body["disk_size"] = self.disk_size + if self.disk_throughput is not None: + body["disk_throughput"] = self.disk_throughput + if self.disk_type: + body["disk_type"] = self.disk_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditCluster: - """Deserializes the EditCluster from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> DiskSpec: + """Deserializes the DiskSpec from a dictionary.""" return cls( - apply_policy_default_values=d.get("apply_policy_default_values", None), - autoscale=_from_dict(d, "autoscale", AutoScale), - autotermination_minutes=d.get("autotermination_minutes", None), - aws_attributes=_from_dict(d, "aws_attributes", AwsAttributes), - azure_attributes=_from_dict(d, "azure_attributes", AzureAttributes), - cluster_id=d.get("cluster_id", None), - cluster_log_conf=_from_dict(d, "cluster_log_conf", ClusterLogConf), - cluster_name=d.get("cluster_name", None), - custom_tags=d.get("custom_tags", None), - data_security_mode=_enum(d, "data_security_mode", DataSecurityMode), - docker_image=_from_dict(d, "docker_image", DockerImage), - driver_instance_pool_id=d.get("driver_instance_pool_id", None), - driver_node_type_id=d.get("driver_node_type_id", None), - enable_elastic_disk=d.get("enable_elastic_disk", None), - enable_local_disk_encryption=d.get("enable_local_disk_encryption", None), - gcp_attributes=_from_dict(d, "gcp_attributes", GcpAttributes), - init_scripts=_repeated_dict(d, "init_scripts", InitScriptInfo), - instance_pool_id=d.get("instance_pool_id", None), - is_single_node=d.get("is_single_node", None), - kind=_enum(d, "kind", Kind), - node_type_id=d.get("node_type_id", None), - num_workers=d.get("num_workers", None), - policy_id=d.get("policy_id", None), - remote_disk_throughput=d.get("remote_disk_throughput", None), - runtime_engine=_enum(d, "runtime_engine", RuntimeEngine), - single_user_name=d.get("single_user_name", None), - spark_conf=d.get("spark_conf", None), - spark_env_vars=d.get("spark_env_vars", None), - spark_version=d.get("spark_version", None), - ssh_public_keys=d.get("ssh_public_keys", None), - total_initial_remote_disk_size=d.get("total_initial_remote_disk_size", None), - use_ml_runtime=d.get("use_ml_runtime", None), - workload_type=_from_dict(d, "workload_type", WorkloadType), + disk_count=d.get("disk_count", None), + disk_iops=d.get("disk_iops", None), + disk_size=d.get("disk_size", None), + disk_throughput=d.get("disk_throughput", None), + disk_type=_from_dict(d, "disk_type", DiskType), ) @dataclass -class EditClusterResponse: +class DiskType: + """Describes the disk type.""" + + azure_disk_volume_type: Optional[DiskTypeAzureDiskVolumeType] = None + + ebs_volume_type: Optional[DiskTypeEbsVolumeType] = None + def as_dict(self) -> dict: - """Serializes the EditClusterResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DiskType into a dictionary suitable for use as a JSON request body.""" body = {} + if self.azure_disk_volume_type is not None: + body["azure_disk_volume_type"] = self.azure_disk_volume_type.value + if self.ebs_volume_type is not None: + body["ebs_volume_type"] = self.ebs_volume_type.value return body def as_shallow_dict(self) -> dict: - """Serializes the EditClusterResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the DiskType into a shallow dictionary of its immediate attributes.""" body = {} + if self.azure_disk_volume_type is not None: + body["azure_disk_volume_type"] = self.azure_disk_volume_type + if self.ebs_volume_type is not None: + body["ebs_volume_type"] = self.ebs_volume_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditClusterResponse: - """Deserializes the EditClusterResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, Any]) -> DiskType: + """Deserializes the DiskType from a dictionary.""" + return cls( + azure_disk_volume_type=_enum(d, "azure_disk_volume_type", DiskTypeAzureDiskVolumeType), + ebs_volume_type=_enum(d, "ebs_volume_type", DiskTypeEbsVolumeType), + ) -@dataclass -class EditInstancePool: - instance_pool_id: str - """Instance pool ID""" +class DiskTypeAzureDiskVolumeType(Enum): + """All Azure Disk types that Databricks supports. See + https://docs.microsoft.com/en-us/azure/storage/storage-about-disks-and-vhds-linux#types-of-disks""" - instance_pool_name: str - """Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100 - characters.""" + PREMIUM_LRS = "PREMIUM_LRS" + STANDARD_LRS = "STANDARD_LRS" - node_type_id: str - """This field encodes, through a single value, the resources available to each of the Spark nodes - in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or - compute intensive workloads. A list of available node types can be retrieved by using the - :method:clusters/listNodeTypes API call.""" - custom_tags: Optional[Dict[str, str]] = None - """Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances - and EBS volumes) with these tags in addition to `default_tags`. Notes: - - - Currently, Databricks allows at most 45 custom tags""" +class DiskTypeEbsVolumeType(Enum): + """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for + details.""" - idle_instance_autotermination_minutes: Optional[int] = None - """Automatically terminates the extra instances in the pool cache after they are inactive for this - time in minutes if min_idle_instances requirement is already met. If not set, the extra pool - instances will be automatically terminated after a default timeout. If specified, the threshold - must be between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle - instances from the cache if min cache size could still hold.""" + GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" + THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" - max_capacity: Optional[int] = None - """Maximum number of outstanding instances to keep in the pool, including both instances used by - clusters and idle instances. Clusters that require further instance provisioning will fail - during upsize requests.""" - min_idle_instances: Optional[int] = None - """Minimum number of idle instances to keep in the instance pool""" +@dataclass +class DockerBasicAuth: + password: Optional[str] = None + """Password of the user""" - remote_disk_throughput: Optional[int] = None - """If set, what the configurable throughput (in Mb/s) for the remote disk is. Currently only - supported for GCP HYPERDISK_BALANCED types.""" + username: Optional[str] = None + """Name of the user""" - total_initial_remote_disk_size: Optional[int] = None - """If set, what the total initial volume size (in GB) of the remote disks should be. Currently only - supported for GCP HYPERDISK_BALANCED types.""" + def as_dict(self) -> dict: + """Serializes the DockerBasicAuth into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.password is not None: + body["password"] = self.password + if self.username is not None: + body["username"] = self.username + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DockerBasicAuth into a shallow dictionary of its immediate attributes.""" + body = {} + if self.password is not None: + body["password"] = self.password + if self.username is not None: + body["username"] = self.username + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DockerBasicAuth: + """Deserializes the DockerBasicAuth from a dictionary.""" + return cls(password=d.get("password", None), username=d.get("username", None)) + + +@dataclass +class DockerImage: + basic_auth: Optional[DockerBasicAuth] = None + """Basic auth with username and password""" + + url: Optional[str] = None + """URL of the docker image.""" def as_dict(self) -> dict: - """Serializes the EditInstancePool into a dictionary suitable for use as a JSON request body.""" + """Serializes the DockerImage into a dictionary suitable for use as a JSON request body.""" body = {} - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.remote_disk_throughput is not None: - body["remote_disk_throughput"] = self.remote_disk_throughput - if self.total_initial_remote_disk_size is not None: - body["total_initial_remote_disk_size"] = self.total_initial_remote_disk_size + if self.basic_auth: + body["basic_auth"] = self.basic_auth.as_dict() + if self.url is not None: + body["url"] = self.url return body def as_shallow_dict(self) -> dict: - """Serializes the EditInstancePool into a shallow dictionary of its immediate attributes.""" + """Serializes the DockerImage into a shallow dictionary of its immediate attributes.""" body = {} - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.idle_instance_autotermination_minutes is not None: - body["idle_instance_autotermination_minutes"] = self.idle_instance_autotermination_minutes - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - if self.instance_pool_name is not None: - body["instance_pool_name"] = self.instance_pool_name - if self.max_capacity is not None: - body["max_capacity"] = self.max_capacity - if self.min_idle_instances is not None: - body["min_idle_instances"] = self.min_idle_instances - if self.node_type_id is not None: - body["node_type_id"] = self.node_type_id - if self.remote_disk_throughput is not None: - body["remote_disk_throughput"] = self.remote_disk_throughput - if self.total_initial_remote_disk_size is not None: - body["total_initial_remote_disk_size"] = self.total_initial_remote_disk_size + if self.basic_auth: + body["basic_auth"] = self.basic_auth + if self.url is not None: + body["url"] = self.url return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditInstancePool: - """Deserializes the EditInstancePool from a dictionary.""" - return cls( - custom_tags=d.get("custom_tags", None), - idle_instance_autotermination_minutes=d.get("idle_instance_autotermination_minutes", None), - instance_pool_id=d.get("instance_pool_id", None), - instance_pool_name=d.get("instance_pool_name", None), - max_capacity=d.get("max_capacity", None), - min_idle_instances=d.get("min_idle_instances", None), - node_type_id=d.get("node_type_id", None), - remote_disk_throughput=d.get("remote_disk_throughput", None), - total_initial_remote_disk_size=d.get("total_initial_remote_disk_size", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> DockerImage: + """Deserializes the DockerImage from a dictionary.""" + return cls(basic_auth=_from_dict(d, "basic_auth", DockerBasicAuth), url=d.get("url", None)) + + +class EbsVolumeType(Enum): + """All EBS volume types that Databricks supports. See https://aws.amazon.com/ebs/details/ for + details.""" + + GENERAL_PURPOSE_SSD = "GENERAL_PURPOSE_SSD" + THROUGHPUT_OPTIMIZED_HDD = "THROUGHPUT_OPTIMIZED_HDD" @dataclass -class EditInstancePoolResponse: +class EditClusterResponse: def as_dict(self) -> dict: - """Serializes the EditInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the EditClusterResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the EditInstancePoolResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the EditClusterResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditInstancePoolResponse: - """Deserializes the EditInstancePoolResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> EditClusterResponse: + """Deserializes the EditClusterResponse from a dictionary.""" return cls() @dataclass -class EditPolicy: - policy_id: str - """The ID of the policy to update.""" - - definition: Optional[str] = None - """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. - - [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - - description: Optional[str] = None - """Additional human-readable description of the cluster policy.""" - - libraries: Optional[List[Library]] = None - """A list of libraries to be installed on the next cluster restart that uses this policy. The - maximum number of libraries is 500.""" - - max_clusters_per_user: Optional[int] = None - """Max number of clusters per user that can be active using this policy. If not present, there is - no max limit.""" - - name: Optional[str] = None - """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and - 100 characters.""" - - policy_family_definition_overrides: Optional[str] = None - """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON - document must be passed as a string and cannot be embedded in the requests. - - You can use this to customize the policy definition inherited from the policy family. Policy - rules specified here are merged into the inherited policy definition. - - [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" - - policy_family_id: Optional[str] = None - """ID of the policy family. The cluster policy's policy definition inherits the policy family's - policy definition. - - Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize - the policy definition.""" - +class EditInstancePoolResponse: def as_dict(self) -> dict: - """Serializes the EditPolicy into a dictionary suitable for use as a JSON request body.""" + """Serializes the EditInstancePoolResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id - if self.policy_id is not None: - body["policy_id"] = self.policy_id return body def as_shallow_dict(self) -> dict: - """Serializes the EditPolicy into a shallow dictionary of its immediate attributes.""" + """Serializes the EditInstancePoolResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.definition is not None: - body["definition"] = self.definition - if self.description is not None: - body["description"] = self.description - if self.libraries: - body["libraries"] = self.libraries - if self.max_clusters_per_user is not None: - body["max_clusters_per_user"] = self.max_clusters_per_user - if self.name is not None: - body["name"] = self.name - if self.policy_family_definition_overrides is not None: - body["policy_family_definition_overrides"] = self.policy_family_definition_overrides - if self.policy_family_id is not None: - body["policy_family_id"] = self.policy_family_id - if self.policy_id is not None: - body["policy_id"] = self.policy_id return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditPolicy: - """Deserializes the EditPolicy from a dictionary.""" - return cls( - definition=d.get("definition", None), - description=d.get("description", None), - libraries=_repeated_dict(d, "libraries", Library), - max_clusters_per_user=d.get("max_clusters_per_user", None), - name=d.get("name", None), - policy_family_definition_overrides=d.get("policy_family_definition_overrides", None), - policy_family_id=d.get("policy_family_id", None), - policy_id=d.get("policy_id", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> EditInstancePoolResponse: + """Deserializes the EditInstancePoolResponse from a dictionary.""" + return cls() @dataclass @@ -4625,39 +3081,6 @@ def from_dict(cls, d: Dict[str, Any]) -> EditResponse: return cls() -@dataclass -class EnforceClusterComplianceRequest: - cluster_id: str - """The ID of the cluster you want to enforce policy compliance on.""" - - validate_only: Optional[bool] = None - """If set, previews the changes that would be made to a cluster to enforce compliance but does not - update the cluster.""" - - def as_dict(self) -> dict: - """Serializes the EnforceClusterComplianceRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EnforceClusterComplianceRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EnforceClusterComplianceRequest: - """Deserializes the EnforceClusterComplianceRequest from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), validate_only=d.get("validate_only", None)) - - @dataclass class EnforceClusterComplianceResponse: changes: Optional[List[ClusterSettingsChange]] = None @@ -5613,65 +4036,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GetSparkVersionsResponse: return cls(versions=_repeated_dict(d, "versions", SparkVersion)) -@dataclass -class GlobalInitScriptCreateRequest: - name: str - """The name of the script""" - - script: str - """The Base64-encoded content of the script.""" - - enabled: Optional[bool] = None - """Specifies whether the script is enabled. The script runs only if enabled.""" - - position: Optional[int] = None - """The position of a global init script, where 0 represents the first script to run, 1 is the - second script to run, in ascending order. - - If you omit the numeric position for a new global init script, it defaults to last position. It - will run after all current scripts. Setting any value greater than the position of the last - script is equivalent to the last position. Example: Take three existing scripts with positions - 0, 1, and 2. Any position of (3) or greater puts the script in the last position. If an explicit - position value conflicts with an existing script value, your request succeeds, but the original - script at that position and all later scripts have their positions incremented by 1.""" - - def as_dict(self) -> dict: - """Serializes the GlobalInitScriptCreateRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GlobalInitScriptCreateRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptCreateRequest: - """Deserializes the GlobalInitScriptCreateRequest from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - name=d.get("name", None), - position=d.get("position", None), - script=d.get("script", None), - ) - - @dataclass class GlobalInitScriptDetails: created_at: Optional[int] = None @@ -5848,73 +4212,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptDetailsWithContent: ) -@dataclass -class GlobalInitScriptUpdateRequest: - name: str - """The name of the script""" - - script: str - """The Base64-encoded content of the script.""" - - enabled: Optional[bool] = None - """Specifies whether the script is enabled. The script runs only if enabled.""" - - position: Optional[int] = None - """The position of a script, where 0 represents the first script to run, 1 is the second script to - run, in ascending order. To move the script to run first, set its position to 0. - - To move the script to the end, set its position to any value greater or equal to the position of - the last script. Example, three existing scripts with positions 0, 1, and 2. Any position value - of 2 or greater puts the script in the last position (2). - - If an explicit position value conflicts with an existing script, your request succeeds, but the - original script at that position and all later scripts have their positions incremented by 1.""" - - script_id: Optional[str] = None - """The ID of the global init script.""" - - def as_dict(self) -> dict: - """Serializes the GlobalInitScriptUpdateRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - if self.script_id is not None: - body["script_id"] = self.script_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GlobalInitScriptUpdateRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.name is not None: - body["name"] = self.name - if self.position is not None: - body["position"] = self.position - if self.script is not None: - body["script"] = self.script - if self.script_id is not None: - body["script_id"] = self.script_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GlobalInitScriptUpdateRequest: - """Deserializes the GlobalInitScriptUpdateRequest from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - name=d.get("name", None), - position=d.get("position", None), - script=d.get("script", None), - script_id=d.get("script_id", None), - ) - - @dataclass class InitScriptEventDetails: cluster: Optional[List[InitScriptInfoAndExecutionDetails]] = None @@ -6140,62 +4437,30 @@ def as_shallow_dict(self) -> dict: if self.gcs: body["gcs"] = self.gcs if self.s3: - body["s3"] = self.s3 - if self.status is not None: - body["status"] = self.status - if self.volumes: - body["volumes"] = self.volumes - if self.workspace: - body["workspace"] = self.workspace - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> InitScriptInfoAndExecutionDetails: - """Deserializes the InitScriptInfoAndExecutionDetails from a dictionary.""" - return cls( - abfss=_from_dict(d, "abfss", Adlsgen2Info), - dbfs=_from_dict(d, "dbfs", DbfsStorageInfo), - error_message=d.get("error_message", None), - execution_duration_seconds=d.get("execution_duration_seconds", None), - file=_from_dict(d, "file", LocalFileInfo), - gcs=_from_dict(d, "gcs", GcsStorageInfo), - s3=_from_dict(d, "s3", S3StorageInfo), - status=_enum(d, "status", InitScriptExecutionDetailsInitScriptExecutionStatus), - volumes=_from_dict(d, "volumes", VolumesStorageInfo), - workspace=_from_dict(d, "workspace", WorkspaceStorageInfo), - ) - - -@dataclass -class InstallLibraries: - cluster_id: str - """Unique identifier for the cluster on which to install these libraries.""" - - libraries: List[Library] - """The libraries to install.""" - - def as_dict(self) -> dict: - """Serializes the InstallLibraries into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the InstallLibraries into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = self.libraries + body["s3"] = self.s3 + if self.status is not None: + body["status"] = self.status + if self.volumes: + body["volumes"] = self.volumes + if self.workspace: + body["workspace"] = self.workspace return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> InstallLibraries: - """Deserializes the InstallLibraries from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), libraries=_repeated_dict(d, "libraries", Library)) + def from_dict(cls, d: Dict[str, Any]) -> InitScriptInfoAndExecutionDetails: + """Deserializes the InitScriptInfoAndExecutionDetails from a dictionary.""" + return cls( + abfss=_from_dict(d, "abfss", Adlsgen2Info), + dbfs=_from_dict(d, "dbfs", DbfsStorageInfo), + error_message=d.get("error_message", None), + execution_duration_seconds=d.get("execution_duration_seconds", None), + file=_from_dict(d, "file", LocalFileInfo), + gcs=_from_dict(d, "gcs", GcsStorageInfo), + s3=_from_dict(d, "s3", S3StorageInfo), + status=_enum(d, "status", InitScriptExecutionDetailsInitScriptExecutionStatus), + volumes=_from_dict(d, "volumes", VolumesStorageInfo), + workspace=_from_dict(d, "workspace", WorkspaceStorageInfo), + ) @dataclass @@ -6824,40 +5089,6 @@ def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermissionsDescription: ) -@dataclass -class InstancePoolPermissionsRequest: - access_control_list: Optional[List[InstancePoolAccessControlRequest]] = None - - instance_pool_id: Optional[str] = None - """The instance pool for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the InstancePoolPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the InstancePoolPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.instance_pool_id is not None: - body["instance_pool_id"] = self.instance_pool_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> InstancePoolPermissionsRequest: - """Deserializes the InstancePoolPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", InstancePoolAccessControlRequest), - instance_pool_id=d.get("instance_pool_id", None), - ) - - class InstancePoolState(Enum): """The state of a Cluster. The current allowable state transitions are as follows: @@ -8017,31 +6248,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PendingInstanceError: return cls(instance_id=d.get("instance_id", None), message=d.get("message", None)) -@dataclass -class PermanentDeleteCluster: - cluster_id: str - """The cluster to be deleted.""" - - def as_dict(self) -> dict: - """Serializes the PermanentDeleteCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PermanentDeleteCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PermanentDeleteCluster: - """Deserializes the PermanentDeleteCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) - - @dataclass class PermanentDeleteClusterResponse: def as_dict(self) -> dict: @@ -8060,30 +6266,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PermanentDeleteClusterResponse: return cls() -@dataclass -class PinCluster: - cluster_id: str - - def as_dict(self) -> dict: - """Serializes the PinCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PinCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PinCluster: - """Deserializes the PinCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) - - @dataclass class PinClusterResponse: def as_dict(self) -> dict: @@ -8346,31 +6528,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RCranLibrary: return cls(package=d.get("package", None), repo=d.get("repo", None)) -@dataclass -class RemoveInstanceProfile: - instance_profile_arn: str - """The ARN of the instance profile to remove. This field is required.""" - - def as_dict(self) -> dict: - """Serializes the RemoveInstanceProfile into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RemoveInstanceProfile into a shallow dictionary of its immediate attributes.""" - body = {} - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RemoveInstanceProfile: - """Deserializes the RemoveInstanceProfile from a dictionary.""" - return cls(instance_profile_arn=d.get("instance_profile_arn", None)) - - @dataclass class RemoveResponse: def as_dict(self) -> dict: @@ -8389,57 +6546,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RemoveResponse: return cls() -@dataclass -class ResizeCluster: - cluster_id: str - """The cluster to be resized.""" - - autoscale: Optional[AutoScale] = None - """Parameters needed in order to automatically scale clusters up and down based on load. Note: - autoscaling works best with DB runtime versions 3.0 or later.""" - - num_workers: Optional[int] = None - """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and - `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. - - Note: When reading the properties of a cluster, this field reflects the desired number of - workers rather than the actual current number of workers. For instance, if a cluster is resized - from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 - workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the - new nodes are provisioned.""" - - def as_dict(self) -> dict: - """Serializes the ResizeCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.autoscale: - body["autoscale"] = self.autoscale.as_dict() - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ResizeCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.autoscale: - body["autoscale"] = self.autoscale - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.num_workers is not None: - body["num_workers"] = self.num_workers - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ResizeCluster: - """Deserializes the ResizeCluster from a dictionary.""" - return cls( - autoscale=_from_dict(d, "autoscale", AutoScale), - cluster_id=d.get("cluster_id", None), - num_workers=d.get("num_workers", None), - ) - - @dataclass class ResizeClusterResponse: def as_dict(self) -> dict: @@ -8458,37 +6564,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ResizeClusterResponse: return cls() -@dataclass -class RestartCluster: - cluster_id: str - """The cluster to be started.""" - - restart_user: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the RestartCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.restart_user is not None: - body["restart_user"] = self.restart_user - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RestartCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.restart_user is not None: - body["restart_user"] = self.restart_user - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RestartCluster: - """Deserializes the RestartCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), restart_user=d.get("restart_user", None)) - - @dataclass class RestartClusterResponse: def as_dict(self) -> dict: @@ -8850,31 +6925,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SparkVersion: return cls(key=d.get("key", None), name=d.get("name", None)) -@dataclass -class StartCluster: - cluster_id: str - """The cluster to be started.""" - - def as_dict(self) -> dict: - """Serializes the StartCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the StartCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> StartCluster: - """Deserializes the StartCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) - - @dataclass class StartClusterResponse: def as_dict(self) -> dict: @@ -9019,6 +7069,7 @@ class TerminationReasonCode(Enum): DOCKER_IMAGE_PULL_FAILURE = "DOCKER_IMAGE_PULL_FAILURE" DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION = "DOCKER_IMAGE_TOO_LARGE_FOR_INSTANCE_EXCEPTION" DOCKER_INVALID_OS_EXCEPTION = "DOCKER_INVALID_OS_EXCEPTION" + DRIVER_DNS_RESOLUTION_FAILURE = "DRIVER_DNS_RESOLUTION_FAILURE" DRIVER_EVICTION = "DRIVER_EVICTION" DRIVER_LAUNCH_TIMEOUT = "DRIVER_LAUNCH_TIMEOUT" DRIVER_NODE_UNREACHABLE = "DRIVER_NODE_UNREACHABLE" @@ -9141,38 +7192,6 @@ class TerminationReasonType(Enum): SUCCESS = "SUCCESS" -@dataclass -class UninstallLibraries: - cluster_id: str - """Unique identifier for the cluster on which to uninstall these libraries.""" - - libraries: List[Library] - """The libraries to uninstall.""" - - def as_dict(self) -> dict: - """Serializes the UninstallLibraries into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UninstallLibraries into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.libraries: - body["libraries"] = self.libraries - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UninstallLibraries: - """Deserializes the UninstallLibraries from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None), libraries=_repeated_dict(d, "libraries", Library)) - - @dataclass class UninstallLibrariesResponse: def as_dict(self) -> dict: @@ -9191,30 +7210,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UninstallLibrariesResponse: return cls() -@dataclass -class UnpinCluster: - cluster_id: str - - def as_dict(self) -> dict: - """Serializes the UnpinCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UnpinCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UnpinCluster: - """Deserializes the UnpinCluster from a dictionary.""" - return cls(cluster_id=d.get("cluster_id", None)) - - @dataclass class UnpinClusterResponse: def as_dict(self) -> dict: @@ -9233,60 +7228,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UnpinClusterResponse: return cls() -@dataclass -class UpdateCluster: - cluster_id: str - """ID of the cluster.""" - - update_mask: str - """Used to specify which cluster attributes and size fields to update. See - https://google.aip.dev/161 for more details. - - The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - cluster: Optional[UpdateClusterResource] = None - """The cluster to be updated.""" - - def as_dict(self) -> dict: - """Serializes the UpdateCluster into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cluster: - body["cluster"] = self.cluster.as_dict() - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCluster into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cluster: - body["cluster"] = self.cluster - if self.cluster_id is not None: - body["cluster_id"] = self.cluster_id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCluster: - """Deserializes the UpdateCluster from a dictionary.""" - return cls( - cluster=_from_dict(d, "cluster", UpdateClusterResource), - cluster_id=d.get("cluster_id", None), - update_mask=d.get("update_mask", None), - ) - - @dataclass class UpdateClusterResource: autoscale: Optional[AutoScale] = None diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 1276734bb..01ac655a4 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -413,49 +413,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieConversationSummary: ) -@dataclass -class GenieCreateConversationMessageRequest: - content: str - """User message content.""" - - conversation_id: Optional[str] = None - """The ID associated with the conversation.""" - - space_id: Optional[str] = None - """The ID associated with the Genie space where the conversation is started.""" - - def as_dict(self) -> dict: - """Serializes the GenieCreateConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.space_id is not None: - body["space_id"] = self.space_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GenieCreateConversationMessageRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.conversation_id is not None: - body["conversation_id"] = self.conversation_id - if self.space_id is not None: - body["space_id"] = self.space_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenieCreateConversationMessageRequest: - """Deserializes the GenieCreateConversationMessageRequest from a dictionary.""" - return cls( - content=d.get("content", None), - conversation_id=d.get("conversation_id", None), - space_id=d.get("space_id", None), - ) - - @dataclass class GenieGetMessageQueryResultResponse: statement_response: Optional[sql.StatementResponse] = None @@ -810,38 +767,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieSpace: return cls(description=d.get("description", None), space_id=d.get("space_id", None), title=d.get("title", None)) -@dataclass -class GenieStartConversationMessageRequest: - content: str - """The text of the message that starts the conversation.""" - - space_id: Optional[str] = None - """The ID associated with the Genie space where you want to start a conversation.""" - - def as_dict(self) -> dict: - """Serializes the GenieStartConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.space_id is not None: - body["space_id"] = self.space_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GenieStartConversationMessageRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.space_id is not None: - body["space_id"] = self.space_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenieStartConversationMessageRequest: - """Deserializes the GenieStartConversationMessageRequest from a dictionary.""" - return cls(content=d.get("content", None), space_id=d.get("space_id", None)) - - @dataclass class GenieStartConversationResponse: message_id: str @@ -1159,102 +1084,6 @@ class MessageStatus(Enum): SUBMITTED = "SUBMITTED" -@dataclass -class MigrateDashboardRequest: - source_dashboard_id: str - """UUID of the dashboard to be migrated.""" - - display_name: Optional[str] = None - """Display name for the new Lakeview dashboard.""" - - parent_path: Optional[str] = None - """The workspace path of the folder to contain the migrated Lakeview dashboard.""" - - update_parameter_syntax: Optional[bool] = None - """Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named - syntax (:param) when converting datasets in the dashboard.""" - - def as_dict(self) -> dict: - """Serializes the MigrateDashboardRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.source_dashboard_id is not None: - body["source_dashboard_id"] = self.source_dashboard_id - if self.update_parameter_syntax is not None: - body["update_parameter_syntax"] = self.update_parameter_syntax - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MigrateDashboardRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.parent_path is not None: - body["parent_path"] = self.parent_path - if self.source_dashboard_id is not None: - body["source_dashboard_id"] = self.source_dashboard_id - if self.update_parameter_syntax is not None: - body["update_parameter_syntax"] = self.update_parameter_syntax - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MigrateDashboardRequest: - """Deserializes the MigrateDashboardRequest from a dictionary.""" - return cls( - display_name=d.get("display_name", None), - parent_path=d.get("parent_path", None), - source_dashboard_id=d.get("source_dashboard_id", None), - update_parameter_syntax=d.get("update_parameter_syntax", None), - ) - - -@dataclass -class PublishRequest: - dashboard_id: Optional[str] = None - """UUID identifying the dashboard to be published.""" - - embed_credentials: Optional[bool] = None - """Flag to indicate if the publisher's credentials should be embedded in the published dashboard. - These embedded credentials will be used to execute the published dashboard's queries.""" - - warehouse_id: Optional[str] = None - """The ID of the warehouse that can be used to override the warehouse which was set in the draft.""" - - def as_dict(self) -> dict: - """Serializes the PublishRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.embed_credentials is not None: - body["embed_credentials"] = self.embed_credentials - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PublishRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.embed_credentials is not None: - body["embed_credentials"] = self.embed_credentials - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PublishRequest: - """Deserializes the PublishRequest from a dictionary.""" - return cls( - dashboard_id=d.get("dashboard_id", None), - embed_credentials=d.get("embed_credentials", None), - warehouse_id=d.get("warehouse_id", None), - ) - - @dataclass class PublishedDashboard: display_name: Optional[str] = None @@ -2549,12 +2378,7 @@ def __init__(self, api_client): def get_published_dashboard_token_info( self, dashboard_id: str, *, external_value: Optional[str] = None, external_viewer_id: Optional[str] = None ) -> GetPublishedDashboardTokenInfoResponse: - """Get a required authorization details and scopes of a published dashboard to mint an OAuth token. The - `authorization_details` can be enriched to apply additional restriction. - - Example: Adding the following `authorization_details` object to downscope the viewer permission to - specific table ``` { type: "unity_catalog_privileges", privileges: ["SELECT"], object_type: "TABLE", - object_full_path: "main.default.testdata" } ``` + """Get a required authorization details and scopes of a published dashboard to mint an OAuth token. :param dashboard_id: str UUID identifying the published dashboard. diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index 9dd59f0ac..183d03140 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -586,51 +586,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeltaTableSyncInfo: ) -@dataclass -class GenerateDatabaseCredentialRequest: - """Generates a credential that can be used to access database instances""" - - claims: Optional[List[RequestedClaims]] = None - """The returned token will be scoped to the union of instance_names and instances containing the - specified UC tables, so instance_names is allowed to be empty.""" - - instance_names: Optional[List[str]] = None - """Instances to which the token will be scoped.""" - - request_id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the GenerateDatabaseCredentialRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.claims: - body["claims"] = [v.as_dict() for v in self.claims] - if self.instance_names: - body["instance_names"] = [v for v in self.instance_names] - if self.request_id is not None: - body["request_id"] = self.request_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GenerateDatabaseCredentialRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.claims: - body["claims"] = self.claims - if self.instance_names: - body["instance_names"] = self.instance_names - if self.request_id is not None: - body["request_id"] = self.request_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GenerateDatabaseCredentialRequest: - """Deserializes the GenerateDatabaseCredentialRequest from a dictionary.""" - return cls( - claims=_repeated_dict(d, "claims", RequestedClaims), - instance_names=d.get("instance_names", None), - request_id=d.get("request_id", None), - ) - - @dataclass class ListDatabaseInstanceRolesResponse: database_instance_roles: Optional[List[DatabaseInstanceRole]] = None @@ -707,11 +662,15 @@ class NewPipelineSpec: fields of pipeline are still inferred by table def internally""" storage_catalog: Optional[str] = None - """UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This + """This field needs to be specified if the destination catalog is a managed postgres catalog. + + UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This needs to be a standard catalog where the user has permissions to create Delta tables.""" storage_schema: Optional[str] = None - """UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This needs + """This field needs to be specified if the destination catalog is a managed postgres catalog. + + UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This needs to be in the standard catalog where the user has permissions to create Delta tables.""" def as_dict(self) -> dict: @@ -1144,13 +1103,20 @@ class SyncedTableSpec: do not already exist.""" existing_pipeline_id: Optional[str] = None - """User-specified ID of a pre-existing pipeline to bin pack. This field is optional, and should be - empty if new_pipeline_spec is set. This field will only be set by the server in response - messages if it is specified in the request. The SyncedTableStatus message will always contain - the effective pipeline ID (either client provided or server generated), however.""" + """At most one of existing_pipeline_id and new_pipeline_spec should be defined. + + If existing_pipeline_id is defined, the synced table will be bin packed into the existing + pipeline referenced. This avoids creating a new pipeline and allows sharing existing compute. In + this case, the scheduling_policy of this synced table must match the scheduling policy of the + existing pipeline.""" new_pipeline_spec: Optional[NewPipelineSpec] = None - """Spec of new pipeline. Should be empty if pipeline_id / existing_pipeline_id is set""" + """At most one of existing_pipeline_id and new_pipeline_spec should be defined. + + If new_pipeline_spec is defined, a new pipeline is created for this synced table. The location + pointed to is used to store intermediate files (checkpoints, event logs etc). The caller must + have write permissions to create Delta tables in the specified catalog and schema. Again, note + this requires write permissions, whereas the source table only requires read permissions.""" primary_key_columns: Optional[List[str]] = None """Primary Key columns to be used for data insert/update in the destination.""" diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py index f912a510f..2117a09f3 100755 --- a/databricks/sdk/service/files.py +++ b/databricks/sdk/service/files.py @@ -14,38 +14,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class AddBlock: - handle: int - """The handle on an open stream.""" - - data: str - """The base64-encoded data to append to the stream. This has a limit of 1 MB.""" - - def as_dict(self) -> dict: - """Serializes the AddBlock into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.data is not None: - body["data"] = self.data - if self.handle is not None: - body["handle"] = self.handle - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AddBlock into a shallow dictionary of its immediate attributes.""" - body = {} - if self.data is not None: - body["data"] = self.data - if self.handle is not None: - body["handle"] = self.handle - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AddBlock: - """Deserializes the AddBlock from a dictionary.""" - return cls(data=d.get("data", None), handle=d.get("handle", None)) - - @dataclass class AddBlockResponse: def as_dict(self) -> dict: @@ -64,31 +32,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AddBlockResponse: return cls() -@dataclass -class Close: - handle: int - """The handle on an open stream.""" - - def as_dict(self) -> dict: - """Serializes the Close into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.handle is not None: - body["handle"] = self.handle - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Close into a shallow dictionary of its immediate attributes.""" - body = {} - if self.handle is not None: - body["handle"] = self.handle - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Close: - """Deserializes the Close from a dictionary.""" - return cls(handle=d.get("handle", None)) - - @dataclass class CloseResponse: def as_dict(self) -> dict: @@ -107,38 +50,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CloseResponse: return cls() -@dataclass -class Create: - path: str - """The path of the new file. The path should be the absolute DBFS path.""" - - overwrite: Optional[bool] = None - """The flag that specifies whether to overwrite existing file/files.""" - - def as_dict(self) -> dict: - """Serializes the Create into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Create into a shallow dictionary of its immediate attributes.""" - body = {} - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Create: - """Deserializes the Create from a dictionary.""" - return cls(overwrite=d.get("overwrite", None), path=d.get("path", None)) - - @dataclass class CreateDirectoryResponse: def as_dict(self) -> dict: @@ -183,39 +94,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateResponse: return cls(handle=d.get("handle", None)) -@dataclass -class Delete: - path: str - """The path of the file or directory to delete. The path should be the absolute DBFS path.""" - - recursive: Optional[bool] = None - """Whether or not to recursively delete the directory's contents. Deleting empty directories can be - done without providing the recursive flag.""" - - def as_dict(self) -> dict: - """Serializes the Delete into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Delete into a shallow dictionary of its immediate attributes.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Delete: - """Deserializes the Delete from a dictionary.""" - return cls(path=d.get("path", None), recursive=d.get("recursive", None)) - - @dataclass class DeleteDirectoryResponse: def as_dict(self) -> dict: @@ -530,31 +408,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListStatusResponse: return cls(files=_repeated_dict(d, "files", FileInfo)) -@dataclass -class MkDirs: - path: str - """The path of the new directory. The path should be the absolute DBFS path.""" - - def as_dict(self) -> dict: - """Serializes the MkDirs into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MkDirs into a shallow dictionary of its immediate attributes.""" - body = {} - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MkDirs: - """Deserializes the MkDirs from a dictionary.""" - return cls(path=d.get("path", None)) - - @dataclass class MkDirsResponse: def as_dict(self) -> dict: @@ -573,38 +426,6 @@ def from_dict(cls, d: Dict[str, Any]) -> MkDirsResponse: return cls() -@dataclass -class Move: - source_path: str - """The source path of the file or directory. The path should be the absolute DBFS path.""" - - destination_path: str - """The destination path of the file or directory. The path should be the absolute DBFS path.""" - - def as_dict(self) -> dict: - """Serializes the Move into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.destination_path is not None: - body["destination_path"] = self.destination_path - if self.source_path is not None: - body["source_path"] = self.source_path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Move into a shallow dictionary of its immediate attributes.""" - body = {} - if self.destination_path is not None: - body["destination_path"] = self.destination_path - if self.source_path is not None: - body["source_path"] = self.source_path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Move: - """Deserializes the Move from a dictionary.""" - return cls(destination_path=d.get("destination_path", None), source_path=d.get("source_path", None)) - - @dataclass class MoveResponse: def as_dict(self) -> dict: @@ -623,45 +444,6 @@ def from_dict(cls, d: Dict[str, Any]) -> MoveResponse: return cls() -@dataclass -class Put: - path: str - """The path of the new file. The path should be the absolute DBFS path.""" - - contents: Optional[str] = None - """This parameter might be absent, and instead a posted file will be used.""" - - overwrite: Optional[bool] = None - """The flag that specifies whether to overwrite existing file/files.""" - - def as_dict(self) -> dict: - """Serializes the Put into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.contents is not None: - body["contents"] = self.contents - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Put into a shallow dictionary of its immediate attributes.""" - body = {} - if self.contents is not None: - body["contents"] = self.contents - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Put: - """Deserializes the Put from a dictionary.""" - return cls(contents=d.get("contents", None), overwrite=d.get("overwrite", None), path=d.get("path", None)) - - @dataclass class PutResponse: def as_dict(self) -> dict: @@ -1097,6 +879,7 @@ def download(self, file_path: str) -> DownloadResponse: headers = { "Accept": "application/octet-stream", } + response_headers = [ "content-length", "content-type", @@ -1142,6 +925,7 @@ def get_metadata(self, file_path: str) -> GetMetadataResponse: """ headers = {} + response_headers = [ "content-length", "content-type", diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index a25767c7a..09166b04f 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -697,57 +697,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListUsersResponse: ) -@dataclass -class MigratePermissionsRequest: - workspace_id: int - """WorkspaceId of the associated workspace where the permission migration will occur.""" - - from_workspace_group_name: str - """The name of the workspace group that permissions will be migrated from.""" - - to_account_group_name: str - """The name of the account group that permissions will be migrated to.""" - - size: Optional[int] = None - """The maximum number of permissions that will be migrated.""" - - def as_dict(self) -> dict: - """Serializes the MigratePermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.from_workspace_group_name is not None: - body["from_workspace_group_name"] = self.from_workspace_group_name - if self.size is not None: - body["size"] = self.size - if self.to_account_group_name is not None: - body["to_account_group_name"] = self.to_account_group_name - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the MigratePermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.from_workspace_group_name is not None: - body["from_workspace_group_name"] = self.from_workspace_group_name - if self.size is not None: - body["size"] = self.size - if self.to_account_group_name is not None: - body["to_account_group_name"] = self.to_account_group_name - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> MigratePermissionsRequest: - """Deserializes the MigratePermissionsRequest from a dictionary.""" - return cls( - from_workspace_group_name=d.get("from_workspace_group_name", None), - size=d.get("size", None), - to_account_group_name=d.get("to_account_group_name", None), - workspace_id=d.get("workspace_id", None), - ) - - @dataclass class MigratePermissionsResponse: permissions_migrated: Optional[int] = None @@ -845,48 +794,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ObjectPermissions: ) -@dataclass -class PartialUpdate: - id: Optional[str] = None - """Unique ID in the Databricks workspace.""" - - operations: Optional[List[Patch]] = None - - schemas: Optional[List[PatchSchema]] = None - """The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].""" - - def as_dict(self) -> dict: - """Serializes the PartialUpdate into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.operations: - body["Operations"] = [v.as_dict() for v in self.operations] - if self.schemas: - body["schemas"] = [v.value for v in self.schemas] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PartialUpdate into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.operations: - body["Operations"] = self.operations - if self.schemas: - body["schemas"] = self.schemas - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PartialUpdate: - """Deserializes the PartialUpdate from a dictionary.""" - return cls( - id=d.get("id", None), - operations=_repeated_dict(d, "Operations", Patch), - schemas=_repeated_enum(d, "schemas", PatchSchema), - ) - - @dataclass class PasswordAccessControlRequest: group_name: Optional[str] = None @@ -1118,30 +1025,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PasswordPermissionsDescription: ) -@dataclass -class PasswordPermissionsRequest: - access_control_list: Optional[List[PasswordAccessControlRequest]] = None - - def as_dict(self) -> dict: - """Serializes the PasswordPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PasswordPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PasswordPermissionsRequest: - """Deserializes the PasswordPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, "access_control_list", PasswordAccessControlRequest)) - - @dataclass class Patch: op: Optional[PatchOp] = None @@ -1764,94 +1647,6 @@ class ServicePrincipalSchema(Enum): URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL = "urn:ietf:params:scim:schemas:core:2.0:ServicePrincipal" -@dataclass -class SetObjectPermissions: - access_control_list: Optional[List[AccessControlRequest]] = None - - request_object_id: Optional[str] = None - """The id of the request object.""" - - request_object_type: Optional[str] = None - """The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" - - def as_dict(self) -> dict: - """Serializes the SetObjectPermissions into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetObjectPermissions into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetObjectPermissions: - """Deserializes the SetObjectPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControlRequest), - request_object_id=d.get("request_object_id", None), - request_object_type=d.get("request_object_type", None), - ) - - -@dataclass -class UpdateObjectPermissions: - access_control_list: Optional[List[AccessControlRequest]] = None - - request_object_id: Optional[str] = None - """The id of the request object.""" - - request_object_type: Optional[str] = None - """The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" - - def as_dict(self) -> dict: - """Serializes the UpdateObjectPermissions into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateObjectPermissions into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.request_object_id is not None: - body["request_object_id"] = self.request_object_id - if self.request_object_type is not None: - body["request_object_type"] = self.request_object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateObjectPermissions: - """Deserializes the UpdateObjectPermissions from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControlRequest), - request_object_id=d.get("request_object_id", None), - request_object_type=d.get("request_object_type", None), - ) - - @dataclass class UpdateResponse: def as_dict(self) -> dict: @@ -1870,84 +1665,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: return cls() -@dataclass -class UpdateRuleSetRequest: - name: str - """Name of the rule set.""" - - rule_set: RuleSetUpdateRequest - - def as_dict(self) -> dict: - """Serializes the UpdateRuleSetRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.rule_set: - body["rule_set"] = self.rule_set.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRuleSetRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.rule_set: - body["rule_set"] = self.rule_set - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRuleSetRequest: - """Deserializes the UpdateRuleSetRequest from a dictionary.""" - return cls(name=d.get("name", None), rule_set=_from_dict(d, "rule_set", RuleSetUpdateRequest)) - - -@dataclass -class UpdateWorkspaceAssignments: - permissions: Optional[List[WorkspacePermission]] = None - """Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" - (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other - values will be ignored. Note that excluding this field, or providing unsupported values, will - have the same effect as providing an empty list, which will result in the deletion of all - permissions for the principal.""" - - principal_id: Optional[int] = None - """The ID of the user, service principal, or group.""" - - workspace_id: Optional[int] = None - """The workspace ID.""" - - def as_dict(self) -> dict: - """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permissions: - body["permissions"] = [v.value for v in self.permissions] - if self.principal_id is not None: - body["principal_id"] = self.principal_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateWorkspaceAssignments into a shallow dictionary of its immediate attributes.""" - body = {} - if self.permissions: - body["permissions"] = self.permissions - if self.principal_id is not None: - body["principal_id"] = self.principal_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceAssignments: - """Deserializes the UpdateWorkspaceAssignments from a dictionary.""" - return cls( - permissions=_repeated_enum(d, "permissions", WorkspacePermission), - principal_id=d.get("principal_id", None), - workspace_id=d.get("workspace_id", None), - ) - - @dataclass class User: active: Optional[bool] = None @@ -3608,9 +3325,10 @@ def get(self, request_object_type: str, request_object_id: str) -> ObjectPermiss object. :param request_object_type: str - The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. :param request_object_id: str The id of the request object. @@ -3628,9 +3346,10 @@ def get_permission_levels(self, request_object_type: str, request_object_id: str """Gets the permission levels that a user can have on an object. :param request_object_type: str - The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. :param request_object_id: str :returns: :class:`GetPermissionLevelsResponse` @@ -3657,9 +3376,10 @@ def set( object. :param request_object_type: str - The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -3690,9 +3410,10 @@ def update( root object. :param request_object_type: str - The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 5b6c4442b..50c7ddb83 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -432,39 +432,6 @@ def from_dict(cls, d: Dict[str, Any]) -> BaseRun: ) -@dataclass -class CancelAllRuns: - all_queued_runs: Optional[bool] = None - """Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs - in the workspace are canceled.""" - - job_id: Optional[int] = None - """The canonical identifier of the job to cancel all runs of.""" - - def as_dict(self) -> dict: - """Serializes the CancelAllRuns into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.all_queued_runs is not None: - body["all_queued_runs"] = self.all_queued_runs - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelAllRuns into a shallow dictionary of its immediate attributes.""" - body = {} - if self.all_queued_runs is not None: - body["all_queued_runs"] = self.all_queued_runs - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelAllRuns: - """Deserializes the CancelAllRuns from a dictionary.""" - return cls(all_queued_runs=d.get("all_queued_runs", None), job_id=d.get("job_id", None)) - - @dataclass class CancelAllRunsResponse: def as_dict(self) -> dict: @@ -483,31 +450,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CancelAllRunsResponse: return cls() -@dataclass -class CancelRun: - run_id: int - """This field is required.""" - - def as_dict(self) -> dict: - """Serializes the CancelRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CancelRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CancelRun: - """Deserializes the CancelRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) - - @dataclass class CancelRunResponse: def as_dict(self) -> dict: @@ -932,267 +874,7 @@ def as_shallow_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, Any]) -> Continuous: """Deserializes the Continuous from a dictionary.""" - return cls(pause_status=_enum(d, "pause_status", PauseStatus)) - - -@dataclass -class CreateJob: - access_control_list: Optional[List[JobAccessControlRequest]] = None - """List of permissions to set on the job.""" - - budget_policy_id: Optional[str] = None - """The id of the user specified budget policy to use for this job. If not specified, a default - budget policy may be applied when creating or modifying the job. See - `effective_budget_policy_id` for the budget policy used by this workload.""" - - continuous: Optional[Continuous] = None - """An optional continuous property for this job. The continuous property will ensure that there is - always one run executing. Only one of `schedule` and `continuous` can be used.""" - - deployment: Optional[JobDeployment] = None - """Deployment information for jobs managed by external sources.""" - - description: Optional[str] = None - """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.""" - - edit_mode: Optional[JobEditMode] = None - """Edit mode of the job. - - * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is - in an editable state and can be modified.""" - - email_notifications: Optional[JobEmailNotifications] = None - """An optional set of email addresses that is notified when runs of this job begin or complete as - well as when this job is deleted.""" - - environments: Optional[List[JobEnvironment]] = None - """A list of task execution environment specifications that can be referenced by serverless tasks - of this job. An environment is required to be present for serverless tasks. For serverless - notebook tasks, the environment is accessible in the notebook environment panel. For other - serverless tasks, the task environment is required to be specified using environment_key in the - task settings.""" - - format: Optional[Format] = None - """Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. - When using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.""" - - git_source: Optional[GitSource] = None - """An optional specification for a remote Git repository containing the source code used by tasks. - Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - - If `git_source` is set, these tasks retrieve the file from the remote repository by default. - However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - - Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks - are used, `git_source` must be defined on the job.""" - - health: Optional[JobsHealthRules] = None - - job_clusters: Optional[List[JobCluster]] = None - """A list of job cluster specifications that can be shared and reused by tasks of this job. - Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in - task settings.""" - - max_concurrent_runs: Optional[int] = None - """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to - be able to execute multiple runs of the same job concurrently. This is useful for example if you - trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each - other, or if you want to trigger multiple runs which differ by their input parameters. This - setting affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 - concurrent active runs. Then setting the concurrency to 3 won’t kill any of the active runs. - However, from then on, new runs are skipped unless there are fewer than 3 active runs. This - value cannot exceed 1000. Setting this value to `0` causes all new runs to be skipped.""" - - name: Optional[str] = None - """An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.""" - - notification_settings: Optional[JobNotificationSettings] = None - """Optional notification settings that are used when sending notifications to each of the - `email_notifications` and `webhook_notifications` for this job.""" - - parameters: Optional[List[JobParameterDefinition]] = None - """Job-level parameter definitions""" - - performance_target: Optional[PerformanceTarget] = None - """The performance mode on a serverless job. This field determines the level of compute performance - or cost-efficiency for the run. - - * `STANDARD`: Enables cost-efficient execution of serverless workloads. * - `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and - optimized cluster performance.""" - - queue: Optional[QueueSettings] = None - """The queue settings of the job.""" - - run_as: Optional[JobRunAs] = None - - schedule: Optional[CronSchedule] = None - """An optional periodic schedule for this job. The default behavior is that the job only runs when - triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.""" - - tags: Optional[Dict[str, str]] = None - """A map of tags associated with the job. These are forwarded to the cluster as cluster tags for - jobs clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can - be added to the job.""" - - tasks: Optional[List[Task]] = None - """A list of task specifications to be executed by this job. It supports up to 1000 elements in - write endpoints (:method:jobs/create, :method:jobs/reset, :method:jobs/update, - :method:jobs/submit). Read endpoints return only 100 tasks. If more than 100 tasks are - available, you can paginate through them using :method:jobs/get. Use the `next_page_token` field - at the object root to determine if more results are available.""" - - timeout_seconds: Optional[int] = None - """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" - - trigger: Optional[TriggerSettings] = None - """A configuration to trigger a run when certain conditions are met. The default behavior is that - the job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API - request to `runNow`.""" - - webhook_notifications: Optional[WebhookNotifications] = None - """A collection of system notification IDs to notify when runs of this job begin or complete.""" - - def as_dict(self) -> dict: - """Serializes the CreateJob into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.continuous: - body["continuous"] = self.continuous.as_dict() - if self.deployment: - body["deployment"] = self.deployment.as_dict() - if self.description is not None: - body["description"] = self.description - if self.edit_mode is not None: - body["edit_mode"] = self.edit_mode.value - if self.email_notifications: - body["email_notifications"] = self.email_notifications.as_dict() - if self.environments: - body["environments"] = [v.as_dict() for v in self.environments] - if self.format is not None: - body["format"] = self.format.value - if self.git_source: - body["git_source"] = self.git_source.as_dict() - if self.health: - body["health"] = self.health.as_dict() - if self.job_clusters: - body["job_clusters"] = [v.as_dict() for v in self.job_clusters] - if self.max_concurrent_runs is not None: - body["max_concurrent_runs"] = self.max_concurrent_runs - if self.name is not None: - body["name"] = self.name - if self.notification_settings: - body["notification_settings"] = self.notification_settings.as_dict() - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.performance_target is not None: - body["performance_target"] = self.performance_target.value - if self.queue: - body["queue"] = self.queue.as_dict() - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.schedule: - body["schedule"] = self.schedule.as_dict() - if self.tags: - body["tags"] = self.tags - if self.tasks: - body["tasks"] = [v.as_dict() for v in self.tasks] - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.trigger: - body["trigger"] = self.trigger.as_dict() - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateJob into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.continuous: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment - if self.description is not None: - body["description"] = self.description - if self.edit_mode is not None: - body["edit_mode"] = self.edit_mode - if self.email_notifications: - body["email_notifications"] = self.email_notifications - if self.environments: - body["environments"] = self.environments - if self.format is not None: - body["format"] = self.format - if self.git_source: - body["git_source"] = self.git_source - if self.health: - body["health"] = self.health - if self.job_clusters: - body["job_clusters"] = self.job_clusters - if self.max_concurrent_runs is not None: - body["max_concurrent_runs"] = self.max_concurrent_runs - if self.name is not None: - body["name"] = self.name - if self.notification_settings: - body["notification_settings"] = self.notification_settings - if self.parameters: - body["parameters"] = self.parameters - if self.performance_target is not None: - body["performance_target"] = self.performance_target - if self.queue: - body["queue"] = self.queue - if self.run_as: - body["run_as"] = self.run_as - if self.schedule: - body["schedule"] = self.schedule - if self.tags: - body["tags"] = self.tags - if self.tasks: - body["tasks"] = self.tasks - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.trigger: - body["trigger"] = self.trigger - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateJob: - """Deserializes the CreateJob from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), - budget_policy_id=d.get("budget_policy_id", None), - continuous=_from_dict(d, "continuous", Continuous), - deployment=_from_dict(d, "deployment", JobDeployment), - description=d.get("description", None), - edit_mode=_enum(d, "edit_mode", JobEditMode), - email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), - environments=_repeated_dict(d, "environments", JobEnvironment), - format=_enum(d, "format", Format), - git_source=_from_dict(d, "git_source", GitSource), - health=_from_dict(d, "health", JobsHealthRules), - job_clusters=_repeated_dict(d, "job_clusters", JobCluster), - max_concurrent_runs=d.get("max_concurrent_runs", None), - name=d.get("name", None), - notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), - parameters=_repeated_dict(d, "parameters", JobParameterDefinition), - performance_target=_enum(d, "performance_target", PerformanceTarget), - queue=_from_dict(d, "queue", QueueSettings), - run_as=_from_dict(d, "run_as", JobRunAs), - schedule=_from_dict(d, "schedule", CronSchedule), - tags=d.get("tags", None), - tasks=_repeated_dict(d, "tasks", Task), - timeout_seconds=d.get("timeout_seconds", None), - trigger=_from_dict(d, "trigger", TriggerSettings), - webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), - ) + return cls(pause_status=_enum(d, "pause_status", PauseStatus)) @dataclass @@ -1803,31 +1485,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DbtTask: ) -@dataclass -class DeleteJob: - job_id: int - """The canonical identifier of the job to delete. This field is required.""" - - def as_dict(self) -> dict: - """Serializes the DeleteJob into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteJob into a shallow dictionary of its immediate attributes.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteJob: - """Deserializes the DeleteJob from a dictionary.""" - return cls(job_id=d.get("job_id", None)) - - @dataclass class DeleteResponse: def as_dict(self) -> dict: @@ -1846,31 +1503,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: return cls() -@dataclass -class DeleteRun: - run_id: int - """ID of the run to delete.""" - - def as_dict(self) -> dict: - """Serializes the DeleteRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteRun: - """Deserializes the DeleteRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) - - @dataclass class DeleteRunResponse: def as_dict(self) -> dict: @@ -1937,38 +1569,6 @@ def from_dict(cls, d: Dict[str, Any]) -> EnforcePolicyComplianceForJobResponseJo ) -@dataclass -class EnforcePolicyComplianceRequest: - job_id: int - """The ID of the job you want to enforce policy compliance on.""" - - validate_only: Optional[bool] = None - """If set, previews changes made to the job to comply with its policy, but does not update the job.""" - - def as_dict(self) -> dict: - """Serializes the EnforcePolicyComplianceRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EnforcePolicyComplianceRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EnforcePolicyComplianceRequest: - """Deserializes the EnforcePolicyComplianceRequest from a dictionary.""" - return cls(job_id=d.get("job_id", None), validate_only=d.get("validate_only", None)) - - @dataclass class EnforcePolicyComplianceResponse: has_changes: Optional[bool] = None @@ -3283,40 +2883,6 @@ def from_dict(cls, d: Dict[str, Any]) -> JobPermissionsDescription: ) -@dataclass -class JobPermissionsRequest: - access_control_list: Optional[List[JobAccessControlRequest]] = None - - job_id: Optional[str] = None - """The job for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the JobPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the JobPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.job_id is not None: - body["job_id"] = self.job_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> JobPermissionsRequest: - """Deserializes the JobPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), - job_id=d.get("job_id", None), - ) - - @dataclass class JobRunAs: """Write-only setting. Specifies the user or service principal that the job runs as. If not @@ -3443,6 +3009,9 @@ class JobSettings: """The queue settings of the job.""" run_as: Optional[JobRunAs] = None + """The user or service principal that the job runs as, if specified in the request. This field + indicates the explicit configuration of `run_as` for the job. To find the value in all cases, + explicit or implicit, use `run_as_user_name`.""" schedule: Optional[CronSchedule] = None """An optional periodic schedule for this job. The default behavior is that the job only runs when @@ -4533,240 +4102,42 @@ def as_shallow_dict(self) -> dict: body["effective_performance_target"] = self.effective_performance_target if self.end_time is not None: body["end_time"] = self.end_time - if self.id is not None: - body["id"] = self.id - if self.start_time is not None: - body["start_time"] = self.start_time - if self.state: - body["state"] = self.state - if self.status: - body["status"] = self.status - if self.task_run_ids: - body["task_run_ids"] = self.task_run_ids - if self.type is not None: - body["type"] = self.type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RepairHistoryItem: - """Deserializes the RepairHistoryItem from a dictionary.""" - return cls( - effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), - end_time=d.get("end_time", None), - id=d.get("id", None), - start_time=d.get("start_time", None), - state=_from_dict(d, "state", RunState), - status=_from_dict(d, "status", RunStatus), - task_run_ids=d.get("task_run_ids", None), - type=_enum(d, "type", RepairHistoryItemType), - ) - - -class RepairHistoryItemType(Enum): - """The repair history item type. Indicates whether a run is the original run or a repair run.""" - - ORIGINAL = "ORIGINAL" - REPAIR = "REPAIR" - - -@dataclass -class RepairRun: - run_id: int - """The job run ID of the run to repair. The run must not be in progress.""" - - dbt_commands: Optional[List[str]] = None - """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt - deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - - jar_params: Optional[List[str]] = None - """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", - "35"]`. The parameters are used to invoke the main function of the main class specified in the - Spark JAR task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot - be specified in conjunction with notebook_params. The JSON representation of this field (for - example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - job_parameters: Optional[Dict[str, str]] = None - """Job-level parameters used in the run. for example `"param": "overriding_val"`""" - - latest_repair_id: Optional[int] = None - """The ID of the latest repair. This parameter is not required when repairing a run for the first - time, but must be provided on subsequent requests to repair the same run.""" - - notebook_params: Optional[Dict[str, str]] = None - """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": - "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the - [dbutils.widgets.get] function. - - If not specified upon `run-now`, the triggered run uses the job’s base parameters. - - notebook_params cannot be specified in conjunction with jar_params. - - Use [Task parameter variables] to set parameters containing information about job runs. - - The JSON representation of this field (for example `{"notebook_params":{"name":"john - doe","age":"35"}}`) cannot exceed 10,000 bytes. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables - [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - - performance_target: Optional[PerformanceTarget] = None - """The performance mode on a serverless job. The performance target determines the level of compute - performance or cost-efficiency for the run. This field overrides the performance target defined - on the job level. - - * `STANDARD`: Enables cost-efficient execution of serverless workloads. * - `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and - optimized cluster performance.""" - - pipeline_params: Optional[PipelineParams] = None - """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str, str]] = None - - python_params: Optional[List[str]] = None - """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", - "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon - `run-now`, it would overwrite the parameters specified in job setting. The JSON representation - of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs. - - Important - - These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters - returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and - emojis. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - rerun_all_failed_tasks: Optional[bool] = None - """If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be - used.""" - - rerun_dependent_tasks: Optional[bool] = None - """If true, repair all tasks that depend on the tasks in `rerun_tasks`, even if they were - previously successful. Can be also used in combination with `rerun_all_failed_tasks`.""" - - rerun_tasks: Optional[List[str]] = None - """The task keys of the task runs to repair.""" - - spark_submit_params: Optional[List[str]] = None - """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": - ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit - script as command-line parameters. If specified upon `run-now`, it would overwrite the - parameters specified in job setting. The JSON representation of this field (for example - `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs - - Important - - These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters - returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and - emojis. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str, str]] = None - """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john - doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - - def as_dict(self) -> dict: - """Serializes the RepairRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dbt_commands: - body["dbt_commands"] = [v for v in self.dbt_commands] - if self.jar_params: - body["jar_params"] = [v for v in self.jar_params] - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.latest_repair_id is not None: - body["latest_repair_id"] = self.latest_repair_id - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.performance_target is not None: - body["performance_target"] = self.performance_target.value - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params.as_dict() - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = [v for v in self.python_params] - if self.rerun_all_failed_tasks is not None: - body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks - if self.rerun_dependent_tasks is not None: - body["rerun_dependent_tasks"] = self.rerun_dependent_tasks - if self.rerun_tasks: - body["rerun_tasks"] = [v for v in self.rerun_tasks] - if self.run_id is not None: - body["run_id"] = self.run_id - if self.spark_submit_params: - body["spark_submit_params"] = [v for v in self.spark_submit_params] - if self.sql_params: - body["sql_params"] = self.sql_params - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RepairRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dbt_commands: - body["dbt_commands"] = self.dbt_commands - if self.jar_params: - body["jar_params"] = self.jar_params - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.latest_repair_id is not None: - body["latest_repair_id"] = self.latest_repair_id - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.performance_target is not None: - body["performance_target"] = self.performance_target - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = self.python_params - if self.rerun_all_failed_tasks is not None: - body["rerun_all_failed_tasks"] = self.rerun_all_failed_tasks - if self.rerun_dependent_tasks is not None: - body["rerun_dependent_tasks"] = self.rerun_dependent_tasks - if self.rerun_tasks: - body["rerun_tasks"] = self.rerun_tasks - if self.run_id is not None: - body["run_id"] = self.run_id - if self.spark_submit_params: - body["spark_submit_params"] = self.spark_submit_params - if self.sql_params: - body["sql_params"] = self.sql_params + if self.id is not None: + body["id"] = self.id + if self.start_time is not None: + body["start_time"] = self.start_time + if self.state: + body["state"] = self.state + if self.status: + body["status"] = self.status + if self.task_run_ids: + body["task_run_ids"] = self.task_run_ids + if self.type is not None: + body["type"] = self.type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RepairRun: - """Deserializes the RepairRun from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> RepairHistoryItem: + """Deserializes the RepairHistoryItem from a dictionary.""" return cls( - dbt_commands=d.get("dbt_commands", None), - jar_params=d.get("jar_params", None), - job_parameters=d.get("job_parameters", None), - latest_repair_id=d.get("latest_repair_id", None), - notebook_params=d.get("notebook_params", None), - performance_target=_enum(d, "performance_target", PerformanceTarget), - pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), - python_named_params=d.get("python_named_params", None), - python_params=d.get("python_params", None), - rerun_all_failed_tasks=d.get("rerun_all_failed_tasks", None), - rerun_dependent_tasks=d.get("rerun_dependent_tasks", None), - rerun_tasks=d.get("rerun_tasks", None), - run_id=d.get("run_id", None), - spark_submit_params=d.get("spark_submit_params", None), - sql_params=d.get("sql_params", None), + effective_performance_target=_enum(d, "effective_performance_target", PerformanceTarget), + end_time=d.get("end_time", None), + id=d.get("id", None), + start_time=d.get("start_time", None), + state=_from_dict(d, "state", RunState), + status=_from_dict(d, "status", RunStatus), + task_run_ids=d.get("task_run_ids", None), + type=_enum(d, "type", RepairHistoryItemType), ) +class RepairHistoryItemType(Enum): + """The repair history item type. Indicates whether a run is the original run or a repair run.""" + + ORIGINAL = "ORIGINAL" + REPAIR = "REPAIR" + + @dataclass class RepairRunResponse: """Run repair was initiated.""" @@ -4795,41 +4166,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RepairRunResponse: return cls(repair_id=d.get("repair_id", None)) -@dataclass -class ResetJob: - job_id: int - """The canonical identifier of the job to reset. This field is required.""" - - new_settings: JobSettings - """The new settings of the job. These settings completely replace the old settings. - - Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to - other fields are applied to future runs only.""" - - def as_dict(self) -> dict: - """Serializes the ResetJob into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ResetJob into a shallow dictionary of its immediate attributes.""" - body = {} - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ResetJob: - """Deserializes the ResetJob from a dictionary.""" - return cls(job_id=d.get("job_id", None), new_settings=_from_dict(d, "new_settings", JobSettings)) - - @dataclass class ResetResponse: def as_dict(self) -> dict: @@ -5587,249 +4923,50 @@ class RunIf(Enum): `AT_LEAST_ONE_SUCCESS`: At least one dependency has succeeded * `NONE_FAILED`: None of the dependencies have failed and at least one was executed * `ALL_DONE`: All dependencies have been completed * `AT_LEAST_ONE_FAILED`: At least one dependency failed * `ALL_FAILED`: ALl - dependencies have failed""" - - ALL_DONE = "ALL_DONE" - ALL_FAILED = "ALL_FAILED" - ALL_SUCCESS = "ALL_SUCCESS" - AT_LEAST_ONE_FAILED = "AT_LEAST_ONE_FAILED" - AT_LEAST_ONE_SUCCESS = "AT_LEAST_ONE_SUCCESS" - NONE_FAILED = "NONE_FAILED" - - -@dataclass -class RunJobOutput: - run_id: Optional[int] = None - """The run id of the triggered job run""" - - def as_dict(self) -> dict: - """Serializes the RunJobOutput into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RunJobOutput into a shallow dictionary of its immediate attributes.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RunJobOutput: - """Deserializes the RunJobOutput from a dictionary.""" - return cls(run_id=d.get("run_id", None)) - - -@dataclass -class RunJobTask: - job_id: int - """ID of the job to trigger.""" - - dbt_commands: Optional[List[str]] = None - """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt - deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - - jar_params: Optional[List[str]] = None - """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", - "35"]`. The parameters are used to invoke the main function of the main class specified in the - Spark JAR task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot - be specified in conjunction with notebook_params. The JSON representation of this field (for - example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - job_parameters: Optional[Dict[str, str]] = None - """Job-level parameters used to trigger the job.""" - - notebook_params: Optional[Dict[str, str]] = None - """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": - "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the - [dbutils.widgets.get] function. - - If not specified upon `run-now`, the triggered run uses the job’s base parameters. - - notebook_params cannot be specified in conjunction with jar_params. - - Use [Task parameter variables] to set parameters containing information about job runs. - - The JSON representation of this field (for example `{"notebook_params":{"name":"john - doe","age":"35"}}`) cannot exceed 10,000 bytes. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables - [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - - pipeline_params: Optional[PipelineParams] = None - """Controls whether the pipeline should perform a full refresh""" - - python_named_params: Optional[Dict[str, str]] = None - - python_params: Optional[List[str]] = None - """A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", - "35"]`. The parameters are passed to Python file as command-line parameters. If specified upon - `run-now`, it would overwrite the parameters specified in job setting. The JSON representation - of this field (for example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs. - - Important - - These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters - returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and - emojis. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - spark_submit_params: Optional[List[str]] = None - """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": - ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit - script as command-line parameters. If specified upon `run-now`, it would overwrite the - parameters specified in job setting. The JSON representation of this field (for example - `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes. - - Use [Task parameter variables] to set parameters containing information about job runs - - Important - - These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters - returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and - emojis. - - [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - - sql_params: Optional[Dict[str, str]] = None - """A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john - doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" - - def as_dict(self) -> dict: - """Serializes the RunJobTask into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dbt_commands: - body["dbt_commands"] = [v for v in self.dbt_commands] - if self.jar_params: - body["jar_params"] = [v for v in self.jar_params] - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params.as_dict() - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = [v for v in self.python_params] - if self.spark_submit_params: - body["spark_submit_params"] = [v for v in self.spark_submit_params] - if self.sql_params: - body["sql_params"] = self.sql_params - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RunJobTask into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dbt_commands: - body["dbt_commands"] = self.dbt_commands - if self.jar_params: - body["jar_params"] = self.jar_params - if self.job_id is not None: - body["job_id"] = self.job_id - if self.job_parameters: - body["job_parameters"] = self.job_parameters - if self.notebook_params: - body["notebook_params"] = self.notebook_params - if self.pipeline_params: - body["pipeline_params"] = self.pipeline_params - if self.python_named_params: - body["python_named_params"] = self.python_named_params - if self.python_params: - body["python_params"] = self.python_params - if self.spark_submit_params: - body["spark_submit_params"] = self.spark_submit_params - if self.sql_params: - body["sql_params"] = self.sql_params - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RunJobTask: - """Deserializes the RunJobTask from a dictionary.""" - return cls( - dbt_commands=d.get("dbt_commands", None), - jar_params=d.get("jar_params", None), - job_id=d.get("job_id", None), - job_parameters=d.get("job_parameters", None), - notebook_params=d.get("notebook_params", None), - pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), - python_named_params=d.get("python_named_params", None), - python_params=d.get("python_params", None), - spark_submit_params=d.get("spark_submit_params", None), - sql_params=d.get("sql_params", None), - ) - - -class RunLifeCycleState(Enum): - """A value indicating the run's lifecycle state. The possible values are: * `QUEUED`: The run is - queued. * `PENDING`: The run is waiting to be executed while the cluster and execution context - are being prepared. * `RUNNING`: The task of this run is being executed. * `TERMINATING`: The - task of this run has completed, and the cluster and execution context are being cleaned up. * - `TERMINATED`: The task of this run has completed, and the cluster and execution context have - been cleaned up. This state is terminal. * `SKIPPED`: This run was aborted because a previous - run of the same job was already active. This state is terminal. * `INTERNAL_ERROR`: An - exceptional state that indicates a failure in the Jobs service, such as network failure over a - long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service - terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is - blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry.""" + dependencies have failed""" - BLOCKED = "BLOCKED" - INTERNAL_ERROR = "INTERNAL_ERROR" - PENDING = "PENDING" - QUEUED = "QUEUED" - RUNNING = "RUNNING" - SKIPPED = "SKIPPED" - TERMINATED = "TERMINATED" - TERMINATING = "TERMINATING" - WAITING_FOR_RETRY = "WAITING_FOR_RETRY" + ALL_DONE = "ALL_DONE" + ALL_FAILED = "ALL_FAILED" + ALL_SUCCESS = "ALL_SUCCESS" + AT_LEAST_ONE_FAILED = "AT_LEAST_ONE_FAILED" + AT_LEAST_ONE_SUCCESS = "AT_LEAST_ONE_SUCCESS" + NONE_FAILED = "NONE_FAILED" -class RunLifecycleStateV2State(Enum): - """The current state of the run.""" +@dataclass +class RunJobOutput: + run_id: Optional[int] = None + """The run id of the triggered job run""" - BLOCKED = "BLOCKED" - PENDING = "PENDING" - QUEUED = "QUEUED" - RUNNING = "RUNNING" - TERMINATED = "TERMINATED" - TERMINATING = "TERMINATING" - WAITING = "WAITING" + def as_dict(self) -> dict: + """Serializes the RunJobOutput into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.run_id is not None: + body["run_id"] = self.run_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the RunJobOutput into a shallow dictionary of its immediate attributes.""" + body = {} + if self.run_id is not None: + body["run_id"] = self.run_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> RunJobOutput: + """Deserializes the RunJobOutput from a dictionary.""" + return cls(run_id=d.get("run_id", None)) @dataclass -class RunNow: +class RunJobTask: job_id: int - """The ID of the job to be executed""" + """ID of the job to trigger.""" dbt_commands: Optional[List[str]] = None """An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`""" - idempotency_token: Optional[str] = None - """An optional token to guarantee the idempotency of job run requests. If a run with the provided - token already exists, the request does not create a new run but returns the ID of the existing - run instead. If a run with the provided token is deleted, an error is returned. - - If you specify the idempotency token, upon failure you can retry until the request succeeds. - Databricks guarantees that exactly one run is launched with that idempotency token. - - This token must have at most 64 characters. - - For more information, see [How to ensure idempotency for jobs]. - - [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html""" - jar_params: Optional[List[str]] = None """A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`. The parameters are used to invoke the main function of the main class specified in the @@ -5842,7 +4979,7 @@ class RunNow: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" job_parameters: Optional[Dict[str, str]] = None - """Job-level parameters used in the run. for example `"param": "overriding_val"`""" + """Job-level parameters used to trigger the job.""" notebook_params: Optional[Dict[str, str]] = None """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name": @@ -5861,19 +4998,6 @@ class RunNow: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html""" - only: Optional[List[str]] = None - """A list of task keys to run inside of the job. If this field is not provided, all tasks in the - job will be run.""" - - performance_target: Optional[PerformanceTarget] = None - """The performance mode on a serverless job. The performance target determines the level of compute - performance or cost-efficiency for the run. This field overrides the performance target defined - on the job level. - - * `STANDARD`: Enables cost-efficient execution of serverless workloads. * - `PERFORMANCE_OPTIMIZED`: Prioritizes fast startup and execution times through rapid scaling and - optimized cluster performance.""" - pipeline_params: Optional[PipelineParams] = None """Controls whether the pipeline should perform a full refresh""" @@ -5895,9 +5019,6 @@ class RunNow: [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables""" - queue: Optional[QueueSettings] = None - """The queue settings of the run.""" - spark_submit_params: Optional[List[str]] = None """A list of parameters for jobs with spark submit task, for example `"spark_submit_params": ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit @@ -5920,12 +5041,10 @@ class RunNow: doe", "age": "35"}`. The SQL alert task does not support custom parameters.""" def as_dict(self) -> dict: - """Serializes the RunNow into a dictionary suitable for use as a JSON request body.""" + """Serializes the RunJobTask into a dictionary suitable for use as a JSON request body.""" body = {} if self.dbt_commands: body["dbt_commands"] = [v for v in self.dbt_commands] - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token if self.jar_params: body["jar_params"] = [v for v in self.jar_params] if self.job_id is not None: @@ -5934,18 +5053,12 @@ def as_dict(self) -> dict: body["job_parameters"] = self.job_parameters if self.notebook_params: body["notebook_params"] = self.notebook_params - if self.only: - body["only"] = [v for v in self.only] - if self.performance_target is not None: - body["performance_target"] = self.performance_target.value if self.pipeline_params: body["pipeline_params"] = self.pipeline_params.as_dict() if self.python_named_params: body["python_named_params"] = self.python_named_params if self.python_params: body["python_params"] = [v for v in self.python_params] - if self.queue: - body["queue"] = self.queue.as_dict() if self.spark_submit_params: body["spark_submit_params"] = [v for v in self.spark_submit_params] if self.sql_params: @@ -5953,12 +5066,10 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the RunNow into a shallow dictionary of its immediate attributes.""" + """Serializes the RunJobTask into a shallow dictionary of its immediate attributes.""" body = {} if self.dbt_commands: body["dbt_commands"] = self.dbt_commands - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token if self.jar_params: body["jar_params"] = self.jar_params if self.job_id is not None: @@ -5967,18 +5078,12 @@ def as_shallow_dict(self) -> dict: body["job_parameters"] = self.job_parameters if self.notebook_params: body["notebook_params"] = self.notebook_params - if self.only: - body["only"] = self.only - if self.performance_target is not None: - body["performance_target"] = self.performance_target if self.pipeline_params: body["pipeline_params"] = self.pipeline_params if self.python_named_params: body["python_named_params"] = self.python_named_params if self.python_params: body["python_params"] = self.python_params - if self.queue: - body["queue"] = self.queue if self.spark_submit_params: body["spark_submit_params"] = self.spark_submit_params if self.sql_params: @@ -5986,26 +5091,58 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RunNow: - """Deserializes the RunNow from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> RunJobTask: + """Deserializes the RunJobTask from a dictionary.""" return cls( dbt_commands=d.get("dbt_commands", None), - idempotency_token=d.get("idempotency_token", None), jar_params=d.get("jar_params", None), job_id=d.get("job_id", None), job_parameters=d.get("job_parameters", None), notebook_params=d.get("notebook_params", None), - only=d.get("only", None), - performance_target=_enum(d, "performance_target", PerformanceTarget), pipeline_params=_from_dict(d, "pipeline_params", PipelineParams), python_named_params=d.get("python_named_params", None), python_params=d.get("python_params", None), - queue=_from_dict(d, "queue", QueueSettings), spark_submit_params=d.get("spark_submit_params", None), sql_params=d.get("sql_params", None), ) +class RunLifeCycleState(Enum): + """A value indicating the run's lifecycle state. The possible values are: * `QUEUED`: The run is + queued. * `PENDING`: The run is waiting to be executed while the cluster and execution context + are being prepared. * `RUNNING`: The task of this run is being executed. * `TERMINATING`: The + task of this run has completed, and the cluster and execution context are being cleaned up. * + `TERMINATED`: The task of this run has completed, and the cluster and execution context have + been cleaned up. This state is terminal. * `SKIPPED`: This run was aborted because a previous + run of the same job was already active. This state is terminal. * `INTERNAL_ERROR`: An + exceptional state that indicates a failure in the Jobs service, such as network failure over a + long period. If a run on a new cluster ends in the `INTERNAL_ERROR` state, the Jobs service + terminates the cluster as soon as possible. This state is terminal. * `BLOCKED`: The run is + blocked on an upstream dependency. * `WAITING_FOR_RETRY`: The run is waiting for a retry.""" + + BLOCKED = "BLOCKED" + INTERNAL_ERROR = "INTERNAL_ERROR" + PENDING = "PENDING" + QUEUED = "QUEUED" + RUNNING = "RUNNING" + SKIPPED = "SKIPPED" + TERMINATED = "TERMINATED" + TERMINATING = "TERMINATING" + WAITING_FOR_RETRY = "WAITING_FOR_RETRY" + + +class RunLifecycleStateV2State(Enum): + """The current state of the run.""" + + BLOCKED = "BLOCKED" + PENDING = "PENDING" + QUEUED = "QUEUED" + RUNNING = "RUNNING" + TERMINATED = "TERMINATED" + TERMINATING = "TERMINATING" + WAITING = "WAITING" + + @dataclass class RunNowResponse: """Run was started successfully.""" @@ -7690,156 +6827,6 @@ class StorageMode(Enum): IMPORT = "IMPORT" -@dataclass -class SubmitRun: - access_control_list: Optional[List[JobAccessControlRequest]] = None - """List of permissions to set on the job.""" - - budget_policy_id: Optional[str] = None - """The user specified id of the budget policy to use for this one-time run. If not specified, the - run will be not be attributed to any budget policy.""" - - email_notifications: Optional[JobEmailNotifications] = None - """An optional set of email addresses notified when the run begins or completes.""" - - environments: Optional[List[JobEnvironment]] = None - """A list of task execution environment specifications that can be referenced by tasks of this run.""" - - git_source: Optional[GitSource] = None - """An optional specification for a remote Git repository containing the source code used by tasks. - Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks. - - If `git_source` is set, these tasks retrieve the file from the remote repository by default. - However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task. - - Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks - are used, `git_source` must be defined on the job.""" - - health: Optional[JobsHealthRules] = None - - idempotency_token: Optional[str] = None - """An optional token that can be used to guarantee the idempotency of job run requests. If a run - with the provided token already exists, the request does not create a new run but returns the ID - of the existing run instead. If a run with the provided token is deleted, an error is returned. - - If you specify the idempotency token, upon failure you can retry until the request succeeds. - Databricks guarantees that exactly one run is launched with that idempotency token. - - This token must have at most 64 characters. - - For more information, see [How to ensure idempotency for jobs]. - - [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html""" - - notification_settings: Optional[JobNotificationSettings] = None - """Optional notification settings that are used when sending notifications to each of the - `email_notifications` and `webhook_notifications` for this run.""" - - queue: Optional[QueueSettings] = None - """The queue settings of the one-time run.""" - - run_as: Optional[JobRunAs] = None - """Specifies the user or service principal that the job runs as. If not specified, the job runs as - the user who submits the request.""" - - run_name: Optional[str] = None - """An optional name for the run. The default value is `Untitled`.""" - - tasks: Optional[List[SubmitTask]] = None - - timeout_seconds: Optional[int] = None - """An optional timeout applied to each run of this job. A value of `0` means no timeout.""" - - webhook_notifications: Optional[WebhookNotifications] = None - """A collection of system notification IDs to notify when the run begins or completes.""" - - def as_dict(self) -> dict: - """Serializes the SubmitRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.email_notifications: - body["email_notifications"] = self.email_notifications.as_dict() - if self.environments: - body["environments"] = [v.as_dict() for v in self.environments] - if self.git_source: - body["git_source"] = self.git_source.as_dict() - if self.health: - body["health"] = self.health.as_dict() - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token - if self.notification_settings: - body["notification_settings"] = self.notification_settings.as_dict() - if self.queue: - body["queue"] = self.queue.as_dict() - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.run_name is not None: - body["run_name"] = self.run_name - if self.tasks: - body["tasks"] = [v.as_dict() for v in self.tasks] - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SubmitRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.email_notifications: - body["email_notifications"] = self.email_notifications - if self.environments: - body["environments"] = self.environments - if self.git_source: - body["git_source"] = self.git_source - if self.health: - body["health"] = self.health - if self.idempotency_token is not None: - body["idempotency_token"] = self.idempotency_token - if self.notification_settings: - body["notification_settings"] = self.notification_settings - if self.queue: - body["queue"] = self.queue - if self.run_as: - body["run_as"] = self.run_as - if self.run_name is not None: - body["run_name"] = self.run_name - if self.tasks: - body["tasks"] = self.tasks - if self.timeout_seconds is not None: - body["timeout_seconds"] = self.timeout_seconds - if self.webhook_notifications: - body["webhook_notifications"] = self.webhook_notifications - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SubmitRun: - """Deserializes the SubmitRun from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", JobAccessControlRequest), - budget_policy_id=d.get("budget_policy_id", None), - email_notifications=_from_dict(d, "email_notifications", JobEmailNotifications), - environments=_repeated_dict(d, "environments", JobEnvironment), - git_source=_from_dict(d, "git_source", GitSource), - health=_from_dict(d, "health", JobsHealthRules), - idempotency_token=d.get("idempotency_token", None), - notification_settings=_from_dict(d, "notification_settings", JobNotificationSettings), - queue=_from_dict(d, "queue", QueueSettings), - run_as=_from_dict(d, "run_as", JobRunAs), - run_name=d.get("run_name", None), - tasks=_repeated_dict(d, "tasks", SubmitTask), - timeout_seconds=d.get("timeout_seconds", None), - webhook_notifications=_from_dict(d, "webhook_notifications", WebhookNotifications), - ) - - @dataclass class SubmitRunResponse: """Run was created and started successfully.""" @@ -9049,59 +8036,6 @@ class TriggerType(Enum): TABLE = "TABLE" -@dataclass -class UpdateJob: - job_id: int - """The canonical identifier of the job to update. This field is required.""" - - fields_to_remove: Optional[List[str]] = None - """Remove top-level fields in the job settings. Removing nested fields is not supported, except for - tasks and job clusters (`tasks/task_1`). This field is optional.""" - - new_settings: Optional[JobSettings] = None - """The new settings for the job. - - Top-level fields specified in `new_settings` are completely replaced, except for arrays which - are merged. That is, new and existing entries are completely replaced based on the respective - key fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept. - - Partially updating nested fields is not supported. - - Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other - fields are applied to future runs only.""" - - def as_dict(self) -> dict: - """Serializes the UpdateJob into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.fields_to_remove: - body["fields_to_remove"] = [v for v in self.fields_to_remove] - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateJob into a shallow dictionary of its immediate attributes.""" - body = {} - if self.fields_to_remove: - body["fields_to_remove"] = self.fields_to_remove - if self.job_id is not None: - body["job_id"] = self.job_id - if self.new_settings: - body["new_settings"] = self.new_settings - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateJob: - """Deserializes the UpdateJob from a dictionary.""" - return cls( - fields_to_remove=d.get("fields_to_remove", None), - job_id=d.get("job_id", None), - new_settings=_from_dict(d, "new_settings", JobSettings), - ) - - @dataclass class UpdateResponse: def as_dict(self) -> dict: @@ -9499,6 +8433,9 @@ def create( :param queue: :class:`QueueSettings` (optional) The queue settings of the job. :param run_as: :class:`JobRunAs` (optional) + The user or service principal that the job runs as, if specified in the request. This field + indicates the explicit configuration of `run_as` for the job. To find the value in all cases, + explicit or implicit, use `run_as_user_name`. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index 961480be7..a199010ab 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -15,36 +15,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class AddExchangeForListingRequest: - listing_id: str - - exchange_id: str - - def as_dict(self) -> dict: - """Serializes the AddExchangeForListingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the AddExchangeForListingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.exchange_id is not None: - body["exchange_id"] = self.exchange_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> AddExchangeForListingRequest: - """Deserializes the AddExchangeForListingRequest from a dictionary.""" - return cls(exchange_id=d.get("exchange_id", None), listing_id=d.get("listing_id", None)) - - @dataclass class AddExchangeForListingResponse: exchange_for_listing: Optional[ExchangeListing] = None @@ -233,30 +203,6 @@ class Cost(Enum): PAID = "PAID" -@dataclass -class CreateExchangeFilterRequest: - filter: ExchangeFilter - - def as_dict(self) -> dict: - """Serializes the CreateExchangeFilterRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.filter: - body["filter"] = self.filter.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateExchangeFilterRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.filter: - body["filter"] = self.filter - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeFilterRequest: - """Deserializes the CreateExchangeFilterRequest from a dictionary.""" - return cls(filter=_from_dict(d, "filter", ExchangeFilter)) - - @dataclass class CreateExchangeFilterResponse: filter_id: Optional[str] = None @@ -281,30 +227,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeFilterResponse: return cls(filter_id=d.get("filter_id", None)) -@dataclass -class CreateExchangeRequest: - exchange: Exchange - - def as_dict(self) -> dict: - """Serializes the CreateExchangeRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.exchange: - body["exchange"] = self.exchange.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateExchangeRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.exchange: - body["exchange"] = self.exchange - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeRequest: - """Deserializes the CreateExchangeRequest from a dictionary.""" - return cls(exchange=_from_dict(d, "exchange", Exchange)) - - @dataclass class CreateExchangeResponse: exchange_id: Optional[str] = None @@ -329,53 +251,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateExchangeResponse: return cls(exchange_id=d.get("exchange_id", None)) -@dataclass -class CreateFileRequest: - file_parent: FileParent - - marketplace_file_type: MarketplaceFileType - - mime_type: str - - display_name: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the CreateFileRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.file_parent: - body["file_parent"] = self.file_parent.as_dict() - if self.marketplace_file_type is not None: - body["marketplace_file_type"] = self.marketplace_file_type.value - if self.mime_type is not None: - body["mime_type"] = self.mime_type - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateFileRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.display_name is not None: - body["display_name"] = self.display_name - if self.file_parent: - body["file_parent"] = self.file_parent - if self.marketplace_file_type is not None: - body["marketplace_file_type"] = self.marketplace_file_type - if self.mime_type is not None: - body["mime_type"] = self.mime_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateFileRequest: - """Deserializes the CreateFileRequest from a dictionary.""" - return cls( - display_name=d.get("display_name", None), - file_parent=_from_dict(d, "file_parent", FileParent), - marketplace_file_type=_enum(d, "marketplace_file_type", MarketplaceFileType), - mime_type=d.get("mime_type", None), - ) - - @dataclass class CreateFileResponse: file_info: Optional[FileInfo] = None @@ -407,92 +282,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateFileResponse: return cls(file_info=_from_dict(d, "file_info", FileInfo), upload_url=d.get("upload_url", None)) -@dataclass -class CreateInstallationRequest: - accepted_consumer_terms: Optional[ConsumerTerms] = None - - catalog_name: Optional[str] = None - - listing_id: Optional[str] = None - - recipient_type: Optional[DeltaSharingRecipientType] = None - - repo_detail: Optional[RepoInstallation] = None - """for git repo installations""" - - share_name: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the CreateInstallationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms.as_dict() - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type.value - if self.repo_detail: - body["repo_detail"] = self.repo_detail.as_dict() - if self.share_name is not None: - body["share_name"] = self.share_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateInstallationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms - if self.catalog_name is not None: - body["catalog_name"] = self.catalog_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type - if self.repo_detail: - body["repo_detail"] = self.repo_detail - if self.share_name is not None: - body["share_name"] = self.share_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateInstallationRequest: - """Deserializes the CreateInstallationRequest from a dictionary.""" - return cls( - accepted_consumer_terms=_from_dict(d, "accepted_consumer_terms", ConsumerTerms), - catalog_name=d.get("catalog_name", None), - listing_id=d.get("listing_id", None), - recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), - repo_detail=_from_dict(d, "repo_detail", RepoInstallation), - share_name=d.get("share_name", None), - ) - - -@dataclass -class CreateListingRequest: - listing: Listing - - def as_dict(self) -> dict: - """Serializes the CreateListingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.listing: - body["listing"] = self.listing.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateListingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.listing: - body["listing"] = self.listing - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateListingRequest: - """Deserializes the CreateListingRequest from a dictionary.""" - return cls(listing=_from_dict(d, "listing", Listing)) - - @dataclass class CreateListingResponse: listing_id: Optional[str] = None @@ -517,90 +306,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateListingResponse: return cls(listing_id=d.get("listing_id", None)) -@dataclass -class CreatePersonalizationRequest: - """Data request messages also creates a lead (maybe)""" - - intended_use: str - - accepted_consumer_terms: ConsumerTerms - - comment: Optional[str] = None - - company: Optional[str] = None - - first_name: Optional[str] = None - - is_from_lighthouse: Optional[bool] = None - - last_name: Optional[str] = None - - listing_id: Optional[str] = None - - recipient_type: Optional[DeltaSharingRecipientType] = None - - def as_dict(self) -> dict: - """Serializes the CreatePersonalizationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms.as_dict() - if self.comment is not None: - body["comment"] = self.comment - if self.company is not None: - body["company"] = self.company - if self.first_name is not None: - body["first_name"] = self.first_name - if self.intended_use is not None: - body["intended_use"] = self.intended_use - if self.is_from_lighthouse is not None: - body["is_from_lighthouse"] = self.is_from_lighthouse - if self.last_name is not None: - body["last_name"] = self.last_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreatePersonalizationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.accepted_consumer_terms: - body["accepted_consumer_terms"] = self.accepted_consumer_terms - if self.comment is not None: - body["comment"] = self.comment - if self.company is not None: - body["company"] = self.company - if self.first_name is not None: - body["first_name"] = self.first_name - if self.intended_use is not None: - body["intended_use"] = self.intended_use - if self.is_from_lighthouse is not None: - body["is_from_lighthouse"] = self.is_from_lighthouse - if self.last_name is not None: - body["last_name"] = self.last_name - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.recipient_type is not None: - body["recipient_type"] = self.recipient_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePersonalizationRequest: - """Deserializes the CreatePersonalizationRequest from a dictionary.""" - return cls( - accepted_consumer_terms=_from_dict(d, "accepted_consumer_terms", ConsumerTerms), - comment=d.get("comment", None), - company=d.get("company", None), - first_name=d.get("first_name", None), - intended_use=d.get("intended_use", None), - is_from_lighthouse=d.get("is_from_lighthouse", None), - last_name=d.get("last_name", None), - listing_id=d.get("listing_id", None), - recipient_type=_enum(d, "recipient_type", DeltaSharingRecipientType), - ) - - @dataclass class CreatePersonalizationRequestResponse: id: Optional[str] = None @@ -625,30 +330,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreatePersonalizationRequestResponse: return cls(id=d.get("id", None)) -@dataclass -class CreateProviderRequest: - provider: ProviderInfo - - def as_dict(self) -> dict: - """Serializes the CreateProviderRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.provider: - body["provider"] = self.provider.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateProviderRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.provider: - body["provider"] = self.provider - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateProviderRequest: - """Deserializes the CreateProviderRequest from a dictionary.""" - return cls(provider=_from_dict(d, "provider", ProviderInfo)) - - @dataclass class CreateProviderResponse: id: Optional[str] = None @@ -3059,36 +2740,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TokenInfo: ) -@dataclass -class UpdateExchangeFilterRequest: - filter: ExchangeFilter - - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the UpdateExchangeFilterRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.filter: - body["filter"] = self.filter.as_dict() - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateExchangeFilterRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.filter: - body["filter"] = self.filter - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeFilterRequest: - """Deserializes the UpdateExchangeFilterRequest from a dictionary.""" - return cls(filter=_from_dict(d, "filter", ExchangeFilter), id=d.get("id", None)) - - @dataclass class UpdateExchangeFilterResponse: filter: Optional[ExchangeFilter] = None @@ -3113,36 +2764,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeFilterResponse: return cls(filter=_from_dict(d, "filter", ExchangeFilter)) -@dataclass -class UpdateExchangeRequest: - exchange: Exchange - - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the UpdateExchangeRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.exchange: - body["exchange"] = self.exchange.as_dict() - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateExchangeRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.exchange: - body["exchange"] = self.exchange - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeRequest: - """Deserializes the UpdateExchangeRequest from a dictionary.""" - return cls(exchange=_from_dict(d, "exchange", Exchange), id=d.get("id", None)) - - @dataclass class UpdateExchangeResponse: exchange: Optional[Exchange] = None @@ -3167,53 +2788,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateExchangeResponse: return cls(exchange=_from_dict(d, "exchange", Exchange)) -@dataclass -class UpdateInstallationRequest: - installation: InstallationDetail - - installation_id: Optional[str] = None - - listing_id: Optional[str] = None - - rotate_token: Optional[bool] = None - - def as_dict(self) -> dict: - """Serializes the UpdateInstallationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.installation: - body["installation"] = self.installation.as_dict() - if self.installation_id is not None: - body["installation_id"] = self.installation_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.rotate_token is not None: - body["rotate_token"] = self.rotate_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateInstallationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.installation: - body["installation"] = self.installation - if self.installation_id is not None: - body["installation_id"] = self.installation_id - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.rotate_token is not None: - body["rotate_token"] = self.rotate_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateInstallationRequest: - """Deserializes the UpdateInstallationRequest from a dictionary.""" - return cls( - installation=_from_dict(d, "installation", InstallationDetail), - installation_id=d.get("installation_id", None), - listing_id=d.get("listing_id", None), - rotate_token=d.get("rotate_token", None), - ) - - @dataclass class UpdateInstallationResponse: installation: Optional[InstallationDetail] = None @@ -3238,36 +2812,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateInstallationResponse: return cls(installation=_from_dict(d, "installation", InstallationDetail)) -@dataclass -class UpdateListingRequest: - listing: Listing - - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the UpdateListingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.listing: - body["listing"] = self.listing.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateListingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.listing: - body["listing"] = self.listing - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateListingRequest: - """Deserializes the UpdateListingRequest from a dictionary.""" - return cls(id=d.get("id", None), listing=_from_dict(d, "listing", Listing)) - - @dataclass class UpdateListingResponse: listing: Optional[Listing] = None @@ -3292,60 +2836,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateListingResponse: return cls(listing=_from_dict(d, "listing", Listing)) -@dataclass -class UpdatePersonalizationRequestRequest: - status: PersonalizationRequestStatus - - listing_id: Optional[str] = None - - reason: Optional[str] = None - - request_id: Optional[str] = None - - share: Optional[ShareInfo] = None - - def as_dict(self) -> dict: - """Serializes the UpdatePersonalizationRequestRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.reason is not None: - body["reason"] = self.reason - if self.request_id is not None: - body["request_id"] = self.request_id - if self.share: - body["share"] = self.share.as_dict() - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdatePersonalizationRequestRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.listing_id is not None: - body["listing_id"] = self.listing_id - if self.reason is not None: - body["reason"] = self.reason - if self.request_id is not None: - body["request_id"] = self.request_id - if self.share: - body["share"] = self.share - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalizationRequestRequest: - """Deserializes the UpdatePersonalizationRequestRequest from a dictionary.""" - return cls( - listing_id=d.get("listing_id", None), - reason=d.get("reason", None), - request_id=d.get("request_id", None), - share=_from_dict(d, "share", ShareInfo), - status=_enum(d, "status", PersonalizationRequestStatus), - ) - - @dataclass class UpdatePersonalizationRequestResponse: request: Optional[PersonalizationRequest] = None @@ -3370,39 +2860,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalizationRequestResponse: return cls(request=_from_dict(d, "request", PersonalizationRequest)) -@dataclass -class UpdateProviderAnalyticsDashboardRequest: - id: Optional[str] = None - """id is immutable property and can't be updated.""" - - version: Optional[int] = None - """this is the version of the dashboard template we want to update our user to current expectation - is that it should be equal to latest version of the dashboard template""" - - def as_dict(self) -> dict: - """Serializes the UpdateProviderAnalyticsDashboardRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateProviderAnalyticsDashboardRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderAnalyticsDashboardRequest: - """Deserializes the UpdateProviderAnalyticsDashboardRequest from a dictionary.""" - return cls(id=d.get("id", None), version=d.get("version", None)) - - @dataclass class UpdateProviderAnalyticsDashboardResponse: id: str @@ -3441,36 +2898,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderAnalyticsDashboardRespons return cls(dashboard_id=d.get("dashboard_id", None), id=d.get("id", None), version=d.get("version", None)) -@dataclass -class UpdateProviderRequest: - provider: ProviderInfo - - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the UpdateProviderRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.provider: - body["provider"] = self.provider.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateProviderRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.provider: - body["provider"] = self.provider - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateProviderRequest: - """Deserializes the UpdateProviderRequest from a dictionary.""" - return cls(id=d.get("id", None), provider=_from_dict(d, "provider", ProviderInfo)) - - @dataclass class UpdateProviderResponse: provider: Optional[ProviderInfo] = None diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 3d22a2ad0..9c8c90627 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -174,75 +174,6 @@ class ActivityType(Enum): SYSTEM_TRANSITION = "SYSTEM_TRANSITION" -@dataclass -class ApproveTransitionRequest: - """Details required to identify and approve a model version stage transition request.""" - - name: str - """Name of the model.""" - - version: str - """Version of the model.""" - - stage: str - """Target stage of the transition. Valid values are: - - * `None`: The initial stage of a model version. - - * `Staging`: Staging or pre-production stage. - - * `Production`: Production stage. - - * `Archived`: Archived stage.""" - - archive_existing_versions: bool - """Specifies whether to archive all current model versions in the target stage.""" - - comment: Optional[str] = None - """User-provided comment on the action.""" - - def as_dict(self) -> dict: - """Serializes the ApproveTransitionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ApproveTransitionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ApproveTransitionRequest: - """Deserializes the ApproveTransitionRequest from a dictionary.""" - return cls( - archive_existing_versions=d.get("archive_existing_versions", None), - comment=d.get("comment", None), - name=d.get("name", None), - stage=d.get("stage", None), - version=d.get("version", None), - ) - - @dataclass class ApproveTransitionRequestResponse: activity: Optional[Activity] = None @@ -358,47 +289,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CommentObject: ) -@dataclass -class CreateComment: - """Details required to create a comment on a model version.""" - - name: str - """Name of the model.""" - - version: str - """Version of the model.""" - - comment: str - """User-provided comment on the action.""" - - def as_dict(self) -> dict: - """Serializes the CreateComment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateComment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateComment: - """Deserializes the CreateComment from a dictionary.""" - return cls(comment=d.get("comment", None), name=d.get("name", None), version=d.get("version", None)) - - @dataclass class CreateCommentResponse: comment: Optional[CommentObject] = None @@ -424,53 +314,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCommentResponse: return cls(comment=_from_dict(d, "comment", CommentObject)) -@dataclass -class CreateExperiment: - name: str - """Experiment name.""" - - artifact_location: Optional[str] = None - """Location where all artifacts for the experiment are stored. If not provided, the remote server - will select an appropriate default.""" - - tags: Optional[List[ExperimentTag]] = None - """A collection of tags to set on the experiment. Maximum tag size and number of tags per request - depends on the storage backend. All storage backends are guaranteed to support tag keys up to - 250 bytes in size and tag values up to 5000 bytes in size. All storage backends are also - guaranteed to support up to 20 tags per request.""" - - def as_dict(self) -> dict: - """Serializes the CreateExperiment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.artifact_location is not None: - body["artifact_location"] = self.artifact_location - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateExperiment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.artifact_location is not None: - body["artifact_location"] = self.artifact_location - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateExperiment: - """Deserializes the CreateExperiment from a dictionary.""" - return cls( - artifact_location=d.get("artifact_location", None), - name=d.get("name", None), - tags=_repeated_dict(d, "tags", ExperimentTag), - ) - - @dataclass class CreateExperimentResponse: experiment_id: Optional[str] = None @@ -496,177 +339,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateExperimentResponse: return cls(experiment_id=d.get("experiment_id", None)) -@dataclass -class CreateForecastingExperimentRequest: - train_data_path: str - """The fully qualified path of a Unity Catalog table, formatted as - catalog_name.schema_name.table_name, used as training data for the forecasting model.""" - - target_column: str - """The column in the input training table used as the prediction target for model training. The - values in this column are used as the ground truth for model training.""" - - time_column: str - """The column in the input training table that represents each row's timestamp.""" - - forecast_granularity: str - """The time interval between consecutive rows in the time series data. Possible values include: '1 - second', '1 minute', '5 minutes', '10 minutes', '15 minutes', '30 minutes', 'Hourly', 'Daily', - 'Weekly', 'Monthly', 'Quarterly', 'Yearly'.""" - - forecast_horizon: int - """The number of time steps into the future to make predictions, calculated as a multiple of - forecast_granularity. This value represents how far ahead the model should forecast.""" - - custom_weights_column: Optional[str] = None - """The column in the training table used to customize weights for each time series.""" - - experiment_path: Optional[str] = None - """The path in the workspace to store the created experiment.""" - - future_feature_data_path: Optional[str] = None - """The fully qualified path of a Unity Catalog table, formatted as - catalog_name.schema_name.table_name, used to store future feature data for predictions.""" - - holiday_regions: Optional[List[str]] = None - """The region code(s) to automatically add holiday features. Currently supports only one region.""" - - include_features: Optional[List[str]] = None - """Specifies the list of feature columns to include in model training. These columns must exist in - the training data and be of type string, numerical, or boolean. If not specified, no additional - features will be included. Note: Certain columns are automatically handled: - Automatically - excluded: split_column, target_column, custom_weights_column. - Automatically included: - time_column.""" - - max_runtime: Optional[int] = None - """The maximum duration for the experiment in minutes. The experiment stops automatically if it - exceeds this limit.""" - - prediction_data_path: Optional[str] = None - """The fully qualified path of a Unity Catalog table, formatted as - catalog_name.schema_name.table_name, used to store predictions.""" - - primary_metric: Optional[str] = None - """The evaluation metric used to optimize the forecasting model.""" - - register_to: Optional[str] = None - """The fully qualified path of a Unity Catalog model, formatted as - catalog_name.schema_name.model_name, used to store the best model.""" - - split_column: Optional[str] = None - """// The column in the training table used for custom data splits. Values must be 'train', - 'validate', or 'test'.""" - - timeseries_identifier_columns: Optional[List[str]] = None - """The column in the training table used to group the dataset for predicting individual time - series.""" - - training_frameworks: Optional[List[str]] = None - """List of frameworks to include for model tuning. Possible values are 'Prophet', 'ARIMA', - 'DeepAR'. An empty list includes all supported frameworks.""" - - def as_dict(self) -> dict: - """Serializes the CreateForecastingExperimentRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.custom_weights_column is not None: - body["custom_weights_column"] = self.custom_weights_column - if self.experiment_path is not None: - body["experiment_path"] = self.experiment_path - if self.forecast_granularity is not None: - body["forecast_granularity"] = self.forecast_granularity - if self.forecast_horizon is not None: - body["forecast_horizon"] = self.forecast_horizon - if self.future_feature_data_path is not None: - body["future_feature_data_path"] = self.future_feature_data_path - if self.holiday_regions: - body["holiday_regions"] = [v for v in self.holiday_regions] - if self.include_features: - body["include_features"] = [v for v in self.include_features] - if self.max_runtime is not None: - body["max_runtime"] = self.max_runtime - if self.prediction_data_path is not None: - body["prediction_data_path"] = self.prediction_data_path - if self.primary_metric is not None: - body["primary_metric"] = self.primary_metric - if self.register_to is not None: - body["register_to"] = self.register_to - if self.split_column is not None: - body["split_column"] = self.split_column - if self.target_column is not None: - body["target_column"] = self.target_column - if self.time_column is not None: - body["time_column"] = self.time_column - if self.timeseries_identifier_columns: - body["timeseries_identifier_columns"] = [v for v in self.timeseries_identifier_columns] - if self.train_data_path is not None: - body["train_data_path"] = self.train_data_path - if self.training_frameworks: - body["training_frameworks"] = [v for v in self.training_frameworks] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateForecastingExperimentRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.custom_weights_column is not None: - body["custom_weights_column"] = self.custom_weights_column - if self.experiment_path is not None: - body["experiment_path"] = self.experiment_path - if self.forecast_granularity is not None: - body["forecast_granularity"] = self.forecast_granularity - if self.forecast_horizon is not None: - body["forecast_horizon"] = self.forecast_horizon - if self.future_feature_data_path is not None: - body["future_feature_data_path"] = self.future_feature_data_path - if self.holiday_regions: - body["holiday_regions"] = self.holiday_regions - if self.include_features: - body["include_features"] = self.include_features - if self.max_runtime is not None: - body["max_runtime"] = self.max_runtime - if self.prediction_data_path is not None: - body["prediction_data_path"] = self.prediction_data_path - if self.primary_metric is not None: - body["primary_metric"] = self.primary_metric - if self.register_to is not None: - body["register_to"] = self.register_to - if self.split_column is not None: - body["split_column"] = self.split_column - if self.target_column is not None: - body["target_column"] = self.target_column - if self.time_column is not None: - body["time_column"] = self.time_column - if self.timeseries_identifier_columns: - body["timeseries_identifier_columns"] = self.timeseries_identifier_columns - if self.train_data_path is not None: - body["train_data_path"] = self.train_data_path - if self.training_frameworks: - body["training_frameworks"] = self.training_frameworks - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateForecastingExperimentRequest: - """Deserializes the CreateForecastingExperimentRequest from a dictionary.""" - return cls( - custom_weights_column=d.get("custom_weights_column", None), - experiment_path=d.get("experiment_path", None), - forecast_granularity=d.get("forecast_granularity", None), - forecast_horizon=d.get("forecast_horizon", None), - future_feature_data_path=d.get("future_feature_data_path", None), - holiday_regions=d.get("holiday_regions", None), - include_features=d.get("include_features", None), - max_runtime=d.get("max_runtime", None), - prediction_data_path=d.get("prediction_data_path", None), - primary_metric=d.get("primary_metric", None), - register_to=d.get("register_to", None), - split_column=d.get("split_column", None), - target_column=d.get("target_column", None), - time_column=d.get("time_column", None), - timeseries_identifier_columns=d.get("timeseries_identifier_columns", None), - train_data_path=d.get("train_data_path", None), - training_frameworks=d.get("training_frameworks", None), - ) - - @dataclass class CreateForecastingExperimentResponse: experiment_id: Optional[str] = None @@ -692,73 +364,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateForecastingExperimentResponse: return cls(experiment_id=d.get("experiment_id", None)) -@dataclass -class CreateLoggedModelRequest: - experiment_id: str - """The ID of the experiment that owns the model.""" - - model_type: Optional[str] = None - """The type of the model, such as ``"Agent"``, ``"Classifier"``, ``"LLM"``.""" - - name: Optional[str] = None - """The name of the model (optional). If not specified one will be generated.""" - - params: Optional[List[LoggedModelParameter]] = None - """Parameters attached to the model.""" - - source_run_id: Optional[str] = None - """The ID of the run that created the model.""" - - tags: Optional[List[LoggedModelTag]] = None - """Tags attached to the model.""" - - def as_dict(self) -> dict: - """Serializes the CreateLoggedModelRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.model_type is not None: - body["model_type"] = self.model_type - if self.name is not None: - body["name"] = self.name - if self.params: - body["params"] = [v.as_dict() for v in self.params] - if self.source_run_id is not None: - body["source_run_id"] = self.source_run_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateLoggedModelRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.model_type is not None: - body["model_type"] = self.model_type - if self.name is not None: - body["name"] = self.name - if self.params: - body["params"] = self.params - if self.source_run_id is not None: - body["source_run_id"] = self.source_run_id - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateLoggedModelRequest: - """Deserializes the CreateLoggedModelRequest from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - model_type=d.get("model_type", None), - name=d.get("name", None), - params=_repeated_dict(d, "params", LoggedModelParameter), - source_run_id=d.get("source_run_id", None), - tags=_repeated_dict(d, "tags", LoggedModelTag), - ) - - @dataclass class CreateLoggedModelResponse: model: Optional[LoggedModel] = None @@ -784,47 +389,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateLoggedModelResponse: return cls(model=_from_dict(d, "model", LoggedModel)) -@dataclass -class CreateModelRequest: - name: str - """Register models under this name""" - - description: Optional[str] = None - """Optional description for registered model.""" - - tags: Optional[List[ModelTag]] = None - """Additional metadata for registered model.""" - - def as_dict(self) -> dict: - """Serializes the CreateModelRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateModelRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateModelRequest: - """Deserializes the CreateModelRequest from a dictionary.""" - return cls( - description=d.get("description", None), name=d.get("name", None), tags=_repeated_dict(d, "tags", ModelTag) - ) - - @dataclass class CreateModelResponse: registered_model: Optional[Model] = None @@ -850,276 +414,44 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateModelResponse: @dataclass -class CreateModelVersionRequest: - name: str - """Register model under this name""" +class CreateModelVersionResponse: + model_version: Optional[ModelVersion] = None + """Return new version number generated for this model in registry.""" - source: str - """URI indicating the location of the model artifacts.""" + def as_dict(self) -> dict: + """Serializes the CreateModelVersionResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.model_version: + body["model_version"] = self.model_version.as_dict() + return body - description: Optional[str] = None - """Optional description for model version.""" + def as_shallow_dict(self) -> dict: + """Serializes the CreateModelVersionResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.model_version: + body["model_version"] = self.model_version + return body - run_id: Optional[str] = None - """MLflow run ID for correlation, if `source` was generated by an experiment run in MLflow tracking - server""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CreateModelVersionResponse: + """Deserializes the CreateModelVersionResponse from a dictionary.""" + return cls(model_version=_from_dict(d, "model_version", ModelVersion)) - run_link: Optional[str] = None - """MLflow run link - this is the exact link of the run that generated this model version, - potentially hosted at another instance of MLflow.""" - tags: Optional[List[ModelVersionTag]] = None - """Additional metadata for model version.""" +@dataclass +class CreateRunResponse: + run: Optional[Run] = None + """The newly created run.""" def as_dict(self) -> dict: - """Serializes the CreateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateRunResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_link is not None: - body["run_link"] = self.run_link - if self.source is not None: - body["source"] = self.source - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] + if self.run: + body["run"] = self.run.as_dict() return body def as_shallow_dict(self) -> dict: - """Serializes the CreateModelVersionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_link is not None: - body["run_link"] = self.run_link - if self.source is not None: - body["source"] = self.source - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateModelVersionRequest: - """Deserializes the CreateModelVersionRequest from a dictionary.""" - return cls( - description=d.get("description", None), - name=d.get("name", None), - run_id=d.get("run_id", None), - run_link=d.get("run_link", None), - source=d.get("source", None), - tags=_repeated_dict(d, "tags", ModelVersionTag), - ) - - -@dataclass -class CreateModelVersionResponse: - model_version: Optional[ModelVersion] = None - """Return new version number generated for this model in registry.""" - - def as_dict(self) -> dict: - """Serializes the CreateModelVersionResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.model_version: - body["model_version"] = self.model_version.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateModelVersionResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.model_version: - body["model_version"] = self.model_version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateModelVersionResponse: - """Deserializes the CreateModelVersionResponse from a dictionary.""" - return cls(model_version=_from_dict(d, "model_version", ModelVersion)) - - -@dataclass -class CreateRegistryWebhook: - """Details required to create a registry webhook.""" - - events: List[RegistryWebhookEvent] - """Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was - created for the associated model. - - * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - - * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - - * `COMMENT_CREATED`: A user wrote a comment on a registered model. - - * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be - specified for a registry-wide webhook, which can be created by not specifying a model name in - the create request. - - * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - - * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - - * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - - * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - - * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to - staging. - - * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned - to production. - - * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.""" - - description: Optional[str] = None - """User-specified description for the webhook.""" - - http_url_spec: Optional[HttpUrlSpec] = None - """External HTTPS URL called on event trigger (by using a POST request).""" - - job_spec: Optional[JobSpec] = None - """ID of the job that the webhook runs.""" - - model_name: Optional[str] = None - """If model name is not specified, a registry-wide webhook is created that listens for the - specified events across all versions of all registered models.""" - - status: Optional[RegistryWebhookStatus] = None - """Enable or disable triggering the webhook, or put the webhook into test mode. The default is - `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens. - - * `DISABLED`: Webhook is not triggered. - - * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a - real event.""" - - def as_dict(self) -> dict: - """Serializes the CreateRegistryWebhook into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = [v.value for v in self.events] - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec.as_dict() - if self.job_spec: - body["job_spec"] = self.job_spec.as_dict() - if self.model_name is not None: - body["model_name"] = self.model_name - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateRegistryWebhook into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = self.events - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec - if self.job_spec: - body["job_spec"] = self.job_spec - if self.model_name is not None: - body["model_name"] = self.model_name - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateRegistryWebhook: - """Deserializes the CreateRegistryWebhook from a dictionary.""" - return cls( - description=d.get("description", None), - events=_repeated_enum(d, "events", RegistryWebhookEvent), - http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpec), - job_spec=_from_dict(d, "job_spec", JobSpec), - model_name=d.get("model_name", None), - status=_enum(d, "status", RegistryWebhookStatus), - ) - - -@dataclass -class CreateRun: - experiment_id: Optional[str] = None - """ID of the associated experiment.""" - - run_name: Optional[str] = None - """The name of the run.""" - - start_time: Optional[int] = None - """Unix timestamp in milliseconds of when the run started.""" - - tags: Optional[List[RunTag]] = None - """Additional metadata for run.""" - - user_id: Optional[str] = None - """ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed - in a future MLflow release. Use 'mlflow.user' tag instead.""" - - def as_dict(self) -> dict: - """Serializes the CreateRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.start_time is not None: - body["start_time"] = self.start_time - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - if self.user_id is not None: - body["user_id"] = self.user_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.start_time is not None: - body["start_time"] = self.start_time - if self.tags: - body["tags"] = self.tags - if self.user_id is not None: - body["user_id"] = self.user_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateRun: - """Deserializes the CreateRun from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - run_name=d.get("run_name", None), - start_time=d.get("start_time", None), - tags=_repeated_dict(d, "tags", RunTag), - user_id=d.get("user_id", None), - ) - - -@dataclass -class CreateRunResponse: - run: Optional[Run] = None - """The newly created run.""" - - def as_dict(self) -> dict: - """Serializes the CreateRunResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.run: - body["run"] = self.run.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateRunResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the CreateRunResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.run: body["run"] = self.run @@ -1131,67 +463,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateRunResponse: return cls(run=_from_dict(d, "run", Run)) -@dataclass -class CreateTransitionRequest: - """Details required to create a model version stage transition request.""" - - name: str - """Name of the model.""" - - version: str - """Version of the model.""" - - stage: str - """Target stage of the transition. Valid values are: - - * `None`: The initial stage of a model version. - - * `Staging`: Staging or pre-production stage. - - * `Production`: Production stage. - - * `Archived`: Archived stage.""" - - comment: Optional[str] = None - """User-provided comment on the action.""" - - def as_dict(self) -> dict: - """Serializes the CreateTransitionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateTransitionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateTransitionRequest: - """Deserializes the CreateTransitionRequest from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - stage=d.get("stage", None), - version=d.get("version", None), - ) - - @dataclass class CreateTransitionRequestResponse: request: Optional[TransitionRequest] = None @@ -1367,31 +638,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteCommentResponse: return cls() -@dataclass -class DeleteExperiment: - experiment_id: str - """ID of the associated experiment.""" - - def as_dict(self) -> dict: - """Serializes the DeleteExperiment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteExperiment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteExperiment: - """Deserializes the DeleteExperiment from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None)) - - @dataclass class DeleteExperimentResponse: def as_dict(self) -> dict: @@ -1518,31 +764,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteModelVersionTagResponse: return cls() -@dataclass -class DeleteRun: - run_id: str - """ID of the run to delete.""" - - def as_dict(self) -> dict: - """Serializes the DeleteRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteRun: - """Deserializes the DeleteRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) - - @dataclass class DeleteRunResponse: def as_dict(self) -> dict: @@ -1561,51 +782,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteRunResponse: return cls() -@dataclass -class DeleteRuns: - experiment_id: str - """The ID of the experiment containing the runs to delete.""" - - max_timestamp_millis: int - """The maximum creation timestamp in milliseconds since the UNIX epoch for deleting runs. Only runs - created prior to or at this timestamp are deleted.""" - - max_runs: Optional[int] = None - """An optional positive integer indicating the maximum number of runs to delete. The maximum - allowed value for max_runs is 10000.""" - - def as_dict(self) -> dict: - """Serializes the DeleteRuns into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.max_timestamp_millis is not None: - body["max_timestamp_millis"] = self.max_timestamp_millis - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteRuns into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.max_timestamp_millis is not None: - body["max_timestamp_millis"] = self.max_timestamp_millis - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteRuns: - """Deserializes the DeleteRuns from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - max_runs=d.get("max_runs", None), - max_timestamp_millis=d.get("max_timestamp_millis", None), - ) - - @dataclass class DeleteRunsResponse: runs_deleted: Optional[int] = None @@ -1631,38 +807,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteRunsResponse: return cls(runs_deleted=d.get("runs_deleted", None)) -@dataclass -class DeleteTag: - run_id: str - """ID of the run that the tag was logged under. Must be provided.""" - - key: str - """Name of the tag. Maximum size is 255 bytes. Must be provided.""" - - def as_dict(self) -> dict: - """Serializes the DeleteTag into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteTag into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteTag: - """Deserializes the DeleteTag from a dictionary.""" - return cls(key=d.get("key", None), run_id=d.get("run_id", None)) - - @dataclass class DeleteTagResponse: def as_dict(self) -> dict: @@ -2032,40 +1176,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermissionsDescription: ) -@dataclass -class ExperimentPermissionsRequest: - access_control_list: Optional[List[ExperimentAccessControlRequest]] = None - - experiment_id: Optional[str] = None - """The experiment for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the ExperimentPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExperimentPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExperimentPermissionsRequest: - """Deserializes the ExperimentPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ExperimentAccessControlRequest), - experiment_id=d.get("experiment_id", None), - ) - - @dataclass class ExperimentTag: """A tag for an experiment.""" @@ -2367,46 +1477,13 @@ def as_shallow_dict(self) -> dict: if self.is_dir is not None: body["is_dir"] = self.is_dir if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FileInfo: - """Deserializes the FileInfo from a dictionary.""" - return cls(file_size=d.get("file_size", None), is_dir=d.get("is_dir", None), path=d.get("path", None)) - - -@dataclass -class FinalizeLoggedModelRequest: - status: LoggedModelStatus - """Whether or not the model is ready for use. ``"LOGGED_MODEL_UPLOAD_FAILED"`` indicates that - something went wrong when logging the model weights / agent code.""" - - model_id: Optional[str] = None - """The ID of the logged model to finalize.""" - - def as_dict(self) -> dict: - """Serializes the FinalizeLoggedModelRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the FinalizeLoggedModelRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.status is not None: - body["status"] = self.status + body["path"] = self.path return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> FinalizeLoggedModelRequest: - """Deserializes the FinalizeLoggedModelRequest from a dictionary.""" - return cls(model_id=d.get("model_id", None), status=_enum(d, "status", LoggedModelStatus)) + def from_dict(cls, d: Dict[str, Any]) -> FileInfo: + """Deserializes the FileInfo from a dictionary.""" + return cls(file_size=d.get("file_size", None), is_dir=d.get("is_dir", None), path=d.get("path", None)) @dataclass @@ -2563,38 +1640,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GetExperimentResponse: return cls(experiment=_from_dict(d, "experiment", Experiment)) -@dataclass -class GetLatestVersionsRequest: - name: str - """Registered model unique name identifier.""" - - stages: Optional[List[str]] = None - """List of stages.""" - - def as_dict(self) -> dict: - """Serializes the GetLatestVersionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.stages: - body["stages"] = [v for v in self.stages] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the GetLatestVersionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.stages: - body["stages"] = self.stages - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> GetLatestVersionsRequest: - """Deserializes the GetLatestVersionsRequest from a dictionary.""" - return cls(name=d.get("name", None), stages=d.get("stages", None)) - - @dataclass class GetLatestVersionsResponse: model_versions: Optional[List[ModelVersion]] = None @@ -3250,60 +2295,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListTransitionRequestsResponse: return cls(requests=_repeated_dict(d, "requests", Activity)) -@dataclass -class LogBatch: - metrics: Optional[List[Metric]] = None - """Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, - and tags in total.""" - - params: Optional[List[Param]] = None - """Params to log. A single request can contain up to 100 params, and up to 1000 metrics, params, - and tags in total.""" - - run_id: Optional[str] = None - """ID of the run to log under""" - - tags: Optional[List[RunTag]] = None - """Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and - tags in total.""" - - def as_dict(self) -> dict: - """Serializes the LogBatch into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.metrics: - body["metrics"] = [v.as_dict() for v in self.metrics] - if self.params: - body["params"] = [v.as_dict() for v in self.params] - if self.run_id is not None: - body["run_id"] = self.run_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogBatch into a shallow dictionary of its immediate attributes.""" - body = {} - if self.metrics: - body["metrics"] = self.metrics - if self.params: - body["params"] = self.params - if self.run_id is not None: - body["run_id"] = self.run_id - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogBatch: - """Deserializes the LogBatch from a dictionary.""" - return cls( - metrics=_repeated_dict(d, "metrics", Metric), - params=_repeated_dict(d, "params", Param), - run_id=d.get("run_id", None), - tags=_repeated_dict(d, "tags", RunTag), - ) - - @dataclass class LogBatchResponse: def as_dict(self) -> dict: @@ -3322,49 +2313,6 @@ def from_dict(cls, d: Dict[str, Any]) -> LogBatchResponse: return cls() -@dataclass -class LogInputs: - run_id: str - """ID of the run to log under""" - - datasets: Optional[List[DatasetInput]] = None - """Dataset inputs""" - - models: Optional[List[ModelInput]] = None - """Model inputs""" - - def as_dict(self) -> dict: - """Serializes the LogInputs into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.datasets: - body["datasets"] = [v.as_dict() for v in self.datasets] - if self.models: - body["models"] = [v.as_dict() for v in self.models] - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogInputs into a shallow dictionary of its immediate attributes.""" - body = {} - if self.datasets: - body["datasets"] = self.datasets - if self.models: - body["models"] = self.models - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogInputs: - """Deserializes the LogInputs from a dictionary.""" - return cls( - datasets=_repeated_dict(d, "datasets", DatasetInput), - models=_repeated_dict(d, "models", ModelInput), - run_id=d.get("run_id", None), - ) - - @dataclass class LogInputsResponse: def as_dict(self) -> dict: @@ -3383,38 +2331,6 @@ def from_dict(cls, d: Dict[str, Any]) -> LogInputsResponse: return cls() -@dataclass -class LogLoggedModelParamsRequest: - model_id: Optional[str] = None - """The ID of the logged model to log params for.""" - - params: Optional[List[LoggedModelParameter]] = None - """Parameters to attach to the model.""" - - def as_dict(self) -> dict: - """Serializes the LogLoggedModelParamsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.params: - body["params"] = [v.as_dict() for v in self.params] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogLoggedModelParamsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.params: - body["params"] = self.params - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogLoggedModelParamsRequest: - """Deserializes the LogLoggedModelParamsRequest from a dictionary.""" - return cls(model_id=d.get("model_id", None), params=_repeated_dict(d, "params", LoggedModelParameter)) - - @dataclass class LogLoggedModelParamsRequestResponse: def as_dict(self) -> dict: @@ -3433,100 +2349,6 @@ def from_dict(cls, d: Dict[str, Any]) -> LogLoggedModelParamsRequestResponse: return cls() -@dataclass -class LogMetric: - key: str - """Name of the metric.""" - - value: float - """Double value of the metric being logged.""" - - timestamp: int - """Unix timestamp in milliseconds at the time metric was logged.""" - - dataset_digest: Optional[str] = None - """Dataset digest of the dataset associated with the metric, e.g. an md5 hash of the dataset that - uniquely identifies it within datasets of the same name.""" - - dataset_name: Optional[str] = None - """The name of the dataset associated with the metric. E.g. “my.uc.table@2” - “nyc-taxi-dataset”, “fantastic-elk-3”""" - - model_id: Optional[str] = None - """ID of the logged model associated with the metric, if applicable""" - - run_id: Optional[str] = None - """ID of the run under which to log the metric. Must be provided.""" - - run_uuid: Optional[str] = None - """[Deprecated, use `run_id` instead] ID of the run under which to log the metric. This field will - be removed in a future MLflow version.""" - - step: Optional[int] = None - """Step at which to log the metric""" - - def as_dict(self) -> dict: - """Serializes the LogMetric into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.key is not None: - body["key"] = self.key - if self.model_id is not None: - body["model_id"] = self.model_id - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.step is not None: - body["step"] = self.step - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogMetric into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dataset_digest is not None: - body["dataset_digest"] = self.dataset_digest - if self.dataset_name is not None: - body["dataset_name"] = self.dataset_name - if self.key is not None: - body["key"] = self.key - if self.model_id is not None: - body["model_id"] = self.model_id - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.step is not None: - body["step"] = self.step - if self.timestamp is not None: - body["timestamp"] = self.timestamp - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogMetric: - """Deserializes the LogMetric from a dictionary.""" - return cls( - dataset_digest=d.get("dataset_digest", None), - dataset_name=d.get("dataset_name", None), - key=d.get("key", None), - model_id=d.get("model_id", None), - run_id=d.get("run_id", None), - run_uuid=d.get("run_uuid", None), - step=d.get("step", None), - timestamp=d.get("timestamp", None), - value=d.get("value", None), - ) - - @dataclass class LogMetricResponse: def as_dict(self) -> dict: @@ -3535,166 +2357,50 @@ def as_dict(self) -> dict: return body def as_shallow_dict(self) -> dict: - """Serializes the LogMetricResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogMetricResponse: - """Deserializes the LogMetricResponse from a dictionary.""" - return cls() - - -@dataclass -class LogModel: - model_json: Optional[str] = None - """MLmodel file in json format.""" - - run_id: Optional[str] = None - """ID of the run to log under""" - - def as_dict(self) -> dict: - """Serializes the LogModel into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.model_json is not None: - body["model_json"] = self.model_json - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogModel into a shallow dictionary of its immediate attributes.""" - body = {} - if self.model_json is not None: - body["model_json"] = self.model_json - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogModel: - """Deserializes the LogModel from a dictionary.""" - return cls(model_json=d.get("model_json", None), run_id=d.get("run_id", None)) - - -@dataclass -class LogModelResponse: - def as_dict(self) -> dict: - """Serializes the LogModelResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogModelResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogModelResponse: - """Deserializes the LogModelResponse from a dictionary.""" - return cls() - - -@dataclass -class LogOutputsRequest: - run_id: str - """The ID of the Run from which to log outputs.""" - - models: Optional[List[ModelOutput]] = None - """The model outputs from the Run.""" - - def as_dict(self) -> dict: - """Serializes the LogOutputsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.models: - body["models"] = [v.as_dict() for v in self.models] - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the LogOutputsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.models: - body["models"] = self.models - if self.run_id is not None: - body["run_id"] = self.run_id + """Serializes the LogMetricResponse into a shallow dictionary of its immediate attributes.""" + body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogOutputsRequest: - """Deserializes the LogOutputsRequest from a dictionary.""" - return cls(models=_repeated_dict(d, "models", ModelOutput), run_id=d.get("run_id", None)) + def from_dict(cls, d: Dict[str, Any]) -> LogMetricResponse: + """Deserializes the LogMetricResponse from a dictionary.""" + return cls() @dataclass -class LogOutputsResponse: +class LogModelResponse: def as_dict(self) -> dict: - """Serializes the LogOutputsResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the LogModelResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body def as_shallow_dict(self) -> dict: - """Serializes the LogOutputsResponse into a shallow dictionary of its immediate attributes.""" + """Serializes the LogModelResponse into a shallow dictionary of its immediate attributes.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogOutputsResponse: - """Deserializes the LogOutputsResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> LogModelResponse: + """Deserializes the LogModelResponse from a dictionary.""" return cls() @dataclass -class LogParam: - key: str - """Name of the param. Maximum size is 255 bytes.""" - - value: str - """String value of the param being logged. Maximum size is 500 bytes.""" - - run_id: Optional[str] = None - """ID of the run under which to log the param. Must be provided.""" - - run_uuid: Optional[str] = None - """[Deprecated, use `run_id` instead] ID of the run under which to log the param. This field will - be removed in a future MLflow version.""" - +class LogOutputsResponse: def as_dict(self) -> dict: - """Serializes the LogParam into a dictionary suitable for use as a JSON request body.""" + """Serializes the LogOutputsResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value return body def as_shallow_dict(self) -> dict: - """Serializes the LogParam into a shallow dictionary of its immediate attributes.""" + """Serializes the LogOutputsResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> LogParam: - """Deserializes the LogParam from a dictionary.""" - return cls( - key=d.get("key", None), - run_id=d.get("run_id", None), - run_uuid=d.get("run_uuid", None), - value=d.get("value", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> LogOutputsResponse: + """Deserializes the LogOutputsResponse from a dictionary.""" + return cls() @dataclass @@ -4671,6 +3377,9 @@ class OnlineStore: creator: Optional[str] = None """The email of the creator of the online store.""" + read_replica_count: Optional[int] = None + """The number of read replicas for the online store. Defaults to 0.""" + state: Optional[OnlineStoreState] = None """The current state of the online store.""" @@ -4685,6 +3394,8 @@ def as_dict(self) -> dict: body["creator"] = self.creator if self.name is not None: body["name"] = self.name + if self.read_replica_count is not None: + body["read_replica_count"] = self.read_replica_count if self.state is not None: body["state"] = self.state.value return body @@ -4700,6 +3411,8 @@ def as_shallow_dict(self) -> dict: body["creator"] = self.creator if self.name is not None: body["name"] = self.name + if self.read_replica_count is not None: + body["read_replica_count"] = self.read_replica_count if self.state is not None: body["state"] = self.state return body @@ -4712,6 +3425,7 @@ def from_dict(cls, d: Dict[str, Any]) -> OnlineStore: creation_time=d.get("creation_time", None), creator=d.get("creator", None), name=d.get("name", None), + read_replica_count=d.get("read_replica_count", None), state=_enum(d, "state", OnlineStoreState), ) @@ -4823,40 +3537,6 @@ class PublishSpecPublishMode(Enum): TRIGGERED = "TRIGGERED" -@dataclass -class PublishTableRequest: - publish_spec: PublishSpec - """The specification for publishing the online table from the source table.""" - - source_table_name: Optional[str] = None - """The full three-part (catalog, schema, table) name of the source table.""" - - def as_dict(self) -> dict: - """Serializes the PublishTableRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.publish_spec: - body["publish_spec"] = self.publish_spec.as_dict() - if self.source_table_name is not None: - body["source_table_name"] = self.source_table_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PublishTableRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.publish_spec: - body["publish_spec"] = self.publish_spec - if self.source_table_name is not None: - body["source_table_name"] = self.source_table_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PublishTableRequest: - """Deserializes the PublishTableRequest from a dictionary.""" - return cls( - publish_spec=_from_dict(d, "publish_spec", PublishSpec), source_table_name=d.get("source_table_name", None) - ) - - @dataclass class PublishTableResponse: online_table_name: Optional[str] = None @@ -5121,40 +3801,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermissionsDescription: ) -@dataclass -class RegisteredModelPermissionsRequest: - access_control_list: Optional[List[RegisteredModelAccessControlRequest]] = None - - registered_model_id: Optional[str] = None - """The registered model for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the RegisteredModelPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.registered_model_id is not None: - body["registered_model_id"] = self.registered_model_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RegisteredModelPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.registered_model_id is not None: - body["registered_model_id"] = self.registered_model_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RegisteredModelPermissionsRequest: - """Deserializes the RegisteredModelPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", RegisteredModelAccessControlRequest), - registered_model_id=d.get("registered_model_id", None), - ) - - class RegistryEmailSubscriptionType(Enum): """.. note:: Experimental: This entity may change or be removed in a future release without warning. Email subscription types for registry notifications: - `ALL_EVENTS`: Subscribed to all @@ -5312,67 +3958,6 @@ class RegistryWebhookStatus(Enum): TEST_MODE = "TEST_MODE" -@dataclass -class RejectTransitionRequest: - """Details required to identify and reject a model version stage transition request.""" - - name: str - """Name of the model.""" - - version: str - """Version of the model.""" - - stage: str - """Target stage of the transition. Valid values are: - - * `None`: The initial stage of a model version. - - * `Staging`: Staging or pre-production stage. - - * `Production`: Production stage. - - * `Archived`: Archived stage.""" - - comment: Optional[str] = None - """User-provided comment on the action.""" - - def as_dict(self) -> dict: - """Serializes the RejectTransitionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RejectTransitionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RejectTransitionRequest: - """Deserializes the RejectTransitionRequest from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - stage=d.get("stage", None), - version=d.get("version", None), - ) - - @dataclass class RejectTransitionRequestResponse: activity: Optional[Activity] = None @@ -5398,38 +3983,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RejectTransitionRequestResponse: return cls(activity=_from_dict(d, "activity", Activity)) -@dataclass -class RenameModelRequest: - name: str - """Registered model unique name identifier.""" - - new_name: Optional[str] = None - """If provided, updates the name for this `registered_model`.""" - - def as_dict(self) -> dict: - """Serializes the RenameModelRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RenameModelRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RenameModelRequest: - """Deserializes the RenameModelRequest from a dictionary.""" - return cls(name=d.get("name", None), new_name=d.get("new_name", None)) - - @dataclass class RenameModelResponse: registered_model: Optional[Model] = None @@ -5454,31 +4007,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RenameModelResponse: return cls(registered_model=_from_dict(d, "registered_model", Model)) -@dataclass -class RestoreExperiment: - experiment_id: str - """ID of the associated experiment.""" - - def as_dict(self) -> dict: - """Serializes the RestoreExperiment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RestoreExperiment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RestoreExperiment: - """Deserializes the RestoreExperiment from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None)) - - @dataclass class RestoreExperimentResponse: def as_dict(self) -> dict: @@ -5497,31 +4025,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RestoreExperimentResponse: return cls() -@dataclass -class RestoreRun: - run_id: str - """ID of the run to restore.""" - - def as_dict(self) -> dict: - """Serializes the RestoreRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RestoreRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.run_id is not None: - body["run_id"] = self.run_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RestoreRun: - """Deserializes the RestoreRun from a dictionary.""" - return cls(run_id=d.get("run_id", None)) - - @dataclass class RestoreRunResponse: def as_dict(self) -> dict: @@ -5540,51 +4043,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RestoreRunResponse: return cls() -@dataclass -class RestoreRuns: - experiment_id: str - """The ID of the experiment containing the runs to restore.""" - - min_timestamp_millis: int - """The minimum deletion timestamp in milliseconds since the UNIX epoch for restoring runs. Only - runs deleted no earlier than this timestamp are restored.""" - - max_runs: Optional[int] = None - """An optional positive integer indicating the maximum number of runs to restore. The maximum - allowed value for max_runs is 10000.""" - - def as_dict(self) -> dict: - """Serializes the RestoreRuns into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.min_timestamp_millis is not None: - body["min_timestamp_millis"] = self.min_timestamp_millis - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RestoreRuns into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.max_runs is not None: - body["max_runs"] = self.max_runs - if self.min_timestamp_millis is not None: - body["min_timestamp_millis"] = self.min_timestamp_millis - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RestoreRuns: - """Deserializes the RestoreRuns from a dictionary.""" - return cls( - experiment_id=d.get("experiment_id", None), - max_runs=d.get("max_runs", None), - min_timestamp_millis=d.get("min_timestamp_millis", None), - ) - - @dataclass class RestoreRunsResponse: runs_restored: Optional[int] = None @@ -5886,68 +4344,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RunTag: return cls(key=d.get("key", None), value=d.get("value", None)) -@dataclass -class SearchExperiments: - filter: Optional[str] = None - """String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'")""" - - max_results: Optional[int] = None - """Maximum number of experiments desired. Max threshold is 3000.""" - - order_by: Optional[List[str]] = None - """List of columns for ordering search results, which can include experiment name and last updated - timestamp with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are - done by experiment id DESC.""" - - page_token: Optional[str] = None - """Token indicating the page of experiments to fetch""" - - view_type: Optional[ViewType] = None - """Qualifier for type of experiments to be returned. If unspecified, return only active - experiments.""" - - def as_dict(self) -> dict: - """Serializes the SearchExperiments into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = [v for v in self.order_by] - if self.page_token is not None: - body["page_token"] = self.page_token - if self.view_type is not None: - body["view_type"] = self.view_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SearchExperiments into a shallow dictionary of its immediate attributes.""" - body = {} - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = self.order_by - if self.page_token is not None: - body["page_token"] = self.page_token - if self.view_type is not None: - body["view_type"] = self.view_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SearchExperiments: - """Deserializes the SearchExperiments from a dictionary.""" - return cls( - filter=d.get("filter", None), - max_results=d.get("max_results", None), - order_by=d.get("order_by", None), - page_token=d.get("page_token", None), - view_type=_enum(d, "view_type", ViewType), - ) - - @dataclass class SearchExperimentsResponse: experiments: Optional[List[Experiment]] = None @@ -6060,87 +4456,13 @@ def as_shallow_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsOrderBy: - """Deserializes the SearchLoggedModelsOrderBy from a dictionary.""" - return cls( - ascending=d.get("ascending", None), - dataset_digest=d.get("dataset_digest", None), - dataset_name=d.get("dataset_name", None), - field_name=d.get("field_name", None), - ) - - -@dataclass -class SearchLoggedModelsRequest: - datasets: Optional[List[SearchLoggedModelsDataset]] = None - """List of datasets on which to apply the metrics filter clauses. For example, a filter with - `metrics.accuracy > 0.9` and dataset info with name "test_dataset" means we will return all - logged models with accuracy > 0.9 on the test_dataset. Metric values from ANY dataset matching - the criteria are considered. If no datasets are specified, then metrics across all datasets are - considered in the filter.""" - - experiment_ids: Optional[List[str]] = None - """The IDs of the experiments in which to search for logged models.""" - - filter: Optional[str] = None - """A filter expression over logged model info and data that allows returning a subset of logged - models. The syntax is a subset of SQL that supports AND'ing together binary operations. - - Example: ``params.alpha < 0.3 AND metrics.accuracy > 0.9``.""" - - max_results: Optional[int] = None - """The maximum number of Logged Models to return. The maximum limit is 50.""" - - order_by: Optional[List[SearchLoggedModelsOrderBy]] = None - """The list of columns for ordering the results, with additional fields for sorting criteria.""" - - page_token: Optional[str] = None - """The token indicating the page of logged models to fetch.""" - - def as_dict(self) -> dict: - """Serializes the SearchLoggedModelsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.datasets: - body["datasets"] = [v.as_dict() for v in self.datasets] - if self.experiment_ids: - body["experiment_ids"] = [v for v in self.experiment_ids] - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = [v.as_dict() for v in self.order_by] - if self.page_token is not None: - body["page_token"] = self.page_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SearchLoggedModelsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.datasets: - body["datasets"] = self.datasets - if self.experiment_ids: - body["experiment_ids"] = self.experiment_ids - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = self.order_by - if self.page_token is not None: - body["page_token"] = self.page_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsRequest: - """Deserializes the SearchLoggedModelsRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> SearchLoggedModelsOrderBy: + """Deserializes the SearchLoggedModelsOrderBy from a dictionary.""" return cls( - datasets=_repeated_dict(d, "datasets", SearchLoggedModelsDataset), - experiment_ids=d.get("experiment_ids", None), - filter=d.get("filter", None), - max_results=d.get("max_results", None), - order_by=_repeated_dict(d, "order_by", SearchLoggedModelsOrderBy), - page_token=d.get("page_token", None), + ascending=d.get("ascending", None), + dataset_digest=d.get("dataset_digest", None), + dataset_name=d.get("dataset_name", None), + field_name=d.get("field_name", None), ) @@ -6246,86 +4568,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SearchModelsResponse: ) -@dataclass -class SearchRuns: - experiment_ids: Optional[List[str]] = None - """List of experiment IDs to search over.""" - - filter: Optional[str] = None - """A filter expression over params, metrics, and tags, that allows returning a subset of runs. The - syntax is a subset of SQL that supports ANDing together binary operations between a param, - metric, or tag and a constant. - - Example: `metrics.rmse < 1 and params.model_class = 'LogisticRegression'` - - You can select columns with special characters (hyphen, space, period, etc.) by using double - quotes: `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'` - - Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`.""" - - max_results: Optional[int] = None - """Maximum number of runs desired. Max threshold is 50000""" - - order_by: Optional[List[str]] = None - """List of columns to be ordered by, including attributes, params, metrics, and tags with an - optional `"DESC"` or `"ASC"` annotation, where `"ASC"` is the default. Example: `["params.input - DESC", "metrics.alpha ASC", "metrics.rmse"]`. Tiebreaks are done by start_time `DESC` followed - by `run_id` for runs with the same start time (and this is the default ordering criterion if - order_by is not provided).""" - - page_token: Optional[str] = None - """Token for the current page of runs.""" - - run_view_type: Optional[ViewType] = None - """Whether to display only active, only deleted, or all runs. Defaults to only active runs.""" - - def as_dict(self) -> dict: - """Serializes the SearchRuns into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_ids: - body["experiment_ids"] = [v for v in self.experiment_ids] - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = [v for v in self.order_by] - if self.page_token is not None: - body["page_token"] = self.page_token - if self.run_view_type is not None: - body["run_view_type"] = self.run_view_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SearchRuns into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_ids: - body["experiment_ids"] = self.experiment_ids - if self.filter is not None: - body["filter"] = self.filter - if self.max_results is not None: - body["max_results"] = self.max_results - if self.order_by: - body["order_by"] = self.order_by - if self.page_token is not None: - body["page_token"] = self.page_token - if self.run_view_type is not None: - body["run_view_type"] = self.run_view_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SearchRuns: - """Deserializes the SearchRuns from a dictionary.""" - return cls( - experiment_ids=d.get("experiment_ids", None), - filter=d.get("filter", None), - max_results=d.get("max_results", None), - order_by=d.get("order_by", None), - page_token=d.get("page_token", None), - run_view_type=_enum(d, "run_view_type", ViewType), - ) - - @dataclass class SearchRunsResponse: next_page_token: Optional[str] = None @@ -6358,45 +4600,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SearchRunsResponse: return cls(next_page_token=d.get("next_page_token", None), runs=_repeated_dict(d, "runs", Run)) -@dataclass -class SetExperimentTag: - experiment_id: str - """ID of the experiment under which to log the tag. Must be provided.""" - - key: str - """Name of the tag. Keys up to 250 bytes in size are supported.""" - - value: str - """String value of the tag being logged. Values up to 64KB in size are supported.""" - - def as_dict(self) -> dict: - """Serializes the SetExperimentTag into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetExperimentTag into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.key is not None: - body["key"] = self.key - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetExperimentTag: - """Deserializes the SetExperimentTag from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None), key=d.get("key", None), value=d.get("value", None)) - - @dataclass class SetExperimentTagResponse: def as_dict(self) -> dict: @@ -6415,38 +4618,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SetExperimentTagResponse: return cls() -@dataclass -class SetLoggedModelTagsRequest: - model_id: Optional[str] = None - """The ID of the logged model to set the tags on.""" - - tags: Optional[List[LoggedModelTag]] = None - """The tags to set on the logged model.""" - - def as_dict(self) -> dict: - """Serializes the SetLoggedModelTagsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetLoggedModelTagsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.model_id is not None: - body["model_id"] = self.model_id - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetLoggedModelTagsRequest: - """Deserializes the SetLoggedModelTagsRequest from a dictionary.""" - return cls(model_id=d.get("model_id", None), tags=_repeated_dict(d, "tags", LoggedModelTag)) - - @dataclass class SetLoggedModelTagsResponse: def as_dict(self) -> dict: @@ -6465,48 +4636,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SetLoggedModelTagsResponse: return cls() -@dataclass -class SetModelTagRequest: - name: str - """Unique name of the model.""" - - key: str - """Name of the tag. Maximum size depends on storage backend. If a tag with this name already - exists, its preexisting value will be replaced by the specified `value`. All storage backends - are guaranteed to support key values up to 250 bytes in size.""" - - value: str - """String value of the tag being logged. Maximum size depends on storage backend. All storage - backends are guaranteed to support key values up to 5000 bytes in size.""" - - def as_dict(self) -> dict: - """Serializes the SetModelTagRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetModelTagRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetModelTagRequest: - """Deserializes the SetModelTagRequest from a dictionary.""" - return cls(key=d.get("key", None), name=d.get("name", None), value=d.get("value", None)) - - @dataclass class SetModelTagResponse: def as_dict(self) -> dict: @@ -6525,57 +4654,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SetModelTagResponse: return cls() -@dataclass -class SetModelVersionTagRequest: - name: str - """Unique name of the model.""" - - version: str - """Model version number.""" - - key: str - """Name of the tag. Maximum size depends on storage backend. If a tag with this name already - exists, its preexisting value will be replaced by the specified `value`. All storage backends - are guaranteed to support key values up to 250 bytes in size.""" - - value: str - """String value of the tag being logged. Maximum size depends on storage backend. All storage - backends are guaranteed to support key values up to 5000 bytes in size.""" - - def as_dict(self) -> dict: - """Serializes the SetModelVersionTagRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetModelVersionTagRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.name is not None: - body["name"] = self.name - if self.value is not None: - body["value"] = self.value - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetModelVersionTagRequest: - """Deserializes the SetModelVersionTagRequest from a dictionary.""" - return cls( - key=d.get("key", None), name=d.get("name", None), value=d.get("value", None), version=d.get("version", None) - ) - - @dataclass class SetModelVersionTagResponse: def as_dict(self) -> dict: @@ -6594,58 +4672,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SetModelVersionTagResponse: return cls() -@dataclass -class SetTag: - key: str - """Name of the tag. Keys up to 250 bytes in size are supported.""" - - value: str - """String value of the tag being logged. Values up to 64KB in size are supported.""" - - run_id: Optional[str] = None - """ID of the run under which to log the tag. Must be provided.""" - - run_uuid: Optional[str] = None - """[Deprecated, use `run_id` instead] ID of the run under which to log the tag. This field will be - removed in a future MLflow version.""" - - def as_dict(self) -> dict: - """Serializes the SetTag into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetTag into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.value is not None: - body["value"] = self.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetTag: - """Deserializes the SetTag from a dictionary.""" - return cls( - key=d.get("key", None), - run_id=d.get("run_id", None), - run_uuid=d.get("run_uuid", None), - value=d.get("value", None), - ) - - @dataclass class SetTagResponse: def as_dict(self) -> dict: @@ -6677,140 +4703,36 @@ class Status(Enum): READY = "READY" -@dataclass -class TestRegistryWebhookRequest: - """Details required to test a registry webhook.""" - - id: str - """Webhook ID""" - - event: Optional[RegistryWebhookEvent] = None - """If `event` is specified, the test trigger uses the specified event. If `event` is not specified, - the test trigger uses a randomly chosen event associated with the webhook.""" - - def as_dict(self) -> dict: - """Serializes the TestRegistryWebhookRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.event is not None: - body["event"] = self.event.value - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TestRegistryWebhookRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.event is not None: - body["event"] = self.event - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookRequest: - """Deserializes the TestRegistryWebhookRequest from a dictionary.""" - return cls(event=_enum(d, "event", RegistryWebhookEvent), id=d.get("id", None)) - - @dataclass class TestRegistryWebhookResponse: body: Optional[str] = None """Body of the response from the webhook URL""" - status_code: Optional[int] = None - """Status code returned by the webhook URL""" - - def as_dict(self) -> dict: - """Serializes the TestRegistryWebhookResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.body is not None: - body["body"] = self.body - if self.status_code is not None: - body["status_code"] = self.status_code - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TestRegistryWebhookResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.body is not None: - body["body"] = self.body - if self.status_code is not None: - body["status_code"] = self.status_code - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookResponse: - """Deserializes the TestRegistryWebhookResponse from a dictionary.""" - return cls(body=d.get("body", None), status_code=d.get("status_code", None)) - - -@dataclass -class TransitionModelVersionStageDatabricks: - """Details required to transition a model version's stage.""" - - name: str - """Name of the model.""" - - version: str - """Version of the model.""" - - stage: str - """Target stage of the transition. Valid values are: - - * `None`: The initial stage of a model version. - - * `Staging`: Staging or pre-production stage. - - * `Production`: Production stage. - - * `Archived`: Archived stage.""" - - archive_existing_versions: bool - """Specifies whether to archive all current model versions in the target stage.""" - - comment: Optional[str] = None - """User-provided comment on the action.""" + status_code: Optional[int] = None + """Status code returned by the webhook URL""" def as_dict(self) -> dict: - """Serializes the TransitionModelVersionStageDatabricks into a dictionary suitable for use as a JSON request body.""" + """Serializes the TestRegistryWebhookResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version + if self.body is not None: + body["body"] = self.body + if self.status_code is not None: + body["status_code"] = self.status_code return body def as_shallow_dict(self) -> dict: - """Serializes the TransitionModelVersionStageDatabricks into a shallow dictionary of its immediate attributes.""" + """Serializes the TestRegistryWebhookResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.archive_existing_versions is not None: - body["archive_existing_versions"] = self.archive_existing_versions - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.stage is not None: - body["stage"] = self.stage - if self.version is not None: - body["version"] = self.version + if self.body is not None: + body["body"] = self.body + if self.status_code is not None: + body["status_code"] = self.status_code return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TransitionModelVersionStageDatabricks: - """Deserializes the TransitionModelVersionStageDatabricks from a dictionary.""" - return cls( - archive_existing_versions=d.get("archive_existing_versions", None), - comment=d.get("comment", None), - name=d.get("name", None), - stage=d.get("stage", None), - version=d.get("version", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> TestRegistryWebhookResponse: + """Deserializes the TestRegistryWebhookResponse from a dictionary.""" + return cls(body=d.get("body", None), status_code=d.get("status_code", None)) @dataclass @@ -6908,40 +4830,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TransitionStageResponse: return cls(model_version_databricks=_from_dict(d, "model_version_databricks", ModelVersionDatabricks)) -@dataclass -class UpdateComment: - """Details required to edit a comment on a model version.""" - - id: str - """Unique identifier of an activity""" - - comment: str - """User-provided comment on the action.""" - - def as_dict(self) -> dict: - """Serializes the UpdateComment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateComment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateComment: - """Deserializes the UpdateComment from a dictionary.""" - return cls(comment=d.get("comment", None), id=d.get("id", None)) - - @dataclass class UpdateCommentResponse: comment: Optional[CommentObject] = None @@ -6967,38 +4855,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateCommentResponse: return cls(comment=_from_dict(d, "comment", CommentObject)) -@dataclass -class UpdateExperiment: - experiment_id: str - """ID of the associated experiment.""" - - new_name: Optional[str] = None - """If provided, the experiment's name is changed to the new name. The new name must be unique.""" - - def as_dict(self) -> dict: - """Serializes the UpdateExperiment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.new_name is not None: - body["new_name"] = self.new_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateExperiment into a shallow dictionary of its immediate attributes.""" - body = {} - if self.experiment_id is not None: - body["experiment_id"] = self.experiment_id - if self.new_name is not None: - body["new_name"] = self.new_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateExperiment: - """Deserializes the UpdateExperiment from a dictionary.""" - return cls(experiment_id=d.get("experiment_id", None), new_name=d.get("new_name", None)) - - @dataclass class UpdateExperimentResponse: def as_dict(self) -> dict: @@ -7017,38 +4873,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateExperimentResponse: return cls() -@dataclass -class UpdateModelRequest: - name: str - """Registered model unique name identifier.""" - - description: Optional[str] = None - """If provided, updates the description for this `registered_model`.""" - - def as_dict(self) -> dict: - """Serializes the UpdateModelRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateModelRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateModelRequest: - """Deserializes the UpdateModelRequest from a dictionary.""" - return cls(description=d.get("description", None), name=d.get("name", None)) - - @dataclass class UpdateModelResponse: registered_model: Optional[Model] = None @@ -7073,45 +4897,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateModelResponse: return cls(registered_model=_from_dict(d, "registered_model", Model)) -@dataclass -class UpdateModelVersionRequest: - name: str - """Name of the registered model""" - - version: str - """Model version number""" - - description: Optional[str] = None - """If provided, updates the description for this `registered_model`.""" - - def as_dict(self) -> dict: - """Serializes the UpdateModelVersionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.version is not None: - body["version"] = self.version - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionRequest: - """Deserializes the UpdateModelVersionRequest from a dictionary.""" - return cls(description=d.get("description", None), name=d.get("name", None), version=d.get("version", None)) - - @dataclass class UpdateModelVersionResponse: model_version: Optional[ModelVersion] = None @@ -7137,160 +4922,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateModelVersionResponse: return cls(model_version=_from_dict(d, "model_version", ModelVersion)) -@dataclass -class UpdateRegistryWebhook: - """Details required to update a registry webhook. Only the fields that need to be updated should be - specified, and both `http_url_spec` and `job_spec` should not be specified in the same request.""" - - id: str - """Webhook ID""" - - description: Optional[str] = None - """User-specified description for the webhook.""" - - events: Optional[List[RegistryWebhookEvent]] = None - """Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was - created for the associated model. - - * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed. - - * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned. - - * `COMMENT_CREATED`: A user wrote a comment on a registered model. - - * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be - specified for a registry-wide webhook, which can be created by not specifying a model name in - the create request. - - * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version. - - * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging. - - * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production. - - * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived. - - * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to - staging. - - * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned - to production. - - * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.""" - - http_url_spec: Optional[HttpUrlSpec] = None - - job_spec: Optional[JobSpec] = None - - status: Optional[RegistryWebhookStatus] = None - - def as_dict(self) -> dict: - """Serializes the UpdateRegistryWebhook into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = [v.value for v in self.events] - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec.as_dict() - if self.id is not None: - body["id"] = self.id - if self.job_spec: - body["job_spec"] = self.job_spec.as_dict() - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRegistryWebhook into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.events: - body["events"] = self.events - if self.http_url_spec: - body["http_url_spec"] = self.http_url_spec - if self.id is not None: - body["id"] = self.id - if self.job_spec: - body["job_spec"] = self.job_spec - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRegistryWebhook: - """Deserializes the UpdateRegistryWebhook from a dictionary.""" - return cls( - description=d.get("description", None), - events=_repeated_enum(d, "events", RegistryWebhookEvent), - http_url_spec=_from_dict(d, "http_url_spec", HttpUrlSpec), - id=d.get("id", None), - job_spec=_from_dict(d, "job_spec", JobSpec), - status=_enum(d, "status", RegistryWebhookStatus), - ) - - -@dataclass -class UpdateRun: - end_time: Optional[int] = None - """Unix timestamp in milliseconds of when the run ended.""" - - run_id: Optional[str] = None - """ID of the run to update. Must be provided.""" - - run_name: Optional[str] = None - """Updated name of the run.""" - - run_uuid: Optional[str] = None - """[Deprecated, use `run_id` instead] ID of the run to update. This field will be removed in a - future MLflow version.""" - - status: Optional[UpdateRunStatus] = None - """Updated status of the run.""" - - def as_dict(self) -> dict: - """Serializes the UpdateRun into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.end_time is not None: - body["end_time"] = self.end_time - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.status is not None: - body["status"] = self.status.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRun into a shallow dictionary of its immediate attributes.""" - body = {} - if self.end_time is not None: - body["end_time"] = self.end_time - if self.run_id is not None: - body["run_id"] = self.run_id - if self.run_name is not None: - body["run_name"] = self.run_name - if self.run_uuid is not None: - body["run_uuid"] = self.run_uuid - if self.status is not None: - body["status"] = self.status - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRun: - """Deserializes the UpdateRun from a dictionary.""" - return cls( - end_time=d.get("end_time", None), - run_id=d.get("run_id", None), - run_name=d.get("run_name", None), - run_uuid=d.get("run_uuid", None), - status=_enum(d, "status", UpdateRunStatus), - ) - - @dataclass class UpdateRunResponse: run_info: Optional[RunInfo] = None diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index a1d36a80c..4e762e763 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -14,75 +14,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class CreateCustomAppIntegration: - confidential: Optional[bool] = None - """This field indicates whether an OAuth client secret is required to authenticate this client.""" - - name: Optional[str] = None - """Name of the custom OAuth app""" - - redirect_urls: Optional[List[str]] = None - """List of OAuth redirect urls""" - - scopes: Optional[List[str]] = None - """OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, - openid, profile, email.""" - - token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy""" - - user_authorized_scopes: Optional[List[str]] = None - """Scopes that will need to be consented by end user to mint the access token. If the user does not - authorize the access token will not be minted. Must be a subset of scopes.""" - - def as_dict(self) -> dict: - """Serializes the CreateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.confidential is not None: - body["confidential"] = self.confidential - if self.name is not None: - body["name"] = self.name - if self.redirect_urls: - body["redirect_urls"] = [v for v in self.redirect_urls] - if self.scopes: - body["scopes"] = [v for v in self.scopes] - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() - if self.user_authorized_scopes: - body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateCustomAppIntegration into a shallow dictionary of its immediate attributes.""" - body = {} - if self.confidential is not None: - body["confidential"] = self.confidential - if self.name is not None: - body["name"] = self.name - if self.redirect_urls: - body["redirect_urls"] = self.redirect_urls - if self.scopes: - body["scopes"] = self.scopes - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy - if self.user_authorized_scopes: - body["user_authorized_scopes"] = self.user_authorized_scopes - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCustomAppIntegration: - """Deserializes the CreateCustomAppIntegration from a dictionary.""" - return cls( - confidential=d.get("confidential", None), - name=d.get("name", None), - redirect_urls=d.get("redirect_urls", None), - scopes=d.get("scopes", None), - token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), - user_authorized_scopes=d.get("user_authorized_scopes", None), - ) - - @dataclass class CreateCustomAppIntegrationOutput: client_id: Optional[str] = None @@ -127,40 +58,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCustomAppIntegrationOutput: ) -@dataclass -class CreatePublishedAppIntegration: - app_id: Optional[str] = None - """App id of the OAuth published app integration. For example power-bi, tableau-deskop""" - - token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy""" - - def as_dict(self) -> dict: - """Serializes the CreatePublishedAppIntegration into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreatePublishedAppIntegration into a shallow dictionary of its immediate attributes.""" - body = {} - if self.app_id is not None: - body["app_id"] = self.app_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePublishedAppIntegration: - """Deserializes the CreatePublishedAppIntegration from a dictionary.""" - return cls( - app_id=d.get("app_id", None), token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy) - ) - - @dataclass class CreatePublishedAppIntegrationOutput: integration_id: Optional[str] = None @@ -186,39 +83,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreatePublishedAppIntegrationOutput: return cls(integration_id=d.get("integration_id", None)) -@dataclass -class CreateServicePrincipalSecretRequest: - lifetime: Optional[str] = None - """The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a - default lifetime of 730 days (63072000s).""" - - service_principal_id: Optional[int] = None - """The service principal ID.""" - - def as_dict(self) -> dict: - """Serializes the CreateServicePrincipalSecretRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.lifetime is not None: - body["lifetime"] = self.lifetime - if self.service_principal_id is not None: - body["service_principal_id"] = self.service_principal_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateServicePrincipalSecretRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.lifetime is not None: - body["lifetime"] = self.lifetime - if self.service_principal_id is not None: - body["service_principal_id"] = self.service_principal_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateServicePrincipalSecretRequest: - """Deserializes the CreateServicePrincipalSecretRequest from a dictionary.""" - return cls(lifetime=d.get("lifetime", None), service_principal_id=d.get("service_principal_id", None)) - - @dataclass class CreateServicePrincipalSecretResponse: create_time: Optional[str] = None @@ -1032,66 +896,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TokenAccessPolicy: ) -@dataclass -class UpdateCustomAppIntegration: - integration_id: Optional[str] = None - - redirect_urls: Optional[List[str]] = None - """List of OAuth redirect urls to be updated in the custom OAuth app integration""" - - scopes: Optional[List[str]] = None - """List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs - this will fully replace the existing values instead of appending""" - - token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy to be updated in the custom OAuth app integration""" - - user_authorized_scopes: Optional[List[str]] = None - """Scopes that will need to be consented by end user to mint the access token. If the user does not - authorize the access token will not be minted. Must be a subset of scopes.""" - - def as_dict(self) -> dict: - """Serializes the UpdateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.redirect_urls: - body["redirect_urls"] = [v for v in self.redirect_urls] - if self.scopes: - body["scopes"] = [v for v in self.scopes] - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() - if self.user_authorized_scopes: - body["user_authorized_scopes"] = [v for v in self.user_authorized_scopes] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCustomAppIntegration into a shallow dictionary of its immediate attributes.""" - body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.redirect_urls: - body["redirect_urls"] = self.redirect_urls - if self.scopes: - body["scopes"] = self.scopes - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy - if self.user_authorized_scopes: - body["user_authorized_scopes"] = self.user_authorized_scopes - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomAppIntegration: - """Deserializes the UpdateCustomAppIntegration from a dictionary.""" - return cls( - integration_id=d.get("integration_id", None), - redirect_urls=d.get("redirect_urls", None), - scopes=d.get("scopes", None), - token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), - user_authorized_scopes=d.get("user_authorized_scopes", None), - ) - - @dataclass class UpdateCustomAppIntegrationOutput: def as_dict(self) -> dict: @@ -1110,40 +914,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateCustomAppIntegrationOutput: return cls() -@dataclass -class UpdatePublishedAppIntegration: - integration_id: Optional[str] = None - - token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy to be updated in the published OAuth app integration""" - - def as_dict(self) -> dict: - """Serializes the UpdatePublishedAppIntegration into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdatePublishedAppIntegration into a shallow dictionary of its immediate attributes.""" - body = {} - if self.integration_id is not None: - body["integration_id"] = self.integration_id - if self.token_access_policy: - body["token_access_policy"] = self.token_access_policy - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdatePublishedAppIntegration: - """Deserializes the UpdatePublishedAppIntegration from a dictionary.""" - return cls( - integration_id=d.get("integration_id", None), - token_access_policy=_from_dict(d, "token_access_policy", TokenAccessPolicy), - ) - - @dataclass class UpdatePublishedAppIntegrationOutput: def as_dict(self) -> dict: @@ -1905,7 +1675,7 @@ class ServicePrincipalSecretsAPI: You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using - OAuth tokens for service principals], + OAuth tokens for service principals]. In addition, the generated secrets can be used to configure the Databricks Terraform Provider to authenticate with the service principal. For more information, see [Databricks Terraform Provider]. @@ -1918,11 +1688,11 @@ def __init__(self, api_client): self._api = api_client def create( - self, service_principal_id: int, *, lifetime: Optional[str] = None + self, service_principal_id: str, *, lifetime: Optional[str] = None ) -> CreateServicePrincipalSecretResponse: """Create a secret for the given service principal. - :param service_principal_id: int + :param service_principal_id: str The service principal ID. :param lifetime: str (optional) The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a @@ -1946,10 +1716,10 @@ def create( ) return CreateServicePrincipalSecretResponse.from_dict(res) - def delete(self, service_principal_id: int, secret_id: str): + def delete(self, service_principal_id: str, secret_id: str): """Delete a secret from the given service principal. - :param service_principal_id: int + :param service_principal_id: str The service principal ID. :param secret_id: str The secret ID. @@ -1965,12 +1735,15 @@ def delete(self, service_principal_id: int, secret_id: str): headers=headers, ) - def list(self, service_principal_id: int, *, page_token: Optional[str] = None) -> Iterator[SecretInfo]: + def list( + self, service_principal_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[SecretInfo]: """List all secrets associated with the given service principal. This operation only returns information about the secrets themselves and does not include the secret values. - :param service_principal_id: int + :param service_principal_id: str The service principal ID. + :param page_size: int (optional) :param page_token: str (optional) An opaque page token which was the `next_page_token` in the response of the previous request to list the secrets for this service principal. Provide this token to retrieve the next page of secret @@ -1983,6 +1756,8 @@ def list(self, service_principal_id: int, *, page_token: Optional[str] = None) - """ query = {} + if page_size is not None: + query["page_size"] = page_size if page_token is not None: query["page_token"] = page_token headers = { @@ -2002,3 +1777,113 @@ def list(self, service_principal_id: int, *, page_token: Optional[str] = None) - if "next_page_token" not in json or not json["next_page_token"]: return query["page_token"] = json["next_page_token"] + + +class ServicePrincipalSecretsProxyAPI: + """These APIs enable administrators to manage service principal secrets at the workspace level. To use these + APIs, the service principal must be first added to the current workspace. + + You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be + used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using + OAuth tokens for service principals]. + + In addition, the generated secrets can be used to configure the Databricks Terraform Providerto + authenticate with the service principal. For more information, see [Databricks Terraform Provider]. + + [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html + [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal + """ + + def __init__(self, api_client): + self._api = api_client + + def create( + self, service_principal_id: str, *, lifetime: Optional[str] = None + ) -> CreateServicePrincipalSecretResponse: + """Create a secret for the given service principal. + + :param service_principal_id: str + The service principal ID. + :param lifetime: str (optional) + The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a + default lifetime of 730 days (63072000s). + + :returns: :class:`CreateServicePrincipalSecretResponse` + """ + body = {} + if lifetime is not None: + body["lifetime"] = lifetime + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "POST", + f"/api/2.0/accounts/servicePrincipals/{service_principal_id}/credentials/secrets", + body=body, + headers=headers, + ) + return CreateServicePrincipalSecretResponse.from_dict(res) + + def delete(self, service_principal_id: str, secret_id: str): + """Delete a secret from the given service principal. + + :param service_principal_id: str + The service principal ID. + :param secret_id: str + The secret ID. + + + """ + + headers = {} + + self._api.do( + "DELETE", + f"/api/2.0/accounts/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}", + headers=headers, + ) + + def list( + self, service_principal_id: str, *, page_size: Optional[int] = None, page_token: Optional[str] = None + ) -> Iterator[SecretInfo]: + """List all secrets associated with the given service principal. This operation only returns information + about the secrets themselves and does not include the secret values. + + :param service_principal_id: str + The service principal ID. + :param page_size: int (optional) + :param page_token: str (optional) + An opaque page token which was the `next_page_token` in the response of the previous request to list + the secrets for this service principal. Provide this token to retrieve the next page of secret + entries. When providing a `page_token`, all other parameters provided to the request must match the + previous request. To list all of the secrets for a service principal, it is necessary to continue + requesting pages of entries until the response contains no `next_page_token`. Note that the number + of entries returned must not be used to determine when the listing is complete. + + :returns: Iterator over :class:`SecretInfo` + """ + + query = {} + if page_size is not None: + query["page_size"] = page_size + if page_token is not None: + query["page_token"] = page_token + headers = { + "Accept": "application/json", + } + + while True: + json = self._api.do( + "GET", + f"/api/2.0/accounts/servicePrincipals/{service_principal_id}/credentials/secrets", + query=query, + headers=headers, + ) + if "secrets" in json: + for v in json["secrets"]: + yield SecretInfo.from_dict(v) + if "next_page_token" not in json or not json["next_page_token"]: + return + query["page_token"] = json["next_page_token"] diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index d848d4557..74c9cdd13 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -21,273 +21,6 @@ # all definitions in this file are in alphabetical order -@dataclass -class CreatePipeline: - allow_duplicate_names: Optional[bool] = None - """If false, deployment will fail if name conflicts with that of another pipeline.""" - - budget_policy_id: Optional[str] = None - """Budget policy of this pipeline.""" - - catalog: Optional[str] = None - """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, - tables in this pipeline are published to a `target` schema inside `catalog` (for example, - `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity - Catalog.""" - - channel: Optional[str] = None - """DLT Release Channel that specifies which version to use.""" - - clusters: Optional[List[PipelineCluster]] = None - """Cluster settings for this pipeline deployment.""" - - configuration: Optional[Dict[str, str]] = None - """String-String configuration for this pipeline execution.""" - - continuous: Optional[bool] = None - """Whether the pipeline is continuous or triggered. This replaces `trigger`.""" - - deployment: Optional[PipelineDeployment] = None - """Deployment type of this pipeline.""" - - development: Optional[bool] = None - """Whether the pipeline is in Development mode. Defaults to false.""" - - dry_run: Optional[bool] = None - - edition: Optional[str] = None - """Pipeline product edition.""" - - environment: Optional[PipelinesEnvironment] = None - """Environment specification for this pipeline used to install dependencies.""" - - event_log: Optional[EventLogSpec] = None - """Event log configuration for this pipeline""" - - filters: Optional[Filters] = None - """Filters on which Pipeline packages to include in the deployed graph.""" - - gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None - """The definition of a gateway pipeline to support change data capture.""" - - id: Optional[str] = None - """Unique identifier for this pipeline.""" - - ingestion_definition: Optional[IngestionPipelineDefinition] = None - """The configuration for a managed ingestion pipeline. These settings cannot be used with the - 'libraries', 'schema', 'target', or 'catalog' settings.""" - - libraries: Optional[List[PipelineLibrary]] = None - """Libraries or code needed by this deployment.""" - - name: Optional[str] = None - """Friendly identifier for this pipeline.""" - - notifications: Optional[List[Notifications]] = None - """List of notification settings for this pipeline.""" - - photon: Optional[bool] = None - """Whether Photon is enabled for this pipeline.""" - - restart_window: Optional[RestartWindow] = None - """Restart window of this pipeline.""" - - root_path: Optional[str] = None - """Root path for this pipeline. This is used as the root directory when editing the pipeline in the - Databricks user interface and it is added to sys.path when executing Python sources during - pipeline execution.""" - - run_as: Optional[RunAs] = None - - schema: Optional[str] = None - """The default schema (database) where tables are read from or published to.""" - - serverless: Optional[bool] = None - """Whether serverless compute is enabled for this pipeline.""" - - storage: Optional[str] = None - """DBFS root directory for storing checkpoints and tables.""" - - tags: Optional[Dict[str, str]] = None - """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, - and are therefore subject to the same limitations. A maximum of 25 tags can be added to the - pipeline.""" - - target: Optional[str] = None - """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` - must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is - deprecated for pipeline creation in favor of the `schema` field.""" - - trigger: Optional[PipelineTrigger] = None - """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" - - def as_dict(self) -> dict: - """Serializes the CreatePipeline into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = [v.as_dict() for v in self.clusters] - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment.as_dict() - if self.development is not None: - body["development"] = self.development - if self.dry_run is not None: - body["dry_run"] = self.dry_run - if self.edition is not None: - body["edition"] = self.edition - if self.environment: - body["environment"] = self.environment.as_dict() - if self.event_log: - body["event_log"] = self.event_log.as_dict() - if self.filters: - body["filters"] = self.filters.as_dict() - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition.as_dict() - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition.as_dict() - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = [v.as_dict() for v in self.notifications] - if self.photon is not None: - body["photon"] = self.photon - if self.restart_window: - body["restart_window"] = self.restart_window.as_dict() - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.tags: - body["tags"] = self.tags - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreatePipeline into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = self.clusters - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment - if self.development is not None: - body["development"] = self.development - if self.dry_run is not None: - body["dry_run"] = self.dry_run - if self.edition is not None: - body["edition"] = self.edition - if self.environment: - body["environment"] = self.environment - if self.event_log: - body["event_log"] = self.event_log - if self.filters: - body["filters"] = self.filters - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition - if self.libraries: - body["libraries"] = self.libraries - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = self.notifications - if self.photon is not None: - body["photon"] = self.photon - if self.restart_window: - body["restart_window"] = self.restart_window - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.tags: - body["tags"] = self.tags - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePipeline: - """Deserializes the CreatePipeline from a dictionary.""" - return cls( - allow_duplicate_names=d.get("allow_duplicate_names", None), - budget_policy_id=d.get("budget_policy_id", None), - catalog=d.get("catalog", None), - channel=d.get("channel", None), - clusters=_repeated_dict(d, "clusters", PipelineCluster), - configuration=d.get("configuration", None), - continuous=d.get("continuous", None), - deployment=_from_dict(d, "deployment", PipelineDeployment), - development=d.get("development", None), - dry_run=d.get("dry_run", None), - edition=d.get("edition", None), - environment=_from_dict(d, "environment", PipelinesEnvironment), - event_log=_from_dict(d, "event_log", EventLogSpec), - filters=_from_dict(d, "filters", Filters), - gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), - id=d.get("id", None), - ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition), - libraries=_repeated_dict(d, "libraries", PipelineLibrary), - name=d.get("name", None), - notifications=_repeated_dict(d, "notifications", Notifications), - photon=d.get("photon", None), - restart_window=_from_dict(d, "restart_window", RestartWindow), - root_path=d.get("root_path", None), - run_as=_from_dict(d, "run_as", RunAs), - schema=d.get("schema", None), - serverless=d.get("serverless", None), - storage=d.get("storage", None), - tags=d.get("tags", None), - target=d.get("target", None), - trigger=_from_dict(d, "trigger", PipelineTrigger), - ) - - @dataclass class CreatePipelineResponse: effective_settings: Optional[PipelineSpec] = None @@ -309,396 +42,119 @@ def as_shallow_dict(self) -> dict: """Serializes the CreatePipelineResponse into a shallow dictionary of its immediate attributes.""" body = {} if self.effective_settings: - body["effective_settings"] = self.effective_settings - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePipelineResponse: - """Deserializes the CreatePipelineResponse from a dictionary.""" - return cls( - effective_settings=_from_dict(d, "effective_settings", PipelineSpec), pipeline_id=d.get("pipeline_id", None) - ) - - -@dataclass -class CronTrigger: - quartz_cron_schedule: Optional[str] = None - - timezone_id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the CronTrigger into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.quartz_cron_schedule is not None: - body["quartz_cron_schedule"] = self.quartz_cron_schedule - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CronTrigger into a shallow dictionary of its immediate attributes.""" - body = {} - if self.quartz_cron_schedule is not None: - body["quartz_cron_schedule"] = self.quartz_cron_schedule - if self.timezone_id is not None: - body["timezone_id"] = self.timezone_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CronTrigger: - """Deserializes the CronTrigger from a dictionary.""" - return cls(quartz_cron_schedule=d.get("quartz_cron_schedule", None), timezone_id=d.get("timezone_id", None)) - - -@dataclass -class DataPlaneId: - instance: Optional[str] = None - """The instance name of the data plane emitting an event.""" - - seq_no: Optional[int] = None - """A sequence number, unique and increasing within the data plane instance.""" - - def as_dict(self) -> dict: - """Serializes the DataPlaneId into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.instance is not None: - body["instance"] = self.instance - if self.seq_no is not None: - body["seq_no"] = self.seq_no - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes.""" - body = {} - if self.instance is not None: - body["instance"] = self.instance - if self.seq_no is not None: - body["seq_no"] = self.seq_no - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DataPlaneId: - """Deserializes the DataPlaneId from a dictionary.""" - return cls(instance=d.get("instance", None), seq_no=d.get("seq_no", None)) - - -class DayOfWeek(Enum): - """Days of week in which the restart is allowed to happen (within a five-hour window starting at - start_hour). If not specified all days of the week will be used.""" - - FRIDAY = "FRIDAY" - MONDAY = "MONDAY" - SATURDAY = "SATURDAY" - SUNDAY = "SUNDAY" - THURSDAY = "THURSDAY" - TUESDAY = "TUESDAY" - WEDNESDAY = "WEDNESDAY" - - -@dataclass -class DeletePipelineResponse: - def as_dict(self) -> dict: - """Serializes the DeletePipelineResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeletePipelineResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeletePipelineResponse: - """Deserializes the DeletePipelineResponse from a dictionary.""" - return cls() - - -class DeploymentKind(Enum): - """The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a - Databricks Asset Bundle.""" - - BUNDLE = "BUNDLE" - - -@dataclass -class EditPipeline: - allow_duplicate_names: Optional[bool] = None - """If false, deployment will fail if name has changed and conflicts the name of another pipeline.""" - - budget_policy_id: Optional[str] = None - """Budget policy of this pipeline.""" - - catalog: Optional[str] = None - """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, - tables in this pipeline are published to a `target` schema inside `catalog` (for example, - `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity - Catalog.""" - - channel: Optional[str] = None - """DLT Release Channel that specifies which version to use.""" - - clusters: Optional[List[PipelineCluster]] = None - """Cluster settings for this pipeline deployment.""" - - configuration: Optional[Dict[str, str]] = None - """String-String configuration for this pipeline execution.""" - - continuous: Optional[bool] = None - """Whether the pipeline is continuous or triggered. This replaces `trigger`.""" - - deployment: Optional[PipelineDeployment] = None - """Deployment type of this pipeline.""" - - development: Optional[bool] = None - """Whether the pipeline is in Development mode. Defaults to false.""" - - edition: Optional[str] = None - """Pipeline product edition.""" - - environment: Optional[PipelinesEnvironment] = None - """Environment specification for this pipeline used to install dependencies.""" - - event_log: Optional[EventLogSpec] = None - """Event log configuration for this pipeline""" - - expected_last_modified: Optional[int] = None - """If present, the last-modified time of the pipeline settings before the edit. If the settings - were modified after that time, then the request will fail with a conflict.""" - - filters: Optional[Filters] = None - """Filters on which Pipeline packages to include in the deployed graph.""" - - gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None - """The definition of a gateway pipeline to support change data capture.""" - - id: Optional[str] = None - """Unique identifier for this pipeline.""" - - ingestion_definition: Optional[IngestionPipelineDefinition] = None - """The configuration for a managed ingestion pipeline. These settings cannot be used with the - 'libraries', 'schema', 'target', or 'catalog' settings.""" - - libraries: Optional[List[PipelineLibrary]] = None - """Libraries or code needed by this deployment.""" - - name: Optional[str] = None - """Friendly identifier for this pipeline.""" - - notifications: Optional[List[Notifications]] = None - """List of notification settings for this pipeline.""" - - photon: Optional[bool] = None - """Whether Photon is enabled for this pipeline.""" - - pipeline_id: Optional[str] = None - """Unique identifier for this pipeline.""" - - restart_window: Optional[RestartWindow] = None - """Restart window of this pipeline.""" - - root_path: Optional[str] = None - """Root path for this pipeline. This is used as the root directory when editing the pipeline in the - Databricks user interface and it is added to sys.path when executing Python sources during - pipeline execution.""" - - run_as: Optional[RunAs] = None - - schema: Optional[str] = None - """The default schema (database) where tables are read from or published to.""" - - serverless: Optional[bool] = None - """Whether serverless compute is enabled for this pipeline.""" - - storage: Optional[str] = None - """DBFS root directory for storing checkpoints and tables.""" - - tags: Optional[Dict[str, str]] = None - """A map of tags associated with the pipeline. These are forwarded to the cluster as cluster tags, - and are therefore subject to the same limitations. A maximum of 25 tags can be added to the - pipeline.""" - - target: Optional[str] = None - """Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` - must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is - deprecated for pipeline creation in favor of the `schema` field.""" - - trigger: Optional[PipelineTrigger] = None - """Which pipeline trigger to use. Deprecated: Use `continuous` instead.""" - - def as_dict(self) -> dict: - """Serializes the EditPipeline into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = [v.as_dict() for v in self.clusters] - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment.as_dict() - if self.development is not None: - body["development"] = self.development - if self.edition is not None: - body["edition"] = self.edition - if self.environment: - body["environment"] = self.environment.as_dict() - if self.event_log: - body["event_log"] = self.event_log.as_dict() - if self.expected_last_modified is not None: - body["expected_last_modified"] = self.expected_last_modified - if self.filters: - body["filters"] = self.filters.as_dict() - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition.as_dict() - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition.as_dict() - if self.libraries: - body["libraries"] = [v.as_dict() for v in self.libraries] - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = [v.as_dict() for v in self.notifications] - if self.photon is not None: - body["photon"] = self.photon - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.restart_window: - body["restart_window"] = self.restart_window.as_dict() - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as.as_dict() - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.tags: - body["tags"] = self.tags - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EditPipeline into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_duplicate_names is not None: - body["allow_duplicate_names"] = self.allow_duplicate_names - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.catalog is not None: - body["catalog"] = self.catalog - if self.channel is not None: - body["channel"] = self.channel - if self.clusters: - body["clusters"] = self.clusters - if self.configuration: - body["configuration"] = self.configuration - if self.continuous is not None: - body["continuous"] = self.continuous - if self.deployment: - body["deployment"] = self.deployment - if self.development is not None: - body["development"] = self.development - if self.edition is not None: - body["edition"] = self.edition - if self.environment: - body["environment"] = self.environment - if self.event_log: - body["event_log"] = self.event_log - if self.expected_last_modified is not None: - body["expected_last_modified"] = self.expected_last_modified - if self.filters: - body["filters"] = self.filters - if self.gateway_definition: - body["gateway_definition"] = self.gateway_definition - if self.id is not None: - body["id"] = self.id - if self.ingestion_definition: - body["ingestion_definition"] = self.ingestion_definition - if self.libraries: - body["libraries"] = self.libraries - if self.name is not None: - body["name"] = self.name - if self.notifications: - body["notifications"] = self.notifications - if self.photon is not None: - body["photon"] = self.photon + body["effective_settings"] = self.effective_settings if self.pipeline_id is not None: body["pipeline_id"] = self.pipeline_id - if self.restart_window: - body["restart_window"] = self.restart_window - if self.root_path is not None: - body["root_path"] = self.root_path - if self.run_as: - body["run_as"] = self.run_as - if self.schema is not None: - body["schema"] = self.schema - if self.serverless is not None: - body["serverless"] = self.serverless - if self.storage is not None: - body["storage"] = self.storage - if self.tags: - body["tags"] = self.tags - if self.target is not None: - body["target"] = self.target - if self.trigger: - body["trigger"] = self.trigger return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditPipeline: - """Deserializes the EditPipeline from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> CreatePipelineResponse: + """Deserializes the CreatePipelineResponse from a dictionary.""" return cls( - allow_duplicate_names=d.get("allow_duplicate_names", None), - budget_policy_id=d.get("budget_policy_id", None), - catalog=d.get("catalog", None), - channel=d.get("channel", None), - clusters=_repeated_dict(d, "clusters", PipelineCluster), - configuration=d.get("configuration", None), - continuous=d.get("continuous", None), - deployment=_from_dict(d, "deployment", PipelineDeployment), - development=d.get("development", None), - edition=d.get("edition", None), - environment=_from_dict(d, "environment", PipelinesEnvironment), - event_log=_from_dict(d, "event_log", EventLogSpec), - expected_last_modified=d.get("expected_last_modified", None), - filters=_from_dict(d, "filters", Filters), - gateway_definition=_from_dict(d, "gateway_definition", IngestionGatewayPipelineDefinition), - id=d.get("id", None), - ingestion_definition=_from_dict(d, "ingestion_definition", IngestionPipelineDefinition), - libraries=_repeated_dict(d, "libraries", PipelineLibrary), - name=d.get("name", None), - notifications=_repeated_dict(d, "notifications", Notifications), - photon=d.get("photon", None), - pipeline_id=d.get("pipeline_id", None), - restart_window=_from_dict(d, "restart_window", RestartWindow), - root_path=d.get("root_path", None), - run_as=_from_dict(d, "run_as", RunAs), - schema=d.get("schema", None), - serverless=d.get("serverless", None), - storage=d.get("storage", None), - tags=d.get("tags", None), - target=d.get("target", None), - trigger=_from_dict(d, "trigger", PipelineTrigger), + effective_settings=_from_dict(d, "effective_settings", PipelineSpec), pipeline_id=d.get("pipeline_id", None) ) +@dataclass +class CronTrigger: + quartz_cron_schedule: Optional[str] = None + + timezone_id: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the CronTrigger into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.quartz_cron_schedule is not None: + body["quartz_cron_schedule"] = self.quartz_cron_schedule + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CronTrigger into a shallow dictionary of its immediate attributes.""" + body = {} + if self.quartz_cron_schedule is not None: + body["quartz_cron_schedule"] = self.quartz_cron_schedule + if self.timezone_id is not None: + body["timezone_id"] = self.timezone_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CronTrigger: + """Deserializes the CronTrigger from a dictionary.""" + return cls(quartz_cron_schedule=d.get("quartz_cron_schedule", None), timezone_id=d.get("timezone_id", None)) + + +@dataclass +class DataPlaneId: + instance: Optional[str] = None + """The instance name of the data plane emitting an event.""" + + seq_no: Optional[int] = None + """A sequence number, unique and increasing within the data plane instance.""" + + def as_dict(self) -> dict: + """Serializes the DataPlaneId into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.instance is not None: + body["instance"] = self.instance + if self.seq_no is not None: + body["seq_no"] = self.seq_no + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes.""" + body = {} + if self.instance is not None: + body["instance"] = self.instance + if self.seq_no is not None: + body["seq_no"] = self.seq_no + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DataPlaneId: + """Deserializes the DataPlaneId from a dictionary.""" + return cls(instance=d.get("instance", None), seq_no=d.get("seq_no", None)) + + +class DayOfWeek(Enum): + """Days of week in which the restart is allowed to happen (within a five-hour window starting at + start_hour). If not specified all days of the week will be used.""" + + FRIDAY = "FRIDAY" + MONDAY = "MONDAY" + SATURDAY = "SATURDAY" + SUNDAY = "SUNDAY" + THURSDAY = "THURSDAY" + TUESDAY = "TUESDAY" + WEDNESDAY = "WEDNESDAY" + + +@dataclass +class DeletePipelineResponse: + def as_dict(self) -> dict: + """Serializes the DeletePipelineResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeletePipelineResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeletePipelineResponse: + """Deserializes the DeletePipelineResponse from a dictionary.""" + return cls() + + +class DeploymentKind(Enum): + """The deployment method that manages the pipeline: - BUNDLE: The pipeline is managed by a + Databricks Asset Bundle.""" + + BUNDLE = "BUNDLE" + + @dataclass class EditPipelineResponse: def as_dict(self) -> dict: @@ -1207,12 +663,73 @@ def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinition: ) +@dataclass +class IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig: + """Configurations that are only applicable for query-based ingestion connectors.""" + + cursor_columns: Optional[List[str]] = None + """The names of the monotonically increasing columns in the source table that are used to enable + the table to be read and ingested incrementally through structured streaming. The columns are + allowed to have repeated values but have to be non-decreasing. If the source data is merged into + the destination (e.g., using SCD Type 1 or Type 2), these columns will implicitly define the + `sequence_by` behavior. You can still explicitly set `sequence_by` to override this default.""" + + deletion_condition: Optional[str] = None + """Specifies a SQL WHERE condition that specifies that the source row has been deleted. This is + sometimes referred to as "soft-deletes". For example: "Operation = 'DELETE'" or "is_deleted = + true". This field is orthogonal to `hard_deletion_sync_interval_in_seconds`, one for + soft-deletes and the other for hard-deletes. See also the + hard_deletion_sync_min_interval_in_seconds field for handling of "hard deletes" where the source + rows are physically removed from the table.""" + + hard_deletion_sync_min_interval_in_seconds: Optional[int] = None + """Specifies the minimum interval (in seconds) between snapshots on primary keys for detecting and + synchronizing hard deletions—i.e., rows that have been physically removed from the source + table. This interval acts as a lower bound. If ingestion runs less frequently than this value, + hard deletion synchronization will align with the actual ingestion frequency instead of + happening more often. If not set, hard deletion synchronization via snapshots is disabled. This + field is mutable and can be updated without triggering a full snapshot.""" + + def as_dict(self) -> dict: + """Serializes the IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cursor_columns: + body["cursor_columns"] = [v for v in self.cursor_columns] + if self.deletion_condition is not None: + body["deletion_condition"] = self.deletion_condition + if self.hard_deletion_sync_min_interval_in_seconds is not None: + body["hard_deletion_sync_min_interval_in_seconds"] = self.hard_deletion_sync_min_interval_in_seconds + return body + + def as_shallow_dict(self) -> dict: + """Serializes the IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig into a shallow dictionary of its immediate attributes.""" + body = {} + if self.cursor_columns: + body["cursor_columns"] = self.cursor_columns + if self.deletion_condition is not None: + body["deletion_condition"] = self.deletion_condition + if self.hard_deletion_sync_min_interval_in_seconds is not None: + body["hard_deletion_sync_min_interval_in_seconds"] = self.hard_deletion_sync_min_interval_in_seconds + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig: + """Deserializes the IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig from a dictionary.""" + return cls( + cursor_columns=d.get("cursor_columns", None), + deletion_condition=d.get("deletion_condition", None), + hard_deletion_sync_min_interval_in_seconds=d.get("hard_deletion_sync_min_interval_in_seconds", None), + ) + + class IngestionSourceType(Enum): BIGQUERY = "BIGQUERY" + CONFLUENCE = "CONFLUENCE" DYNAMICS365 = "DYNAMICS365" GA4_RAW_DATA = "GA4_RAW_DATA" MANAGED_POSTGRESQL = "MANAGED_POSTGRESQL" + META_MARKETING = "META_MARKETING" MYSQL = "MYSQL" NETSUITE = "NETSUITE" ORACLE = "ORACLE" @@ -2308,40 +1825,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissionsDescription: ) -@dataclass -class PipelinePermissionsRequest: - access_control_list: Optional[List[PipelineAccessControlRequest]] = None - - pipeline_id: Optional[str] = None - """The pipeline for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the PipelinePermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PipelinePermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PipelinePermissionsRequest: - """Deserializes the PipelinePermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", PipelineAccessControlRequest), - pipeline_id=d.get("pipeline_id", None), - ) - - @dataclass class PipelineSpec: budget_policy_id: Optional[str] = None @@ -3093,76 +2576,6 @@ def from_dict(cls, d: Dict[str, Any]) -> StackFrame: ) -@dataclass -class StartUpdate: - cause: Optional[StartUpdateCause] = None - - full_refresh: Optional[bool] = None - """If true, this update will reset all tables before running.""" - - full_refresh_selection: Optional[List[str]] = None - """A list of tables to update with fullRefresh. If both refresh_selection and - full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means - that the states of the table will be reset before the refresh.""" - - pipeline_id: Optional[str] = None - - refresh_selection: Optional[List[str]] = None - """A list of tables to update without fullRefresh. If both refresh_selection and - full_refresh_selection are empty, this is a full graph update. Full Refresh on a table means - that the states of the table will be reset before the refresh.""" - - validate_only: Optional[bool] = None - """If true, this update only validates the correctness of pipeline source code but does not - materialize or publish any datasets.""" - - def as_dict(self) -> dict: - """Serializes the StartUpdate into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.cause is not None: - body["cause"] = self.cause.value - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh - if self.full_refresh_selection: - body["full_refresh_selection"] = [v for v in self.full_refresh_selection] - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.refresh_selection: - body["refresh_selection"] = [v for v in self.refresh_selection] - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - def as_shallow_dict(self) -> dict: - """Serializes the StartUpdate into a shallow dictionary of its immediate attributes.""" - body = {} - if self.cause is not None: - body["cause"] = self.cause - if self.full_refresh is not None: - body["full_refresh"] = self.full_refresh - if self.full_refresh_selection: - body["full_refresh_selection"] = self.full_refresh_selection - if self.pipeline_id is not None: - body["pipeline_id"] = self.pipeline_id - if self.refresh_selection: - body["refresh_selection"] = self.refresh_selection - if self.validate_only is not None: - body["validate_only"] = self.validate_only - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> StartUpdate: - """Deserializes the StartUpdate from a dictionary.""" - return cls( - cause=_enum(d, "cause", StartUpdateCause), - full_refresh=d.get("full_refresh", None), - full_refresh_selection=d.get("full_refresh_selection", None), - pipeline_id=d.get("pipeline_id", None), - refresh_selection=d.get("refresh_selection", None), - validate_only=d.get("validate_only", None), - ) - - class StartUpdateCause(Enum): """What triggered this update.""" @@ -3311,6 +2724,10 @@ class TableSpecificConfig: primary_keys: Optional[List[str]] = None """The primary key of the table used to apply changes.""" + query_based_connector_config: Optional[IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig] = ( + None + ) + salesforce_include_formula_fields: Optional[bool] = None """If true, formula fields defined in the table are included in the ingestion. This setting is only valid for the Salesforce connector""" @@ -3331,6 +2748,8 @@ def as_dict(self) -> dict: body["include_columns"] = [v for v in self.include_columns] if self.primary_keys: body["primary_keys"] = [v for v in self.primary_keys] + if self.query_based_connector_config: + body["query_based_connector_config"] = self.query_based_connector_config.as_dict() if self.salesforce_include_formula_fields is not None: body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields if self.scd_type is not None: @@ -3348,6 +2767,8 @@ def as_shallow_dict(self) -> dict: body["include_columns"] = self.include_columns if self.primary_keys: body["primary_keys"] = self.primary_keys + if self.query_based_connector_config: + body["query_based_connector_config"] = self.query_based_connector_config if self.salesforce_include_formula_fields is not None: body["salesforce_include_formula_fields"] = self.salesforce_include_formula_fields if self.scd_type is not None: @@ -3363,6 +2784,11 @@ def from_dict(cls, d: Dict[str, Any]) -> TableSpecificConfig: exclude_columns=d.get("exclude_columns", None), include_columns=d.get("include_columns", None), primary_keys=d.get("primary_keys", None), + query_based_connector_config=_from_dict( + d, + "query_based_connector_config", + IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig, + ), salesforce_include_formula_fields=d.get("salesforce_include_formula_fields", None), scd_type=_enum(d, "scd_type", TableSpecificConfigScdType), sequence_by=d.get("sequence_by", None), diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py index dce0a171d..8e34b28f0 100755 --- a/databricks/sdk/service/provisioning.py +++ b/databricks/sdk/service/provisioning.py @@ -224,40 +224,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialAwsCredentials: return cls(sts_role=_from_dict(d, "sts_role", CreateCredentialStsRole)) -@dataclass -class CreateCredentialRequest: - credentials_name: str - """The human-readable name of the credential configuration object.""" - - aws_credentials: CreateCredentialAwsCredentials - - def as_dict(self) -> dict: - """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_credentials: - body["aws_credentials"] = self.aws_credentials.as_dict() - if self.credentials_name is not None: - body["credentials_name"] = self.credentials_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_credentials: - body["aws_credentials"] = self.aws_credentials - if self.credentials_name is not None: - body["credentials_name"] = self.credentials_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialRequest: - """Deserializes the CreateCredentialRequest from a dictionary.""" - return cls( - aws_credentials=_from_dict(d, "aws_credentials", CreateCredentialAwsCredentials), - credentials_name=d.get("credentials_name", None), - ) - - @dataclass class CreateCredentialStsRole: role_arn: Optional[str] = None @@ -283,47 +249,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialStsRole: return cls(role_arn=d.get("role_arn", None)) -@dataclass -class CreateCustomerManagedKeyRequest: - use_cases: List[KeyUseCase] - """The cases that the key can be used for.""" - - aws_key_info: Optional[CreateAwsKeyInfo] = None - - gcp_key_info: Optional[CreateGcpKeyInfo] = None - - def as_dict(self) -> dict: - """Serializes the CreateCustomerManagedKeyRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_key_info: - body["aws_key_info"] = self.aws_key_info.as_dict() - if self.gcp_key_info: - body["gcp_key_info"] = self.gcp_key_info.as_dict() - if self.use_cases: - body["use_cases"] = [v.value for v in self.use_cases] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateCustomerManagedKeyRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_key_info: - body["aws_key_info"] = self.aws_key_info - if self.gcp_key_info: - body["gcp_key_info"] = self.gcp_key_info - if self.use_cases: - body["use_cases"] = self.use_cases - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCustomerManagedKeyRequest: - """Deserializes the CreateCustomerManagedKeyRequest from a dictionary.""" - return cls( - aws_key_info=_from_dict(d, "aws_key_info", CreateAwsKeyInfo), - gcp_key_info=_from_dict(d, "gcp_key_info", CreateGcpKeyInfo), - use_cases=_repeated_enum(d, "use_cases", KeyUseCase), - ) - - @dataclass class CreateGcpKeyInfo: kms_key_id: str @@ -349,346 +274,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateGcpKeyInfo: return cls(kms_key_id=d.get("kms_key_id", None)) -@dataclass -class CreateNetworkRequest: - network_name: str - """The human-readable name of the network configuration.""" - - gcp_network_info: Optional[GcpNetworkInfo] = None - - security_group_ids: Optional[List[str]] = None - """IDs of one to five security groups associated with this network. Security group IDs **cannot** - be used in multiple network configurations.""" - - subnet_ids: Optional[List[str]] = None - """IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in - multiple network configurations.""" - - vpc_endpoints: Optional[NetworkVpcEndpoints] = None - - vpc_id: Optional[str] = None - """The ID of the VPC associated with this network. VPC IDs can be used in multiple network - configurations.""" - - def as_dict(self) -> dict: - """Serializes the CreateNetworkRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.gcp_network_info: - body["gcp_network_info"] = self.gcp_network_info.as_dict() - if self.network_name is not None: - body["network_name"] = self.network_name - if self.security_group_ids: - body["security_group_ids"] = [v for v in self.security_group_ids] - if self.subnet_ids: - body["subnet_ids"] = [v for v in self.subnet_ids] - if self.vpc_endpoints: - body["vpc_endpoints"] = self.vpc_endpoints.as_dict() - if self.vpc_id is not None: - body["vpc_id"] = self.vpc_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateNetworkRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.gcp_network_info: - body["gcp_network_info"] = self.gcp_network_info - if self.network_name is not None: - body["network_name"] = self.network_name - if self.security_group_ids: - body["security_group_ids"] = self.security_group_ids - if self.subnet_ids: - body["subnet_ids"] = self.subnet_ids - if self.vpc_endpoints: - body["vpc_endpoints"] = self.vpc_endpoints - if self.vpc_id is not None: - body["vpc_id"] = self.vpc_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateNetworkRequest: - """Deserializes the CreateNetworkRequest from a dictionary.""" - return cls( - gcp_network_info=_from_dict(d, "gcp_network_info", GcpNetworkInfo), - network_name=d.get("network_name", None), - security_group_ids=d.get("security_group_ids", None), - subnet_ids=d.get("subnet_ids", None), - vpc_endpoints=_from_dict(d, "vpc_endpoints", NetworkVpcEndpoints), - vpc_id=d.get("vpc_id", None), - ) - - -@dataclass -class CreateStorageConfigurationRequest: - storage_configuration_name: str - """The human-readable name of the storage configuration.""" - - root_bucket_info: RootBucketInfo - - def as_dict(self) -> dict: - """Serializes the CreateStorageConfigurationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.root_bucket_info: - body["root_bucket_info"] = self.root_bucket_info.as_dict() - if self.storage_configuration_name is not None: - body["storage_configuration_name"] = self.storage_configuration_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateStorageConfigurationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.root_bucket_info: - body["root_bucket_info"] = self.root_bucket_info - if self.storage_configuration_name is not None: - body["storage_configuration_name"] = self.storage_configuration_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateStorageConfigurationRequest: - """Deserializes the CreateStorageConfigurationRequest from a dictionary.""" - return cls( - root_bucket_info=_from_dict(d, "root_bucket_info", RootBucketInfo), - storage_configuration_name=d.get("storage_configuration_name", None), - ) - - -@dataclass -class CreateVpcEndpointRequest: - vpc_endpoint_name: str - """The human-readable name of the storage configuration.""" - - aws_vpc_endpoint_id: Optional[str] = None - """The ID of the VPC endpoint object in AWS.""" - - gcp_vpc_endpoint_info: Optional[GcpVpcEndpointInfo] = None - - region: Optional[str] = None - """The AWS region in which this VPC endpoint object exists.""" - - def as_dict(self) -> dict: - """Serializes the CreateVpcEndpointRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_vpc_endpoint_id is not None: - body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: - body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info.as_dict() - if self.region is not None: - body["region"] = self.region - if self.vpc_endpoint_name is not None: - body["vpc_endpoint_name"] = self.vpc_endpoint_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateVpcEndpointRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_vpc_endpoint_id is not None: - body["aws_vpc_endpoint_id"] = self.aws_vpc_endpoint_id - if self.gcp_vpc_endpoint_info: - body["gcp_vpc_endpoint_info"] = self.gcp_vpc_endpoint_info - if self.region is not None: - body["region"] = self.region - if self.vpc_endpoint_name is not None: - body["vpc_endpoint_name"] = self.vpc_endpoint_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateVpcEndpointRequest: - """Deserializes the CreateVpcEndpointRequest from a dictionary.""" - return cls( - aws_vpc_endpoint_id=d.get("aws_vpc_endpoint_id", None), - gcp_vpc_endpoint_info=_from_dict(d, "gcp_vpc_endpoint_info", GcpVpcEndpointInfo), - region=d.get("region", None), - vpc_endpoint_name=d.get("vpc_endpoint_name", None), - ) - - -@dataclass -class CreateWorkspaceRequest: - workspace_name: str - """The workspace's human-readable name.""" - - aws_region: Optional[str] = None - """The AWS region of the workspace's data plane.""" - - cloud: Optional[str] = None - """The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field - to `gcp`.""" - - cloud_resource_container: Optional[CloudResourceContainer] = None - - credentials_id: Optional[str] = None - """ID of the workspace's credential configuration object.""" - - custom_tags: Optional[Dict[str, str]] = None - """The custom tags key-value pairing that is attached to this workspace. The key-value pair is a - string of utf-8 characters. The value can be an empty string, with maximum length of 255 - characters. The key can be of maximum length of 127 characters, and cannot be empty.""" - - deployment_name: Optional[str] = None - """The deployment name defines part of the subdomain for the workspace. The workspace URL for the - web application and REST APIs is `.cloud.databricks.com`. For - example, if the deployment name is `abcsales`, your workspace URL will be - `https://abcsales.cloud.databricks.com`. Hyphens are allowed. This property supports only the - set of characters that are allowed in a subdomain. - - To set this value, you must have a deployment name prefix. Contact your Databricks account team - to add an account deployment name prefix to your account. - - Workspace deployment names follow the account prefix and a hyphen. For example, if your - account's deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the - JSON response for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL - would be `acme-workspace-1.cloud.databricks.com`. - - You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the - deployment name to only include the deployment prefix. For example, if your account's deployment - prefix is `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes - `acme` only and the workspace URL is `acme.cloud.databricks.com`. - - This value must be unique across all non-deleted deployments across all AWS regions. - - If a new workspace omits this property, the server generates a unique deployment name for you - with the pattern `dbc-xxxxxxxx-xxxx`.""" - - gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None - - gke_config: Optional[GkeConfig] = None - - is_no_public_ip_enabled: Optional[bool] = None - """Whether no public IP is enabled for the workspace.""" - - location: Optional[str] = None - """The Google Cloud region of the workspace data plane in your Google account. For example, - `us-east4`.""" - - managed_services_customer_managed_key_id: Optional[str] = None - """The ID of the workspace's managed services encryption key configuration object. This is used to - help protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, - and query history. The provided key configuration object property `use_cases` must contain - `MANAGED_SERVICES`.""" - - network_id: Optional[str] = None - - pricing_tier: Optional[PricingTier] = None - - private_access_settings_id: Optional[str] = None - """ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be - specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace - connection), back-end (data plane to control plane connection), or both connection types. - - Before configuring PrivateLink, read the [Databricks article about PrivateLink].", - - [AWS PrivateLink]: https://aws.amazon.com/privatelink/ - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html""" - - storage_configuration_id: Optional[str] = None - """The ID of the workspace's storage configuration object.""" - - storage_customer_managed_key_id: Optional[str] = None - """The ID of the workspace's storage encryption key configuration object. This is used to encrypt - the workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. - The provided key configuration object property `use_cases` must contain `STORAGE`.""" - - def as_dict(self) -> dict: - """Serializes the CreateWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.cloud is not None: - body["cloud"] = self.cloud - if self.cloud_resource_container: - body["cloud_resource_container"] = self.cloud_resource_container.as_dict() - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.deployment_name is not None: - body["deployment_name"] = self.deployment_name - if self.gcp_managed_network_config: - body["gcp_managed_network_config"] = self.gcp_managed_network_config.as_dict() - if self.gke_config: - body["gke_config"] = self.gke_config.as_dict() - if self.is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled - if self.location is not None: - body["location"] = self.location - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.pricing_tier is not None: - body["pricing_tier"] = self.pricing_tier.value - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_name is not None: - body["workspace_name"] = self.workspace_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateWorkspaceRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.cloud is not None: - body["cloud"] = self.cloud - if self.cloud_resource_container: - body["cloud_resource_container"] = self.cloud_resource_container - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.deployment_name is not None: - body["deployment_name"] = self.deployment_name - if self.gcp_managed_network_config: - body["gcp_managed_network_config"] = self.gcp_managed_network_config - if self.gke_config: - body["gke_config"] = self.gke_config - if self.is_no_public_ip_enabled is not None: - body["is_no_public_ip_enabled"] = self.is_no_public_ip_enabled - if self.location is not None: - body["location"] = self.location - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.pricing_tier is not None: - body["pricing_tier"] = self.pricing_tier - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_name is not None: - body["workspace_name"] = self.workspace_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateWorkspaceRequest: - """Deserializes the CreateWorkspaceRequest from a dictionary.""" - return cls( - aws_region=d.get("aws_region", None), - cloud=d.get("cloud", None), - cloud_resource_container=_from_dict(d, "cloud_resource_container", CloudResourceContainer), - credentials_id=d.get("credentials_id", None), - custom_tags=d.get("custom_tags", None), - deployment_name=d.get("deployment_name", None), - gcp_managed_network_config=_from_dict(d, "gcp_managed_network_config", GcpManagedNetworkConfig), - gke_config=_from_dict(d, "gke_config", GkeConfig), - is_no_public_ip_enabled=d.get("is_no_public_ip_enabled", None), - location=d.get("location", None), - managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), - network_id=d.get("network_id", None), - pricing_tier=_enum(d, "pricing_tier", PricingTier), - private_access_settings_id=d.get("private_access_settings_id", None), - storage_configuration_id=d.get("storage_configuration_id", None), - storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), - workspace_name=d.get("workspace_name", None), - ) - - @dataclass class Credential: account_id: Optional[str] = None @@ -1689,194 +1274,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: return cls() -@dataclass -class UpdateWorkspaceRequest: - aws_region: Optional[str] = None - """The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is - available only for updating failed workspaces.""" - - credentials_id: Optional[str] = None - """ID of the workspace's credential configuration object. This parameter is available for updating - both failed and running workspaces.""" - - custom_tags: Optional[Dict[str, str]] = None - """The custom tags key-value pairing that is attached to this workspace. The key-value pair is a - string of utf-8 characters. The value can be an empty string, with maximum length of 255 - characters. The key can be of maximum length of 127 characters, and cannot be empty.""" - - managed_services_customer_managed_key_id: Optional[str] = None - """The ID of the workspace's managed services encryption key configuration object. This parameter - is available only for updating failed workspaces.""" - - network_connectivity_config_id: Optional[str] = None - - network_id: Optional[str] = None - """The ID of the workspace's network configuration object. Used only if you already use a - customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC - to a customer-managed VPC by updating the workspace to add a network configuration ID.""" - - private_access_settings_id: Optional[str] = None - """The ID of the workspace's private access settings configuration object. This parameter is - available only for updating failed workspaces.""" - - storage_configuration_id: Optional[str] = None - """The ID of the workspace's storage configuration object. This parameter is available only for - updating failed workspaces.""" - - storage_customer_managed_key_id: Optional[str] = None - """The ID of the key configuration object for workspace storage. This parameter is available for - updating both failed and running workspaces.""" - - workspace_id: Optional[int] = None - """Workspace ID.""" - - def as_dict(self) -> dict: - """Serializes the UpdateWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = self.network_connectivity_config_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateWorkspaceRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.aws_region is not None: - body["aws_region"] = self.aws_region - if self.credentials_id is not None: - body["credentials_id"] = self.credentials_id - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.managed_services_customer_managed_key_id is not None: - body["managed_services_customer_managed_key_id"] = self.managed_services_customer_managed_key_id - if self.network_connectivity_config_id is not None: - body["network_connectivity_config_id"] = self.network_connectivity_config_id - if self.network_id is not None: - body["network_id"] = self.network_id - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.storage_configuration_id is not None: - body["storage_configuration_id"] = self.storage_configuration_id - if self.storage_customer_managed_key_id is not None: - body["storage_customer_managed_key_id"] = self.storage_customer_managed_key_id - if self.workspace_id is not None: - body["workspace_id"] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceRequest: - """Deserializes the UpdateWorkspaceRequest from a dictionary.""" - return cls( - aws_region=d.get("aws_region", None), - credentials_id=d.get("credentials_id", None), - custom_tags=d.get("custom_tags", None), - managed_services_customer_managed_key_id=d.get("managed_services_customer_managed_key_id", None), - network_connectivity_config_id=d.get("network_connectivity_config_id", None), - network_id=d.get("network_id", None), - private_access_settings_id=d.get("private_access_settings_id", None), - storage_configuration_id=d.get("storage_configuration_id", None), - storage_customer_managed_key_id=d.get("storage_customer_managed_key_id", None), - workspace_id=d.get("workspace_id", None), - ) - - -@dataclass -class UpsertPrivateAccessSettingsRequest: - private_access_settings_name: str - """The human-readable name of the private access settings object.""" - - region: str - """The cloud region for workspaces associated with this private access settings object.""" - - allowed_vpc_endpoint_ids: Optional[List[str]] = None - """An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when - registering the VPC endpoint configuration in your Databricks account. This is not the ID of the - VPC endpoint in AWS. - - Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC - endpoints that in your account that can connect to your workspace over AWS PrivateLink. - - If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this - control only works for PrivateLink connections. To control how your workspace is accessed via - public internet, see [IP access lists]. - - [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html""" - - private_access_level: Optional[PrivateAccessLevel] = None - - private_access_settings_id: Optional[str] = None - """Databricks Account API private access settings ID.""" - - public_access_enabled: Optional[bool] = None - """Determines if the workspace can be accessed over public internet. For fully private workspaces, - you can optionally specify `false`, but only if you implement both the front-end and the - back-end PrivateLink connections. Otherwise, specify `true`, which means that public access is - enabled.""" - - def as_dict(self) -> dict: - """Serializes the UpsertPrivateAccessSettingsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allowed_vpc_endpoint_ids: - body["allowed_vpc_endpoint_ids"] = [v for v in self.allowed_vpc_endpoint_ids] - if self.private_access_level is not None: - body["private_access_level"] = self.private_access_level.value - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.private_access_settings_name is not None: - body["private_access_settings_name"] = self.private_access_settings_name - if self.public_access_enabled is not None: - body["public_access_enabled"] = self.public_access_enabled - if self.region is not None: - body["region"] = self.region - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpsertPrivateAccessSettingsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allowed_vpc_endpoint_ids: - body["allowed_vpc_endpoint_ids"] = self.allowed_vpc_endpoint_ids - if self.private_access_level is not None: - body["private_access_level"] = self.private_access_level - if self.private_access_settings_id is not None: - body["private_access_settings_id"] = self.private_access_settings_id - if self.private_access_settings_name is not None: - body["private_access_settings_name"] = self.private_access_settings_name - if self.public_access_enabled is not None: - body["public_access_enabled"] = self.public_access_enabled - if self.region is not None: - body["region"] = self.region - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpsertPrivateAccessSettingsRequest: - """Deserializes the UpsertPrivateAccessSettingsRequest from a dictionary.""" - return cls( - allowed_vpc_endpoint_ids=d.get("allowed_vpc_endpoint_ids", None), - private_access_level=_enum(d, "private_access_level", PrivateAccessLevel), - private_access_settings_id=d.get("private_access_settings_id", None), - private_access_settings_name=d.get("private_access_settings_name", None), - public_access_enabled=d.get("public_access_enabled", None), - region=d.get("region", None), - ) - - @dataclass class VpcEndpoint: account_id: Optional[str] = None diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index bc98f9ecf..3df700055 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -299,12 +299,12 @@ def from_dict(cls, d: Dict[str, Any]) -> AiGatewayInferenceTableConfig: @dataclass class AiGatewayRateLimit: - calls: int - """Used to specify how many calls are allowed for a key within the renewal_period.""" - renewal_period: AiGatewayRateLimitRenewalPeriod """Renewal period field for a rate limit. Currently, only 'minute' is supported.""" + calls: Optional[int] = None + """Used to specify how many calls are allowed for a key within the renewal_period.""" + key: Optional[AiGatewayRateLimitKey] = None """Key field for a rate limit. Currently, 'user', 'user_group, 'service_principal', and 'endpoint' are supported, with 'endpoint' being the default if not specified.""" @@ -853,152 +853,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CohereConfig: ) -@dataclass -class CreatePtEndpointRequest: - name: str - """The name of the serving endpoint. This field is required and must be unique across a Databricks - workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.""" - - config: PtEndpointCoreConfig - """The core config of the serving endpoint.""" - - ai_gateway: Optional[AiGatewayConfig] = None - """The AI Gateway configuration for the serving endpoint.""" - - budget_policy_id: Optional[str] = None - """The budget policy associated with the endpoint.""" - - tags: Optional[List[EndpointTag]] = None - """Tags to be attached to the serving endpoint and automatically propagated to billing logs.""" - - def as_dict(self) -> dict: - """Serializes the CreatePtEndpointRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config.as_dict() - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreatePtEndpointRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config - if self.name is not None: - body["name"] = self.name - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreatePtEndpointRequest: - """Deserializes the CreatePtEndpointRequest from a dictionary.""" - return cls( - ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), - budget_policy_id=d.get("budget_policy_id", None), - config=_from_dict(d, "config", PtEndpointCoreConfig), - name=d.get("name", None), - tags=_repeated_dict(d, "tags", EndpointTag), - ) - - -@dataclass -class CreateServingEndpoint: - name: str - """The name of the serving endpoint. This field is required and must be unique across a Databricks - workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.""" - - ai_gateway: Optional[AiGatewayConfig] = None - """The AI Gateway configuration for the serving endpoint. NOTE: External model, provisioned - throughput, and pay-per-token endpoints are fully supported; agent endpoints currently only - support inference tables.""" - - budget_policy_id: Optional[str] = None - """The budget policy to be applied to the serving endpoint.""" - - config: Optional[EndpointCoreConfigInput] = None - """The core config of the serving endpoint.""" - - description: Optional[str] = None - - rate_limits: Optional[List[RateLimit]] = None - """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI - Gateway to manage rate limits.""" - - route_optimized: Optional[bool] = None - """Enable route optimization for the serving endpoint.""" - - tags: Optional[List[EndpointTag]] = None - """Tags to be attached to the serving endpoint and automatically propagated to billing logs.""" - - def as_dict(self) -> dict: - """Serializes the CreateServingEndpoint into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway.as_dict() - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config.as_dict() - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] - if self.route_optimized is not None: - body["route_optimized"] = self.route_optimized - if self.tags: - body["tags"] = [v.as_dict() for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateServingEndpoint into a shallow dictionary of its immediate attributes.""" - body = {} - if self.ai_gateway: - body["ai_gateway"] = self.ai_gateway - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.config: - body["config"] = self.config - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = self.rate_limits - if self.route_optimized is not None: - body["route_optimized"] = self.route_optimized - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateServingEndpoint: - """Deserializes the CreateServingEndpoint from a dictionary.""" - return cls( - ai_gateway=_from_dict(d, "ai_gateway", AiGatewayConfig), - budget_policy_id=d.get("budget_policy_id", None), - config=_from_dict(d, "config", EndpointCoreConfigInput), - description=d.get("description", None), - name=d.get("name", None), - rate_limits=_repeated_dict(d, "rate_limits", RateLimit), - route_optimized=d.get("route_optimized", None), - tags=_repeated_dict(d, "tags", EndpointTag), - ) - - @dataclass class CustomProviderConfig: """Configs needed to create a custom provider model route.""" @@ -1601,76 +1455,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ExportMetricsResponse: return cls(contents=d.get("contents", None)) -@dataclass -class ExternalFunctionRequest: - """Simple Proto message for testing""" - - connection_name: str - """The connection name to use. This is required to identify the external connection.""" - - method: ExternalFunctionRequestHttpMethod - """The HTTP method to use (e.g., 'GET', 'POST').""" - - path: str - """The relative path for the API endpoint. This is required.""" - - headers: Optional[str] = None - """Additional headers for the request. If not provided, only auth headers from connections would be - passed.""" - - json: Optional[str] = None - """The JSON payload to send in the request body.""" - - params: Optional[str] = None - """Query parameters for the request.""" - - def as_dict(self) -> dict: - """Serializes the ExternalFunctionRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.headers is not None: - body["headers"] = self.headers - if self.json is not None: - body["json"] = self.json - if self.method is not None: - body["method"] = self.method.value - if self.params is not None: - body["params"] = self.params - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExternalFunctionRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.connection_name is not None: - body["connection_name"] = self.connection_name - if self.headers is not None: - body["headers"] = self.headers - if self.json is not None: - body["json"] = self.json - if self.method is not None: - body["method"] = self.method - if self.params is not None: - body["params"] = self.params - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExternalFunctionRequest: - """Deserializes the ExternalFunctionRequest from a dictionary.""" - return cls( - connection_name=d.get("connection_name", None), - headers=d.get("headers", None), - json=d.get("json", None), - method=_enum(d, "method", ExternalFunctionRequestHttpMethod), - params=d.get("params", None), - path=d.get("path", None), - ) - - class ExternalFunctionRequestHttpMethod(Enum): DELETE = "DELETE" @@ -2291,49 +2075,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PaLmConfig: ) -@dataclass -class PatchServingEndpointTags: - add_tags: Optional[List[EndpointTag]] = None - """List of endpoint tags to add""" - - delete_tags: Optional[List[str]] = None - """List of tag keys to delete""" - - name: Optional[str] = None - """The name of the serving endpoint who's tags to patch. This field is required.""" - - def as_dict(self) -> dict: - """Serializes the PatchServingEndpointTags into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.add_tags: - body["add_tags"] = [v.as_dict() for v in self.add_tags] - if self.delete_tags: - body["delete_tags"] = [v for v in self.delete_tags] - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PatchServingEndpointTags into a shallow dictionary of its immediate attributes.""" - body = {} - if self.add_tags: - body["add_tags"] = self.add_tags - if self.delete_tags: - body["delete_tags"] = self.delete_tags - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PatchServingEndpointTags: - """Deserializes the PatchServingEndpointTags from a dictionary.""" - return cls( - add_tags=_repeated_dict(d, "add_tags", EndpointTag), - delete_tags=d.get("delete_tags", None), - name=d.get("name", None), - ) - - @dataclass class PayloadTable: name: Optional[str] = None @@ -2460,77 +2201,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PtServedModel: ) -@dataclass -class PutAiGatewayRequest: - fallback_config: Optional[FallbackConfig] = None - """Configuration for traffic fallback which auto fallbacks to other served entities if the request - to a served entity fails with certain error codes, to increase availability.""" - - guardrails: Optional[AiGatewayGuardrails] = None - """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and - responses.""" - - inference_table_config: Optional[AiGatewayInferenceTableConfig] = None - """Configuration for payload logging using inference tables. Use these tables to monitor and audit - data being sent to and received from model APIs and to improve model quality.""" - - name: Optional[str] = None - """The name of the serving endpoint whose AI Gateway is being updated. This field is required.""" - - rate_limits: Optional[List[AiGatewayRateLimit]] = None - """Configuration for rate limits which can be set to limit endpoint traffic.""" - - usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None - """Configuration to enable usage tracking using system tables. These tables allow you to monitor - operational usage on endpoints and their associated costs.""" - - def as_dict(self) -> dict: - """Serializes the PutAiGatewayRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.fallback_config: - body["fallback_config"] = self.fallback_config.as_dict() - if self.guardrails: - body["guardrails"] = self.guardrails.as_dict() - if self.inference_table_config: - body["inference_table_config"] = self.inference_table_config.as_dict() - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] - if self.usage_tracking_config: - body["usage_tracking_config"] = self.usage_tracking_config.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutAiGatewayRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.fallback_config: - body["fallback_config"] = self.fallback_config - if self.guardrails: - body["guardrails"] = self.guardrails - if self.inference_table_config: - body["inference_table_config"] = self.inference_table_config - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = self.rate_limits - if self.usage_tracking_config: - body["usage_tracking_config"] = self.usage_tracking_config - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutAiGatewayRequest: - """Deserializes the PutAiGatewayRequest from a dictionary.""" - return cls( - fallback_config=_from_dict(d, "fallback_config", FallbackConfig), - guardrails=_from_dict(d, "guardrails", AiGatewayGuardrails), - inference_table_config=_from_dict(d, "inference_table_config", AiGatewayInferenceTableConfig), - name=d.get("name", None), - rate_limits=_repeated_dict(d, "rate_limits", AiGatewayRateLimit), - usage_tracking_config=_from_dict(d, "usage_tracking_config", AiGatewayUsageTrackingConfig), - ) - - @dataclass class PutAiGatewayResponse: fallback_config: Optional[FallbackConfig] = None @@ -2594,38 +2264,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PutAiGatewayResponse: ) -@dataclass -class PutRequest: - name: Optional[str] = None - """The name of the serving endpoint whose rate limits are being updated. This field is required.""" - - rate_limits: Optional[List[RateLimit]] = None - """The list of endpoint rate limits.""" - - def as_dict(self) -> dict: - """Serializes the PutRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = [v.as_dict() for v in self.rate_limits] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.rate_limits: - body["rate_limits"] = self.rate_limits - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutRequest: - """Deserializes the PutRequest from a dictionary.""" - return cls(name=d.get("name", None), rate_limits=_repeated_dict(d, "rate_limits", RateLimit)) - - @dataclass class PutResponse: rate_limits: Optional[List[RateLimit]] = None @@ -2651,153 +2289,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PutResponse: return cls(rate_limits=_repeated_dict(d, "rate_limits", RateLimit)) -@dataclass -class QueryEndpointInput: - dataframe_records: Optional[List[Any]] = None - """Pandas Dataframe input in the records orientation.""" - - dataframe_split: Optional[DataframeSplitInput] = None - """Pandas Dataframe input in the split orientation.""" - - extra_params: Optional[Dict[str, str]] = None - """The extra parameters field used ONLY for __completions, chat,__ and __embeddings external & - foundation model__ serving endpoints. This is a map of strings and should only be used with - other external/foundation model query fields.""" - - input: Optional[Any] = None - """The input string (or array of strings) field used ONLY for __embeddings external & foundation - model__ serving endpoints and is the only field (along with extra_params if needed) used by - embeddings queries.""" - - inputs: Optional[Any] = None - """Tensor-based input in columnar format.""" - - instances: Optional[List[Any]] = None - """Tensor-based input in row format.""" - - max_tokens: Optional[int] = None - """The max tokens field used ONLY for __completions__ and __chat external & foundation model__ - serving endpoints. This is an integer and should only be used with other chat/completions query - fields.""" - - messages: Optional[List[ChatMessage]] = None - """The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is - a map of strings and should only be used with other chat query fields.""" - - n: Optional[int] = None - """The n (number of candidates) field used ONLY for __completions__ and __chat external & - foundation model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and - should only be used with other chat/completions query fields.""" - - name: Optional[str] = None - """The name of the serving endpoint. This field is required.""" - - prompt: Optional[Any] = None - """The prompt string (or array of strings) field used ONLY for __completions external & foundation - model__ serving endpoints and should only be used with other completions query fields.""" - - stop: Optional[List[str]] = None - """The stop sequences field used ONLY for __completions__ and __chat external & foundation model__ - serving endpoints. This is a list of strings and should only be used with other chat/completions - query fields.""" - - stream: Optional[bool] = None - """The stream field used ONLY for __completions__ and __chat external & foundation model__ serving - endpoints. This is a boolean defaulting to false and should only be used with other - chat/completions query fields.""" - - temperature: Optional[float] = None - """The temperature field used ONLY for __completions__ and __chat external & foundation model__ - serving endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be - used with other chat/completions query fields.""" - - def as_dict(self) -> dict: - """Serializes the QueryEndpointInput into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dataframe_records: - body["dataframe_records"] = [v for v in self.dataframe_records] - if self.dataframe_split: - body["dataframe_split"] = self.dataframe_split.as_dict() - if self.extra_params: - body["extra_params"] = self.extra_params - if self.input: - body["input"] = self.input - if self.inputs: - body["inputs"] = self.inputs - if self.instances: - body["instances"] = [v for v in self.instances] - if self.max_tokens is not None: - body["max_tokens"] = self.max_tokens - if self.messages: - body["messages"] = [v.as_dict() for v in self.messages] - if self.n is not None: - body["n"] = self.n - if self.name is not None: - body["name"] = self.name - if self.prompt: - body["prompt"] = self.prompt - if self.stop: - body["stop"] = [v for v in self.stop] - if self.stream is not None: - body["stream"] = self.stream - if self.temperature is not None: - body["temperature"] = self.temperature - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QueryEndpointInput into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dataframe_records: - body["dataframe_records"] = self.dataframe_records - if self.dataframe_split: - body["dataframe_split"] = self.dataframe_split - if self.extra_params: - body["extra_params"] = self.extra_params - if self.input: - body["input"] = self.input - if self.inputs: - body["inputs"] = self.inputs - if self.instances: - body["instances"] = self.instances - if self.max_tokens is not None: - body["max_tokens"] = self.max_tokens - if self.messages: - body["messages"] = self.messages - if self.n is not None: - body["n"] = self.n - if self.name is not None: - body["name"] = self.name - if self.prompt: - body["prompt"] = self.prompt - if self.stop: - body["stop"] = self.stop - if self.stream is not None: - body["stream"] = self.stream - if self.temperature is not None: - body["temperature"] = self.temperature - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryEndpointInput: - """Deserializes the QueryEndpointInput from a dictionary.""" - return cls( - dataframe_records=d.get("dataframe_records", None), - dataframe_split=_from_dict(d, "dataframe_split", DataframeSplitInput), - extra_params=d.get("extra_params", None), - input=d.get("input", None), - inputs=d.get("inputs", None), - instances=d.get("instances", None), - max_tokens=d.get("max_tokens", None), - messages=_repeated_dict(d, "messages", ChatMessage), - n=d.get("n", None), - name=d.get("name", None), - prompt=d.get("prompt", None), - stop=d.get("stop", None), - stream=d.get("stream", None), - temperature=d.get("temperature", None), - ) - - @dataclass class QueryEndpointResponse: choices: Optional[List[V1ResponseChoiceElement]] = None @@ -4314,40 +3805,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermissionsDescription: ) -@dataclass -class ServingEndpointPermissionsRequest: - access_control_list: Optional[List[ServingEndpointAccessControlRequest]] = None - - serving_endpoint_id: Optional[str] = None - """The serving endpoint for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the ServingEndpointPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.serving_endpoint_id is not None: - body["serving_endpoint_id"] = self.serving_endpoint_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ServingEndpointPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.serving_endpoint_id is not None: - body["serving_endpoint_id"] = self.serving_endpoint_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ServingEndpointPermissionsRequest: - """Deserializes the ServingEndpointPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", ServingEndpointAccessControlRequest), - serving_endpoint_id=d.get("serving_endpoint_id", None), - ) - - class ServingModelWorkloadType(Enum): """Please keep this in sync with with workload types in InferenceEndpointEntities.scala""" @@ -4383,37 +3840,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TrafficConfig: return cls(routes=_repeated_dict(d, "routes", Route)) -@dataclass -class UpdateProvisionedThroughputEndpointConfigRequest: - config: PtEndpointCoreConfig - - name: Optional[str] = None - """The name of the pt endpoint to update. This field is required.""" - - def as_dict(self) -> dict: - """Serializes the UpdateProvisionedThroughputEndpointConfigRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.config: - body["config"] = self.config.as_dict() - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateProvisionedThroughputEndpointConfigRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.config: - body["config"] = self.config - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateProvisionedThroughputEndpointConfigRequest: - """Deserializes the UpdateProvisionedThroughputEndpointConfigRequest from a dictionary.""" - return cls(config=_from_dict(d, "config", PtEndpointCoreConfig), name=d.get("name", None)) - - @dataclass class V1ResponseChoiceElement: finish_reason: Optional[str] = None @@ -5074,6 +4500,7 @@ def query( "Accept": "application/json", "Content-Type": "application/json", } + response_headers = [ "served-model-name", ] @@ -5386,6 +4813,7 @@ def auth(r: requests.PreparedRequest) -> requests.PreparedRequest: "Accept": "application/json", "Content-Type": "application/json", } + response_headers = [ "served-model-name", ] diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index e8746a745..2d379c696 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -731,49 +731,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Config: ) -@dataclass -class CreateIpAccessList: - """Details required to configure a block list or allow list.""" - - label: str - """Label for the IP access list. This **cannot** be empty.""" - - list_type: ListType - - ip_addresses: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the CreateIpAccessList into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.ip_addresses: - body["ip_addresses"] = [v for v in self.ip_addresses] - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateIpAccessList into a shallow dictionary of its immediate attributes.""" - body = {} - if self.ip_addresses: - body["ip_addresses"] = self.ip_addresses - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateIpAccessList: - """Deserializes the CreateIpAccessList from a dictionary.""" - return cls( - ip_addresses=d.get("ip_addresses", None), - label=d.get("label", None), - list_type=_enum(d, "list_type", ListType), - ) - - @dataclass class CreateIpAccessListResponse: """An IP access list was successfully created.""" @@ -837,83 +794,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateNetworkConnectivityConfiguration: return cls(name=d.get("name", None), region=d.get("region", None)) -@dataclass -class CreateNotificationDestinationRequest: - config: Optional[Config] = None - """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" - - display_name: Optional[str] = None - """The display name for the notification destination.""" - - def as_dict(self) -> dict: - """Serializes the CreateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.config: - body["config"] = self.config.as_dict() - if self.display_name is not None: - body["display_name"] = self.display_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateNotificationDestinationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.config: - body["config"] = self.config - if self.display_name is not None: - body["display_name"] = self.display_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateNotificationDestinationRequest: - """Deserializes the CreateNotificationDestinationRequest from a dictionary.""" - return cls(config=_from_dict(d, "config", Config), display_name=d.get("display_name", None)) - - -@dataclass -class CreateOboTokenRequest: - """Configuration details for creating on-behalf tokens.""" - - application_id: str - """Application ID of the service principal.""" - - comment: Optional[str] = None - """Comment that describes the purpose of the token.""" - - lifetime_seconds: Optional[int] = None - """The number of seconds before the token expires.""" - - def as_dict(self) -> dict: - """Serializes the CreateOboTokenRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.application_id is not None: - body["application_id"] = self.application_id - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateOboTokenRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.application_id is not None: - body["application_id"] = self.application_id - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateOboTokenRequest: - """Deserializes the CreateOboTokenRequest from a dictionary.""" - return cls( - application_id=d.get("application_id", None), - comment=d.get("comment", None), - lifetime_seconds=d.get("lifetime_seconds", None), - ) - - @dataclass class CreateOboTokenResponse: """An on-behalf token was successfully created for the service principal.""" @@ -1020,40 +900,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreatePrivateEndpointRule: ) -@dataclass -class CreateTokenRequest: - comment: Optional[str] = None - """Optional description to attach to the token.""" - - lifetime_seconds: Optional[int] = None - """The lifetime of the token, in seconds. - - If the lifetime is not specified, this token remains valid indefinitely.""" - - def as_dict(self) -> dict: - """Serializes the CreateTokenRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateTokenRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.lifetime_seconds is not None: - body["lifetime_seconds"] = self.lifetime_seconds - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateTokenRequest: - """Deserializes the CreateTokenRequest from a dictionary.""" - return cls(comment=d.get("comment", None), lifetime_seconds=d.get("lifetime_seconds", None)) - - @dataclass class CreateTokenResponse: token_info: Optional[PublicTokenInfo] = None @@ -1445,6 +1291,56 @@ def from_dict(cls, d: Dict[str, Any]) -> DefaultNamespaceSetting: ) +@dataclass +class DefaultWarehouseId: + string_val: StringMessage + + etag: Optional[str] = None + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> update pattern to perform setting updates in order to avoid race conditions. That is, get an + etag from a GET request, and pass it with the PATCH request to identify the setting version you + are updating.""" + + setting_name: Optional[str] = None + """Name of the corresponding setting. This field is populated in the response, but it will not be + respected even if it's set in the request body. The setting name in the path parameter will be + respected instead. Setting name is required to be 'default' if the setting only has one instance + per workspace.""" + + def as_dict(self) -> dict: + """Serializes the DefaultWarehouseId into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name + if self.string_val: + body["string_val"] = self.string_val.as_dict() + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DefaultWarehouseId into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: + body["etag"] = self.etag + if self.setting_name is not None: + body["setting_name"] = self.setting_name + if self.string_val: + body["string_val"] = self.string_val + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DefaultWarehouseId: + """Deserializes the DefaultWarehouseId from a dictionary.""" + return cls( + etag=d.get("etag", None), + setting_name=d.get("setting_name", None), + string_val=_from_dict(d, "string_val", StringMessage), + ) + + @dataclass class DeleteAccountIpAccessEnableResponse: """The etag is returned.""" @@ -1605,6 +1501,38 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteDefaultNamespaceSettingResponse: return cls(etag=d.get("etag", None)) +@dataclass +class DeleteDefaultWarehouseIdResponse: + """The etag is returned.""" + + etag: str + """etag used for versioning. The response is at least as fresh as the eTag provided. This is used + for optimistic concurrency control as a way to help prevent simultaneous writes of a setting + overwriting each other. It is strongly suggested that systems make use of the etag in the read + -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get + an etag from a GET request, and pass it with the DELETE request to identify the rule set version + you are deleting.""" + + def as_dict(self) -> dict: + """Serializes the DeleteDefaultWarehouseIdResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.etag is not None: + body["etag"] = self.etag + return body + + def as_shallow_dict(self) -> dict: + """Serializes the DeleteDefaultWarehouseIdResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.etag is not None: + body["etag"] = self.etag + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteDefaultWarehouseIdResponse: + """Deserializes the DeleteDefaultWarehouseIdResponse from a dictionary.""" + return cls(etag=d.get("etag", None)) + + @dataclass class DeleteDisableLegacyAccessResponse: """The etag is returned.""" @@ -2914,51 +2842,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ExchangeToken: ) -@dataclass -class ExchangeTokenRequest: - """Exchange a token with the IdP""" - - partition_id: PartitionId - """The partition of Credentials store""" - - token_type: List[TokenType] - """A list of token types being requested""" - - scopes: List[str] - """Array of scopes for the token request.""" - - def as_dict(self) -> dict: - """Serializes the ExchangeTokenRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.partition_id: - body["partitionId"] = self.partition_id.as_dict() - if self.scopes: - body["scopes"] = [v for v in self.scopes] - if self.token_type: - body["tokenType"] = [v.value for v in self.token_type] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExchangeTokenRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.partition_id: - body["partitionId"] = self.partition_id - if self.scopes: - body["scopes"] = self.scopes - if self.token_type: - body["tokenType"] = self.token_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExchangeTokenRequest: - """Deserializes the ExchangeTokenRequest from a dictionary.""" - return cls( - partition_id=_from_dict(d, "partitionId", PartitionId), - scopes=d.get("scopes", None), - token_type=_repeated_enum(d, "tokenType", TokenType), - ) - - @dataclass class ExchangeTokenResponse: """Exhanged tokens were successfully returned.""" @@ -4597,65 +4480,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PublicTokenInfo: ) -@dataclass -class ReplaceIpAccessList: - """Details required to replace an IP access list.""" - - label: str - """Label for the IP access list. This **cannot** be empty.""" - - list_type: ListType - - enabled: bool - """Specifies whether this IP access list is enabled.""" - - ip_access_list_id: Optional[str] = None - """The ID for the corresponding IP access list""" - - ip_addresses: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the ReplaceIpAccessList into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = [v for v in self.ip_addresses] - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ReplaceIpAccessList into a shallow dictionary of its immediate attributes.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = self.ip_addresses - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ReplaceIpAccessList: - """Deserializes the ReplaceIpAccessList from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - ip_access_list_id=d.get("ip_access_list_id", None), - ip_addresses=d.get("ip_addresses", None), - label=d.get("label", None), - list_type=_enum(d, "list_type", ListType), - ) - - @dataclass class ReplaceResponse: def as_dict(self) -> dict: @@ -4754,31 +4578,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RestrictWorkspaceAdminsSetting: ) -@dataclass -class RevokeTokenRequest: - token_id: str - """The ID of the token to be revoked.""" - - def as_dict(self) -> dict: - """Serializes the RevokeTokenRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.token_id is not None: - body["token_id"] = self.token_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RevokeTokenRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.token_id is not None: - body["token_id"] = self.token_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RevokeTokenRequest: - """Deserializes the RevokeTokenRequest from a dictionary.""" - return cls(token_id=d.get("token_id", None)) - - @dataclass class RevokeTokenResponse: def as_dict(self) -> dict: @@ -5240,1180 +5039,16 @@ def from_dict(cls, d: Dict[str, Any]) -> TokenPermissionsDescription: ) -@dataclass -class TokenPermissionsRequest: - access_control_list: Optional[List[TokenAccessControlRequest]] = None - - def as_dict(self) -> dict: - """Serializes the TokenPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TokenPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TokenPermissionsRequest: - """Deserializes the TokenPermissionsRequest from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, "access_control_list", TokenAccessControlRequest)) - - -class TokenType(Enum): - """The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported.""" - - ARCLIGHT_AZURE_EXCHANGE_TOKEN = "ARCLIGHT_AZURE_EXCHANGE_TOKEN" - ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = "ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY" - ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN = "ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN" - ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = ( - "ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY" - ) - AZURE_ACTIVE_DIRECTORY_TOKEN = "AZURE_ACTIVE_DIRECTORY_TOKEN" - - -@dataclass -class UpdateAccountIpAccessEnableRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: AccountIpAccessEnable - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateAccountIpAccessEnableRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAccountIpAccessEnableRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAccountIpAccessEnableRequest: - """Deserializes the UpdateAccountIpAccessEnableRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AccountIpAccessEnable), - ) - - -@dataclass -class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: AibiDashboardEmbeddingAccessPolicySetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAibiDashboardEmbeddingAccessPolicySettingRequest: - """Deserializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AibiDashboardEmbeddingAccessPolicySetting), - ) - - -@dataclass -class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: AibiDashboardEmbeddingApprovedDomainsSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest: - """Deserializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AibiDashboardEmbeddingApprovedDomainsSetting), - ) - - -@dataclass -class UpdateAutomaticClusterUpdateSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: AutomaticClusterUpdateSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAutomaticClusterUpdateSettingRequest: - """Deserializes the UpdateAutomaticClusterUpdateSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", AutomaticClusterUpdateSetting), - ) - - -@dataclass -class UpdateComplianceSecurityProfileSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: ComplianceSecurityProfileSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateComplianceSecurityProfileSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateComplianceSecurityProfileSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateComplianceSecurityProfileSettingRequest: - """Deserializes the UpdateComplianceSecurityProfileSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", ComplianceSecurityProfileSetting), - ) - - -@dataclass -class UpdateCspEnablementAccountSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: CspEnablementAccountSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateCspEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCspEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCspEnablementAccountSettingRequest: - """Deserializes the UpdateCspEnablementAccountSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", CspEnablementAccountSetting), - ) - - -@dataclass -class UpdateDashboardEmailSubscriptionsRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: DashboardEmailSubscriptions - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateDashboardEmailSubscriptionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateDashboardEmailSubscriptionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateDashboardEmailSubscriptionsRequest: - """Deserializes the UpdateDashboardEmailSubscriptionsRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DashboardEmailSubscriptions), - ) - - -@dataclass -class UpdateDefaultNamespaceSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: DefaultNamespaceSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateDefaultNamespaceSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateDefaultNamespaceSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateDefaultNamespaceSettingRequest: - """Deserializes the UpdateDefaultNamespaceSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DefaultNamespaceSetting), - ) - - -@dataclass -class UpdateDisableLegacyAccessRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: DisableLegacyAccess - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateDisableLegacyAccessRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateDisableLegacyAccessRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyAccessRequest: - """Deserializes the UpdateDisableLegacyAccessRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DisableLegacyAccess), - ) - - -@dataclass -class UpdateDisableLegacyDbfsRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: DisableLegacyDbfs - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateDisableLegacyDbfsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateDisableLegacyDbfsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyDbfsRequest: - """Deserializes the UpdateDisableLegacyDbfsRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DisableLegacyDbfs), - ) - - -@dataclass -class UpdateDisableLegacyFeaturesRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: DisableLegacyFeatures - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateDisableLegacyFeaturesRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateDisableLegacyFeaturesRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateDisableLegacyFeaturesRequest: - """Deserializes the UpdateDisableLegacyFeaturesRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", DisableLegacyFeatures), - ) - - -@dataclass -class UpdateEnableExportNotebookRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: EnableExportNotebook - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateEnableExportNotebookRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEnableExportNotebookRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableExportNotebookRequest: - """Deserializes the UpdateEnableExportNotebookRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnableExportNotebook), - ) - - -@dataclass -class UpdateEnableNotebookTableClipboardRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: EnableNotebookTableClipboard - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateEnableNotebookTableClipboardRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEnableNotebookTableClipboardRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableNotebookTableClipboardRequest: - """Deserializes the UpdateEnableNotebookTableClipboardRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnableNotebookTableClipboard), - ) - - -@dataclass -class UpdateEnableResultsDownloadingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: EnableResultsDownloading - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateEnableResultsDownloadingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEnableResultsDownloadingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEnableResultsDownloadingRequest: - """Deserializes the UpdateEnableResultsDownloadingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnableResultsDownloading), - ) - - -@dataclass -class UpdateEnhancedSecurityMonitoringSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: EnhancedSecurityMonitoringSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEnhancedSecurityMonitoringSettingRequest: - """Deserializes the UpdateEnhancedSecurityMonitoringSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EnhancedSecurityMonitoringSetting), - ) - - -@dataclass -class UpdateEsmEnablementAccountSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: EsmEnablementAccountSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateEsmEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEsmEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEsmEnablementAccountSettingRequest: - """Deserializes the UpdateEsmEnablementAccountSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", EsmEnablementAccountSetting), - ) - - -@dataclass -class UpdateIpAccessList: - """Details required to update an IP access list.""" - - enabled: Optional[bool] = None - """Specifies whether this IP access list is enabled.""" - - ip_access_list_id: Optional[str] = None - """The ID for the corresponding IP access list""" - - ip_addresses: Optional[List[str]] = None - - label: Optional[str] = None - """Label for the IP access list. This **cannot** be empty.""" - - list_type: Optional[ListType] = None - - def as_dict(self) -> dict: - """Serializes the UpdateIpAccessList into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = [v for v in self.ip_addresses] - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateIpAccessList into a shallow dictionary of its immediate attributes.""" - body = {} - if self.enabled is not None: - body["enabled"] = self.enabled - if self.ip_access_list_id is not None: - body["ip_access_list_id"] = self.ip_access_list_id - if self.ip_addresses: - body["ip_addresses"] = self.ip_addresses - if self.label is not None: - body["label"] = self.label - if self.list_type is not None: - body["list_type"] = self.list_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateIpAccessList: - """Deserializes the UpdateIpAccessList from a dictionary.""" - return cls( - enabled=d.get("enabled", None), - ip_access_list_id=d.get("ip_access_list_id", None), - ip_addresses=d.get("ip_addresses", None), - label=d.get("label", None), - list_type=_enum(d, "list_type", ListType), - ) - - -@dataclass -class UpdateLlmProxyPartnerPoweredAccountRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: LlmProxyPartnerPoweredAccount - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredAccountRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredAccountRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredAccountRequest: - """Deserializes the UpdateLlmProxyPartnerPoweredAccountRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", LlmProxyPartnerPoweredAccount), - ) - - -@dataclass -class UpdateLlmProxyPartnerPoweredEnforceRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: LlmProxyPartnerPoweredEnforce - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredEnforceRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredEnforceRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredEnforceRequest: - """Deserializes the UpdateLlmProxyPartnerPoweredEnforceRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", LlmProxyPartnerPoweredEnforce), - ) - - -@dataclass -class UpdateLlmProxyPartnerPoweredWorkspaceRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: LlmProxyPartnerPoweredWorkspace - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateLlmProxyPartnerPoweredWorkspaceRequest: - """Deserializes the UpdateLlmProxyPartnerPoweredWorkspaceRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", LlmProxyPartnerPoweredWorkspace), - ) - - -@dataclass -class UpdateNotificationDestinationRequest: - config: Optional[Config] = None - """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" - - display_name: Optional[str] = None - """The display name for the notification destination.""" - - id: Optional[str] = None - """UUID identifying notification destination.""" - - def as_dict(self) -> dict: - """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.config: - body["config"] = self.config.as_dict() - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateNotificationDestinationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.config: - body["config"] = self.config - if self.display_name is not None: - body["display_name"] = self.display_name - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateNotificationDestinationRequest: - """Deserializes the UpdateNotificationDestinationRequest from a dictionary.""" - return cls( - config=_from_dict(d, "config", Config), display_name=d.get("display_name", None), id=d.get("id", None) - ) - - -@dataclass -class UpdatePersonalComputeSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: PersonalComputeSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdatePersonalComputeSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdatePersonalComputeSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body +class TokenType(Enum): + """The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported.""" - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdatePersonalComputeSettingRequest: - """Deserializes the UpdatePersonalComputeSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", PersonalComputeSetting), - ) + ARCLIGHT_AZURE_EXCHANGE_TOKEN = "ARCLIGHT_AZURE_EXCHANGE_TOKEN" + ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = "ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY" + ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN = "ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN" + ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = ( + "ARCLIGHT_MULTI_TENANT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY" + ) + AZURE_ACTIVE_DIRECTORY_TOKEN = "AZURE_ACTIVE_DIRECTORY_TOKEN" @dataclass @@ -6490,110 +5125,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: return cls() -@dataclass -class UpdateRestrictWorkspaceAdminsSettingRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: RestrictWorkspaceAdminsSetting - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRestrictWorkspaceAdminsSettingRequest: - """Deserializes the UpdateRestrictWorkspaceAdminsSettingRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", RestrictWorkspaceAdminsSetting), - ) - - -@dataclass -class UpdateSqlResultsDownloadRequest: - """Details required to update a setting.""" - - allow_missing: bool - """This should always be set to true for Settings API. Added for AIP compliance.""" - - setting: SqlResultsDownload - - field_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - def as_dict(self) -> dict: - """Serializes the UpdateSqlResultsDownloadRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateSqlResultsDownloadRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.allow_missing is not None: - body["allow_missing"] = self.allow_missing - if self.field_mask is not None: - body["field_mask"] = self.field_mask - if self.setting: - body["setting"] = self.setting - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateSqlResultsDownloadRequest: - """Deserializes the UpdateSqlResultsDownloadRequest from a dictionary.""" - return cls( - allow_missing=d.get("allow_missing", None), - field_mask=d.get("field_mask", None), - setting=_from_dict(d, "setting", SqlResultsDownload), - ) - - WorkspaceConf = Dict[str, str] @@ -7584,6 +6115,100 @@ def update(self, allow_missing: bool, setting: DefaultNamespaceSetting, field_ma return DefaultNamespaceSetting.from_dict(res) +class DefaultWarehouseIdAPI: + """Warehouse to be selected by default for users in this workspace. Covers SQL workloads only and can be + overridden by users.""" + + def __init__(self, api_client): + self._api = api_client + + def delete(self, *, etag: Optional[str] = None) -> DeleteDefaultWarehouseIdResponse: + """Reverts the Default Warehouse Id setting to its default value. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteDefaultWarehouseIdResponse` + """ + + query = {} + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "DELETE", "/api/2.0/settings/types/default_warehouse_id/names/default", query=query, headers=headers + ) + return DeleteDefaultWarehouseIdResponse.from_dict(res) + + def get(self, *, etag: Optional[str] = None) -> DefaultWarehouseId: + """Gets the Default Warehouse Id setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DefaultWarehouseId` + """ + + query = {} + if etag is not None: + query["etag"] = etag + headers = { + "Accept": "application/json", + } + + res = self._api.do( + "GET", "/api/2.0/settings/types/default_warehouse_id/names/default", query=query, headers=headers + ) + return DefaultWarehouseId.from_dict(res) + + def update(self, allow_missing: bool, setting: DefaultWarehouseId, field_mask: str) -> DefaultWarehouseId: + """Updates the Default Warehouse Id setting. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`DefaultWarehouseId` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`DefaultWarehouseId` + """ + body = {} + if allow_missing is not None: + body["allow_missing"] = allow_missing + if field_mask is not None: + body["field_mask"] = field_mask + if setting is not None: + body["setting"] = setting.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do( + "PATCH", "/api/2.0/settings/types/default_warehouse_id/names/default", body=body, headers=headers + ) + return DefaultWarehouseId.from_dict(res) + + class DisableLegacyAccessAPI: """'Disabling legacy access' has the following impacts: @@ -9527,6 +8152,7 @@ def __init__(self, api_client): self._compliance_security_profile = ComplianceSecurityProfileAPI(self._api) self._dashboard_email_subscriptions = DashboardEmailSubscriptionsAPI(self._api) self._default_namespace = DefaultNamespaceAPI(self._api) + self._default_warehouse_id = DefaultWarehouseIdAPI(self._api) self._disable_legacy_access = DisableLegacyAccessAPI(self._api) self._disable_legacy_dbfs = DisableLegacyDbfsAPI(self._api) self._enable_export_notebook = EnableExportNotebookAPI(self._api) @@ -9567,6 +8193,11 @@ def default_namespace(self) -> DefaultNamespaceAPI: """The default namespace setting API allows users to configure the default namespace for a Databricks workspace.""" return self._default_namespace + @property + def default_warehouse_id(self) -> DefaultWarehouseIdAPI: + """Warehouse to be selected by default for users in this workspace.""" + return self._default_warehouse_id + @property def disable_legacy_access(self) -> DisableLegacyAccessAPI: """'Disabling legacy access' has the following impacts: 1.""" diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index 04ef7b94e..3cbb98dc9 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -55,191 +55,6 @@ class ColumnTypeName(Enum): VARIANT = "VARIANT" -@dataclass -class CreateProvider: - name: str - """The name of the Provider.""" - - authentication_type: AuthenticationType - - comment: Optional[str] = None - """Description about the provider.""" - - recipient_profile_str: Optional[str] = None - """This field is required when the __authentication_type__ is **TOKEN**, - **OAUTH_CLIENT_CREDENTIALS** or not provided.""" - - def as_dict(self) -> dict: - """Serializes the CreateProvider into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type.value - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateProvider into a shallow dictionary of its immediate attributes.""" - body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateProvider: - """Deserializes the CreateProvider from a dictionary.""" - return cls( - authentication_type=_enum(d, "authentication_type", AuthenticationType), - comment=d.get("comment", None), - name=d.get("name", None), - recipient_profile_str=d.get("recipient_profile_str", None), - ) - - -@dataclass -class CreateRecipient: - name: str - """Name of Recipient.""" - - authentication_type: AuthenticationType - - comment: Optional[str] = None - """Description about the recipient.""" - - data_recipient_global_metastore_id: Optional[str] = None - """The global Unity Catalog metastore id provided by the data recipient. This field is only present - when the __authentication_type__ is **DATABRICKS**. The identifier is of format - __cloud__:__region__:__metastore-uuid__.""" - - expiration_time: Optional[int] = None - """Expiration timestamp of the token, in epoch milliseconds.""" - - ip_access_list: Optional[IpAccessList] = None - """IP Access List""" - - owner: Optional[str] = None - """Username of the recipient owner.""" - - properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None - """Recipient properties as map of string key-value pairs. When provided in update request, the - specified properties will override the existing properties. To add and remove properties, one - would need to perform a read-modify-write.""" - - sharing_code: Optional[str] = None - """The one-time sharing code provided by the data recipient. This field is only present when the - __authentication_type__ is **DATABRICKS**.""" - - def as_dict(self) -> dict: - """Serializes the CreateRecipient into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type.value - if self.comment is not None: - body["comment"] = self.comment - if self.data_recipient_global_metastore_id is not None: - body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list.as_dict() - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs.as_dict() - if self.sharing_code is not None: - body["sharing_code"] = self.sharing_code - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateRecipient into a shallow dictionary of its immediate attributes.""" - body = {} - if self.authentication_type is not None: - body["authentication_type"] = self.authentication_type - if self.comment is not None: - body["comment"] = self.comment - if self.data_recipient_global_metastore_id is not None: - body["data_recipient_global_metastore_id"] = self.data_recipient_global_metastore_id - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list - if self.name is not None: - body["name"] = self.name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs - if self.sharing_code is not None: - body["sharing_code"] = self.sharing_code - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateRecipient: - """Deserializes the CreateRecipient from a dictionary.""" - return cls( - authentication_type=_enum(d, "authentication_type", AuthenticationType), - comment=d.get("comment", None), - data_recipient_global_metastore_id=d.get("data_recipient_global_metastore_id", None), - expiration_time=d.get("expiration_time", None), - ip_access_list=_from_dict(d, "ip_access_list", IpAccessList), - name=d.get("name", None), - owner=d.get("owner", None), - properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs), - sharing_code=d.get("sharing_code", None), - ) - - -@dataclass -class CreateShare: - name: str - """Name of the share.""" - - comment: Optional[str] = None - """User-provided free-form text description.""" - - storage_root: Optional[str] = None - """Storage root URL for the share.""" - - def as_dict(self) -> dict: - """Serializes the CreateShare into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.storage_root is not None: - body["storage_root"] = self.storage_root - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateShare into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.storage_root is not None: - body["storage_root"] = self.storage_root - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateShare: - """Deserializes the CreateShare from a dictionary.""" - return cls(comment=d.get("comment", None), name=d.get("name", None), storage_root=d.get("storage_root", None)) - - @dataclass class DeleteResponse: def as_dict(self) -> dict: @@ -1316,7 +1131,8 @@ class PermissionsChange: """The set of privileges to add.""" principal: Optional[str] = None - """The principal whose privileges we are changing.""" + """The principal whose privileges we are changing. Only one of principal or principal_id should be + specified, never both at the same time.""" remove: Optional[List[str]] = None """The set of privileges to remove.""" @@ -1974,42 +1790,6 @@ def from_dict(cls, d: Dict[str, Any]) -> RetrieveTokenResponse: ) -@dataclass -class RotateRecipientToken: - existing_token_expire_in_seconds: int - """The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of - existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to - expire the existing token immediately, negative number will return an error.""" - - name: Optional[str] = None - """The name of the Recipient.""" - - def as_dict(self) -> dict: - """Serializes the RotateRecipientToken into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.existing_token_expire_in_seconds is not None: - body["existing_token_expire_in_seconds"] = self.existing_token_expire_in_seconds - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RotateRecipientToken into a shallow dictionary of its immediate attributes.""" - body = {} - if self.existing_token_expire_in_seconds is not None: - body["existing_token_expire_in_seconds"] = self.existing_token_expire_in_seconds - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RotateRecipientToken: - """Deserializes the RotateRecipientToken from a dictionary.""" - return cls( - existing_token_expire_in_seconds=d.get("existing_token_expire_in_seconds", None), name=d.get("name", None) - ) - - @dataclass class SecurablePropertiesKvPairs: """An object with __properties__ containing map of key-value properties attached to the securable.""" @@ -2544,6 +2324,7 @@ def from_dict(cls, d: Dict[str, Any]) -> TableInternalAttributes: class TableInternalAttributesSharedTableType(Enum): + DELTA_ICEBERG_TABLE = "DELTA_ICEBERG_TABLE" DIRECTORY_BASED_TABLE = "DIRECTORY_BASED_TABLE" FILE_BASED_TABLE = "FILE_BASED_TABLE" FOREIGN_TABLE = "FOREIGN_TABLE" @@ -2552,253 +2333,6 @@ class TableInternalAttributesSharedTableType(Enum): VIEW = "VIEW" -@dataclass -class UpdateProvider: - comment: Optional[str] = None - """Description about the provider.""" - - name: Optional[str] = None - """Name of the provider.""" - - new_name: Optional[str] = None - """New name for the provider.""" - - owner: Optional[str] = None - """Username of Provider owner.""" - - recipient_profile_str: Optional[str] = None - """This field is required when the __authentication_type__ is **TOKEN**, - **OAUTH_CLIENT_CREDENTIALS** or not provided.""" - - def as_dict(self) -> dict: - """Serializes the UpdateProvider into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateProvider into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.recipient_profile_str is not None: - body["recipient_profile_str"] = self.recipient_profile_str - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateProvider: - """Deserializes the UpdateProvider from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - recipient_profile_str=d.get("recipient_profile_str", None), - ) - - -@dataclass -class UpdateRecipient: - comment: Optional[str] = None - """Description about the recipient.""" - - expiration_time: Optional[int] = None - """Expiration timestamp of the token, in epoch milliseconds.""" - - ip_access_list: Optional[IpAccessList] = None - """IP Access List""" - - name: Optional[str] = None - """Name of the recipient.""" - - new_name: Optional[str] = None - """New name for the recipient. .""" - - owner: Optional[str] = None - """Username of the recipient owner.""" - - properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None - """Recipient properties as map of string key-value pairs. When provided in update request, the - specified properties will override the existing properties. To add and remove properties, one - would need to perform a read-modify-write.""" - - def as_dict(self) -> dict: - """Serializes the UpdateRecipient into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list.as_dict() - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRecipient into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.expiration_time is not None: - body["expiration_time"] = self.expiration_time - if self.ip_access_list: - body["ip_access_list"] = self.ip_access_list - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.properties_kvpairs: - body["properties_kvpairs"] = self.properties_kvpairs - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRecipient: - """Deserializes the UpdateRecipient from a dictionary.""" - return cls( - comment=d.get("comment", None), - expiration_time=d.get("expiration_time", None), - ip_access_list=_from_dict(d, "ip_access_list", IpAccessList), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - properties_kvpairs=_from_dict(d, "properties_kvpairs", SecurablePropertiesKvPairs), - ) - - -@dataclass -class UpdateShare: - comment: Optional[str] = None - """User-provided free-form text description.""" - - name: Optional[str] = None - """The name of the share.""" - - new_name: Optional[str] = None - """New name for the share.""" - - owner: Optional[str] = None - """Username of current owner of share.""" - - storage_root: Optional[str] = None - """Storage root URL for the share.""" - - updates: Optional[List[SharedDataObjectUpdate]] = None - """Array of shared data object updates.""" - - def as_dict(self) -> dict: - """Serializes the UpdateShare into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updates: - body["updates"] = [v.as_dict() for v in self.updates] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateShare into a shallow dictionary of its immediate attributes.""" - body = {} - if self.comment is not None: - body["comment"] = self.comment - if self.name is not None: - body["name"] = self.name - if self.new_name is not None: - body["new_name"] = self.new_name - if self.owner is not None: - body["owner"] = self.owner - if self.storage_root is not None: - body["storage_root"] = self.storage_root - if self.updates: - body["updates"] = self.updates - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateShare: - """Deserializes the UpdateShare from a dictionary.""" - return cls( - comment=d.get("comment", None), - name=d.get("name", None), - new_name=d.get("new_name", None), - owner=d.get("owner", None), - storage_root=d.get("storage_root", None), - updates=_repeated_dict(d, "updates", SharedDataObjectUpdate), - ) - - -@dataclass -class UpdateSharePermissions: - changes: Optional[List[PermissionsChange]] = None - """Array of permissions change objects.""" - - name: Optional[str] = None - """The name of the share.""" - - omit_permissions_list: Optional[bool] = None - """Optional. Whether to return the latest permissions list of the share in the response.""" - - def as_dict(self) -> dict: - """Serializes the UpdateSharePermissions into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.changes: - body["changes"] = [v.as_dict() for v in self.changes] - if self.name is not None: - body["name"] = self.name - if self.omit_permissions_list is not None: - body["omit_permissions_list"] = self.omit_permissions_list - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateSharePermissions into a shallow dictionary of its immediate attributes.""" - body = {} - if self.changes: - body["changes"] = self.changes - if self.name is not None: - body["name"] = self.name - if self.omit_permissions_list is not None: - body["omit_permissions_list"] = self.omit_permissions_list - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateSharePermissions: - """Deserializes the UpdateSharePermissions from a dictionary.""" - return cls( - changes=_repeated_dict(d, "changes", PermissionsChange), - name=d.get("name", None), - omit_permissions_list=d.get("omit_permissions_list", None), - ) - - @dataclass class UpdateSharePermissionsResponse: privilege_assignments: Optional[List[PrivilegeAssignment]] = None diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index d8b9e5089..6c213b7ca 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -665,10 +665,9 @@ class AlertV2: """Text of the query to be run.""" run_as_user_name: Optional[str] = None - """The run as username or application ID of service principal. This field is set to "Unavailable" - if the user has been deleted. On Create and Update, this field can be set to application ID of - an active service principal. Setting this field requires the servicePrincipal/user role. If not - specified it'll default to be request user.""" + """The run as username or application ID of service principal. On Create and Update, this field can + be set to application ID of an active service principal. Setting this field requires the + servicePrincipal/user role.""" schedule: Optional[CronSchedule] = None @@ -1374,101 +1373,6 @@ class ComparisonOperator(Enum): NOT_EQUAL = "NOT_EQUAL" -@dataclass -class CreateAlert: - name: str - """Name of the alert.""" - - options: AlertOptions - """Alert configuration options.""" - - query_id: str - """Query ID.""" - - parent: Optional[str] = None - """The identifier of the workspace folder containing the object.""" - - rearm: Optional[int] = None - """Number of seconds after being triggered before the alert rearms itself and can be triggered - again. If `null`, alert will never be triggered again.""" - - def as_dict(self) -> dict: - """Serializes the CreateAlert into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options.as_dict() - if self.parent is not None: - body["parent"] = self.parent - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateAlert into a shallow dictionary of its immediate attributes.""" - body = {} - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateAlert: - """Deserializes the CreateAlert from a dictionary.""" - return cls( - name=d.get("name", None), - options=_from_dict(d, "options", AlertOptions), - parent=d.get("parent", None), - query_id=d.get("query_id", None), - rearm=d.get("rearm", None), - ) - - -@dataclass -class CreateAlertRequest: - alert: Optional[CreateAlertRequestAlert] = None - - auto_resolve_display_name: Optional[bool] = None - """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the - alert's display name conflicts with an existing alert's display name.""" - - def as_dict(self) -> dict: - """Serializes the CreateAlertRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alert: - body["alert"] = self.alert.as_dict() - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateAlertRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alert: - body["alert"] = self.alert - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateAlertRequest: - """Deserializes the CreateAlertRequest from a dictionary.""" - return cls( - alert=_from_dict(d, "alert", CreateAlertRequestAlert), - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - ) - - @dataclass class CreateAlertRequestAlert: condition: Optional[AlertCondition] = None @@ -1558,41 +1462,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateAlertRequestAlert: ) -@dataclass -class CreateQueryRequest: - auto_resolve_display_name: Optional[bool] = None - """If true, automatically resolve query display name conflicts. Otherwise, fail the request if the - query's display name conflicts with an existing query's display name.""" - - query: Optional[CreateQueryRequestQuery] = None - - def as_dict(self) -> dict: - """Serializes the CreateQueryRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.query: - body["query"] = self.query.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateQueryRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.query: - body["query"] = self.query - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateQueryRequest: - """Deserializes the CreateQueryRequest from a dictionary.""" - return cls( - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - query=_from_dict(d, "query", CreateQueryRequestQuery), - ) - - @dataclass class CreateQueryRequestQuery: apply_auto_limit: Optional[bool] = None @@ -1699,92 +1568,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateQueryRequestQuery: ) -@dataclass -class CreateQueryVisualizationsLegacyRequest: - """Add visualization to a query""" - - options: Any - """The options object varies widely from one visualization type to the next and is unsupported. - Databricks does not recommend modifying visualization settings in JSON.""" - - query_id: str - """The identifier returned by :method:queries/create""" - - type: str - """The type of visualization: chart, table, pivot table, and so on.""" - - description: Optional[str] = None - """A short description of this visualization. This is not displayed in the UI.""" - - name: Optional[str] = None - """The name of the visualization that appears on dashboards and the query screen.""" - - def as_dict(self) -> dict: - """Serializes the CreateQueryVisualizationsLegacyRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query_id is not None: - body["query_id"] = self.query_id - if self.type is not None: - body["type"] = self.type - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateQueryVisualizationsLegacyRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query_id is not None: - body["query_id"] = self.query_id - if self.type is not None: - body["type"] = self.type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateQueryVisualizationsLegacyRequest: - """Deserializes the CreateQueryVisualizationsLegacyRequest from a dictionary.""" - return cls( - description=d.get("description", None), - name=d.get("name", None), - options=d.get("options", None), - query_id=d.get("query_id", None), - type=d.get("type", None), - ) - - -@dataclass -class CreateVisualizationRequest: - visualization: Optional[CreateVisualizationRequestVisualization] = None - - def as_dict(self) -> dict: - """Serializes the CreateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.visualization: - body["visualization"] = self.visualization.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateVisualizationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.visualization: - body["visualization"] = self.visualization - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateVisualizationRequest: - """Deserializes the CreateVisualizationRequest from a dictionary.""" - return cls(visualization=_from_dict(d, "visualization", CreateVisualizationRequestVisualization)) - - @dataclass class CreateVisualizationRequestVisualization: display_name: Optional[str] = None @@ -1846,156 +1629,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateVisualizationRequestVisualization ) -@dataclass -class CreateWarehouseRequest: - auto_stop_mins: Optional[int] = None - """The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) - before it is automatically stopped. - - Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for - non-serverless warehouses - 0 indicates no autostop. - - Defaults to 120 mins""" - - channel: Optional[Channel] = None - """Channel Details""" - - cluster_size: Optional[str] = None - """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows - you to run larger queries on it. If you want to increase the number of concurrent queries, - please tune max_num_clusters. - - Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - - 4X-Large""" - - creator_name: Optional[str] = None - """warehouse creator name""" - - enable_photon: Optional[bool] = None - """Configures whether the warehouse should use Photon optimized clusters. - - Defaults to false.""" - - enable_serverless_compute: Optional[bool] = None - """Configures whether the warehouse should use serverless compute""" - - instance_profile_arn: Optional[str] = None - """Deprecated. Instance profile used to pass IAM role to the cluster""" - - max_num_clusters: Optional[int] = None - """Maximum number of clusters that the autoscaler will create to handle concurrent queries. - - Supported values: - Must be >= min_num_clusters - Must be <= 30. - - Defaults to min_clusters if unset.""" - - min_num_clusters: Optional[int] = None - """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing - this will ensure that a larger number of clusters are always running and therefore may reduce - the cold start time for new queries. This is similar to reserved vs. revocable cores in a - resource manager. - - Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - - Defaults to 1""" - - name: Optional[str] = None - """Logical name for the cluster. - - Supported values: - Must be unique within an org. - Must be less than 100 characters.""" - - spot_instance_policy: Optional[SpotInstancePolicy] = None - - tags: Optional[EndpointTags] = None - """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS - volumes) associated with this SQL warehouse. - - Supported values: - Number of tags < 45.""" - - warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None - - def as_dict(self) -> dict: - """Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel.as_dict() - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy.value - if self.tags: - body["tags"] = self.tags.as_dict() - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateWarehouseRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy - if self.tags: - body["tags"] = self.tags - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateWarehouseRequest: - """Deserializes the CreateWarehouseRequest from a dictionary.""" - return cls( - auto_stop_mins=d.get("auto_stop_mins", None), - channel=_from_dict(d, "channel", Channel), - cluster_size=d.get("cluster_size", None), - creator_name=d.get("creator_name", None), - enable_photon=d.get("enable_photon", None), - enable_serverless_compute=d.get("enable_serverless_compute", None), - instance_profile_arn=d.get("instance_profile_arn", None), - max_num_clusters=d.get("max_num_clusters", None), - min_num_clusters=d.get("min_num_clusters", None), - name=d.get("name", None), - spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), - tags=_from_dict(d, "tags", EndpointTags), - warehouse_type=_enum(d, "warehouse_type", CreateWarehouseRequestWarehouseType), - ) - - class CreateWarehouseRequestWarehouseType(Enum): """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`.""" @@ -2017,76 +1650,17 @@ def as_dict(self) -> dict: body["id"] = self.id return body - def as_shallow_dict(self) -> dict: - """Serializes the CreateWarehouseResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateWarehouseResponse: - """Deserializes the CreateWarehouseResponse from a dictionary.""" - return cls(id=d.get("id", None)) - - -@dataclass -class CreateWidget: - dashboard_id: str - """Dashboard ID returned by :method:dashboards/create.""" - - options: WidgetOptions - - width: int - """Width of a widget""" - - text: Optional[str] = None - """If this is a textbox widget, the application displays this text. This field is ignored if the - widget contains a visualization in the `visualization` field.""" - - visualization_id: Optional[str] = None - """Query Vizualization ID returned by :method:queryvisualizations/create.""" - - def as_dict(self) -> dict: - """Serializes the CreateWidget into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.options: - body["options"] = self.options.as_dict() - if self.text is not None: - body["text"] = self.text - if self.visualization_id is not None: - body["visualization_id"] = self.visualization_id - if self.width is not None: - body["width"] = self.width - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateWidget into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.options: - body["options"] = self.options - if self.text is not None: - body["text"] = self.text - if self.visualization_id is not None: - body["visualization_id"] = self.visualization_id - if self.width is not None: - body["width"] = self.width - return body - + def as_shallow_dict(self) -> dict: + """Serializes the CreateWarehouseResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.id is not None: + body["id"] = self.id + return body + @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateWidget: - """Deserializes the CreateWidget from a dictionary.""" - return cls( - dashboard_id=d.get("dashboard_id", None), - options=_from_dict(d, "options", WidgetOptions), - text=d.get("text", None), - visualization_id=d.get("visualization_id", None), - width=d.get("width", None), - ) + def from_dict(cls, d: Dict[str, Any]) -> CreateWarehouseResponse: + """Deserializes the CreateWarehouseResponse from a dictionary.""" + return cls(id=d.get("id", None)) @dataclass @@ -2296,56 +1870,6 @@ def from_dict(cls, d: Dict[str, Any]) -> Dashboard: ) -@dataclass -class DashboardEditContent: - dashboard_id: Optional[str] = None - - name: Optional[str] = None - """The title of this dashboard that appears in list views and at the top of the dashboard page.""" - - run_as_role: Optional[RunAsRole] = None - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - - tags: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the DashboardEditContent into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.name is not None: - body["name"] = self.name - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DashboardEditContent into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.name is not None: - body["name"] = self.name - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DashboardEditContent: - """Deserializes the DashboardEditContent from a dictionary.""" - return cls( - dashboard_id=d.get("dashboard_id", None), - name=d.get("name", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), - ) - - @dataclass class DashboardOptions: moved_to_trash_at: Optional[str] = None @@ -2372,73 +1896,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DashboardOptions: return cls(moved_to_trash_at=d.get("moved_to_trash_at", None)) -@dataclass -class DashboardPostContent: - name: str - """The title of this dashboard that appears in list views and at the top of the dashboard page.""" - - dashboard_filters_enabled: Optional[bool] = None - """Indicates whether the dashboard filters are enabled""" - - is_favorite: Optional[bool] = None - """Indicates whether this dashboard object should appear in the current user's favorites list.""" - - parent: Optional[str] = None - """The identifier of the workspace folder containing the object.""" - - run_as_role: Optional[RunAsRole] = None - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - - tags: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the DashboardPostContent into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_filters_enabled is not None: - body["dashboard_filters_enabled"] = self.dashboard_filters_enabled - if self.is_favorite is not None: - body["is_favorite"] = self.is_favorite - if self.name is not None: - body["name"] = self.name - if self.parent is not None: - body["parent"] = self.parent - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DashboardPostContent into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_filters_enabled is not None: - body["dashboard_filters_enabled"] = self.dashboard_filters_enabled - if self.is_favorite is not None: - body["is_favorite"] = self.is_favorite - if self.name is not None: - body["name"] = self.name - if self.parent is not None: - body["parent"] = self.parent - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DashboardPostContent: - """Deserializes the DashboardPostContent from a dictionary.""" - return cls( - dashboard_filters_enabled=d.get("dashboard_filters_enabled", None), - is_favorite=d.get("is_favorite", None), - name=d.get("name", None), - parent=d.get("parent", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), - ) - - @dataclass class DataSource: """A JSON object representing a DBSQL data source / SQL warehouse.""" @@ -2692,265 +2149,49 @@ def from_dict(cls, d: Dict[str, Any]) -> DateValue: class DateValueDynamicDate(Enum): NOW = "NOW" - YESTERDAY = "YESTERDAY" - - -@dataclass -class DeleteResponse: - def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" - return cls() - - -@dataclass -class DeleteWarehouseResponse: - def as_dict(self) -> dict: - """Serializes the DeleteWarehouseResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteWarehouseResponse into a shallow dictionary of its immediate attributes.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteWarehouseResponse: - """Deserializes the DeleteWarehouseResponse from a dictionary.""" - return cls() - - -class Disposition(Enum): - - EXTERNAL_LINKS = "EXTERNAL_LINKS" - INLINE = "INLINE" - - -@dataclass -class EditAlert: - name: str - """Name of the alert.""" - - options: AlertOptions - """Alert configuration options.""" - - query_id: str - """Query ID.""" - - alert_id: Optional[str] = None - - rearm: Optional[int] = None - """Number of seconds after being triggered before the alert rearms itself and can be triggered - again. If `null`, alert will never be triggered again.""" - - def as_dict(self) -> dict: - """Serializes the EditAlert into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alert_id is not None: - body["alert_id"] = self.alert_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options.as_dict() - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm - return body - - def as_shallow_dict(self) -> dict: - """Serializes the EditAlert into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alert_id is not None: - body["alert_id"] = self.alert_id - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query_id is not None: - body["query_id"] = self.query_id - if self.rearm is not None: - body["rearm"] = self.rearm - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditAlert: - """Deserializes the EditAlert from a dictionary.""" - return cls( - alert_id=d.get("alert_id", None), - name=d.get("name", None), - options=_from_dict(d, "options", AlertOptions), - query_id=d.get("query_id", None), - rearm=d.get("rearm", None), - ) - - -@dataclass -class EditWarehouseRequest: - auto_stop_mins: Optional[int] = None - """The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) - before it is automatically stopped. - - Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. - - Defaults to 120 mins""" - - channel: Optional[Channel] = None - """Channel Details""" - - cluster_size: Optional[str] = None - """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows - you to run larger queries on it. If you want to increase the number of concurrent queries, - please tune max_num_clusters. - - Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - - 4X-Large""" - - creator_name: Optional[str] = None - """warehouse creator name""" - - enable_photon: Optional[bool] = None - """Configures whether the warehouse should use Photon optimized clusters. - - Defaults to false.""" - - enable_serverless_compute: Optional[bool] = None - """Configures whether the warehouse should use serverless compute.""" - - id: Optional[str] = None - """Required. Id of the warehouse to configure.""" - - instance_profile_arn: Optional[str] = None - """Deprecated. Instance profile used to pass IAM role to the cluster""" - - max_num_clusters: Optional[int] = None - """Maximum number of clusters that the autoscaler will create to handle concurrent queries. - - Supported values: - Must be >= min_num_clusters - Must be <= 30. - - Defaults to min_clusters if unset.""" + YESTERDAY = "YESTERDAY" - min_num_clusters: Optional[int] = None - """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing - this will ensure that a larger number of clusters are always running and therefore may reduce - the cold start time for new queries. This is similar to reserved vs. revocable cores in a - resource manager. - - Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - - Defaults to 1""" - name: Optional[str] = None - """Logical name for the cluster. - - Supported values: - Must be unique within an org. - Must be less than 100 characters.""" +@dataclass +class DeleteResponse: + def as_dict(self) -> dict: + """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body - spot_instance_policy: Optional[SpotInstancePolicy] = None + def as_shallow_dict(self) -> dict: + """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes.""" + body = {} + return body - tags: Optional[EndpointTags] = None - """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS - volumes) associated with this SQL warehouse. - - Supported values: - Number of tags < 45.""" + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: + """Deserializes the DeleteResponse from a dictionary.""" + return cls() - warehouse_type: Optional[EditWarehouseRequestWarehouseType] = None +@dataclass +class DeleteWarehouseResponse: def as_dict(self) -> dict: - """Serializes the EditWarehouseRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteWarehouseResponse into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel.as_dict() - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.id is not None: - body["id"] = self.id - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy.value - if self.tags: - body["tags"] = self.tags.as_dict() - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type.value return body def as_shallow_dict(self) -> dict: - """Serializes the EditWarehouseRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the DeleteWarehouseResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.auto_stop_mins is not None: - body["auto_stop_mins"] = self.auto_stop_mins - if self.channel: - body["channel"] = self.channel - if self.cluster_size is not None: - body["cluster_size"] = self.cluster_size - if self.creator_name is not None: - body["creator_name"] = self.creator_name - if self.enable_photon is not None: - body["enable_photon"] = self.enable_photon - if self.enable_serverless_compute is not None: - body["enable_serverless_compute"] = self.enable_serverless_compute - if self.id is not None: - body["id"] = self.id - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.max_num_clusters is not None: - body["max_num_clusters"] = self.max_num_clusters - if self.min_num_clusters is not None: - body["min_num_clusters"] = self.min_num_clusters - if self.name is not None: - body["name"] = self.name - if self.spot_instance_policy is not None: - body["spot_instance_policy"] = self.spot_instance_policy - if self.tags: - body["tags"] = self.tags - if self.warehouse_type is not None: - body["warehouse_type"] = self.warehouse_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> EditWarehouseRequest: - """Deserializes the EditWarehouseRequest from a dictionary.""" - return cls( - auto_stop_mins=d.get("auto_stop_mins", None), - channel=_from_dict(d, "channel", Channel), - cluster_size=d.get("cluster_size", None), - creator_name=d.get("creator_name", None), - enable_photon=d.get("enable_photon", None), - enable_serverless_compute=d.get("enable_serverless_compute", None), - id=d.get("id", None), - instance_profile_arn=d.get("instance_profile_arn", None), - max_num_clusters=d.get("max_num_clusters", None), - min_num_clusters=d.get("min_num_clusters", None), - name=d.get("name", None), - spot_instance_policy=_enum(d, "spot_instance_policy", SpotInstancePolicy), - tags=_from_dict(d, "tags", EndpointTags), - warehouse_type=_enum(d, "warehouse_type", EditWarehouseRequestWarehouseType), - ) + def from_dict(cls, d: Dict[str, Any]) -> DeleteWarehouseResponse: + """Deserializes the DeleteWarehouseResponse from a dictionary.""" + return cls() + + +class Disposition(Enum): + + EXTERNAL_LINKS = "EXTERNAL_LINKS" + INLINE = "INLINE" class EditWarehouseRequestWarehouseType(Enum): @@ -3402,196 +2643,6 @@ def from_dict(cls, d: Dict[str, Any]) -> EnumValue: ) -@dataclass -class ExecuteStatementRequest: - statement: str - """The SQL statement to execute. The statement can optionally be parameterized, see `parameters`. - The maximum query text size is 16 MiB.""" - - warehouse_id: str - """Warehouse upon which to execute a statement. See also [What are SQL warehouses?] - - [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html""" - - byte_limit: Optional[int] = None - """Applies the given byte limit to the statement's result size. Byte counts are based on internal - data representations and might not match the final size in the requested `format`. If the result - was truncated due to the byte limit, then `truncated` in the response is set to `true`. When - using `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if `byte_limit` - is not explcitly set.""" - - catalog: Optional[str] = None - """Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL. - - [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html""" - - disposition: Optional[Disposition] = None - - format: Optional[Format] = None - """Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and - `CSV`. - - Important: The formats `ARROW_STREAM` and `CSV` are supported only with `EXTERNAL_LINKS` - disposition. `JSON_ARRAY` is supported in `INLINE` and `EXTERNAL_LINKS` disposition. - - When specifying `format=JSON_ARRAY`, result data will be formatted as an array of arrays of - values, where each value is either the *string representation* of a value, or `null`. For - example, the output of `SELECT concat('id-', id) AS strCol, id AS intCol, null AS nullCol FROM - range(3)` would look like this: - - ``` [ [ "id-1", "1", null ], [ "id-2", "2", null ], [ "id-3", "3", null ], ] ``` - - When specifying `format=JSON_ARRAY` and `disposition=EXTERNAL_LINKS`, each chunk in the result - contains compact JSON with no indentation or extra whitespace. - - When specifying `format=ARROW_STREAM` and `disposition=EXTERNAL_LINKS`, each chunk in the result - will be formatted as Apache Arrow Stream. See the [Apache Arrow streaming format]. - - When specifying `format=CSV` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be - a CSV according to [RFC 4180] standard. All the columns values will have *string representation* - similar to the `JSON_ARRAY` format, and `null` values will be encoded as “null”. Only the - first chunk in the result would contain a header row with column names. For example, the output - of `SELECT concat('id-', id) AS strCol, id AS intCol, null as nullCol FROM range(3)` would look - like this: - - ``` strCol,intCol,nullCol id-1,1,null id-2,2,null id-3,3,null ``` - - [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format - [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180""" - - on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout] = None - """When `wait_timeout > 0s`, the call will block up to the specified time. If the statement - execution doesn't finish within this time, `on_wait_timeout` determines whether the execution - should continue or be canceled. When set to `CONTINUE`, the statement execution continues - asynchronously and the call returns a statement ID which can be used for polling with - :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is - canceled and the call returns with a `CANCELED` state.""" - - parameters: Optional[List[StatementParameterListItem]] = None - """A list of parameters to pass into a SQL statement containing parameter markers. A parameter - consists of a name, a value, and optionally a type. To represent a NULL value, the `value` field - may be omitted or set to `null` explicitly. If the `type` field is omitted, the value is - interpreted as a string. - - If the type is given, parameters will be checked for type correctness according to the given - type. A value is correct if the provided string can be converted to the requested type using the - `cast` function. The exact semantics are described in the section [`cast` function] of the SQL - language reference. - - For example, the following statement contains two parameters, `my_name` and `my_date`: - - SELECT * FROM my_table WHERE name = :my_name AND date = :my_date - - The parameters can be passed in the request body as follows: - - { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date", - "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value": - "2020-01-01", "type": "DATE" } ] } - - Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL - Statement Execution API. - - Also see the section [Parameter markers] of the SQL language reference. - - [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html - [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html""" - - row_limit: Optional[int] = None - """Applies the given row limit to the statement's result set, but unlike the `LIMIT` clause in SQL, - it also sets the `truncated` field in the response to indicate whether the result was trimmed - due to the limit or not.""" - - schema: Optional[str] = None - """Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL. - - [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html""" - - wait_timeout: Optional[str] = None - """The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be - set to 0 or to a value between 5 and 50. - - When set to `0s`, the statement will execute in asynchronous mode and the call will not wait for - the execution to finish. In this case, the call returns directly with `PENDING` state and a - statement ID which can be used for polling with :method:statementexecution/getStatement. - - When set between 5 and 50 seconds, the call will behave synchronously up to this timeout and - wait for the statement execution to finish. If the execution finishes within this time, the call - returns immediately with a manifest and result data (or a `FAILED` state in case of an execution - error). If the statement takes longer to execute, `on_wait_timeout` determines what should - happen after the timeout is reached.""" - - def as_dict(self) -> dict: - """Serializes the ExecuteStatementRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.byte_limit is not None: - body["byte_limit"] = self.byte_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.disposition is not None: - body["disposition"] = self.disposition.value - if self.format is not None: - body["format"] = self.format.value - if self.on_wait_timeout is not None: - body["on_wait_timeout"] = self.on_wait_timeout.value - if self.parameters: - body["parameters"] = [v.as_dict() for v in self.parameters] - if self.row_limit is not None: - body["row_limit"] = self.row_limit - if self.schema is not None: - body["schema"] = self.schema - if self.statement is not None: - body["statement"] = self.statement - if self.wait_timeout is not None: - body["wait_timeout"] = self.wait_timeout - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ExecuteStatementRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.byte_limit is not None: - body["byte_limit"] = self.byte_limit - if self.catalog is not None: - body["catalog"] = self.catalog - if self.disposition is not None: - body["disposition"] = self.disposition - if self.format is not None: - body["format"] = self.format - if self.on_wait_timeout is not None: - body["on_wait_timeout"] = self.on_wait_timeout - if self.parameters: - body["parameters"] = self.parameters - if self.row_limit is not None: - body["row_limit"] = self.row_limit - if self.schema is not None: - body["schema"] = self.schema - if self.statement is not None: - body["statement"] = self.statement - if self.wait_timeout is not None: - body["wait_timeout"] = self.wait_timeout - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ExecuteStatementRequest: - """Deserializes the ExecuteStatementRequest from a dictionary.""" - return cls( - byte_limit=d.get("byte_limit", None), - catalog=d.get("catalog", None), - disposition=_enum(d, "disposition", Disposition), - format=_enum(d, "format", Format), - on_wait_timeout=_enum(d, "on_wait_timeout", ExecuteStatementRequestOnWaitTimeout), - parameters=_repeated_dict(d, "parameters", StatementParameterListItem), - row_limit=d.get("row_limit", None), - schema=d.get("schema", None), - statement=d.get("statement", None), - wait_timeout=d.get("wait_timeout", None), - warehouse_id=d.get("warehouse_id", None), - ) - - class ExecuteStatementRequestOnWaitTimeout(Enum): """When `wait_timeout > 0s`, the call will block up to the specified time. If the statement execution doesn't finish within this time, `on_wait_timeout` determines whether the execution @@ -5566,132 +4617,45 @@ def from_dict(cls, d: Dict[str, Any]) -> Query: @dataclass -class QueryBackedValue: - multi_values_options: Optional[MultiValuesOptions] = None - """If specified, allows multiple values to be selected for this parameter.""" - - query_id: Optional[str] = None - """UUID of the query that provides the parameter values.""" - - values: Optional[List[str]] = None - """List of selected query parameter values.""" - - def as_dict(self) -> dict: - """Serializes the QueryBackedValue into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.multi_values_options: - body["multi_values_options"] = self.multi_values_options.as_dict() - if self.query_id is not None: - body["query_id"] = self.query_id - if self.values: - body["values"] = [v for v in self.values] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QueryBackedValue into a shallow dictionary of its immediate attributes.""" - body = {} - if self.multi_values_options: - body["multi_values_options"] = self.multi_values_options - if self.query_id is not None: - body["query_id"] = self.query_id - if self.values: - body["values"] = self.values - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryBackedValue: - """Deserializes the QueryBackedValue from a dictionary.""" - return cls( - multi_values_options=_from_dict(d, "multi_values_options", MultiValuesOptions), - query_id=d.get("query_id", None), - values=d.get("values", None), - ) - - -@dataclass -class QueryEditContent: - data_source_id: Optional[str] = None - """Data source ID maps to the ID of the data source used by the resource and is distinct from the - warehouse ID. [Learn more] - - [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - - description: Optional[str] = None - """General description that conveys additional information about this query such as usage notes.""" - - name: Optional[str] = None - """The title of this query that appears in list views, widget headings, and on the query page.""" - - options: Optional[Any] = None - """Exclusively used for storing a list parameter definitions. A parameter is an object with - `title`, `name`, `type`, and `value` properties. The `value` field here is the default value. It - can be overridden at runtime.""" - - query: Optional[str] = None - """The text of the query to be run.""" +class QueryBackedValue: + multi_values_options: Optional[MultiValuesOptions] = None + """If specified, allows multiple values to be selected for this parameter.""" query_id: Optional[str] = None + """UUID of the query that provides the parameter values.""" - run_as_role: Optional[RunAsRole] = None - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - - tags: Optional[List[str]] = None + values: Optional[List[str]] = None + """List of selected query parameter values.""" def as_dict(self) -> dict: - """Serializes the QueryEditContent into a dictionary suitable for use as a JSON request body.""" + """Serializes the QueryBackedValue into a dictionary suitable for use as a JSON request body.""" body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query is not None: - body["query"] = self.query + if self.multi_values_options: + body["multi_values_options"] = self.multi_values_options.as_dict() if self.query_id is not None: body["query_id"] = self.query_id - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] + if self.values: + body["values"] = [v for v in self.values] return body def as_shallow_dict(self) -> dict: - """Serializes the QueryEditContent into a shallow dictionary of its immediate attributes.""" + """Serializes the QueryBackedValue into a shallow dictionary of its immediate attributes.""" body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.query is not None: - body["query"] = self.query + if self.multi_values_options: + body["multi_values_options"] = self.multi_values_options if self.query_id is not None: body["query_id"] = self.query_id - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags + if self.values: + body["values"] = self.values return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryEditContent: - """Deserializes the QueryEditContent from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> QueryBackedValue: + """Deserializes the QueryBackedValue from a dictionary.""" return cls( - data_source_id=d.get("data_source_id", None), - description=d.get("description", None), - name=d.get("name", None), - options=d.get("options", None), - query=d.get("query", None), + multi_values_options=_from_dict(d, "multi_values_options", MultiValuesOptions), query_id=d.get("query_id", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), + values=d.get("values", None), ) @@ -6050,6 +5014,9 @@ class QueryMetrics: photon_total_time_ms: Optional[int] = None """Total execution time for all individual Photon query engine tasks in the query, in milliseconds.""" + projected_remaining_task_total_time_ms: Optional[int] = None + """projected remaining work to be done aggregated across all stages in the query, in milliseconds""" + provisioning_queue_start_timestamp: Optional[int] = None """Timestamp of when the query was enqueued waiting for a cluster to be provisioned for the warehouse. This field is optional and will not appear if the query skipped the provisioning @@ -6079,6 +5046,10 @@ class QueryMetrics: read_remote_bytes: Optional[int] = None """Size of persistent data read from cloud object storage on your cloud tenant, in bytes.""" + remaining_task_count: Optional[int] = None + """number of remaining tasks to complete this is based on the current status and could be bigger or + smaller in the future based on future updates""" + result_fetch_time_ms: Optional[int] = None """Time spent fetching the query results after the execution finished, in milliseconds.""" @@ -6091,6 +5062,10 @@ class QueryMetrics: rows_read_count: Optional[int] = None """Total number of rows read by the query.""" + runnable_tasks: Optional[int] = None + """number of remaining tasks to complete, calculated by autoscaler StatementAnalysis.scala + deprecated: use remaining_task_count instead""" + spill_to_disk_bytes: Optional[int] = None """Size of data temporarily written to disk while executing the query, in bytes.""" @@ -6104,6 +5079,11 @@ class QueryMetrics: total_time_ms: Optional[int] = None """Total execution time of the query from the client’s point of view, in milliseconds.""" + work_to_be_done: Optional[int] = None + """remaining work to be done across all stages in the query, calculated by autoscaler + StatementAnalysis.scala, in milliseconds deprecated: using + projected_remaining_task_total_time_ms instead""" + write_remote_bytes: Optional[int] = None """Size pf persistent data written to cloud object storage in your cloud tenant, in bytes.""" @@ -6120,6 +5100,8 @@ def as_dict(self) -> dict: body["overloading_queue_start_timestamp"] = self.overloading_queue_start_timestamp if self.photon_total_time_ms is not None: body["photon_total_time_ms"] = self.photon_total_time_ms + if self.projected_remaining_task_total_time_ms is not None: + body["projected_remaining_task_total_time_ms"] = self.projected_remaining_task_total_time_ms if self.provisioning_queue_start_timestamp is not None: body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp if self.pruned_bytes is not None: @@ -6138,6 +5120,8 @@ def as_dict(self) -> dict: body["read_partitions_count"] = self.read_partitions_count if self.read_remote_bytes is not None: body["read_remote_bytes"] = self.read_remote_bytes + if self.remaining_task_count is not None: + body["remaining_task_count"] = self.remaining_task_count if self.result_fetch_time_ms is not None: body["result_fetch_time_ms"] = self.result_fetch_time_ms if self.result_from_cache is not None: @@ -6146,6 +5130,8 @@ def as_dict(self) -> dict: body["rows_produced_count"] = self.rows_produced_count if self.rows_read_count is not None: body["rows_read_count"] = self.rows_read_count + if self.runnable_tasks is not None: + body["runnable_tasks"] = self.runnable_tasks if self.spill_to_disk_bytes is not None: body["spill_to_disk_bytes"] = self.spill_to_disk_bytes if self.task_time_over_time_range: @@ -6154,6 +5140,8 @@ def as_dict(self) -> dict: body["task_total_time_ms"] = self.task_total_time_ms if self.total_time_ms is not None: body["total_time_ms"] = self.total_time_ms + if self.work_to_be_done is not None: + body["work_to_be_done"] = self.work_to_be_done if self.write_remote_bytes is not None: body["write_remote_bytes"] = self.write_remote_bytes return body @@ -6171,6 +5159,8 @@ def as_shallow_dict(self) -> dict: body["overloading_queue_start_timestamp"] = self.overloading_queue_start_timestamp if self.photon_total_time_ms is not None: body["photon_total_time_ms"] = self.photon_total_time_ms + if self.projected_remaining_task_total_time_ms is not None: + body["projected_remaining_task_total_time_ms"] = self.projected_remaining_task_total_time_ms if self.provisioning_queue_start_timestamp is not None: body["provisioning_queue_start_timestamp"] = self.provisioning_queue_start_timestamp if self.pruned_bytes is not None: @@ -6189,6 +5179,8 @@ def as_shallow_dict(self) -> dict: body["read_partitions_count"] = self.read_partitions_count if self.read_remote_bytes is not None: body["read_remote_bytes"] = self.read_remote_bytes + if self.remaining_task_count is not None: + body["remaining_task_count"] = self.remaining_task_count if self.result_fetch_time_ms is not None: body["result_fetch_time_ms"] = self.result_fetch_time_ms if self.result_from_cache is not None: @@ -6197,6 +5189,8 @@ def as_shallow_dict(self) -> dict: body["rows_produced_count"] = self.rows_produced_count if self.rows_read_count is not None: body["rows_read_count"] = self.rows_read_count + if self.runnable_tasks is not None: + body["runnable_tasks"] = self.runnable_tasks if self.spill_to_disk_bytes is not None: body["spill_to_disk_bytes"] = self.spill_to_disk_bytes if self.task_time_over_time_range: @@ -6205,6 +5199,8 @@ def as_shallow_dict(self) -> dict: body["task_total_time_ms"] = self.task_total_time_ms if self.total_time_ms is not None: body["total_time_ms"] = self.total_time_ms + if self.work_to_be_done is not None: + body["work_to_be_done"] = self.work_to_be_done if self.write_remote_bytes is not None: body["write_remote_bytes"] = self.write_remote_bytes return body @@ -6218,6 +5214,7 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryMetrics: network_sent_bytes=d.get("network_sent_bytes", None), overloading_queue_start_timestamp=d.get("overloading_queue_start_timestamp", None), photon_total_time_ms=d.get("photon_total_time_ms", None), + projected_remaining_task_total_time_ms=d.get("projected_remaining_task_total_time_ms", None), provisioning_queue_start_timestamp=d.get("provisioning_queue_start_timestamp", None), pruned_bytes=d.get("pruned_bytes", None), pruned_files_count=d.get("pruned_files_count", None), @@ -6227,14 +5224,17 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryMetrics: read_files_count=d.get("read_files_count", None), read_partitions_count=d.get("read_partitions_count", None), read_remote_bytes=d.get("read_remote_bytes", None), + remaining_task_count=d.get("remaining_task_count", None), result_fetch_time_ms=d.get("result_fetch_time_ms", None), result_from_cache=d.get("result_from_cache", None), rows_produced_count=d.get("rows_produced_count", None), rows_read_count=d.get("rows_read_count", None), + runnable_tasks=d.get("runnable_tasks", None), spill_to_disk_bytes=d.get("spill_to_disk_bytes", None), task_time_over_time_range=_from_dict(d, "task_time_over_time_range", TaskTimeOverRange), task_total_time_ms=d.get("task_total_time_ms", None), total_time_ms=d.get("total_time_ms", None), + work_to_be_done=d.get("work_to_be_done", None), write_remote_bytes=d.get("write_remote_bytes", None), ) @@ -6374,94 +5374,6 @@ def from_dict(cls, d: Dict[str, Any]) -> QueryParameter: ) -@dataclass -class QueryPostContent: - data_source_id: Optional[str] = None - """Data source ID maps to the ID of the data source used by the resource and is distinct from the - warehouse ID. [Learn more] - - [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - - description: Optional[str] = None - """General description that conveys additional information about this query such as usage notes.""" - - name: Optional[str] = None - """The title of this query that appears in list views, widget headings, and on the query page.""" - - options: Optional[Any] = None - """Exclusively used for storing a list parameter definitions. A parameter is an object with - `title`, `name`, `type`, and `value` properties. The `value` field here is the default value. It - can be overridden at runtime.""" - - parent: Optional[str] = None - """The identifier of the workspace folder containing the object.""" - - query: Optional[str] = None - """The text of the query to be run.""" - - run_as_role: Optional[RunAsRole] = None - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" - - tags: Optional[List[str]] = None - - def as_dict(self) -> dict: - """Serializes the QueryPostContent into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.query is not None: - body["query"] = self.query - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role.value - if self.tags: - body["tags"] = [v for v in self.tags] - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QueryPostContent into a shallow dictionary of its immediate attributes.""" - body = {} - if self.data_source_id is not None: - body["data_source_id"] = self.data_source_id - if self.description is not None: - body["description"] = self.description - if self.name is not None: - body["name"] = self.name - if self.options: - body["options"] = self.options - if self.parent is not None: - body["parent"] = self.parent - if self.query is not None: - body["query"] = self.query - if self.run_as_role is not None: - body["run_as_role"] = self.run_as_role - if self.tags: - body["tags"] = self.tags - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryPostContent: - """Deserializes the QueryPostContent from a dictionary.""" - return cls( - data_source_id=d.get("data_source_id", None), - description=d.get("description", None), - name=d.get("name", None), - options=d.get("options", None), - parent=d.get("parent", None), - query=d.get("query", None), - run_as_role=_enum(d, "run_as_role", RunAsRole), - tags=d.get("tags", None), - ) - - class QueryStatementType(Enum): ALTER = "ALTER" @@ -6816,50 +5728,6 @@ class ServiceErrorCode(Enum): WORKSPACE_TEMPORARILY_UNAVAILABLE = "WORKSPACE_TEMPORARILY_UNAVAILABLE" -@dataclass -class SetRequest: - """Set object ACL""" - - access_control_list: Optional[List[AccessControl]] = None - - object_id: Optional[str] = None - """Object ID. The ACL for the object with this UUID is overwritten by this request's POST content.""" - - object_type: Optional[ObjectTypePlural] = None - """The type of object permission to set.""" - - def as_dict(self) -> dict: - """Serializes the SetRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["objectId"] = self.object_id - if self.object_type is not None: - body["objectType"] = self.object_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["objectId"] = self.object_id - if self.object_type is not None: - body["objectType"] = self.object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetRequest: - """Deserializes the SetRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControl), - object_id=d.get("objectId", None), - object_type=_enum(d, "objectType", ObjectTypePlural), - ) - - @dataclass class SetResponse: access_control_list: Optional[List[AccessControl]] = None @@ -6871,130 +5739,34 @@ class SetResponse: """A singular noun object type.""" def as_dict(self) -> dict: - """Serializes the SetResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the SetResponse into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.object_id is not None: - body["object_id"] = self.object_id - if self.object_type is not None: - body["object_type"] = self.object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetResponse: - """Deserializes the SetResponse from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", AccessControl), - object_id=d.get("object_id", None), - object_type=_enum(d, "object_type", ObjectType), - ) - - -@dataclass -class SetWorkspaceWarehouseConfigRequest: - channel: Optional[Channel] = None - """Optional: Channel selection details""" - - config_param: Optional[RepeatedEndpointConfPairs] = None - """Deprecated: Use sql_configuration_parameters""" - - data_access_config: Optional[List[EndpointConfPair]] = None - """Spark confs for external hive metastore configuration JSON serialized size must be less than <= - 512K""" - - enabled_warehouse_types: Optional[List[WarehouseTypePair]] = None - """List of Warehouse Types allowed in this workspace (limits allowed value of the type field in - CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be - specified in SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing warehouses - to be converted to another type. Used by frontend to save specific type availability in the - warehouse create and edit form UI.""" - - global_param: Optional[RepeatedEndpointConfPairs] = None - """Deprecated: Use sql_configuration_parameters""" - - google_service_account: Optional[str] = None - """GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage""" - - instance_profile_arn: Optional[str] = None - """AWS Only: Instance profile used to pass IAM role to the cluster""" - - security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy] = None - """Security policy for warehouses""" - - sql_configuration_parameters: Optional[RepeatedEndpointConfPairs] = None - """SQL configuration parameters""" - - def as_dict(self) -> dict: - """Serializes the SetWorkspaceWarehouseConfigRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.channel: - body["channel"] = self.channel.as_dict() - if self.config_param: - body["config_param"] = self.config_param.as_dict() - if self.data_access_config: - body["data_access_config"] = [v.as_dict() for v in self.data_access_config] - if self.enabled_warehouse_types: - body["enabled_warehouse_types"] = [v.as_dict() for v in self.enabled_warehouse_types] - if self.global_param: - body["global_param"] = self.global_param.as_dict() - if self.google_service_account is not None: - body["google_service_account"] = self.google_service_account - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.security_policy is not None: - body["security_policy"] = self.security_policy.value - if self.sql_configuration_parameters: - body["sql_configuration_parameters"] = self.sql_configuration_parameters.as_dict() + """Serializes the SetResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_control_list: + body["access_control_list"] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type.value return body def as_shallow_dict(self) -> dict: - """Serializes the SetWorkspaceWarehouseConfigRequest into a shallow dictionary of its immediate attributes.""" + """Serializes the SetResponse into a shallow dictionary of its immediate attributes.""" body = {} - if self.channel: - body["channel"] = self.channel - if self.config_param: - body["config_param"] = self.config_param - if self.data_access_config: - body["data_access_config"] = self.data_access_config - if self.enabled_warehouse_types: - body["enabled_warehouse_types"] = self.enabled_warehouse_types - if self.global_param: - body["global_param"] = self.global_param - if self.google_service_account is not None: - body["google_service_account"] = self.google_service_account - if self.instance_profile_arn is not None: - body["instance_profile_arn"] = self.instance_profile_arn - if self.security_policy is not None: - body["security_policy"] = self.security_policy - if self.sql_configuration_parameters: - body["sql_configuration_parameters"] = self.sql_configuration_parameters + if self.access_control_list: + body["access_control_list"] = self.access_control_list + if self.object_id is not None: + body["object_id"] = self.object_id + if self.object_type is not None: + body["object_type"] = self.object_type return body @classmethod - def from_dict(cls, d: Dict[str, Any]) -> SetWorkspaceWarehouseConfigRequest: - """Deserializes the SetWorkspaceWarehouseConfigRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, Any]) -> SetResponse: + """Deserializes the SetResponse from a dictionary.""" return cls( - channel=_from_dict(d, "channel", Channel), - config_param=_from_dict(d, "config_param", RepeatedEndpointConfPairs), - data_access_config=_repeated_dict(d, "data_access_config", EndpointConfPair), - enabled_warehouse_types=_repeated_dict(d, "enabled_warehouse_types", WarehouseTypePair), - global_param=_from_dict(d, "global_param", RepeatedEndpointConfPairs), - google_service_account=d.get("google_service_account", None), - instance_profile_arn=d.get("instance_profile_arn", None), - security_policy=_enum(d, "security_policy", SetWorkspaceWarehouseConfigRequestSecurityPolicy), - sql_configuration_parameters=_from_dict(d, "sql_configuration_parameters", RepeatedEndpointConfPairs), + access_control_list=_repeated_dict(d, "access_control_list", AccessControl), + object_id=d.get("object_id", None), + object_type=_enum(d, "object_type", ObjectType), ) @@ -7531,107 +6303,6 @@ def from_dict(cls, d: Dict[str, Any]) -> TransferOwnershipObjectId: return cls(new_owner=d.get("new_owner", None)) -@dataclass -class TransferOwnershipRequest: - new_owner: Optional[str] = None - """Email address for the new owner, who must exist in the workspace.""" - - object_id: Optional[TransferOwnershipObjectId] = None - """The ID of the object on which to change ownership.""" - - object_type: Optional[OwnableObjectType] = None - """The type of object on which to change ownership.""" - - def as_dict(self) -> dict: - """Serializes the TransferOwnershipRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.new_owner is not None: - body["new_owner"] = self.new_owner - if self.object_id: - body["objectId"] = self.object_id.as_dict() - if self.object_type is not None: - body["objectType"] = self.object_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the TransferOwnershipRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.new_owner is not None: - body["new_owner"] = self.new_owner - if self.object_id: - body["objectId"] = self.object_id - if self.object_type is not None: - body["objectType"] = self.object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> TransferOwnershipRequest: - """Deserializes the TransferOwnershipRequest from a dictionary.""" - return cls( - new_owner=d.get("new_owner", None), - object_id=_from_dict(d, "objectId", TransferOwnershipObjectId), - object_type=_enum(d, "objectType", OwnableObjectType), - ) - - -@dataclass -class UpdateAlertRequest: - update_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - alert: Optional[UpdateAlertRequestAlert] = None - - auto_resolve_display_name: Optional[bool] = None - """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the - alert's display name conflicts with an existing alert's display name.""" - - id: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the UpdateAlertRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.alert: - body["alert"] = self.alert.as_dict() - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateAlertRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.alert: - body["alert"] = self.alert - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateAlertRequest: - """Deserializes the UpdateAlertRequest from a dictionary.""" - return cls( - alert=_from_dict(d, "alert", UpdateAlertRequestAlert), - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - id=d.get("id", None), - update_mask=d.get("update_mask", None), - ) - - @dataclass class UpdateAlertRequestAlert: condition: Optional[AlertCondition] = None @@ -7721,64 +6392,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateAlertRequestAlert: ) -@dataclass -class UpdateQueryRequest: - update_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - auto_resolve_display_name: Optional[bool] = None - """If true, automatically resolve alert display name conflicts. Otherwise, fail the request if the - alert's display name conflicts with an existing alert's display name.""" - - id: Optional[str] = None - - query: Optional[UpdateQueryRequestQuery] = None - - def as_dict(self) -> dict: - """Serializes the UpdateQueryRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.query: - body["query"] = self.query.as_dict() - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateQueryRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.auto_resolve_display_name is not None: - body["auto_resolve_display_name"] = self.auto_resolve_display_name - if self.id is not None: - body["id"] = self.id - if self.query: - body["query"] = self.query - if self.update_mask is not None: - body["update_mask"] = self.update_mask - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateQueryRequest: - """Deserializes the UpdateQueryRequest from a dictionary.""" - return cls( - auto_resolve_display_name=d.get("auto_resolve_display_name", None), - id=d.get("id", None), - query=_from_dict(d, "query", UpdateQueryRequestQuery), - update_mask=d.get("update_mask", None), - ) - - @dataclass class UpdateQueryRequestQuery: apply_auto_limit: Optional[bool] = None @@ -7903,55 +6516,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateResponse: return cls() -@dataclass -class UpdateVisualizationRequest: - update_mask: str - """The field mask must be a single string, with multiple fields separated by commas (no spaces). - The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields - (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed, - as only the entire collection field can be specified. Field names must exactly match the - resource field names. - - A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the - fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the - API changes in the future.""" - - id: Optional[str] = None - - visualization: Optional[UpdateVisualizationRequestVisualization] = None - - def as_dict(self) -> dict: - """Serializes the UpdateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - if self.visualization: - body["visualization"] = self.visualization.as_dict() - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateVisualizationRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.id is not None: - body["id"] = self.id - if self.update_mask is not None: - body["update_mask"] = self.update_mask - if self.visualization: - body["visualization"] = self.visualization - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateVisualizationRequest: - """Deserializes the UpdateVisualizationRequest from a dictionary.""" - return cls( - id=d.get("id", None), - update_mask=d.get("update_mask", None), - visualization=_from_dict(d, "visualization", UpdateVisualizationRequestVisualization), - ) - - @dataclass class UpdateVisualizationRequestVisualization: display_name: Optional[str] = None @@ -8005,73 +6569,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateVisualizationRequestVisualization ) -@dataclass -class UpdateWidgetRequest: - dashboard_id: str - """Dashboard ID returned by :method:dashboards/create.""" - - options: WidgetOptions - - width: int - """Width of a widget""" - - id: Optional[str] = None - """Widget ID returned by :method:dashboardwidgets/create""" - - text: Optional[str] = None - """If this is a textbox widget, the application displays this text. This field is ignored if the - widget contains a visualization in the `visualization` field.""" - - visualization_id: Optional[str] = None - """Query Vizualization ID returned by :method:queryvisualizations/create.""" - - def as_dict(self) -> dict: - """Serializes the UpdateWidgetRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id - if self.options: - body["options"] = self.options.as_dict() - if self.text is not None: - body["text"] = self.text - if self.visualization_id is not None: - body["visualization_id"] = self.visualization_id - if self.width is not None: - body["width"] = self.width - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateWidgetRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.dashboard_id is not None: - body["dashboard_id"] = self.dashboard_id - if self.id is not None: - body["id"] = self.id - if self.options: - body["options"] = self.options - if self.text is not None: - body["text"] = self.text - if self.visualization_id is not None: - body["visualization_id"] = self.visualization_id - if self.width is not None: - body["width"] = self.width - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateWidgetRequest: - """Deserializes the UpdateWidgetRequest from a dictionary.""" - return cls( - dashboard_id=d.get("dashboard_id", None), - id=d.get("id", None), - options=_from_dict(d, "options", WidgetOptions), - text=d.get("text", None), - visualization_id=d.get("visualization_id", None), - width=d.get("width", None), - ) - - @dataclass class User: email: Optional[str] = None @@ -8425,40 +6922,6 @@ def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissionsDescription: ) -@dataclass -class WarehousePermissionsRequest: - access_control_list: Optional[List[WarehouseAccessControlRequest]] = None - - warehouse_id: Optional[str] = None - """The SQL warehouse for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the WarehousePermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the WarehousePermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.warehouse_id is not None: - body["warehouse_id"] = self.warehouse_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> WarehousePermissionsRequest: - """Deserializes the WarehousePermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", WarehouseAccessControlRequest), - warehouse_id=d.get("warehouse_id", None), - ) - - @dataclass class WarehouseTypePair: enabled: Optional[bool] = None @@ -9228,56 +7691,6 @@ class DashboardsAPI: def __init__(self, api_client): self._api = api_client - def create( - self, - name: str, - *, - dashboard_filters_enabled: Optional[bool] = None, - is_favorite: Optional[bool] = None, - parent: Optional[str] = None, - run_as_role: Optional[RunAsRole] = None, - tags: Optional[List[str]] = None, - ) -> Dashboard: - """Creates a new dashboard object. Only the name parameter is required in the POST request JSON body. - Other fields can be included when duplicating dashboards with this API. Databricks does not recommend - designing dashboards exclusively using this API.', - - :param name: str - The title of this dashboard that appears in list views and at the top of the dashboard page. - :param dashboard_filters_enabled: bool (optional) - Indicates whether the dashboard filters are enabled - :param is_favorite: bool (optional) - Indicates whether this dashboard object should appear in the current user's favorites list. - :param parent: str (optional) - The identifier of the workspace folder containing the object. - :param run_as_role: :class:`RunAsRole` (optional) - Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior) - :param tags: List[str] (optional) - - :returns: :class:`Dashboard` - """ - body = {} - if dashboard_filters_enabled is not None: - body["dashboard_filters_enabled"] = dashboard_filters_enabled - if is_favorite is not None: - body["is_favorite"] = is_favorite - if name is not None: - body["name"] = name - if parent is not None: - body["parent"] = parent - if run_as_role is not None: - body["run_as_role"] = run_as_role.value - if tags is not None: - body["tags"] = [v for v in tags] - headers = { - "Accept": "application/json", - "Content-Type": "application/json", - } - - res = self._api.do("POST", "/api/2.0/preview/sql/dashboards", body=body, headers=headers) - return Dashboard.from_dict(res) - def delete(self, dashboard_id: str): """Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot be shared. diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index a79a64db5..fb8ea8f94 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -44,115 +44,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ColumnInfo: return cls(name=d.get("name", None)) -@dataclass -class CreateEndpoint: - name: str - """Name of the vector search endpoint""" - - endpoint_type: EndpointType - """Type of endpoint""" - - budget_policy_id: Optional[str] = None - """The budget policy id to be applied""" - - def as_dict(self) -> dict: - """Serializes the CreateEndpoint into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_type is not None: - body["endpoint_type"] = self.endpoint_type.value - if self.name is not None: - body["name"] = self.name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateEndpoint into a shallow dictionary of its immediate attributes.""" - body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_type is not None: - body["endpoint_type"] = self.endpoint_type - if self.name is not None: - body["name"] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateEndpoint: - """Deserializes the CreateEndpoint from a dictionary.""" - return cls( - budget_policy_id=d.get("budget_policy_id", None), - endpoint_type=_enum(d, "endpoint_type", EndpointType), - name=d.get("name", None), - ) - - -@dataclass -class CreateVectorIndexRequest: - name: str - """Name of the index""" - - endpoint_name: str - """Name of the endpoint to be used for serving the index""" - - primary_key: str - """Primary key of the index""" - - index_type: VectorIndexType - - delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest] = None - """Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`.""" - - direct_access_index_spec: Optional[DirectAccessVectorIndexSpec] = None - """Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`.""" - - def as_dict(self) -> dict: - """Serializes the CreateVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.delta_sync_index_spec: - body["delta_sync_index_spec"] = self.delta_sync_index_spec.as_dict() - if self.direct_access_index_spec: - body["direct_access_index_spec"] = self.direct_access_index_spec.as_dict() - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_type is not None: - body["index_type"] = self.index_type.value - if self.name is not None: - body["name"] = self.name - if self.primary_key is not None: - body["primary_key"] = self.primary_key - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateVectorIndexRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.delta_sync_index_spec: - body["delta_sync_index_spec"] = self.delta_sync_index_spec - if self.direct_access_index_spec: - body["direct_access_index_spec"] = self.direct_access_index_spec - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_type is not None: - body["index_type"] = self.index_type - if self.name is not None: - body["name"] = self.name - if self.primary_key is not None: - body["primary_key"] = self.primary_key - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateVectorIndexRequest: - """Deserializes the CreateVectorIndexRequest from a dictionary.""" - return cls( - delta_sync_index_spec=_from_dict(d, "delta_sync_index_spec", DeltaSyncVectorIndexSpecRequest), - direct_access_index_spec=_from_dict(d, "direct_access_index_spec", DirectAccessVectorIndexSpec), - endpoint_name=d.get("endpoint_name", None), - index_type=_enum(d, "index_type", VectorIndexType), - name=d.get("name", None), - primary_key=d.get("primary_key", None), - ) - - @dataclass class CustomTag: key: str @@ -889,38 +780,6 @@ def from_dict(cls, d: Dict[str, Any]) -> MiniVectorIndex: ) -@dataclass -class PatchEndpointBudgetPolicyRequest: - budget_policy_id: str - """The budget policy id to be applied""" - - endpoint_name: Optional[str] = None - """Name of the vector search endpoint""" - - def as_dict(self) -> dict: - """Serializes the PatchEndpointBudgetPolicyRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PatchEndpointBudgetPolicyRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.budget_policy_id is not None: - body["budget_policy_id"] = self.budget_policy_id - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PatchEndpointBudgetPolicyRequest: - """Deserializes the PatchEndpointBudgetPolicyRequest from a dictionary.""" - return cls(budget_policy_id=d.get("budget_policy_id", None), endpoint_name=d.get("endpoint_name", None)) - - @dataclass class PatchEndpointBudgetPolicyResponse: effective_budget_policy_id: Optional[str] = None @@ -957,149 +816,6 @@ class PipelineType(Enum): TRIGGERED = "TRIGGERED" -@dataclass -class QueryVectorIndexNextPageRequest: - """Request payload for getting next page of results.""" - - endpoint_name: Optional[str] = None - """Name of the endpoint.""" - - index_name: Optional[str] = None - """Name of the vector index to query.""" - - page_token: Optional[str] = None - """Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API.""" - - def as_dict(self) -> dict: - """Serializes the QueryVectorIndexNextPageRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_name is not None: - body["index_name"] = self.index_name - if self.page_token is not None: - body["page_token"] = self.page_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QueryVectorIndexNextPageRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - if self.index_name is not None: - body["index_name"] = self.index_name - if self.page_token is not None: - body["page_token"] = self.page_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryVectorIndexNextPageRequest: - """Deserializes the QueryVectorIndexNextPageRequest from a dictionary.""" - return cls( - endpoint_name=d.get("endpoint_name", None), - index_name=d.get("index_name", None), - page_token=d.get("page_token", None), - ) - - -@dataclass -class QueryVectorIndexRequest: - columns: List[str] - """List of column names to include in the response.""" - - columns_to_rerank: Optional[List[str]] = None - """Column names used to retrieve data to send to the reranker.""" - - filters_json: Optional[str] = None - """JSON string representing query filters. - - Example filters: - - - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater than 5. - - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id greater than - equal to 5. - `{"id": 5}`: Filter for id equal to 5.""" - - index_name: Optional[str] = None - """Name of the vector index to query.""" - - num_results: Optional[int] = None - """Number of results to return. Defaults to 10.""" - - query_text: Optional[str] = None - """Query text. Required for Delta Sync Index using model endpoint.""" - - query_type: Optional[str] = None - """The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`.""" - - query_vector: Optional[List[float]] = None - """Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed - vectors.""" - - score_threshold: Optional[float] = None - """Threshold for the approximate nearest neighbor search. Defaults to 0.0.""" - - def as_dict(self) -> dict: - """Serializes the QueryVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.columns: - body["columns"] = [v for v in self.columns] - if self.columns_to_rerank: - body["columns_to_rerank"] = [v for v in self.columns_to_rerank] - if self.filters_json is not None: - body["filters_json"] = self.filters_json - if self.index_name is not None: - body["index_name"] = self.index_name - if self.num_results is not None: - body["num_results"] = self.num_results - if self.query_text is not None: - body["query_text"] = self.query_text - if self.query_type is not None: - body["query_type"] = self.query_type - if self.query_vector: - body["query_vector"] = [v for v in self.query_vector] - if self.score_threshold is not None: - body["score_threshold"] = self.score_threshold - return body - - def as_shallow_dict(self) -> dict: - """Serializes the QueryVectorIndexRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.columns: - body["columns"] = self.columns - if self.columns_to_rerank: - body["columns_to_rerank"] = self.columns_to_rerank - if self.filters_json is not None: - body["filters_json"] = self.filters_json - if self.index_name is not None: - body["index_name"] = self.index_name - if self.num_results is not None: - body["num_results"] = self.num_results - if self.query_text is not None: - body["query_text"] = self.query_text - if self.query_type is not None: - body["query_type"] = self.query_type - if self.query_vector: - body["query_vector"] = self.query_vector - if self.score_threshold is not None: - body["score_threshold"] = self.score_threshold - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> QueryVectorIndexRequest: - """Deserializes the QueryVectorIndexRequest from a dictionary.""" - return cls( - columns=d.get("columns", None), - columns_to_rerank=d.get("columns_to_rerank", None), - filters_json=d.get("filters_json", None), - index_name=d.get("index_name", None), - num_results=d.get("num_results", None), - query_text=d.get("query_text", None), - query_type=d.get("query_type", None), - query_vector=d.get("query_vector", None), - score_threshold=d.get("score_threshold", None), - ) - - @dataclass class QueryVectorIndexResponse: manifest: Optional[ResultManifest] = None @@ -1213,49 +929,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ResultManifest: return cls(column_count=d.get("column_count", None), columns=_repeated_dict(d, "columns", ColumnInfo)) -@dataclass -class ScanVectorIndexRequest: - index_name: Optional[str] = None - """Name of the vector index to scan.""" - - last_primary_key: Optional[str] = None - """Primary key of the last entry returned in the previous scan.""" - - num_results: Optional[int] = None - """Number of results to return. Defaults to 10.""" - - def as_dict(self) -> dict: - """Serializes the ScanVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.last_primary_key is not None: - body["last_primary_key"] = self.last_primary_key - if self.num_results is not None: - body["num_results"] = self.num_results - return body - - def as_shallow_dict(self) -> dict: - """Serializes the ScanVectorIndexRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.last_primary_key is not None: - body["last_primary_key"] = self.last_primary_key - if self.num_results is not None: - body["num_results"] = self.num_results - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> ScanVectorIndexRequest: - """Deserializes the ScanVectorIndexRequest from a dictionary.""" - return cls( - index_name=d.get("index_name", None), - last_primary_key=d.get("last_primary_key", None), - num_results=d.get("num_results", None), - ) - - @dataclass class ScanVectorIndexResponse: """Response to a scan vector index request.""" @@ -1333,38 +1006,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SyncIndexResponse: return cls() -@dataclass -class UpdateEndpointCustomTagsRequest: - custom_tags: List[CustomTag] - """The new custom tags for the vector search endpoint""" - - endpoint_name: Optional[str] = None - """Name of the vector search endpoint""" - - def as_dict(self) -> dict: - """Serializes the UpdateEndpointCustomTagsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.custom_tags: - body["custom_tags"] = [v.as_dict() for v in self.custom_tags] - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateEndpointCustomTagsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.custom_tags: - body["custom_tags"] = self.custom_tags - if self.endpoint_name is not None: - body["endpoint_name"] = self.endpoint_name - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateEndpointCustomTagsRequest: - """Deserializes the UpdateEndpointCustomTagsRequest from a dictionary.""" - return cls(custom_tags=_repeated_dict(d, "custom_tags", CustomTag), endpoint_name=d.get("endpoint_name", None)) - - @dataclass class UpdateEndpointCustomTagsResponse: custom_tags: Optional[List[CustomTag]] = None @@ -1438,38 +1079,6 @@ class UpsertDataStatus(Enum): SUCCESS = "SUCCESS" -@dataclass -class UpsertDataVectorIndexRequest: - inputs_json: str - """JSON string representing the data to be upserted.""" - - index_name: Optional[str] = None - """Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index.""" - - def as_dict(self) -> dict: - """Serializes the UpsertDataVectorIndexRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.inputs_json is not None: - body["inputs_json"] = self.inputs_json - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpsertDataVectorIndexRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.index_name is not None: - body["index_name"] = self.index_name - if self.inputs_json is not None: - body["inputs_json"] = self.inputs_json - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpsertDataVectorIndexRequest: - """Deserializes the UpsertDataVectorIndexRequest from a dictionary.""" - return cls(index_name=d.get("index_name", None), inputs_json=d.get("inputs_json", None)) - - @dataclass class UpsertDataVectorIndexResponse: result: Optional[UpsertDataResult] = None diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py index c529ebcf8..cab860d9c 100755 --- a/databricks/sdk/service/workspace.py +++ b/databricks/sdk/service/workspace.py @@ -17,6 +17,9 @@ @dataclass class AclItem: + """An item representing an ACL rule applied to the given principal (user or group) on the + associated scope point.""" + principal: str """The principal in which the permission is applied.""" @@ -48,6 +51,7 @@ def from_dict(cls, d: Dict[str, Any]) -> AclItem: class AclPermission(Enum): + """The ACL permission levels for Secret ACLs applied to secret scopes.""" MANAGE = "MANAGE" READ = "READ" @@ -56,6 +60,8 @@ class AclPermission(Enum): @dataclass class AzureKeyVaultSecretScopeMetadata: + """The metadata of the Azure KeyVault for a secret scope of type `AZURE_KEYVAULT`""" + resource_id: str """The resource id of the azure KeyVault that user wants to associate the scope with.""" @@ -86,58 +92,6 @@ def from_dict(cls, d: Dict[str, Any]) -> AzureKeyVaultSecretScopeMetadata: return cls(dns_name=d.get("dns_name", None), resource_id=d.get("resource_id", None)) -@dataclass -class CreateCredentialsRequest: - git_provider: str - """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, - `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, - `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - - git_username: Optional[str] = None - """The username or email provided with your Git provider account, depending on which provider you - are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or - username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS - CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please - see your provider's Personal Access Token authentication documentation to see what is supported.""" - - personal_access_token: Optional[str] = None - """The personal access token used to authenticate to the corresponding Git provider. For certain - providers, support may exist for other types of scoped access tokens. [Learn more]. - - [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" - - def as_dict(self) -> dict: - """Serializes the CreateCredentialsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateCredentialsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialsRequest: - """Deserializes the CreateCredentialsRequest from a dictionary.""" - return cls( - git_provider=d.get("git_provider", None), - git_username=d.get("git_username", None), - personal_access_token=d.get("personal_access_token", None), - ) - - @dataclass class CreateCredentialsResponse: credential_id: int @@ -150,6 +104,12 @@ class CreateCredentialsResponse: """The username or email provided with your Git provider account and associated with the credential.""" + is_default_for_provider: Optional[bool] = None + """if the credential is the default for the given provider""" + + name: Optional[str] = None + """the name of the git credential, used for identification and ease of lookup""" + def as_dict(self) -> dict: """Serializes the CreateCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} @@ -159,6 +119,10 @@ def as_dict(self) -> dict: body["git_provider"] = self.git_provider if self.git_username is not None: body["git_username"] = self.git_username + if self.is_default_for_provider is not None: + body["is_default_for_provider"] = self.is_default_for_provider + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: @@ -170,6 +134,10 @@ def as_shallow_dict(self) -> dict: body["git_provider"] = self.git_provider if self.git_username is not None: body["git_username"] = self.git_username + if self.is_default_for_provider is not None: + body["is_default_for_provider"] = self.is_default_for_provider + if self.name is not None: + body["name"] = self.name return body @classmethod @@ -179,61 +147,8 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateCredentialsResponse: credential_id=d.get("credential_id", None), git_provider=d.get("git_provider", None), git_username=d.get("git_username", None), - ) - - -@dataclass -class CreateRepoRequest: - url: str - """URL of the Git repository to be linked.""" - - provider: str - """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, - `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, - `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - - path: Optional[str] = None - """Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If - repo is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.""" - - sparse_checkout: Optional[SparseCheckout] = None - """If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable - sparse checkout after the repo is created.""" - - def as_dict(self) -> dict: - """Serializes the CreateRepoRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout.as_dict() - if self.url is not None: - body["url"] = self.url - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateRepoRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.provider is not None: - body["provider"] = self.provider - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout - if self.url is not None: - body["url"] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateRepoRequest: - """Deserializes the CreateRepoRequest from a dictionary.""" - return cls( - path=d.get("path", None), - provider=d.get("provider", None), - sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckout), - url=d.get("url", None), + is_default_for_provider=d.get("is_default_for_provider", None), + name=d.get("name", None), ) @@ -312,57 +227,6 @@ def from_dict(cls, d: Dict[str, Any]) -> CreateRepoResponse: ) -@dataclass -class CreateScope: - scope: str - """Scope name requested by the user. Scope names are unique.""" - - backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata] = None - """The metadata for the secret scope if the type is `AZURE_KEYVAULT`""" - - initial_manage_principal: Optional[str] = None - """The principal that is initially granted `MANAGE` permission to the created scope.""" - - scope_backend_type: Optional[ScopeBackendType] = None - """The backend type the scope will be created with. If not specified, will default to `DATABRICKS`""" - - def as_dict(self) -> dict: - """Serializes the CreateScope into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.backend_azure_keyvault: - body["backend_azure_keyvault"] = self.backend_azure_keyvault.as_dict() - if self.initial_manage_principal is not None: - body["initial_manage_principal"] = self.initial_manage_principal - if self.scope is not None: - body["scope"] = self.scope - if self.scope_backend_type is not None: - body["scope_backend_type"] = self.scope_backend_type.value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the CreateScope into a shallow dictionary of its immediate attributes.""" - body = {} - if self.backend_azure_keyvault: - body["backend_azure_keyvault"] = self.backend_azure_keyvault - if self.initial_manage_principal is not None: - body["initial_manage_principal"] = self.initial_manage_principal - if self.scope is not None: - body["scope"] = self.scope - if self.scope_backend_type is not None: - body["scope_backend_type"] = self.scope_backend_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> CreateScope: - """Deserializes the CreateScope from a dictionary.""" - return cls( - backend_azure_keyvault=_from_dict(d, "backend_azure_keyvault", AzureKeyVaultSecretScopeMetadata), - initial_manage_principal=d.get("initial_manage_principal", None), - scope=d.get("scope", None), - scope_backend_type=_enum(d, "scope_backend_type", ScopeBackendType), - ) - - @dataclass class CreateScopeResponse: def as_dict(self) -> dict: @@ -393,6 +257,12 @@ class CredentialInfo: """The username or email provided with your Git provider account and associated with the credential.""" + is_default_for_provider: Optional[bool] = None + """if the credential is the default for the given provider""" + + name: Optional[str] = None + """the name of the git credential, used for identification and ease of lookup""" + def as_dict(self) -> dict: """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body.""" body = {} @@ -402,6 +272,10 @@ def as_dict(self) -> dict: body["git_provider"] = self.git_provider if self.git_username is not None: body["git_username"] = self.git_username + if self.is_default_for_provider is not None: + body["is_default_for_provider"] = self.is_default_for_provider + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: @@ -413,6 +287,10 @@ def as_shallow_dict(self) -> dict: body["git_provider"] = self.git_provider if self.git_username is not None: body["git_username"] = self.git_username + if self.is_default_for_provider is not None: + body["is_default_for_provider"] = self.is_default_for_provider + if self.name is not None: + body["name"] = self.name return body @classmethod @@ -422,75 +300,11 @@ def from_dict(cls, d: Dict[str, Any]) -> CredentialInfo: credential_id=d.get("credential_id", None), git_provider=d.get("git_provider", None), git_username=d.get("git_username", None), + is_default_for_provider=d.get("is_default_for_provider", None), + name=d.get("name", None), ) -@dataclass -class Delete: - path: str - """The absolute path of the notebook or directory.""" - - recursive: Optional[bool] = None - """The flag that specifies whether to delete the object recursively. It is `false` by default. - Please note this deleting directory is not atomic. If it fails in the middle, some of objects - under this directory may be deleted and cannot be undone.""" - - def as_dict(self) -> dict: - """Serializes the Delete into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Delete into a shallow dictionary of its immediate attributes.""" - body = {} - if self.path is not None: - body["path"] = self.path - if self.recursive is not None: - body["recursive"] = self.recursive - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Delete: - """Deserializes the Delete from a dictionary.""" - return cls(path=d.get("path", None), recursive=d.get("recursive", None)) - - -@dataclass -class DeleteAcl: - scope: str - """The name of the scope to remove permissions from.""" - - principal: str - """The principal to remove an existing ACL from.""" - - def as_dict(self) -> dict: - """Serializes the DeleteAcl into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteAcl into a shallow dictionary of its immediate attributes.""" - body = {} - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteAcl: - """Deserializes the DeleteAcl from a dictionary.""" - return cls(principal=d.get("principal", None), scope=d.get("scope", None)) - - @dataclass class DeleteAclResponse: def as_dict(self) -> dict: @@ -563,31 +377,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteResponse: return cls() -@dataclass -class DeleteScope: - scope: str - """Name of the scope to delete.""" - - def as_dict(self) -> dict: - """Serializes the DeleteScope into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.scope is not None: - body["scope"] = self.scope - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteScope into a shallow dictionary of its immediate attributes.""" - body = {} - if self.scope is not None: - body["scope"] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteScope: - """Deserializes the DeleteScope from a dictionary.""" - return cls(scope=d.get("scope", None)) - - @dataclass class DeleteScopeResponse: def as_dict(self) -> dict: @@ -606,38 +395,6 @@ def from_dict(cls, d: Dict[str, Any]) -> DeleteScopeResponse: return cls() -@dataclass -class DeleteSecret: - scope: str - """The name of the scope that contains the secret to delete.""" - - key: str - """Name of the secret to delete.""" - - def as_dict(self) -> dict: - """Serializes the DeleteSecret into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope - return body - - def as_shallow_dict(self) -> dict: - """Serializes the DeleteSecret into a shallow dictionary of its immediate attributes.""" - body = {} - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> DeleteSecret: - """Deserializes the DeleteSecret from a dictionary.""" - return cls(key=d.get("key", None), scope=d.get("scope", None)) - - @dataclass class DeleteSecretResponse: def as_dict(self) -> dict: @@ -716,6 +473,12 @@ class GetCredentialsResponse: """The username or email provided with your Git provider account and associated with the credential.""" + is_default_for_provider: Optional[bool] = None + """if the credential is the default for the given provider""" + + name: Optional[str] = None + """the name of the git credential, used for identification and ease of lookup""" + def as_dict(self) -> dict: """Serializes the GetCredentialsResponse into a dictionary suitable for use as a JSON request body.""" body = {} @@ -725,6 +488,10 @@ def as_dict(self) -> dict: body["git_provider"] = self.git_provider if self.git_username is not None: body["git_username"] = self.git_username + if self.is_default_for_provider is not None: + body["is_default_for_provider"] = self.is_default_for_provider + if self.name is not None: + body["name"] = self.name return body def as_shallow_dict(self) -> dict: @@ -736,6 +503,10 @@ def as_shallow_dict(self) -> dict: body["git_provider"] = self.git_provider if self.git_username is not None: body["git_username"] = self.git_username + if self.is_default_for_provider is not None: + body["is_default_for_provider"] = self.is_default_for_provider + if self.name is not None: + body["name"] = self.name return body @classmethod @@ -745,6 +516,8 @@ def from_dict(cls, d: Dict[str, Any]) -> GetCredentialsResponse: credential_id=d.get("credential_id", None), git_provider=d.get("git_provider", None), git_username=d.get("git_username", None), + is_default_for_provider=d.get("is_default_for_provider", None), + name=d.get("name", None), ) @@ -905,80 +678,6 @@ def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceObjectPermissionLevelsRespo return cls(permission_levels=_repeated_dict(d, "permission_levels", WorkspaceObjectPermissionsDescription)) -@dataclass -class Import: - path: str - """The absolute path of the object or directory. Importing a directory is only supported for the - `DBC` and `SOURCE` formats.""" - - content: Optional[str] = None - """The base64-encoded content. This has a limit of 10 MB. - - If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is - thrown. This parameter might be absent, and instead a posted file is used.""" - - format: Optional[ImportFormat] = None - """This specifies the format of the file to be imported. - - The value is case sensitive. - - - `AUTO`: The item is imported depending on an analysis of the item's extension and the header - content provided in the request. If the item is imported as a notebook, then the item's - extension is automatically removed. - `SOURCE`: The notebook or directory is imported as source - code. - `HTML`: The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported - as a Jupyter/IPython Notebook file. - `DBC`: The notebook is imported in Databricks archive - format. Required for directories. - `R_MARKDOWN`: The notebook is imported from R Markdown - format.""" - - language: Optional[Language] = None - """The language of the object. This value is set only if the object type is `NOTEBOOK`.""" - - overwrite: Optional[bool] = None - """The flag that specifies whether to overwrite existing object. It is `false` by default. For - `DBC` format, `overwrite` is not supported since it may contain a directory.""" - - def as_dict(self) -> dict: - """Serializes the Import into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.format is not None: - body["format"] = self.format.value - if self.language is not None: - body["language"] = self.language.value - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Import into a shallow dictionary of its immediate attributes.""" - body = {} - if self.content is not None: - body["content"] = self.content - if self.format is not None: - body["format"] = self.format - if self.language is not None: - body["language"] = self.language - if self.overwrite is not None: - body["overwrite"] = self.overwrite - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Import: - """Deserializes the Import from a dictionary.""" - return cls( - content=d.get("content", None), - format=_enum(d, "format", ImportFormat), - language=_enum(d, "language", Language), - overwrite=d.get("overwrite", None), - path=d.get("path", None), - ) - - class ImportFormat(Enum): """The format for workspace import and export.""" @@ -1176,32 +875,6 @@ def from_dict(cls, d: Dict[str, Any]) -> ListSecretsResponse: return cls(secrets=_repeated_dict(d, "secrets", SecretMetadata)) -@dataclass -class Mkdirs: - path: str - """The absolute path of the directory. If the parent directories do not exist, it will also create - them. If the directory already exists, this command will do nothing and succeed.""" - - def as_dict(self) -> dict: - """Serializes the Mkdirs into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.path is not None: - body["path"] = self.path - return body - - def as_shallow_dict(self) -> dict: - """Serializes the Mkdirs into a shallow dictionary of its immediate attributes.""" - body = {} - if self.path is not None: - body["path"] = self.path - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> Mkdirs: - """Deserializes the Mkdirs from a dictionary.""" - return cls(path=d.get("path", None)) - - @dataclass class MkdirsResponse: def as_dict(self) -> dict: @@ -1320,49 +993,6 @@ class ObjectType(Enum): REPO = "REPO" -@dataclass -class PutAcl: - scope: str - """The name of the scope to apply permissions to.""" - - principal: str - """The principal in which the permission is applied.""" - - permission: AclPermission - """The permission level applied to the principal.""" - - def as_dict(self) -> dict: - """Serializes the PutAcl into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission.value - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutAcl into a shallow dictionary of its immediate attributes.""" - body = {} - if self.permission is not None: - body["permission"] = self.permission - if self.principal is not None: - body["principal"] = self.principal - if self.scope is not None: - body["scope"] = self.scope - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutAcl: - """Deserializes the PutAcl from a dictionary.""" - return cls( - permission=_enum(d, "permission", AclPermission), - principal=d.get("principal", None), - scope=d.get("scope", None), - ) - - @dataclass class PutAclResponse: def as_dict(self) -> dict: @@ -1381,57 +1011,6 @@ def from_dict(cls, d: Dict[str, Any]) -> PutAclResponse: return cls() -@dataclass -class PutSecret: - scope: str - """The name of the scope to which the secret will be associated with.""" - - key: str - """A unique name to identify the secret.""" - - bytes_value: Optional[str] = None - """If specified, value will be stored as bytes.""" - - string_value: Optional[str] = None - """If specified, note that the value will be stored in UTF-8 (MB4) form.""" - - def as_dict(self) -> dict: - """Serializes the PutSecret into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.bytes_value is not None: - body["bytes_value"] = self.bytes_value - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope - if self.string_value is not None: - body["string_value"] = self.string_value - return body - - def as_shallow_dict(self) -> dict: - """Serializes the PutSecret into a shallow dictionary of its immediate attributes.""" - body = {} - if self.bytes_value is not None: - body["bytes_value"] = self.bytes_value - if self.key is not None: - body["key"] = self.key - if self.scope is not None: - body["scope"] = self.scope - if self.string_value is not None: - body["string_value"] = self.string_value - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> PutSecret: - """Deserializes the PutSecret from a dictionary.""" - return cls( - bytes_value=d.get("bytes_value", None), - key=d.get("key", None), - scope=d.get("scope", None), - string_value=d.get("string_value", None), - ) - - @dataclass class PutSecretResponse: def as_dict(self) -> dict: @@ -1757,41 +1336,9 @@ def from_dict(cls, d: Dict[str, Any]) -> RepoPermissionsDescription: ) -@dataclass -class RepoPermissionsRequest: - access_control_list: Optional[List[RepoAccessControlRequest]] = None - - repo_id: Optional[str] = None - """The repo for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the RepoPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.repo_id is not None: - body["repo_id"] = self.repo_id - return body - - def as_shallow_dict(self) -> dict: - """Serializes the RepoPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.repo_id is not None: - body["repo_id"] = self.repo_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> RepoPermissionsRequest: - """Deserializes the RepoPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", RepoAccessControlRequest), - repo_id=d.get("repo_id", None), - ) - - class ScopeBackendType(Enum): + """The types of secret scope backends in the Secret Manager. Azure KeyVault backed secret scopes + will be supported in a later release.""" AZURE_KEYVAULT = "AZURE_KEYVAULT" DATABRICKS = "DATABRICKS" @@ -1799,6 +1346,9 @@ class ScopeBackendType(Enum): @dataclass class SecretMetadata: + """The metadata about a secret. Returned when listing secrets. Does not contain the actual secret + value.""" + key: Optional[str] = None """A unique name to identify the secret.""" @@ -1831,11 +1381,15 @@ def from_dict(cls, d: Dict[str, Any]) -> SecretMetadata: @dataclass class SecretScope: + """An organizational resource for storing secrets. Secret scopes can be different types + (Databricks-managed, Azure KeyVault backed, etc), and ACLs can be applied to control permissions + for all secrets within a scope.""" + backend_type: Optional[ScopeBackendType] = None """The type of secret scope backend.""" keyvault_metadata: Optional[AzureKeyVaultSecretScopeMetadata] = None - """The metadata for the secret scope if the type is `AZURE_KEYVAULT`""" + """The metadata for the secret scope if the type is ``AZURE_KEYVAULT``""" name: Optional[str] = None """A unique name to identify the secret scope.""" @@ -1930,66 +1484,6 @@ def from_dict(cls, d: Dict[str, Any]) -> SparseCheckoutUpdate: return cls(patterns=d.get("patterns", None)) -@dataclass -class UpdateCredentialsRequest: - git_provider: str - """Git provider. This field is case-insensitive. The available Git providers are `gitHub`, - `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`, - `gitLabEnterpriseEdition` and `awsCodeCommit`.""" - - credential_id: Optional[int] = None - """The ID for the corresponding credential to access.""" - - git_username: Optional[str] = None - """The username or email provided with your Git provider account, depending on which provider you - are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or - username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS - CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please - see your provider's Personal Access Token authentication documentation to see what is supported.""" - - personal_access_token: Optional[str] = None - """The personal access token used to authenticate to the corresponding Git provider. For certain - providers, support may exist for other types of scoped access tokens. [Learn more]. - - [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html""" - - def as_dict(self) -> dict: - """Serializes the UpdateCredentialsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateCredentialsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.credential_id is not None: - body["credential_id"] = self.credential_id - if self.git_provider is not None: - body["git_provider"] = self.git_provider - if self.git_username is not None: - body["git_username"] = self.git_username - if self.personal_access_token is not None: - body["personal_access_token"] = self.personal_access_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialsRequest: - """Deserializes the UpdateCredentialsRequest from a dictionary.""" - return cls( - credential_id=d.get("credential_id", None), - git_provider=d.get("git_provider", None), - git_username=d.get("git_username", None), - personal_access_token=d.get("personal_access_token", None), - ) - - @dataclass class UpdateCredentialsResponse: def as_dict(self) -> dict: @@ -2008,60 +1502,6 @@ def from_dict(cls, d: Dict[str, Any]) -> UpdateCredentialsResponse: return cls() -@dataclass -class UpdateRepoRequest: - branch: Optional[str] = None - """Branch that the local version of the repo is checked out to.""" - - repo_id: Optional[int] = None - """ID of the Git folder (repo) object in the workspace.""" - - sparse_checkout: Optional[SparseCheckoutUpdate] = None - """If specified, update the sparse checkout settings. The update will fail if sparse checkout is - not enabled for the repo.""" - - tag: Optional[str] = None - """Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the - repo in a detached HEAD state. Before committing new changes, you must update the repo to a - branch instead of the detached HEAD.""" - - def as_dict(self) -> dict: - """Serializes the UpdateRepoRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.repo_id is not None: - body["repo_id"] = self.repo_id - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout.as_dict() - if self.tag is not None: - body["tag"] = self.tag - return body - - def as_shallow_dict(self) -> dict: - """Serializes the UpdateRepoRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.branch is not None: - body["branch"] = self.branch - if self.repo_id is not None: - body["repo_id"] = self.repo_id - if self.sparse_checkout: - body["sparse_checkout"] = self.sparse_checkout - if self.tag is not None: - body["tag"] = self.tag - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> UpdateRepoRequest: - """Deserializes the UpdateRepoRequest from a dictionary.""" - return cls( - branch=d.get("branch", None), - repo_id=d.get("repo_id", None), - sparse_checkout=_from_dict(d, "sparse_checkout", SparseCheckoutUpdate), - tag=d.get("tag", None), - ) - - @dataclass class UpdateRepoResponse: def as_dict(self) -> dict: @@ -2311,48 +1751,6 @@ def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermissionsDescription: ) -@dataclass -class WorkspaceObjectPermissionsRequest: - access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]] = None - - workspace_object_id: Optional[str] = None - """The workspace object for which to get or manage permissions.""" - - workspace_object_type: Optional[str] = None - """The workspace object type for which to get or manage permissions.""" - - def as_dict(self) -> dict: - """Serializes the WorkspaceObjectPermissionsRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.access_control_list: - body["access_control_list"] = [v.as_dict() for v in self.access_control_list] - if self.workspace_object_id is not None: - body["workspace_object_id"] = self.workspace_object_id - if self.workspace_object_type is not None: - body["workspace_object_type"] = self.workspace_object_type - return body - - def as_shallow_dict(self) -> dict: - """Serializes the WorkspaceObjectPermissionsRequest into a shallow dictionary of its immediate attributes.""" - body = {} - if self.access_control_list: - body["access_control_list"] = self.access_control_list - if self.workspace_object_id is not None: - body["workspace_object_id"] = self.workspace_object_id - if self.workspace_object_type is not None: - body["workspace_object_type"] = self.workspace_object_type - return body - - @classmethod - def from_dict(cls, d: Dict[str, Any]) -> WorkspaceObjectPermissionsRequest: - """Deserializes the WorkspaceObjectPermissionsRequest from a dictionary.""" - return cls( - access_control_list=_repeated_dict(d, "access_control_list", WorkspaceObjectAccessControlRequest), - workspace_object_id=d.get("workspace_object_id", None), - workspace_object_type=d.get("workspace_object_type", None), - ) - - class GitCredentialsAPI: """Registers personal access token for Databricks to do operations on behalf of the user. @@ -2364,7 +1762,13 @@ def __init__(self, api_client): self._api = api_client def create( - self, git_provider: str, *, git_username: Optional[str] = None, personal_access_token: Optional[str] = None + self, + git_provider: str, + *, + git_username: Optional[str] = None, + is_default_for_provider: Optional[bool] = None, + name: Optional[str] = None, + personal_access_token: Optional[str] = None, ) -> CreateCredentialsResponse: """Creates a Git credential entry for the user. Only one Git credential per user is supported, so any attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update @@ -2380,6 +1784,10 @@ def create( be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please see your provider's Personal Access Token authentication documentation to see what is supported. + :param is_default_for_provider: bool (optional) + if the credential is the default for the given provider + :param name: str (optional) + the name of the git credential, used for identification and ease of lookup :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. @@ -2393,6 +1801,10 @@ def create( body["git_provider"] = git_provider if git_username is not None: body["git_username"] = git_username + if is_default_for_provider is not None: + body["is_default_for_provider"] = is_default_for_provider + if name is not None: + body["name"] = name if personal_access_token is not None: body["personal_access_token"] = personal_access_token headers = { @@ -2455,6 +1867,8 @@ def update( git_provider: str, *, git_username: Optional[str] = None, + is_default_for_provider: Optional[bool] = None, + name: Optional[str] = None, personal_access_token: Optional[str] = None, ): """Updates the specified Git credential. @@ -2471,6 +1885,10 @@ def update( be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please see your provider's Personal Access Token authentication documentation to see what is supported. + :param is_default_for_provider: bool (optional) + if the credential is the default for the given provider + :param name: str (optional) + the name of the git credential, used for identification and ease of lookup :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. @@ -2484,6 +1902,10 @@ def update( body["git_provider"] = git_provider if git_username is not None: body["git_username"] = git_username + if is_default_for_provider is not None: + body["is_default_for_provider"] = is_default_for_provider + if name is not None: + body["name"] = name if personal_access_token is not None: body["personal_access_token"] = personal_access_token headers = { @@ -2749,17 +2171,47 @@ def create_scope( initial_manage_principal: Optional[str] = None, scope_backend_type: Optional[ScopeBackendType] = None, ): - """The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not + """Creates a new secret scope. + + The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. + Example request: + + .. code:: + + { "scope": "my-simple-databricks-scope", "initial_manage_principal": "users" "scope_backend_type": + "databricks|azure_keyvault", # below is only required if scope type is azure_keyvault + "backend_azure_keyvault": { "resource_id": + "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/xxxx/providers/Microsoft.KeyVault/vaults/xxxx", + "tenant_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", "dns_name": "https://xxxx.vault.azure.net/", } } + + If ``initial_manage_principal`` is specified, the initial ACL applied to the scope is applied to the + supplied principal (user or group) with ``MANAGE`` permissions. The only supported principal for this + option is the group ``users``, which contains all users in the workspace. If + ``initial_manage_principal`` is not specified, the initial ACL with ``MANAGE`` permission applied to + the scope is assigned to the API request issuer's user identity. + + If ``scope_backend_type`` is ``azure_keyvault``, a secret scope is created with secrets from a given + Azure KeyVault. The caller must provide the keyvault_resource_id and the tenant_id for the key vault. + If ``scope_backend_type`` is ``databricks`` or is unspecified, an empty secret scope is created and + stored in Databricks's own storage. + + Throws ``RESOURCE_ALREADY_EXISTS`` if a scope with the given name already exists. Throws + ``RESOURCE_LIMIT_EXCEEDED`` if maximum number of scopes in the workspace is exceeded. Throws + ``INVALID_PARAMETER_VALUE`` if the scope name is invalid. Throws ``BAD_REQUEST`` if request violated + constraints. Throws ``CUSTOMER_UNAUTHORIZED`` if normal user attempts to create a scope with name + reserved for databricks internal usage. Throws ``UNAUTHENTICATED`` if unable to verify user access + permission on Azure KeyVault + :param scope: str Scope name requested by the user. Scope names are unique. :param backend_azure_keyvault: :class:`AzureKeyVaultSecretScopeMetadata` (optional) - The metadata for the secret scope if the type is `AZURE_KEYVAULT` + The metadata for the secret scope if the type is ``AZURE_KEYVAULT`` :param initial_manage_principal: str (optional) - The principal that is initially granted `MANAGE` permission to the created scope. + The principal that is initially granted ``MANAGE`` permission to the created scope. :param scope_backend_type: :class:`ScopeBackendType` (optional) - The backend type the scope will be created with. If not specified, will default to `DATABRICKS` + The backend type the scope will be created with. If not specified, will default to ``DATABRICKS`` """ @@ -2773,7 +2225,6 @@ def create_scope( if scope_backend_type is not None: body["scope_backend_type"] = scope_backend_type.value headers = { - "Accept": "application/json", "Content-Type": "application/json", } @@ -2782,9 +2233,17 @@ def create_scope( def delete_acl(self, scope: str, principal: str): """Deletes the given ACL on the given scope. - Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no - such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have - permission to make this API call. + Users must have the ``MANAGE`` permission to invoke this API. + + Example request: + + .. code:: + + { "scope": "my-secret-scope", "principal": "data-scientists" } + + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope, principal, or ACL exists. Throws + ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws + ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid. :param scope: str The name of the scope to remove permissions from. @@ -2799,7 +2258,6 @@ def delete_acl(self, scope: str, principal: str): if scope is not None: body["scope"] = scope headers = { - "Accept": "application/json", "Content-Type": "application/json", } @@ -2808,8 +2266,15 @@ def delete_acl(self, scope: str, principal: str): def delete_scope(self, scope: str): """Deletes a secret scope. - Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user - does not have permission to make this API call. + Example request: + + .. code:: + + { "scope": "my-secret-scope" } + + Throws ``RESOURCE_DOES_NOT_EXIST`` if the scope does not exist. Throws ``PERMISSION_DENIED`` if the + user does not have permission to make this API call. Throws ``BAD_REQUEST`` if system user attempts to + delete internal secret scope. :param scope: str Name of the scope to delete. @@ -2820,18 +2285,25 @@ def delete_scope(self, scope: str): if scope is not None: body["scope"] = scope headers = { - "Accept": "application/json", "Content-Type": "application/json", } self._api.do("POST", "/api/2.0/secrets/scopes/delete", body=body, headers=headers) def delete_secret(self, scope: str, key: str): - """Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the - secret scope. + """Deletes the secret stored in this secret scope. You must have ``WRITE`` or ``MANAGE`` permission on + the Secret Scope. - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED` - if the user does not have permission to make this API call. + Example request: + + .. code:: + + { "scope": "my-secret-scope", "key": "my-secret-key" } + + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws + ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws + ``BAD_REQUEST`` if system user attempts to delete an internal secret, or request is made against Azure + KeyVault backed scope. :param scope: str The name of the scope that contains the secret to delete. @@ -2853,11 +2325,19 @@ def delete_secret(self, scope: str, key: str): self._api.do("POST", "/api/2.0/secrets/delete", body=body, headers=headers) def get_acl(self, scope: str, principal: str) -> AclItem: - """Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` - permission to invoke this API. + """Describes the details about the given ACL, such as the group and permission. - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the - user does not have permission to make this API call. + Users must have the ``MANAGE`` permission to invoke this API. + + Example response: + + .. code:: + + { "principal": "data-scientists", "permission": "READ" } + + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws ``PERMISSION_DENIED`` if the + user does not have permission to make this API call. Throws ``INVALID_PARAMETER_VALUE`` if the + permission or principal is invalid. :param scope: str The name of the scope to fetch ACL information from. @@ -2880,20 +2360,35 @@ def get_acl(self, scope: str, principal: str) -> AclItem: return AclItem.from_dict(res) def get_secret(self, scope: str, key: str) -> GetSecretResponse: - """Gets the bytes representation of a secret value for the specified scope and key. + """Gets a secret for a given key and scope. This API can only be called from the DBUtils interface. Users + need the READ permission to make this call. + + Example response: - Users need the READ permission to make this call. + .. code:: + + { "key": "my-string-key", "value": } Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the caller in DBUtils and the type the data is decoded into. - Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws - ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. Throws + ``PERMISSION_DENIED`` if the user does not have permission to make this API call. + + Note: This is explicitly an undocumented API. It also doesn't need to be supported for the /preview + prefix, because it's not a customer-facing API (i.e. only used for DBUtils SecretUtils to fetch + secrets). + + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws ``BAD_REQUEST`` if + normal user calls get secret outside of a notebook. AKV specific errors: Throws + ``INVALID_PARAMETER_VALUE`` if secret name is not alphanumeric or too long. Throws + ``PERMISSION_DENIED`` if secret manager cannot access AKV with 403 error Throws ``MALFORMED_REQUEST`` + if secret manager cannot access AKV with any other 4xx error :param scope: str - The name of the scope to fetch secret information from. + The name of the scope that contains the secret. :param key: str - The key to fetch secret for. + Name of the secret to fetch value information. :returns: :class:`GetSecretResponse` """ @@ -2911,9 +2406,18 @@ def get_secret(self, scope: str, key: str) -> GetSecretResponse: return GetSecretResponse.from_dict(res) def list_acls(self, scope: str) -> Iterator[AclItem]: - """List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API. + """Lists the ACLs set on the given scope. + + Users must have the ``MANAGE`` permission to invoke this API. + + Example response: - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the + .. code:: + + { "acls": [{ "principal": "admins", "permission": "MANAGE" },{ "principal": "data-scientists", + "permission": "READ" }] } + + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. :param scope: str @@ -2936,7 +2440,14 @@ def list_acls(self, scope: str) -> Iterator[AclItem]: def list_scopes(self) -> Iterator[SecretScope]: """Lists all secret scopes available in the workspace. - Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. + Example response: + + .. code:: + + { "scopes": [{ "name": "my-databricks-scope", "backend_type": "DATABRICKS" },{ "name": "mount-points", + "backend_type": "DATABRICKS" }] } + + Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. :returns: Iterator over :class:`SecretScope` @@ -2954,9 +2465,17 @@ def list_secrets(self, scope: str) -> Iterator[SecretMetadata]: """Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ permission to make this call. - The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if - no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make - this API call. + Example response: + + .. code:: + + { "secrets": [ { "key": "my-string-key"", "last_updated_timestamp": "1520467595000" }, { "key": + "my-byte-key", "last_updated_timestamp": "1520467595000" }, ] } + + The lastUpdatedTimestamp returned is in milliseconds since epoch. + + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws ``PERMISSION_DENIED`` if the + user does not have permission to make this API call. :param scope: str The name of the scope to list secrets within. @@ -2976,14 +2495,12 @@ def list_secrets(self, scope: str) -> Iterator[SecretMetadata]: return parsed if parsed is not None else [] def put_acl(self, scope: str, principal: str, permission: AclPermission): - """Creates or overwrites the Access Control List (ACL) associated with the given principal (user or - group) on the specified scope point. + """Creates or overwrites the ACL associated with the given principal (user or group) on the specified + scope point. In general, a user or group will use the most powerful permission available to them, and + permissions are ordered as follows: - In general, a user or group will use the most powerful permission available to them, and permissions - are ordered as follows: - - * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to - read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what + * ``MANAGE`` - Allowed to change ACLs, and read and write to this secret scope. * ``WRITE`` - Allowed + to read and write to this secret scope. * ``READ`` - Allowed to read this secret scope and list what secrets are available. Note that in general, secret values can only be read from within a command on a cluster (for example, @@ -2991,15 +2508,21 @@ def put_acl(self, scope: str, principal: str, permission: AclPermission): However, the user's permission will be applied based on who is executing the command, and they must have at least READ permission. - Users must have the `MANAGE` permission to invoke this API. + Users must have the ``MANAGE`` permission to invoke this API. + + Example request: + + .. code:: + + { "scope": "my-secret-scope", "principal": "data-scientists", "permission": "READ" } The principal is a user or group name corresponding to an existing Databricks principal to be granted or revoked access. - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a - permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or - principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API - call. + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws ``RESOURCE_ALREADY_EXISTS`` + if a permission for the principal already exists. Throws ``INVALID_PARAMETER_VALUE`` if the permission + or principal is invalid. Throws ``PERMISSION_DENIED`` if the user does not have permission to make + this API call. :param scope: str The name of the scope to apply permissions to. @@ -3018,7 +2541,6 @@ def put_acl(self, scope: str, principal: str, permission: AclPermission): if scope is not None: body["scope"] = scope headers = { - "Accept": "application/json", "Content-Type": "application/json", } @@ -3029,19 +2551,27 @@ def put_secret( ): """Inserts a secret under the provided scope with the given name. If a secret already exists with the same name, this command overwrites the existing secret's value. The server encrypts the secret using - the secret scope's encryption settings before storing it. + the secret scope's encryption settings before storing it. You must have ``WRITE`` or ``MANAGE`` + permission on the secret scope. + + The secret key must consist of alphanumeric characters, dashes, underscores, and periods, and cannot + exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets + in a given scope is 1000. - You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of - alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The - maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000. + Example request: + + .. code:: + + { "scope": "my-databricks-scope", "key": "my-string-key", "string_value": "foobar" } The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine the value returned when the secret value is requested. Exactly one must be specified. - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if - maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or - value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this - API call. + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws ``RESOURCE_LIMIT_EXCEEDED`` + if maximum number of secrets in scope is exceeded. Throws ``INVALID_PARAMETER_VALUE`` if the request + parameters are invalid. Throws ``PERMISSION_DENIED`` if the user does not have permission to make this + API call. Throws ``MALFORMED_REQUEST`` if request is incorrectly formatted or conflicting. Throws + ``BAD_REQUEST`` if request is made against Azure KeyVault backed scope. :param scope: str The name of the scope to which the secret will be associated with. @@ -3064,7 +2594,6 @@ def put_secret( if string_value is not None: body["string_value"] = string_value headers = { - "Accept": "application/json", "Content-Type": "application/json", } diff --git a/docs/account/billing/billable_usage.rst b/docs/account/billing/billable_usage.rst index 2851dec74..baacbee84 100644 --- a/docs/account/billing/billable_usage.rst +++ b/docs/account/billing/billable_usage.rst @@ -21,16 +21,22 @@ resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09") Returns billable usage logs in CSV format for the specified account and date range. For the data - schema, see [CSV file schema]. Note that this method might take multiple minutes to complete. + schema, see: + + - AWS: [CSV file schema]. - GCP: [CSV file schema]. + + Note that this method might take multiple minutes to complete. **Warning**: Depending on the queried date range, the number of workspaces in the account, the size of the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If you experience this, try to mitigate by calling the API with narrower date ranges. - [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema + [CSV file schema]: https://docs.gcp.databricks.com/administration-guide/account-settings/usage-analysis.html#csv-file-schema :param start_month: str - Format: `YYYY-MM`. First month to return billable usage logs for. This field is required. + Format specification for month in the format `YYYY-MM`. This is used to specify billable usage + `start_month` and `end_month` properties. **Note**: Billable usage logs are unavailable before March + 2019 (`2019-03`). :param end_month: str Format: `YYYY-MM`. Last month to return billable usage logs for. This field is required. :param personal_data: bool (optional) diff --git a/docs/account/iam/service_principals.rst b/docs/account/iam/service_principals.rst index 6ec4fb814..78816845f 100644 --- a/docs/account/iam/service_principals.rst +++ b/docs/account/iam/service_principals.rst @@ -23,10 +23,7 @@ a = AccountClient() - sp_create = a.service_principals.create(active=True, display_name=f"sdk-{time.time_ns()}") - - # cleanup - a.service_principals.delete(id=sp_create.id) + spn = a.service_principals.create(display_name=f"sdk-{time.time_ns()}") Creates a new service principal in the Databricks account. diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 133b16f3d..2a8043172 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -43,9 +43,9 @@ a = AccountClient() - workspace_id = os.environ["TEST_WORKSPACE_ID"] + workspace_id = os.environ["DUMMY_WORKSPACE_ID"] - all = a.workspace_assignment.list(list=workspace_id) + all = a.workspace_assignment.list(workspace_id=workspace_id) Get the permission assignments for the specified Databricks account and Databricks workspace. @@ -74,9 +74,9 @@ spn_id = spn.id - workspace_id = os.environ["DUMMY_WORKSPACE_ID"] + workspace_id = os.environ["TEST_WORKSPACE_ID"] - _ = a.workspace_assignment.update( + a.workspace_assignment.update( workspace_id=workspace_id, principal_id=spn_id, permissions=[iam.WorkspacePermission.USER], diff --git a/docs/account/oauth2/service_principal_secrets.rst b/docs/account/oauth2/service_principal_secrets.rst index 0317229c6..e95b779f0 100644 --- a/docs/account/oauth2/service_principal_secrets.rst +++ b/docs/account/oauth2/service_principal_secrets.rst @@ -8,7 +8,7 @@ You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using - OAuth tokens for service principals], + OAuth tokens for service principals]. In addition, the generated secrets can be used to configure the Databricks Terraform Provider to authenticate with the service principal. For more information, see [Databricks Terraform Provider]. @@ -17,11 +17,11 @@ [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal - .. py:method:: create(service_principal_id: int [, lifetime: Optional[str]]) -> CreateServicePrincipalSecretResponse + .. py:method:: create(service_principal_id: str [, lifetime: Optional[str]]) -> CreateServicePrincipalSecretResponse Create a secret for the given service principal. - :param service_principal_id: int + :param service_principal_id: str The service principal ID. :param lifetime: str (optional) The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a @@ -30,11 +30,11 @@ :returns: :class:`CreateServicePrincipalSecretResponse` - .. py:method:: delete(service_principal_id: int, secret_id: str) + .. py:method:: delete(service_principal_id: str, secret_id: str) Delete a secret from the given service principal. - :param service_principal_id: int + :param service_principal_id: str The service principal ID. :param secret_id: str The secret ID. @@ -42,13 +42,14 @@ - .. py:method:: list(service_principal_id: int [, page_token: Optional[str]]) -> Iterator[SecretInfo] + .. py:method:: list(service_principal_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[SecretInfo] List all secrets associated with the given service principal. This operation only returns information about the secrets themselves and does not include the secret values. - :param service_principal_id: int + :param service_principal_id: str The service principal ID. + :param page_size: int (optional) :param page_token: str (optional) An opaque page token which was the `next_page_token` in the response of the previous request to list the secrets for this service principal. Provide this token to retrieve the next page of secret diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst index acb958c8c..e0103ea36 100644 --- a/docs/account/provisioning/credentials.rst +++ b/docs/account/provisioning/credentials.rst @@ -24,15 +24,15 @@ a = AccountClient() - role = a.credentials.create( + creds = a.credentials.create( credentials_name=f"sdk-{time.time_ns()}", aws_credentials=provisioning.CreateCredentialAwsCredentials( - sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]) + sts_role=provisioning.CreateCredentialStsRole(role_arn=os.environ["TEST_LOGDELIVERY_ARN"]) ), ) # cleanup - a.credentials.delete(credentials_id=role.credentials_id) + a.credentials.delete(credentials_id=creds.credentials_id) Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set up network infrastructure properly to host Databricks diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst index a72721a6d..1da53fb45 100644 --- a/docs/account/provisioning/storage.rst +++ b/docs/account/provisioning/storage.rst @@ -16,7 +16,6 @@ .. code-block:: - import os import time from databricks.sdk import AccountClient @@ -24,13 +23,13 @@ a = AccountClient() - storage = a.storage.create( + bucket = a.storage.create( storage_configuration_name=f"sdk-{time.time_ns()}", - root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"]), + root_bucket_info=provisioning.RootBucketInfo(bucket_name=f"sdk-{time.time_ns()}"), ) # cleanup - a.storage.delete(storage_configuration_id=storage.storage_configuration_id) + a.storage.delete(storage_configuration_id=bucket.storage_configuration_id) Creates new storage configuration for an account, specified by ID. Uploads a storage configuration object that represents the root AWS S3 bucket in your account. Databricks stores related workspace diff --git a/docs/dbdataclasses/aibuilder.rst b/docs/dbdataclasses/aibuilder.rst index eb914574b..164dd8164 100644 --- a/docs/dbdataclasses/aibuilder.rst +++ b/docs/dbdataclasses/aibuilder.rst @@ -4,14 +4,6 @@ AI Builder These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.aibuilder`` module. .. py:currentmodule:: databricks.sdk.service.aibuilder -.. autoclass:: CancelCustomLlmOptimizationRunRequest - :members: - :undoc-members: - -.. autoclass:: CreateCustomLlmRequest - :members: - :undoc-members: - .. autoclass:: CustomLlm :members: :undoc-members: @@ -20,10 +12,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: StartCustomLlmOptimizationRunRequest - :members: - :undoc-members: - .. py:class:: State States of Custom LLM optimization lifecycle. @@ -49,7 +37,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: Table :members: :undoc-members: - -.. autoclass:: UpdateCustomLlmRequest - :members: - :undoc-members: diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst index bbd625c62..bfed63efb 100644 --- a/docs/dbdataclasses/apps.rst +++ b/docs/dbdataclasses/apps.rst @@ -72,14 +72,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: AppPermissionsRequest +.. autoclass:: AppResource :members: :undoc-members: -.. autoclass:: AppResource +.. autoclass:: AppResourceDatabase :members: :undoc-members: +.. py:class:: AppResourceDatabaseDatabasePermission + + .. py:attribute:: CAN_CONNECT_AND_CREATE + :value: "CAN_CONNECT_AND_CREATE" + .. autoclass:: AppResourceJob :members: :undoc-members: @@ -218,11 +223,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: ListAppsResponse :members: :undoc-members: - -.. autoclass:: StartAppRequest - :members: - :undoc-members: - -.. autoclass:: StopAppRequest - :members: - :undoc-members: diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst index 3c0c350e7..60f015a7b 100644 --- a/docs/dbdataclasses/billing.rst +++ b/docs/dbdataclasses/billing.rst @@ -61,10 +61,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateBillingUsageDashboardRequest - :members: - :undoc-members: - .. autoclass:: CreateBillingUsageDashboardResponse :members: :undoc-members: @@ -81,18 +77,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateBudgetConfigurationRequest - :members: - :undoc-members: - .. autoclass:: CreateBudgetConfigurationResponse :members: :undoc-members: -.. autoclass:: CreateBudgetPolicyRequest - :members: - :undoc-members: - .. autoclass:: CreateLogDeliveryConfigurationParams :members: :undoc-members: @@ -208,18 +196,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateBudgetConfigurationRequest - :members: - :undoc-members: - .. autoclass:: UpdateBudgetConfigurationResponse :members: :undoc-members: -.. autoclass:: UpdateLogDeliveryConfigurationStatusRequest - :members: - :undoc-members: - .. py:class:: UsageDashboardType .. py:attribute:: USAGE_DASHBOARD_TYPE_GLOBAL @@ -228,10 +208,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: USAGE_DASHBOARD_TYPE_WORKSPACE :value: "USAGE_DASHBOARD_TYPE_WORKSPACE" -.. autoclass:: WrappedCreateLogDeliveryConfiguration - :members: - :undoc-members: - .. autoclass:: WrappedLogDeliveryConfiguration :members: :undoc-members: diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index 219b0a228..626db6121 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -4,18 +4,6 @@ Unity Catalog These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.catalog`` module. .. py:currentmodule:: databricks.sdk.service.catalog -.. autoclass:: AccountsCreateMetastore - :members: - :undoc-members: - -.. autoclass:: AccountsCreateMetastoreAssignment - :members: - :undoc-members: - -.. autoclass:: AccountsCreateStorageCredential - :members: - :undoc-members: - .. autoclass:: AccountsMetastoreAssignment :members: :undoc-members: @@ -28,18 +16,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: AccountsUpdateMetastore - :members: - :undoc-members: - -.. autoclass:: AccountsUpdateMetastoreAssignment - :members: - :undoc-members: - -.. autoclass:: AccountsUpdateStorageCredential - :members: - :undoc-members: - .. autoclass:: ArtifactAllowlistInfo :members: :undoc-members: @@ -251,7 +227,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ConnectionType - Next Id: 36 + Next Id: 37 .. py:attribute:: BIGQUERY :value: "BIGQUERY" @@ -317,22 +293,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateCatalog - :members: - :undoc-members: - -.. autoclass:: CreateConnection - :members: - :undoc-members: - -.. autoclass:: CreateCredentialRequest - :members: - :undoc-members: - -.. autoclass:: CreateExternalLocation - :members: - :undoc-members: - .. autoclass:: CreateFunction :members: :undoc-members: @@ -344,10 +304,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: S :value: "S" -.. autoclass:: CreateFunctionRequest - :members: - :undoc-members: - .. py:class:: CreateFunctionRoutineBody Function language. When **EXTERNAL** is used, the language of the routine function should be specified in the __external_language__ field, and the __return_params__ of the function cannot be used (as **TABLE** return type is not supported), and the __sql_data_access__ field must be **NO_SQL**. @@ -386,14 +342,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateMonitor - :members: - :undoc-members: - -.. autoclass:: CreateRegisteredModelRequest - :members: - :undoc-members: - .. autoclass:: CreateRequestExternalLineage :members: :undoc-members: @@ -402,22 +350,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateSchema - :members: - :undoc-members: - .. autoclass:: CreateStorageCredential :members: :undoc-members: -.. autoclass:: CreateTableConstraint - :members: - :undoc-members: - -.. autoclass:: CreateVolumeRequestContent - :members: - :undoc-members: - .. autoclass:: CredentialDependency :members: :undoc-members: @@ -603,6 +539,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DeleteTableConstraintResponse + :members: + :undoc-members: + .. autoclass:: DeltaRuntimePropertiesKvPairs :members: :undoc-members: @@ -664,15 +604,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: INHERIT :value: "INHERIT" -.. autoclass:: EnableRequest +.. autoclass:: EnableResponse :members: :undoc-members: -.. autoclass:: EnableResponse +.. autoclass:: EncryptionDetails :members: :undoc-members: -.. autoclass:: EncryptionDetails +.. autoclass:: EnvironmentSettings :members: :undoc-members: @@ -830,14 +770,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GenerateTemporaryServiceCredentialRequest - :members: - :undoc-members: - -.. autoclass:: GenerateTemporaryTableCredentialRequest - :members: - :undoc-members: - .. autoclass:: GenerateTemporaryTableCredentialResponse :members: :undoc-members: @@ -1412,10 +1344,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RegenerateDashboardRequest - :members: - :undoc-members: - .. autoclass:: RegenerateDashboardResponse :members: :undoc-members: @@ -1434,8 +1362,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: SecurableKind - Latest kind: TABLE_DELTA_ICEBERG_DELTASHARING = 252; Next id:253 - .. py:attribute:: TABLE_DB_STORAGE :value: "TABLE_DB_STORAGE" @@ -1675,14 +1601,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VOLUME :value: "VOLUME" -.. autoclass:: SetArtifactAllowlist - :members: - :undoc-members: - -.. autoclass:: SetRegisteredModelAliasRequest - :members: - :undoc-members: - .. autoclass:: SseEncryptionDetails :members: :undoc-members: @@ -1714,6 +1632,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CONFLUENT :value: "CONFLUENT" + .. py:attribute:: DATABRICKS + :value: "DATABRICKS" + .. py:attribute:: GOOGLE_BIGQUERY :value: "GOOGLE_BIGQUERY" @@ -1849,30 +1770,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateCatalog - :members: - :undoc-members: - .. autoclass:: UpdateCatalogWorkspaceBindingsResponse :members: :undoc-members: -.. autoclass:: UpdateConnection - :members: - :undoc-members: - -.. autoclass:: UpdateCredentialRequest - :members: - :undoc-members: - -.. autoclass:: UpdateExternalLocation - :members: - :undoc-members: - -.. autoclass:: UpdateFunction - :members: - :undoc-members: - .. autoclass:: UpdateMetastore :members: :undoc-members: @@ -1881,26 +1782,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateModelVersionRequest - :members: - :undoc-members: - -.. autoclass:: UpdateMonitor - :members: - :undoc-members: - -.. autoclass:: UpdatePermissions - :members: - :undoc-members: - .. autoclass:: UpdatePermissionsResponse :members: :undoc-members: -.. autoclass:: UpdateRegisteredModelRequest - :members: - :undoc-members: - .. autoclass:: UpdateRequestExternalLineage :members: :undoc-members: @@ -1909,38 +1794,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateSchema - :members: - :undoc-members: - .. autoclass:: UpdateStorageCredential :members: :undoc-members: -.. autoclass:: UpdateTableRequest - :members: - :undoc-members: - -.. autoclass:: UpdateVolumeRequestContent - :members: - :undoc-members: - -.. autoclass:: UpdateWorkspaceBindings - :members: - :undoc-members: - -.. autoclass:: UpdateWorkspaceBindingsParameters - :members: - :undoc-members: - .. autoclass:: UpdateWorkspaceBindingsResponse :members: :undoc-members: -.. autoclass:: ValidateCredentialRequest - :members: - :undoc-members: - .. autoclass:: ValidateCredentialResponse :members: :undoc-members: @@ -1958,10 +1819,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SKIP :value: "SKIP" -.. autoclass:: ValidateStorageCredential - :members: - :undoc-members: - .. autoclass:: ValidateStorageCredentialResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/cleanrooms.rst b/docs/dbdataclasses/cleanrooms.rst index 812ac1eae..f771b4c40 100644 --- a/docs/dbdataclasses/cleanrooms.rst +++ b/docs/dbdataclasses/cleanrooms.rst @@ -171,7 +171,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: ListCleanRoomsResponse :members: :undoc-members: - -.. autoclass:: UpdateCleanRoomRequest - :members: - :undoc-members: diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index 9562320c2..22b9cf41a 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -4,10 +4,6 @@ Compute These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.compute`` module. .. py:currentmodule:: databricks.sdk.service.compute -.. autoclass:: AddInstanceProfile - :members: - :undoc-members: - .. autoclass:: AddResponse :members: :undoc-members: @@ -55,18 +51,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SPOT_WITH_FALLBACK_AZURE :value: "SPOT_WITH_FALLBACK_AZURE" -.. autoclass:: CancelCommand - :members: - :undoc-members: - .. autoclass:: CancelResponse :members: :undoc-members: -.. autoclass:: ChangeClusterOwner - :members: - :undoc-members: - .. autoclass:: ChangeClusterOwnerResponse :members: :undoc-members: @@ -148,10 +136,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ClusterPermissionsRequest - :members: - :undoc-members: - .. autoclass:: ClusterPolicyAccessControlRequest :members: :undoc-members: @@ -179,10 +163,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ClusterPolicyPermissionsRequest - :members: - :undoc-members: - .. autoclass:: ClusterSettingsChange :members: :undoc-members: @@ -220,10 +200,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Command - :members: - :undoc-members: - .. py:class:: CommandStatus .. py:attribute:: CANCELLED @@ -263,30 +239,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateCluster - :members: - :undoc-members: - .. autoclass:: CreateClusterResponse :members: :undoc-members: -.. autoclass:: CreateContext - :members: - :undoc-members: - -.. autoclass:: CreateInstancePool - :members: - :undoc-members: - .. autoclass:: CreateInstancePoolResponse :members: :undoc-members: -.. autoclass:: CreatePolicy - :members: - :undoc-members: - .. autoclass:: CreatePolicyResponse :members: :undoc-members: @@ -357,26 +317,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteCluster - :members: - :undoc-members: - .. autoclass:: DeleteClusterResponse :members: :undoc-members: -.. autoclass:: DeleteInstancePool - :members: - :undoc-members: - .. autoclass:: DeleteInstancePoolResponse :members: :undoc-members: -.. autoclass:: DeletePolicy - :members: - :undoc-members: - .. autoclass:: DeletePolicyResponse :members: :undoc-members: @@ -385,10 +333,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DestroyContext - :members: - :undoc-members: - .. autoclass:: DestroyResponse :members: :undoc-members: @@ -439,26 +383,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: THROUGHPUT_OPTIMIZED_HDD :value: "THROUGHPUT_OPTIMIZED_HDD" -.. autoclass:: EditCluster - :members: - :undoc-members: - .. autoclass:: EditClusterResponse :members: :undoc-members: -.. autoclass:: EditInstancePool - :members: - :undoc-members: - .. autoclass:: EditInstancePoolResponse :members: :undoc-members: -.. autoclass:: EditPolicy - :members: - :undoc-members: - .. autoclass:: EditPolicyResponse :members: :undoc-members: @@ -467,10 +399,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: EnforceClusterComplianceRequest - :members: - :undoc-members: - .. autoclass:: EnforceClusterComplianceResponse :members: :undoc-members: @@ -652,10 +580,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GlobalInitScriptCreateRequest - :members: - :undoc-members: - .. autoclass:: GlobalInitScriptDetails :members: :undoc-members: @@ -664,10 +588,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GlobalInitScriptUpdateRequest - :members: - :undoc-members: - .. autoclass:: InitScriptEventDetails :members: :undoc-members: @@ -705,10 +625,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: InstallLibraries - :members: - :undoc-members: - .. autoclass:: InstallLibrariesResponse :members: :undoc-members: @@ -779,10 +695,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: InstancePoolPermissionsRequest - :members: - :undoc-members: - .. py:class:: InstancePoolState The state of a Cluster. The current allowable state transitions are as follows: @@ -978,18 +890,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PermanentDeleteCluster - :members: - :undoc-members: - .. autoclass:: PermanentDeleteClusterResponse :members: :undoc-members: -.. autoclass:: PinCluster - :members: - :undoc-members: - .. autoclass:: PinClusterResponse :members: :undoc-members: @@ -1010,26 +914,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RemoveInstanceProfile - :members: - :undoc-members: - .. autoclass:: RemoveResponse :members: :undoc-members: -.. autoclass:: ResizeCluster - :members: - :undoc-members: - .. autoclass:: ResizeClusterResponse :members: :undoc-members: -.. autoclass:: RestartCluster - :members: - :undoc-members: - .. autoclass:: RestartClusterResponse :members: :undoc-members: @@ -1082,10 +974,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: StartCluster - :members: - :undoc-members: - .. autoclass:: StartClusterResponse :members: :undoc-members: @@ -1313,6 +1201,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DOCKER_INVALID_OS_EXCEPTION :value: "DOCKER_INVALID_OS_EXCEPTION" + .. py:attribute:: DRIVER_DNS_RESOLUTION_FAILURE + :value: "DRIVER_DNS_RESOLUTION_FAILURE" + .. py:attribute:: DRIVER_EVICTION :value: "DRIVER_EVICTION" @@ -1662,26 +1553,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" -.. autoclass:: UninstallLibraries - :members: - :undoc-members: - .. autoclass:: UninstallLibrariesResponse :members: :undoc-members: -.. autoclass:: UnpinCluster - :members: - :undoc-members: - .. autoclass:: UnpinClusterResponse :members: :undoc-members: -.. autoclass:: UpdateCluster - :members: - :undoc-members: - .. autoclass:: UpdateClusterResource :members: :undoc-members: diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst index 87d116dad..19cc5815d 100644 --- a/docs/dbdataclasses/dashboards.rst +++ b/docs/dbdataclasses/dashboards.rst @@ -37,10 +37,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GenieCreateConversationMessageRequest - :members: - :undoc-members: - .. autoclass:: GenieGetMessageQueryResultResponse :members: :undoc-members: @@ -69,10 +65,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GenieStartConversationMessageRequest - :members: - :undoc-members: - .. autoclass:: GenieStartConversationResponse :members: :undoc-members: @@ -300,14 +292,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUBMITTED :value: "SUBMITTED" -.. autoclass:: MigrateDashboardRequest - :members: - :undoc-members: - -.. autoclass:: PublishRequest - :members: - :undoc-members: - .. autoclass:: PublishedDashboard :members: :undoc-members: diff --git a/docs/dbdataclasses/database.rst b/docs/dbdataclasses/database.rst index 008025d7d..bc7607faf 100644 --- a/docs/dbdataclasses/database.rst +++ b/docs/dbdataclasses/database.rst @@ -77,10 +77,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GenerateDatabaseCredentialRequest - :members: - :undoc-members: - .. autoclass:: ListDatabaseInstanceRolesResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/files.rst b/docs/dbdataclasses/files.rst index 2b0d9845d..42be15a7b 100644 --- a/docs/dbdataclasses/files.rst +++ b/docs/dbdataclasses/files.rst @@ -4,26 +4,14 @@ File Management These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.files`` module. .. py:currentmodule:: databricks.sdk.service.files -.. autoclass:: AddBlock - :members: - :undoc-members: - .. autoclass:: AddBlockResponse :members: :undoc-members: -.. autoclass:: Close - :members: - :undoc-members: - .. autoclass:: CloseResponse :members: :undoc-members: -.. autoclass:: Create - :members: - :undoc-members: - .. autoclass:: CreateDirectoryResponse :members: :undoc-members: @@ -32,10 +20,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Delete - :members: - :undoc-members: - .. autoclass:: DeleteDirectoryResponse :members: :undoc-members: @@ -72,26 +56,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: MkDirs - :members: - :undoc-members: - .. autoclass:: MkDirsResponse :members: :undoc-members: -.. autoclass:: Move - :members: - :undoc-members: - .. autoclass:: MoveResponse :members: :undoc-members: -.. autoclass:: Put - :members: - :undoc-members: - .. autoclass:: PutResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst index a471503a7..96abd3743 100644 --- a/docs/dbdataclasses/iam.rst +++ b/docs/dbdataclasses/iam.rst @@ -94,10 +94,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: MigratePermissionsRequest - :members: - :undoc-members: - .. autoclass:: MigratePermissionsResponse :members: :undoc-members: @@ -110,10 +106,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PartialUpdate - :members: - :undoc-members: - .. autoclass:: PasswordAccessControlRequest :members: :undoc-members: @@ -141,10 +133,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PasswordPermissionsRequest - :members: - :undoc-members: - .. autoclass:: Patch :members: :undoc-members: @@ -295,26 +283,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL :value: "URN_IETF_PARAMS_SCIM_SCHEMAS_CORE_2_0_SERVICE_PRINCIPAL" -.. autoclass:: SetObjectPermissions - :members: - :undoc-members: - -.. autoclass:: UpdateObjectPermissions - :members: - :undoc-members: - .. autoclass:: UpdateResponse :members: :undoc-members: -.. autoclass:: UpdateRuleSetRequest - :members: - :undoc-members: - -.. autoclass:: UpdateWorkspaceAssignments - :members: - :undoc-members: - .. autoclass:: User :members: :undoc-members: diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index b1365d27c..91038c684 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -20,18 +20,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CancelAllRuns - :members: - :undoc-members: - .. autoclass:: CancelAllRunsResponse :members: :undoc-members: -.. autoclass:: CancelRun - :members: - :undoc-members: - .. autoclass:: CancelRunResponse :members: :undoc-members: @@ -176,10 +168,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateJob - :members: - :undoc-members: - .. autoclass:: CreateResponse :members: :undoc-members: @@ -254,18 +242,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteJob - :members: - :undoc-members: - .. autoclass:: DeleteResponse :members: :undoc-members: -.. autoclass:: DeleteRun - :members: - :undoc-members: - .. autoclass:: DeleteRunResponse :members: :undoc-members: @@ -274,10 +254,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: EnforcePolicyComplianceRequest - :members: - :undoc-members: - .. autoclass:: EnforcePolicyComplianceResponse :members: :undoc-members: @@ -454,10 +430,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: JobPermissionsRequest - :members: - :undoc-members: - .. autoclass:: JobRunAs :members: :undoc-members: @@ -632,18 +604,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: REPAIR :value: "REPAIR" -.. autoclass:: RepairRun - :members: - :undoc-members: - .. autoclass:: RepairRunResponse :members: :undoc-members: -.. autoclass:: ResetJob - :members: - :undoc-members: - .. autoclass:: ResetResponse :members: :undoc-members: @@ -779,10 +743,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: WAITING :value: "WAITING" -.. autoclass:: RunNow - :members: - :undoc-members: - .. autoclass:: RunNowResponse :members: :undoc-members: @@ -972,10 +932,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: IMPORT :value: "IMPORT" -.. autoclass:: SubmitRun - :members: - :undoc-members: - .. autoclass:: SubmitRunResponse :members: :undoc-members: @@ -1154,10 +1110,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TABLE :value: "TABLE" -.. autoclass:: UpdateJob - :members: - :undoc-members: - .. autoclass:: UpdateResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/marketplace.rst b/docs/dbdataclasses/marketplace.rst index 02e48c381..f243184c1 100644 --- a/docs/dbdataclasses/marketplace.rst +++ b/docs/dbdataclasses/marketplace.rst @@ -4,10 +4,6 @@ Marketplace These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.marketplace`` module. .. py:currentmodule:: databricks.sdk.service.marketplace -.. autoclass:: AddExchangeForListingRequest - :members: - :undoc-members: - .. autoclass:: AddExchangeForListingResponse :members: :undoc-members: @@ -127,54 +123,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PAID :value: "PAID" -.. autoclass:: CreateExchangeFilterRequest - :members: - :undoc-members: - .. autoclass:: CreateExchangeFilterResponse :members: :undoc-members: -.. autoclass:: CreateExchangeRequest - :members: - :undoc-members: - .. autoclass:: CreateExchangeResponse :members: :undoc-members: -.. autoclass:: CreateFileRequest - :members: - :undoc-members: - .. autoclass:: CreateFileResponse :members: :undoc-members: -.. autoclass:: CreateInstallationRequest - :members: - :undoc-members: - -.. autoclass:: CreateListingRequest - :members: - :undoc-members: - .. autoclass:: CreateListingResponse :members: :undoc-members: -.. autoclass:: CreatePersonalizationRequest - :members: - :undoc-members: - .. autoclass:: CreatePersonalizationRequestResponse :members: :undoc-members: -.. autoclass:: CreateProviderRequest - :members: - :undoc-members: - .. autoclass:: CreateProviderResponse :members: :undoc-members: @@ -535,58 +503,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateExchangeFilterRequest - :members: - :undoc-members: - .. autoclass:: UpdateExchangeFilterResponse :members: :undoc-members: -.. autoclass:: UpdateExchangeRequest - :members: - :undoc-members: - .. autoclass:: UpdateExchangeResponse :members: :undoc-members: -.. autoclass:: UpdateInstallationRequest - :members: - :undoc-members: - .. autoclass:: UpdateInstallationResponse :members: :undoc-members: -.. autoclass:: UpdateListingRequest - :members: - :undoc-members: - .. autoclass:: UpdateListingResponse :members: :undoc-members: -.. autoclass:: UpdatePersonalizationRequestRequest - :members: - :undoc-members: - .. autoclass:: UpdatePersonalizationRequestResponse :members: :undoc-members: -.. autoclass:: UpdateProviderAnalyticsDashboardRequest - :members: - :undoc-members: - .. autoclass:: UpdateProviderAnalyticsDashboardResponse :members: :undoc-members: -.. autoclass:: UpdateProviderRequest - :members: - :undoc-members: - .. autoclass:: UpdateProviderResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/ml.rst b/docs/dbdataclasses/ml.rst index a1db3ebcc..55d11035a 100644 --- a/docs/dbdataclasses/ml.rst +++ b/docs/dbdataclasses/ml.rst @@ -62,10 +62,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SYSTEM_TRANSITION :value: "SYSTEM_TRANSITION" -.. autoclass:: ApproveTransitionRequest - :members: - :undoc-members: - .. autoclass:: ApproveTransitionRequestResponse :members: :undoc-members: @@ -98,70 +94,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateComment - :members: - :undoc-members: - .. autoclass:: CreateCommentResponse :members: :undoc-members: -.. autoclass:: CreateExperiment - :members: - :undoc-members: - .. autoclass:: CreateExperimentResponse :members: :undoc-members: -.. autoclass:: CreateForecastingExperimentRequest - :members: - :undoc-members: - .. autoclass:: CreateForecastingExperimentResponse :members: :undoc-members: -.. autoclass:: CreateLoggedModelRequest - :members: - :undoc-members: - .. autoclass:: CreateLoggedModelResponse :members: :undoc-members: -.. autoclass:: CreateModelRequest - :members: - :undoc-members: - .. autoclass:: CreateModelResponse :members: :undoc-members: -.. autoclass:: CreateModelVersionRequest - :members: - :undoc-members: - .. autoclass:: CreateModelVersionResponse :members: :undoc-members: -.. autoclass:: CreateRegistryWebhook - :members: - :undoc-members: - -.. autoclass:: CreateRun - :members: - :undoc-members: - .. autoclass:: CreateRunResponse :members: :undoc-members: -.. autoclass:: CreateTransitionRequest - :members: - :undoc-members: - .. autoclass:: CreateTransitionRequestResponse :members: :undoc-members: @@ -182,10 +142,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteExperiment - :members: - :undoc-members: - .. autoclass:: DeleteExperimentResponse :members: :undoc-members: @@ -214,26 +170,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteRun - :members: - :undoc-members: - .. autoclass:: DeleteRunResponse :members: :undoc-members: -.. autoclass:: DeleteRuns - :members: - :undoc-members: - .. autoclass:: DeleteRunsResponse :members: :undoc-members: -.. autoclass:: DeleteTag - :members: - :undoc-members: - .. autoclass:: DeleteTagResponse :members: :undoc-members: @@ -283,10 +227,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ExperimentPermissionsRequest - :members: - :undoc-members: - .. autoclass:: ExperimentTag :members: :undoc-members: @@ -323,10 +263,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: FinalizeLoggedModelRequest - :members: - :undoc-members: - .. autoclass:: FinalizeLoggedModelResponse :members: :undoc-members: @@ -364,10 +300,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GetLatestVersionsRequest - :members: - :undoc-members: - .. autoclass:: GetLatestVersionsResponse :members: :undoc-members: @@ -448,58 +380,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: LogBatch - :members: - :undoc-members: - .. autoclass:: LogBatchResponse :members: :undoc-members: -.. autoclass:: LogInputs - :members: - :undoc-members: - .. autoclass:: LogInputsResponse :members: :undoc-members: -.. autoclass:: LogLoggedModelParamsRequest - :members: - :undoc-members: - .. autoclass:: LogLoggedModelParamsRequestResponse :members: :undoc-members: -.. autoclass:: LogMetric - :members: - :undoc-members: - .. autoclass:: LogMetricResponse :members: :undoc-members: -.. autoclass:: LogModel - :members: - :undoc-members: - .. autoclass:: LogModelResponse :members: :undoc-members: -.. autoclass:: LogOutputsRequest - :members: - :undoc-members: - .. autoclass:: LogOutputsResponse :members: :undoc-members: -.. autoclass:: LogParam - :members: - :undoc-members: - .. autoclass:: LogParamResponse :members: :undoc-members: @@ -650,10 +554,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TRIGGERED :value: "TRIGGERED" -.. autoclass:: PublishTableRequest - :members: - :undoc-members: - .. autoclass:: PublishTableResponse :members: :undoc-members: @@ -697,10 +597,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RegisteredModelPermissionsRequest - :members: - :undoc-members: - .. py:class:: RegistryEmailSubscriptionType .. note:: Experimental: This entity may change or be removed in a future release without warning. Email subscription types for registry notifications: - `ALL_EVENTS`: Subscribed to all events. - `DEFAULT`: Default subscription type. - `SUBSCRIBED`: Subscribed to notifications. - `UNSUBSCRIBED`: Not subscribed to notifications. @@ -774,42 +670,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TEST_MODE :value: "TEST_MODE" -.. autoclass:: RejectTransitionRequest - :members: - :undoc-members: - .. autoclass:: RejectTransitionRequestResponse :members: :undoc-members: -.. autoclass:: RenameModelRequest - :members: - :undoc-members: - .. autoclass:: RenameModelResponse :members: :undoc-members: -.. autoclass:: RestoreExperiment - :members: - :undoc-members: - .. autoclass:: RestoreExperimentResponse :members: :undoc-members: -.. autoclass:: RestoreRun - :members: - :undoc-members: - .. autoclass:: RestoreRunResponse :members: :undoc-members: -.. autoclass:: RestoreRuns - :members: - :undoc-members: - .. autoclass:: RestoreRunsResponse :members: :undoc-members: @@ -853,10 +729,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: SearchExperiments - :members: - :undoc-members: - .. autoclass:: SearchExperimentsResponse :members: :undoc-members: @@ -869,10 +741,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: SearchLoggedModelsRequest - :members: - :undoc-members: - .. autoclass:: SearchLoggedModelsResponse :members: :undoc-members: @@ -885,50 +753,26 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: SearchRuns - :members: - :undoc-members: - .. autoclass:: SearchRunsResponse :members: :undoc-members: -.. autoclass:: SetExperimentTag - :members: - :undoc-members: - .. autoclass:: SetExperimentTagResponse :members: :undoc-members: -.. autoclass:: SetLoggedModelTagsRequest - :members: - :undoc-members: - .. autoclass:: SetLoggedModelTagsResponse :members: :undoc-members: -.. autoclass:: SetModelTagRequest - :members: - :undoc-members: - .. autoclass:: SetModelTagResponse :members: :undoc-members: -.. autoclass:: SetModelVersionTagRequest - :members: - :undoc-members: - .. autoclass:: SetModelVersionTagResponse :members: :undoc-members: -.. autoclass:: SetTag - :members: - :undoc-members: - .. autoclass:: SetTagResponse :members: :undoc-members: @@ -948,18 +792,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: READY :value: "READY" -.. autoclass:: TestRegistryWebhookRequest - :members: - :undoc-members: - .. autoclass:: TestRegistryWebhookResponse :members: :undoc-members: -.. autoclass:: TransitionModelVersionStageDatabricks - :members: - :undoc-members: - .. autoclass:: TransitionRequest :members: :undoc-members: @@ -968,46 +804,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateComment - :members: - :undoc-members: - .. autoclass:: UpdateCommentResponse :members: :undoc-members: -.. autoclass:: UpdateExperiment - :members: - :undoc-members: - .. autoclass:: UpdateExperimentResponse :members: :undoc-members: -.. autoclass:: UpdateModelRequest - :members: - :undoc-members: - .. autoclass:: UpdateModelResponse :members: :undoc-members: -.. autoclass:: UpdateModelVersionRequest - :members: - :undoc-members: - .. autoclass:: UpdateModelVersionResponse :members: :undoc-members: -.. autoclass:: UpdateRegistryWebhook - :members: - :undoc-members: - -.. autoclass:: UpdateRun - :members: - :undoc-members: - .. autoclass:: UpdateRunResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/oauth2.rst b/docs/dbdataclasses/oauth2.rst index 4097add9e..00c961155 100644 --- a/docs/dbdataclasses/oauth2.rst +++ b/docs/dbdataclasses/oauth2.rst @@ -4,26 +4,14 @@ OAuth These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.oauth2`` module. .. py:currentmodule:: databricks.sdk.service.oauth2 -.. autoclass:: CreateCustomAppIntegration - :members: - :undoc-members: - .. autoclass:: CreateCustomAppIntegrationOutput :members: :undoc-members: -.. autoclass:: CreatePublishedAppIntegration - :members: - :undoc-members: - .. autoclass:: CreatePublishedAppIntegrationOutput :members: :undoc-members: -.. autoclass:: CreateServicePrincipalSecretRequest - :members: - :undoc-members: - .. autoclass:: CreateServicePrincipalSecretResponse :members: :undoc-members: @@ -88,18 +76,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateCustomAppIntegration - :members: - :undoc-members: - .. autoclass:: UpdateCustomAppIntegrationOutput :members: :undoc-members: -.. autoclass:: UpdatePublishedAppIntegration - :members: - :undoc-members: - .. autoclass:: UpdatePublishedAppIntegrationOutput :members: :undoc-members: diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index 5fa26e596..436967f5c 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -4,10 +4,6 @@ Delta Live Tables These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.pipelines`` module. .. py:currentmodule:: databricks.sdk.service.pipelines -.. autoclass:: CreatePipeline - :members: - :undoc-members: - .. autoclass:: CreatePipelineResponse :members: :undoc-members: @@ -56,10 +52,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: BUNDLE :value: "BUNDLE" -.. autoclass:: EditPipeline - :members: - :undoc-members: - .. autoclass:: EditPipelineResponse :members: :undoc-members: @@ -130,11 +122,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: IngestionPipelineDefinitionTableSpecificConfigQueryBasedConnectorConfig + :members: + :undoc-members: + .. py:class:: IngestionSourceType .. py:attribute:: BIGQUERY :value: "BIGQUERY" + .. py:attribute:: CONFLUENCE + :value: "CONFLUENCE" + .. py:attribute:: DYNAMICS365 :value: "DYNAMICS365" @@ -144,6 +143,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: MANAGED_POSTGRESQL :value: "MANAGED_POSTGRESQL" + .. py:attribute:: META_MARKETING + :value: "META_MARKETING" + .. py:attribute:: MYSQL :value: "MYSQL" @@ -285,10 +287,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PipelinePermissionsRequest - :members: - :undoc-members: - .. autoclass:: PipelineSpec :members: :undoc-members: @@ -374,10 +372,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: StartUpdate - :members: - :undoc-members: - .. py:class:: StartUpdateCause What triggered this update. diff --git a/docs/dbdataclasses/provisioning.rst b/docs/dbdataclasses/provisioning.rst index 4c909d488..69e237d5f 100644 --- a/docs/dbdataclasses/provisioning.rst +++ b/docs/dbdataclasses/provisioning.rst @@ -28,38 +28,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateCredentialRequest - :members: - :undoc-members: - .. autoclass:: CreateCredentialStsRole :members: :undoc-members: -.. autoclass:: CreateCustomerManagedKeyRequest - :members: - :undoc-members: - .. autoclass:: CreateGcpKeyInfo :members: :undoc-members: -.. autoclass:: CreateNetworkRequest - :members: - :undoc-members: - -.. autoclass:: CreateStorageConfigurationRequest - :members: - :undoc-members: - -.. autoclass:: CreateVpcEndpointRequest - :members: - :undoc-members: - -.. autoclass:: CreateWorkspaceRequest - :members: - :undoc-members: - .. autoclass:: Credential :members: :undoc-members: @@ -225,14 +201,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateWorkspaceRequest - :members: - :undoc-members: - -.. autoclass:: UpsertPrivateAccessSettingsRequest - :members: - :undoc-members: - .. autoclass:: VpcEndpoint :members: :undoc-members: diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index 6c1bc106d..b242ee2b2 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -130,14 +130,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreatePtEndpointRequest - :members: - :undoc-members: - -.. autoclass:: CreateServingEndpoint - :members: - :undoc-members: - .. autoclass:: CustomProviderConfig :members: :undoc-members: @@ -223,10 +215,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ExternalFunctionRequest - :members: - :undoc-members: - .. py:class:: ExternalFunctionRequestHttpMethod .. py:attribute:: DELETE @@ -321,10 +309,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PatchServingEndpointTags - :members: - :undoc-members: - .. autoclass:: PayloadTable :members: :undoc-members: @@ -337,26 +321,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PutAiGatewayRequest - :members: - :undoc-members: - .. autoclass:: PutAiGatewayResponse :members: :undoc-members: -.. autoclass:: PutRequest - :members: - :undoc-members: - .. autoclass:: PutResponse :members: :undoc-members: -.. autoclass:: QueryEndpointInput - :members: - :undoc-members: - .. autoclass:: QueryEndpointResponse :members: :undoc-members: @@ -515,10 +487,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ServingEndpointPermissionsRequest - :members: - :undoc-members: - .. py:class:: ServingModelWorkloadType Please keep this in sync with with workload types in InferenceEndpointEntities.scala @@ -542,10 +510,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateProvisionedThroughputEndpointConfigRequest - :members: - :undoc-members: - .. autoclass:: V1ResponseChoiceElement :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index f120095b6..59383f091 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -168,10 +168,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateIpAccessList - :members: - :undoc-members: - .. autoclass:: CreateIpAccessListResponse :members: :undoc-members: @@ -180,14 +176,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateNotificationDestinationRequest - :members: - :undoc-members: - -.. autoclass:: CreateOboTokenRequest - :members: - :undoc-members: - .. autoclass:: CreateOboTokenResponse :members: :undoc-members: @@ -196,10 +184,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateTokenRequest - :members: - :undoc-members: - .. autoclass:: CreateTokenResponse :members: :undoc-members: @@ -241,6 +225,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DefaultWarehouseId + :members: + :undoc-members: + .. autoclass:: DeleteAccountIpAccessEnableResponse :members: :undoc-members: @@ -261,6 +249,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: DeleteDefaultWarehouseIdResponse + :members: + :undoc-members: + .. autoclass:: DeleteDisableLegacyAccessResponse :members: :undoc-members: @@ -506,10 +498,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ExchangeTokenRequest - :members: - :undoc-members: - .. autoclass:: ExchangeTokenResponse :members: :undoc-members: @@ -708,10 +696,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ReplaceIpAccessList - :members: - :undoc-members: - .. autoclass:: ReplaceResponse :members: :undoc-members: @@ -732,10 +716,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RevokeTokenRequest - :members: - :undoc-members: - .. autoclass:: RevokeTokenResponse :members: :undoc-members: @@ -787,10 +767,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: TokenPermissionsRequest - :members: - :undoc-members: - .. py:class:: TokenType The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported. @@ -810,94 +786,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: AZURE_ACTIVE_DIRECTORY_TOKEN :value: "AZURE_ACTIVE_DIRECTORY_TOKEN" -.. autoclass:: UpdateAccountIpAccessEnableRequest - :members: - :undoc-members: - -.. autoclass:: UpdateAibiDashboardEmbeddingAccessPolicySettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateAutomaticClusterUpdateSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateComplianceSecurityProfileSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateCspEnablementAccountSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateDashboardEmailSubscriptionsRequest - :members: - :undoc-members: - -.. autoclass:: UpdateDefaultNamespaceSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateDisableLegacyAccessRequest - :members: - :undoc-members: - -.. autoclass:: UpdateDisableLegacyDbfsRequest - :members: - :undoc-members: - -.. autoclass:: UpdateDisableLegacyFeaturesRequest - :members: - :undoc-members: - -.. autoclass:: UpdateEnableExportNotebookRequest - :members: - :undoc-members: - -.. autoclass:: UpdateEnableNotebookTableClipboardRequest - :members: - :undoc-members: - -.. autoclass:: UpdateEnableResultsDownloadingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateEnhancedSecurityMonitoringSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateEsmEnablementAccountSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateIpAccessList - :members: - :undoc-members: - -.. autoclass:: UpdateLlmProxyPartnerPoweredAccountRequest - :members: - :undoc-members: - -.. autoclass:: UpdateLlmProxyPartnerPoweredEnforceRequest - :members: - :undoc-members: - -.. autoclass:: UpdateLlmProxyPartnerPoweredWorkspaceRequest - :members: - :undoc-members: - -.. autoclass:: UpdateNotificationDestinationRequest - :members: - :undoc-members: - -.. autoclass:: UpdatePersonalComputeSettingRequest - :members: - :undoc-members: - .. autoclass:: UpdatePrivateEndpointRule :members: :undoc-members: @@ -906,14 +794,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateRestrictWorkspaceAdminsSettingRequest - :members: - :undoc-members: - -.. autoclass:: UpdateSqlResultsDownloadRequest - :members: - :undoc-members: - .. autoclass:: WorkspaceNetworkOption :members: :undoc-members: diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index cd1cc8b92..b94fbee44 100644 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -90,18 +90,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VARIANT :value: "VARIANT" -.. autoclass:: CreateProvider - :members: - :undoc-members: - -.. autoclass:: CreateRecipient - :members: - :undoc-members: - -.. autoclass:: CreateShare - :members: - :undoc-members: - .. autoclass:: DeleteResponse :members: :undoc-members: @@ -394,10 +382,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RotateRecipientToken - :members: - :undoc-members: - .. autoclass:: SecurablePropertiesKvPairs :members: :undoc-members: @@ -497,6 +481,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: TableInternalAttributesSharedTableType + .. py:attribute:: DELTA_ICEBERG_TABLE + :value: "DELTA_ICEBERG_TABLE" + .. py:attribute:: DIRECTORY_BASED_TABLE :value: "DIRECTORY_BASED_TABLE" @@ -515,22 +502,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VIEW :value: "VIEW" -.. autoclass:: UpdateProvider - :members: - :undoc-members: - -.. autoclass:: UpdateRecipient - :members: - :undoc-members: - -.. autoclass:: UpdateShare - :members: - :undoc-members: - -.. autoclass:: UpdateSharePermissions - :members: - :undoc-members: - .. autoclass:: UpdateSharePermissionsResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index 22ec2a6ca..8afa33192 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -282,42 +282,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: NOT_EQUAL :value: "NOT_EQUAL" -.. autoclass:: CreateAlert - :members: - :undoc-members: - -.. autoclass:: CreateAlertRequest - :members: - :undoc-members: - .. autoclass:: CreateAlertRequestAlert :members: :undoc-members: -.. autoclass:: CreateQueryRequest - :members: - :undoc-members: - .. autoclass:: CreateQueryRequestQuery :members: :undoc-members: -.. autoclass:: CreateQueryVisualizationsLegacyRequest - :members: - :undoc-members: - -.. autoclass:: CreateVisualizationRequest - :members: - :undoc-members: - .. autoclass:: CreateVisualizationRequestVisualization :members: :undoc-members: -.. autoclass:: CreateWarehouseRequest - :members: - :undoc-members: - .. py:class:: CreateWarehouseRequestWarehouseType Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. @@ -335,10 +311,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateWidget - :members: - :undoc-members: - .. autoclass:: CronSchedule :members: :undoc-members: @@ -347,18 +319,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DashboardEditContent - :members: - :undoc-members: - .. autoclass:: DashboardOptions :members: :undoc-members: -.. autoclass:: DashboardPostContent - :members: - :undoc-members: - .. autoclass:: DataSource :members: :undoc-members: @@ -463,14 +427,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: INLINE :value: "INLINE" -.. autoclass:: EditAlert - :members: - :undoc-members: - -.. autoclass:: EditWarehouseRequest - :members: - :undoc-members: - .. py:class:: EditWarehouseRequestWarehouseType Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and also set the field `enable_serverless_compute` to `true`. @@ -529,10 +485,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ExecuteStatementRequest - :members: - :undoc-members: - .. py:class:: ExecuteStatementRequestOnWaitTimeout When `wait_timeout > 0s`, the call will block up to the specified time. If the statement execution doesn't finish within this time, `on_wait_timeout` determines whether the execution should continue or be canceled. When set to `CONTINUE`, the statement execution continues asynchronously and the call returns a statement ID which can be used for polling with :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is canceled and the call returns with a `CANCELED` state. @@ -805,10 +757,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: QueryEditContent - :members: - :undoc-members: - .. autoclass:: QueryFilter :members: :undoc-members: @@ -833,10 +781,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: QueryPostContent - :members: - :undoc-members: - .. py:class:: QueryStatementType .. py:attribute:: ALTER @@ -1025,18 +969,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: WORKSPACE_TEMPORARILY_UNAVAILABLE :value: "WORKSPACE_TEMPORARILY_UNAVAILABLE" -.. autoclass:: SetRequest - :members: - :undoc-members: - .. autoclass:: SetResponse :members: :undoc-members: -.. autoclass:: SetWorkspaceWarehouseConfigRequest - :members: - :undoc-members: - .. py:class:: SetWorkspaceWarehouseConfigRequestSecurityPolicy Security policy for warehouses @@ -1437,22 +1373,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: TransferOwnershipRequest - :members: - :undoc-members: - -.. autoclass:: UpdateAlertRequest - :members: - :undoc-members: - .. autoclass:: UpdateAlertRequestAlert :members: :undoc-members: -.. autoclass:: UpdateQueryRequest - :members: - :undoc-members: - .. autoclass:: UpdateQueryRequestQuery :members: :undoc-members: @@ -1461,18 +1385,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateVisualizationRequest - :members: - :undoc-members: - .. autoclass:: UpdateVisualizationRequestVisualization :members: :undoc-members: -.. autoclass:: UpdateWidgetRequest - :members: - :undoc-members: - .. autoclass:: User :members: :undoc-members: @@ -1520,10 +1436,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: WarehousePermissionsRequest - :members: - :undoc-members: - .. autoclass:: WarehouseTypePair :members: :undoc-members: diff --git a/docs/dbdataclasses/vectorsearch.rst b/docs/dbdataclasses/vectorsearch.rst index 5433f2673..d68e083d9 100644 --- a/docs/dbdataclasses/vectorsearch.rst +++ b/docs/dbdataclasses/vectorsearch.rst @@ -8,14 +8,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateEndpoint - :members: - :undoc-members: - -.. autoclass:: CreateVectorIndexRequest - :members: - :undoc-members: - .. autoclass:: CustomTag :members: :undoc-members: @@ -115,10 +107,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: PatchEndpointBudgetPolicyRequest - :members: - :undoc-members: - .. autoclass:: PatchEndpointBudgetPolicyResponse :members: :undoc-members: @@ -133,14 +121,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: TRIGGERED :value: "TRIGGERED" -.. autoclass:: QueryVectorIndexNextPageRequest - :members: - :undoc-members: - -.. autoclass:: QueryVectorIndexRequest - :members: - :undoc-members: - .. autoclass:: QueryVectorIndexResponse :members: :undoc-members: @@ -153,10 +133,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ScanVectorIndexRequest - :members: - :undoc-members: - .. autoclass:: ScanVectorIndexResponse :members: :undoc-members: @@ -169,10 +145,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateEndpointCustomTagsRequest - :members: - :undoc-members: - .. autoclass:: UpdateEndpointCustomTagsResponse :members: :undoc-members: @@ -192,10 +164,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" -.. autoclass:: UpsertDataVectorIndexRequest - :members: - :undoc-members: - .. autoclass:: UpsertDataVectorIndexResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst index e20f4ac7d..db35f519d 100644 --- a/docs/dbdataclasses/workspace.rst +++ b/docs/dbdataclasses/workspace.rst @@ -10,6 +10,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: AclPermission + The ACL permission levels for Secret ACLs applied to secret scopes. + .. py:attribute:: MANAGE :value: "MANAGE" @@ -23,26 +25,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateCredentialsRequest - :members: - :undoc-members: - .. autoclass:: CreateCredentialsResponse :members: :undoc-members: -.. autoclass:: CreateRepoRequest - :members: - :undoc-members: - .. autoclass:: CreateRepoResponse :members: :undoc-members: -.. autoclass:: CreateScope - :members: - :undoc-members: - .. autoclass:: CreateScopeResponse :members: :undoc-members: @@ -51,14 +41,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Delete - :members: - :undoc-members: - -.. autoclass:: DeleteAcl - :members: - :undoc-members: - .. autoclass:: DeleteAclResponse :members: :undoc-members: @@ -75,18 +57,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteScope - :members: - :undoc-members: - .. autoclass:: DeleteScopeResponse :members: :undoc-members: -.. autoclass:: DeleteSecret - :members: - :undoc-members: - .. autoclass:: DeleteSecretResponse :members: :undoc-members: @@ -140,10 +114,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Import - :members: - :undoc-members: - .. py:class:: ImportFormat The format for workspace import and export. @@ -213,10 +183,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: Mkdirs - :members: - :undoc-members: - .. autoclass:: MkdirsResponse :members: :undoc-members: @@ -247,18 +213,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: REPO :value: "REPO" -.. autoclass:: PutAcl - :members: - :undoc-members: - .. autoclass:: PutAclResponse :members: :undoc-members: -.. autoclass:: PutSecret - :members: - :undoc-members: - .. autoclass:: PutSecretResponse :members: :undoc-members: @@ -303,12 +261,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: RepoPermissionsRequest - :members: - :undoc-members: - .. py:class:: ScopeBackendType + The types of secret scope backends in the Secret Manager. Azure KeyVault backed secret scopes will be supported in a later release. + .. py:attribute:: AZURE_KEYVAULT :value: "AZURE_KEYVAULT" @@ -331,18 +287,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateCredentialsRequest - :members: - :undoc-members: - .. autoclass:: UpdateCredentialsResponse :members: :undoc-members: -.. autoclass:: UpdateRepoRequest - :members: - :undoc-members: - .. autoclass:: UpdateRepoResponse :members: :undoc-members: @@ -382,7 +330,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. autoclass:: WorkspaceObjectPermissionsDescription :members: :undoc-members: - -.. autoclass:: WorkspaceObjectPermissionsRequest - :members: - :undoc-members: diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst index 9a18ede8a..c486ab0d1 100644 --- a/docs/workspace/catalog/catalogs.rst +++ b/docs/workspace/catalog/catalogs.rst @@ -24,10 +24,10 @@ w = WorkspaceClient() - created = w.catalogs.create(name=f"sdk-{time.time_ns()}") + created_catalog = w.catalogs.create(name=f"sdk-{time.time_ns()}") # cleanup - w.catalogs.delete(name=created.name, force=True) + w.catalogs.delete(name=created_catalog.name, force=True) Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the **CREATE_CATALOG** privilege. diff --git a/docs/workspace/catalog/connections.rst b/docs/workspace/catalog/connections.rst index ec7f39be2..75da50675 100644 --- a/docs/workspace/catalog/connections.rst +++ b/docs/workspace/catalog/connections.rst @@ -13,7 +13,7 @@ objects based on cloud storage. Users may create different types of connections with each connection having a unique set of configuration options to support credential management and other settings. - .. py:method:: create(name: str, connection_type: ConnectionType, options: Dict[str, str] [, comment: Optional[str], properties: Optional[Dict[str, str]], read_only: Optional[bool]]) -> ConnectionInfo + .. py:method:: create(name: str, connection_type: ConnectionType, options: Dict[str, str] [, comment: Optional[str], environment_settings: Optional[EnvironmentSettings], properties: Optional[Dict[str, str]], read_only: Optional[bool]]) -> ConnectionInfo Usage: @@ -54,6 +54,8 @@ A map of key-value properties attached to the securable. :param comment: str (optional) User-provided free-form text description. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param properties: Dict[str,str] (optional) A map of key-value properties attached to the securable. :param read_only: bool (optional) @@ -146,7 +148,7 @@ :returns: Iterator over :class:`ConnectionInfo` - .. py:method:: update(name: str, options: Dict[str, str] [, new_name: Optional[str], owner: Optional[str]]) -> ConnectionInfo + .. py:method:: update(name: str, options: Dict[str, str] [, environment_settings: Optional[EnvironmentSettings], new_name: Optional[str], owner: Optional[str]]) -> ConnectionInfo Usage: @@ -189,6 +191,8 @@ Name of the connection. :param options: Dict[str,str] A map of key-value properties attached to the securable. + :param environment_settings: :class:`EnvironmentSettings` (optional) + [Create,Update:OPT] Connection environment settings as EnvironmentSettings object. :param new_name: str (optional) New name for the connection. :param owner: str (optional) diff --git a/docs/workspace/catalog/external_lineage.rst b/docs/workspace/catalog/external_lineage.rst index e6369c41f..d8a852f5f 100644 --- a/docs/workspace/catalog/external_lineage.rst +++ b/docs/workspace/catalog/external_lineage.rst @@ -37,11 +37,16 @@ direction. :param object_info: :class:`ExternalLineageObject` - The object to query external lineage relationship on. + The object to query external lineage relationships for. Since this field is a query parameter, + please flatten the nested fields. For example, if the object is a table, the query parameter should + look like: `object_info.table.name=main.sales.customers` :param lineage_direction: :class:`LineageDirection` The lineage direction to filter on. :param page_size: int (optional) + Specifies the maximum number of external lineage relationships to return in a single response. The + value must be less than or equal to 1000. :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ExternalLineageInfo` diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index 624fe1958..e7c1fd75e 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -30,22 +30,20 @@ w = WorkspaceClient() - storage_credential = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), - comment="created via SDK", ) - external_location = w.external_locations.create( + created = w.external_locations.create( name=f"sdk-{time.time_ns()}", - credential_name=storage_credential.name, - comment="created via SDK", - url="s3://" + os.environ["TEST_BUCKET"] + "/" + f"sdk-{time.time_ns()}", + credential_name=credential.name, + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) # cleanup - w.storage_credentials.delete(name=storage_credential.name) - w.external_locations.delete(name=external_location.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage @@ -106,20 +104,20 @@ credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) created = w.external_locations.create( name=f"sdk-{time.time_ns()}", credential_name=credential.name, - url=f's3://{os.environ["TEST_BUCKET"]}/sdk-{time.time_ns()}', + url="s3://%s/%s" % (os.environ["TEST_BUCKET"], f"sdk-{time.time_ns()}"), ) - _ = w.external_locations.get(get=created.name) + _ = w.external_locations.get(name=created.name) # cleanup - w.storage_credentials.delete(delete=credential.name) - w.external_locations.delete(delete=created.name) + w.storage_credentials.delete(name=credential.name) + w.external_locations.delete(name=created.name) Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some privilege on the external location. @@ -141,11 +139,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.external_locations.list(catalog.ListExternalLocationsRequest()) + all = w.external_locations.list() Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on diff --git a/docs/workspace/catalog/external_metadata.rst b/docs/workspace/catalog/external_metadata.rst index 79d3520aa..6c3f3912c 100644 --- a/docs/workspace/catalog/external_metadata.rst +++ b/docs/workspace/catalog/external_metadata.rst @@ -50,7 +50,10 @@ the array. :param page_size: int (optional) + Specifies the maximum number of external metadata objects to return in a single response. The value + must be less than or equal to 1000. :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ExternalMetadata` diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst index 5fe0bb70f..2d4dc160c 100644 --- a/docs/workspace/catalog/storage_credentials.rst +++ b/docs/workspace/catalog/storage_credentials.rst @@ -30,13 +30,13 @@ w = WorkspaceClient() - created = w.storage_credentials.create( + credential = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) # cleanup - w.storage_credentials.delete(delete=created.name) + w.storage_credentials.delete(name=credential.name) Creates a new storage credential. @@ -98,13 +98,13 @@ created = w.storage_credentials.create( name=f"sdk-{time.time_ns()}", - aws_iam_role=catalog.AwsIamRoleRequest(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), + aws_iam_role=catalog.AwsIamRole(role_arn=os.environ["TEST_METASTORE_DATA_ACCESS_ARN"]), ) - by_name = w.storage_credentials.get(name=created.name) + by_name = w.storage_credentials.get(get=created.name) # cleanup - w.storage_credentials.delete(name=created.name) + w.storage_credentials.delete(delete=created.name) Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the storage credential. @@ -123,10 +123,11 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import catalog w = WorkspaceClient() - all = w.storage_credentials.list() + all = w.storage_credentials.list(catalog.ListStorageCredentialsRequest()) Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore diff --git a/docs/workspace/cleanrooms/clean_room_assets.rst b/docs/workspace/cleanrooms/clean_room_assets.rst index c9e35af33..fa5772a76 100644 --- a/docs/workspace/cleanrooms/clean_room_assets.rst +++ b/docs/workspace/cleanrooms/clean_room_assets.rst @@ -15,7 +15,8 @@ access the asset. Typically, you should use a group as the clean room owner. :param clean_room_name: str - Name of the clean room. + The name of the clean room this asset belongs to. This is an output-only field to ensure proper + resource identification. :param asset: :class:`CleanRoomAsset` :returns: :class:`CleanRoomAsset` diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index d46b8ecd0..db78626ff 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -647,11 +647,10 @@ .. code-block:: from databricks.sdk import WorkspaceClient - from databricks.sdk.service import compute w = WorkspaceClient() - all = w.clusters.list(compute.ListClustersRequest()) + nodes = w.clusters.list_node_types() Return information about all pinned and active clusters, and all clusters terminated within the last 30 days. Clusters terminated prior to this period are not included. diff --git a/docs/workspace/dashboards/lakeview_embedded.rst b/docs/workspace/dashboards/lakeview_embedded.rst index d1631bddb..80eb5bc48 100644 --- a/docs/workspace/dashboards/lakeview_embedded.rst +++ b/docs/workspace/dashboards/lakeview_embedded.rst @@ -8,12 +8,7 @@ .. py:method:: get_published_dashboard_token_info(dashboard_id: str [, external_value: Optional[str], external_viewer_id: Optional[str]]) -> GetPublishedDashboardTokenInfoResponse - Get a required authorization details and scopes of a published dashboard to mint an OAuth token. The - `authorization_details` can be enriched to apply additional restriction. - - Example: Adding the following `authorization_details` object to downscope the viewer permission to - specific table ``` { type: "unity_catalog_privileges", privileges: ["SELECT"], object_type: "TABLE", - object_full_path: "main.default.testdata" } ``` + Get a required authorization details and scopes of a published dashboard to mint an OAuth token. :param dashboard_id: str UUID identifying the published dashboard. diff --git a/docs/workspace/iam/current_user.rst b/docs/workspace/iam/current_user.rst index 2f95213e2..b2390ce63 100644 --- a/docs/workspace/iam/current_user.rst +++ b/docs/workspace/iam/current_user.rst @@ -17,7 +17,7 @@ w = WorkspaceClient() - me2 = w.current_user.me() + me = w.current_user.me() Get details about the current method caller's identity. diff --git a/docs/workspace/iam/groups.rst b/docs/workspace/iam/groups.rst index 737939095..764a81ab9 100644 --- a/docs/workspace/iam/groups.rst +++ b/docs/workspace/iam/groups.rst @@ -69,6 +69,9 @@ group = w.groups.create(display_name=f"sdk-{time.time_ns()}") w.groups.delete(id=group.id) + + # cleanup + w.groups.delete(id=group.id) Deletes a group from the Databricks workspace. diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index f166ecc2d..3933a1577 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -44,15 +44,16 @@ obj = w.workspace.get_status(path=notebook_path) - levels = w.permissions.get_permission_levels(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) + _ = w.permissions.get(request_object_type="notebooks", request_object_id="%d" % (obj.object_id)) Gets the permissions of an object. Objects can inherit permissions from their parent objects or root object. :param request_object_type: str - The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. :param request_object_id: str The id of the request object. @@ -81,9 +82,10 @@ Gets the permission levels that a user can have on an object. :param request_object_type: str - The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. :param request_object_id: str :returns: :class:`GetPermissionLevelsResponse` @@ -128,9 +130,10 @@ object. :param request_object_type: str - The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -144,9 +147,10 @@ root object. :param request_object_type: str - The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, - jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, alertsv2, authorization, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or + warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) diff --git a/docs/workspace/index.rst b/docs/workspace/index.rst index d7ecc203e..16ff69dc8 100644 --- a/docs/workspace/index.rst +++ b/docs/workspace/index.rst @@ -19,6 +19,7 @@ These APIs are available from WorkspaceClient jobs/index marketplace/index ml/index + oauth2/index pipelines/index qualitymonitorv2/index serving/index diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 903cf3611..d4fdba07f 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -233,6 +233,9 @@ :param queue: :class:`QueueSettings` (optional) The queue settings of the job. :param run_as: :class:`JobRunAs` (optional) + The user or service principal that the job runs as, if specified in the request. This field + indicates the explicit configuration of `run_as` for the job. To find the value in all cases, + explicit or implicit, use `run_as_user_name`. :param schedule: :class:`CronSchedule` (optional) An optional periodic schedule for this job. The default behavior is that the job only runs when triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`. @@ -350,21 +353,23 @@ w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"] ) - run = w.jobs.submit( - run_name=f"sdk-{time.time_ns()}", + created_job = w.jobs.create( + name=f"sdk-{time.time_ns()}", tasks=[ - jobs.SubmitTask( + jobs.Task( + description="test", existing_cluster_id=cluster_id, notebook_task=jobs.NotebookTask(notebook_path=notebook_path), - task_key=f"sdk-{time.time_ns()}", + task_key="test", + timeout_seconds=0, ) ], - ).result() + ) - output = w.jobs.get_run_output(run_id=run.tasks[0].run_id) + by_id = w.jobs.get(job_id=created_job.job_id) # cleanup - w.jobs.delete_run(run_id=run.run_id) + w.jobs.delete(job_id=created_job.job_id) Get a single job. diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst index 2d34256e4..98d803a63 100644 --- a/docs/workspace/ml/model_registry.rst +++ b/docs/workspace/ml/model_registry.rst @@ -90,7 +90,7 @@ w = WorkspaceClient() - model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") Creates a new registered model with the name specified in the request body. Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists. @@ -120,7 +120,7 @@ model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") Creates a model version. @@ -734,14 +734,13 @@ w = WorkspaceClient() - model = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") + created = w.model_registry.create_model(name=f"sdk-{time.time_ns()}") - created = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp") + model = w.model_registry.get_model(name=created.registered_model.name) - w.model_registry.update_model_version( + w.model_registry.update_model( + name=model.registered_model_databricks.name, description=f"sdk-{time.time_ns()}", - name=created.model_version.name, - version=created.model_version.version, ) Updates a registered model. diff --git a/docs/workspace/oauth2/index.rst b/docs/workspace/oauth2/index.rst new file mode 100644 index 000000000..9595aa2e3 --- /dev/null +++ b/docs/workspace/oauth2/index.rst @@ -0,0 +1,10 @@ + +OAuth +===== + +Configure OAuth 2.0 application registrations for Databricks + +.. toctree:: + :maxdepth: 1 + + service_principal_secrets_proxy \ No newline at end of file diff --git a/docs/workspace/oauth2/service_principal_secrets_proxy.rst b/docs/workspace/oauth2/service_principal_secrets_proxy.rst new file mode 100644 index 000000000..929c8fa72 --- /dev/null +++ b/docs/workspace/oauth2/service_principal_secrets_proxy.rst @@ -0,0 +1,63 @@ +``w.service_principal_secrets_proxy``: Service Principal Secrets Proxy +====================================================================== +.. currentmodule:: databricks.sdk.service.oauth2 + +.. py:class:: ServicePrincipalSecretsProxyAPI + + These APIs enable administrators to manage service principal secrets at the workspace level. To use these + APIs, the service principal must be first added to the current workspace. + + You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be + used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using + OAuth tokens for service principals]. + + In addition, the generated secrets can be used to configure the Databricks Terraform Providerto + authenticate with the service principal. For more information, see [Databricks Terraform Provider]. + + [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html + [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal + + + .. py:method:: create(service_principal_id: str [, lifetime: Optional[str]]) -> CreateServicePrincipalSecretResponse + + Create a secret for the given service principal. + + :param service_principal_id: str + The service principal ID. + :param lifetime: str (optional) + The lifetime of the secret in seconds. If this parameter is not provided, the secret will have a + default lifetime of 730 days (63072000s). + + :returns: :class:`CreateServicePrincipalSecretResponse` + + + .. py:method:: delete(service_principal_id: str, secret_id: str) + + Delete a secret from the given service principal. + + :param service_principal_id: str + The service principal ID. + :param secret_id: str + The secret ID. + + + + + .. py:method:: list(service_principal_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[SecretInfo] + + List all secrets associated with the given service principal. This operation only returns information + about the secrets themselves and does not include the secret values. + + :param service_principal_id: str + The service principal ID. + :param page_size: int (optional) + :param page_token: str (optional) + An opaque page token which was the `next_page_token` in the response of the previous request to list + the secrets for this service principal. Provide this token to retrieve the next page of secret + entries. When providing a `page_token`, all other parameters provided to the request must match the + previous request. To list all of the secrets for a service principal, it is necessary to continue + requesting pages of entries until the response contains no `next_page_token`. Note that the number + of entries returned must not be used to determine when the listing is complete. + + :returns: Iterator over :class:`SecretInfo` + \ No newline at end of file diff --git a/docs/workspace/settings/default_warehouse_id.rst b/docs/workspace/settings/default_warehouse_id.rst new file mode 100644 index 000000000..4da9ff8c0 --- /dev/null +++ b/docs/workspace/settings/default_warehouse_id.rst @@ -0,0 +1,57 @@ +``w.settings.default_warehouse_id``: Default Warehouse Id +========================================================= +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: DefaultWarehouseIdAPI + + Warehouse to be selected by default for users in this workspace. Covers SQL workloads only and can be + overridden by users. + + .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDefaultWarehouseIdResponse + + Reverts the Default Warehouse Id setting to its default value. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteDefaultWarehouseIdResponse` + + + .. py:method:: get( [, etag: Optional[str]]) -> DefaultWarehouseId + + Gets the Default Warehouse Id setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DefaultWarehouseId` + + + .. py:method:: update(allow_missing: bool, setting: DefaultWarehouseId, field_mask: str) -> DefaultWarehouseId + + Updates the Default Warehouse Id setting. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`DefaultWarehouseId` + :param field_mask: str + The field mask must be a single string, with multiple fields separated by commas (no spaces). The + field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g., + `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only + the entire collection field can be specified. Field names must exactly match the resource field + names. + + A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the + fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API + changes in the future. + + :returns: :class:`DefaultWarehouseId` + \ No newline at end of file diff --git a/docs/workspace/settings/index.rst b/docs/workspace/settings/index.rst index 6c0858b7a..f75201654 100644 --- a/docs/workspace/settings/index.rst +++ b/docs/workspace/settings/index.rst @@ -17,6 +17,7 @@ Manage security settings for Accounts and Workspaces compliance_security_profile dashboard_email_subscriptions default_namespace + default_warehouse_id disable_legacy_access disable_legacy_dbfs enable_export_notebook diff --git a/docs/workspace/settings/settings.rst b/docs/workspace/settings/settings.rst index 783f55b6f..c52c2e601 100644 --- a/docs/workspace/settings/settings.rst +++ b/docs/workspace/settings/settings.rst @@ -53,6 +53,12 @@ This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only applies when using Unity Catalog-enabled compute. + .. py:property:: default_warehouse_id + :type: DefaultWarehouseIdAPI + + Warehouse to be selected by default for users in this workspace. Covers SQL workloads only and can be + overridden by users. + .. py:property:: disable_legacy_access :type: DisableLegacyAccessAPI diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index fd81e1b24..1a7c88de9 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -101,12 +101,25 @@ .. code-block:: + import time + from databricks.sdk import WorkspaceClient - from databricks.sdk.service import sharing w = WorkspaceClient() - all = w.providers.list(sharing.ListProvidersRequest()) + public_share_recipient = """{ + "shareCredentialsVersion":1, + "bearerToken":"dapiabcdefghijklmonpqrstuvwxyz", + "endpoint":"https://sharing.delta.io/delta-sharing/" + } + """ + + created = w.providers.create(name=f"sdk-{time.time_ns()}", recipient_profile_str=public_share_recipient) + + shares = w.providers.list_shares(name=created.name) + + # cleanup + w.providers.delete(name=created.name) Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There diff --git a/docs/workspace/sql/dashboards.rst b/docs/workspace/sql/dashboards.rst index e2849976f..fdb12a174 100644 --- a/docs/workspace/sql/dashboards.rst +++ b/docs/workspace/sql/dashboards.rst @@ -10,44 +10,6 @@ to create a new one. Dashboards can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - .. py:method:: create(name: str [, dashboard_filters_enabled: Optional[bool], is_favorite: Optional[bool], parent: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Dashboard - - - Usage: - - .. code-block:: - - import time - - from databricks.sdk import WorkspaceClient - - w = WorkspaceClient() - - created = w.dashboards.create(name=f"sdk-{time.time_ns()}") - - # cleanup - w.dashboards.delete(dashboard_id=created.id) - - Creates a new dashboard object. Only the name parameter is required in the POST request JSON body. - Other fields can be included when duplicating dashboards with this API. Databricks does not recommend - designing dashboards exclusively using this API.', - - :param name: str - The title of this dashboard that appears in list views and at the top of the dashboard page. - :param dashboard_filters_enabled: bool (optional) - Indicates whether the dashboard filters are enabled - :param is_favorite: bool (optional) - Indicates whether this dashboard object should appear in the current user's favorites list. - :param parent: str (optional) - The identifier of the workspace folder containing the object. - :param run_as_role: :class:`RunAsRole` (optional) - Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior) - :param tags: List[str] (optional) - - :returns: :class:`Dashboard` - - .. py:method:: delete(dashboard_id: str) diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index f0081b3f2..0dfb63fbf 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -29,7 +29,7 @@ display_name=f"sdk-{time.time_ns()}", warehouse_id=srcs[0].warehouse_id, description="test query from Go SDK", - query_text="SHOW TABLES", + query_text="SELECT 1", ) ) diff --git a/docs/workspace/workspace/git_credentials.rst b/docs/workspace/workspace/git_credentials.rst index 3569abd56..2dd9451c2 100644 --- a/docs/workspace/workspace/git_credentials.rst +++ b/docs/workspace/workspace/git_credentials.rst @@ -10,7 +10,7 @@ [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html - .. py:method:: create(git_provider: str [, git_username: Optional[str], personal_access_token: Optional[str]]) -> CreateCredentialsResponse + .. py:method:: create(git_provider: str [, git_username: Optional[str], is_default_for_provider: Optional[bool], name: Optional[str], personal_access_token: Optional[str]]) -> CreateCredentialsResponse Usage: @@ -40,6 +40,10 @@ be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please see your provider's Personal Access Token authentication documentation to see what is supported. + :param is_default_for_provider: bool (optional) + if the credential is the default for the given provider + :param name: str (optional) + the name of the git credential, used for identification and ease of lookup :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. @@ -104,7 +108,7 @@ :returns: Iterator over :class:`CredentialInfo` - .. py:method:: update(credential_id: int, git_provider: str [, git_username: Optional[str], personal_access_token: Optional[str]]) + .. py:method:: update(credential_id: int, git_provider: str [, git_username: Optional[str], is_default_for_provider: Optional[bool], name: Optional[str], personal_access_token: Optional[str]]) Usage: @@ -143,6 +147,10 @@ be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please see your provider's Personal Access Token authentication documentation to see what is supported. + :param is_default_for_provider: bool (optional) + if the credential is the default for the given provider + :param name: str (optional) + the name of the git credential, used for identification and ease of lookup :param personal_access_token: str (optional) The personal access token used to authenticate to the corresponding Git provider. For certain providers, support may exist for other types of scoped access tokens. [Learn more]. diff --git a/docs/workspace/workspace/secrets.rst b/docs/workspace/workspace/secrets.rst index 239b3f003..245554d32 100644 --- a/docs/workspace/workspace/secrets.rst +++ b/docs/workspace/workspace/secrets.rst @@ -37,17 +37,47 @@ w.secrets.delete_secret(scope=scope_name, key=key_name) w.secrets.delete_scope(scope=scope_name) + Creates a new secret scope. + The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. + Example request: + + .. code:: + + { "scope": "my-simple-databricks-scope", "initial_manage_principal": "users" "scope_backend_type": + "databricks|azure_keyvault", # below is only required if scope type is azure_keyvault + "backend_azure_keyvault": { "resource_id": + "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/xxxx/providers/Microsoft.KeyVault/vaults/xxxx", + "tenant_id": "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx", "dns_name": "https://xxxx.vault.azure.net/", } } + + If ``initial_manage_principal`` is specified, the initial ACL applied to the scope is applied to the + supplied principal (user or group) with ``MANAGE`` permissions. The only supported principal for this + option is the group ``users``, which contains all users in the workspace. If + ``initial_manage_principal`` is not specified, the initial ACL with ``MANAGE`` permission applied to + the scope is assigned to the API request issuer's user identity. + + If ``scope_backend_type`` is ``azure_keyvault``, a secret scope is created with secrets from a given + Azure KeyVault. The caller must provide the keyvault_resource_id and the tenant_id for the key vault. + If ``scope_backend_type`` is ``databricks`` or is unspecified, an empty secret scope is created and + stored in Databricks's own storage. + + Throws ``RESOURCE_ALREADY_EXISTS`` if a scope with the given name already exists. Throws + ``RESOURCE_LIMIT_EXCEEDED`` if maximum number of scopes in the workspace is exceeded. Throws + ``INVALID_PARAMETER_VALUE`` if the scope name is invalid. Throws ``BAD_REQUEST`` if request violated + constraints. Throws ``CUSTOMER_UNAUTHORIZED`` if normal user attempts to create a scope with name + reserved for databricks internal usage. Throws ``UNAUTHENTICATED`` if unable to verify user access + permission on Azure KeyVault + :param scope: str Scope name requested by the user. Scope names are unique. :param backend_azure_keyvault: :class:`AzureKeyVaultSecretScopeMetadata` (optional) - The metadata for the secret scope if the type is `AZURE_KEYVAULT` + The metadata for the secret scope if the type is ``AZURE_KEYVAULT`` :param initial_manage_principal: str (optional) - The principal that is initially granted `MANAGE` permission to the created scope. + The principal that is initially granted ``MANAGE`` permission to the created scope. :param scope_backend_type: :class:`ScopeBackendType` (optional) - The backend type the scope will be created with. If not specified, will default to `DATABRICKS` + The backend type the scope will be created with. If not specified, will default to ``DATABRICKS`` @@ -56,9 +86,17 @@ Deletes the given ACL on the given scope. - Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no - such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have - permission to make this API call. + Users must have the ``MANAGE`` permission to invoke this API. + + Example request: + + .. code:: + + { "scope": "my-secret-scope", "principal": "data-scientists" } + + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope, principal, or ACL exists. Throws + ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws + ``INVALID_PARAMETER_VALUE`` if the permission or principal is invalid. :param scope: str The name of the scope to remove permissions from. @@ -72,8 +110,15 @@ Deletes a secret scope. - Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user - does not have permission to make this API call. + Example request: + + .. code:: + + { "scope": "my-secret-scope" } + + Throws ``RESOURCE_DOES_NOT_EXIST`` if the scope does not exist. Throws ``PERMISSION_DENIED`` if the + user does not have permission to make this API call. Throws ``BAD_REQUEST`` if system user attempts to + delete internal secret scope. :param scope: str Name of the scope to delete. @@ -83,11 +128,19 @@ .. py:method:: delete_secret(scope: str, key: str) - Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the - secret scope. + Deletes the secret stored in this secret scope. You must have ``WRITE`` or ``MANAGE`` permission on + the Secret Scope. + + Example request: - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED` - if the user does not have permission to make this API call. + .. code:: + + { "scope": "my-secret-scope", "key": "my-secret-key" } + + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws + ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws + ``BAD_REQUEST`` if system user attempts to delete an internal secret, or request is made against Azure + KeyVault backed scope. :param scope: str The name of the scope that contains the secret to delete. @@ -99,11 +152,19 @@ .. py:method:: get_acl(scope: str, principal: str) -> AclItem - Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE` - permission to invoke this API. + Describes the details about the given ACL, such as the group and permission. - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the - user does not have permission to make this API call. + Users must have the ``MANAGE`` permission to invoke this API. + + Example response: + + .. code:: + + { "principal": "data-scientists", "permission": "READ" } + + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws ``PERMISSION_DENIED`` if the + user does not have permission to make this API call. Throws ``INVALID_PARAMETER_VALUE`` if the + permission or principal is invalid. :param scope: str The name of the scope to fetch ACL information from. @@ -115,20 +176,35 @@ .. py:method:: get_secret(scope: str, key: str) -> GetSecretResponse - Gets the bytes representation of a secret value for the specified scope and key. + Gets a secret for a given key and scope. This API can only be called from the DBUtils interface. Users + need the READ permission to make this call. + + Example response: + + .. code:: - Users need the READ permission to make this call. + { "key": "my-string-key", "value": } Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the caller in DBUtils and the type the data is decoded into. - Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws - ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists. Throws + ``PERMISSION_DENIED`` if the user does not have permission to make this API call. + + Note: This is explicitly an undocumented API. It also doesn't need to be supported for the /preview + prefix, because it's not a customer-facing API (i.e. only used for DBUtils SecretUtils to fetch + secrets). + + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope or secret exists. Throws ``BAD_REQUEST`` if + normal user calls get secret outside of a notebook. AKV specific errors: Throws + ``INVALID_PARAMETER_VALUE`` if secret name is not alphanumeric or too long. Throws + ``PERMISSION_DENIED`` if secret manager cannot access AKV with 403 error Throws ``MALFORMED_REQUEST`` + if secret manager cannot access AKV with any other 4xx error :param scope: str - The name of the scope to fetch secret information from. + The name of the scope that contains the secret. :param key: str - The key to fetch secret for. + Name of the secret to fetch value information. :returns: :class:`GetSecretResponse` @@ -158,9 +234,18 @@ w.secrets.delete_secret(scope=scope_name, key=key_name) w.secrets.delete_scope(scope=scope_name) - List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API. + Lists the ACLs set on the given scope. + + Users must have the ``MANAGE`` permission to invoke this API. + + Example response: + + .. code:: + + { "acls": [{ "principal": "admins", "permission": "MANAGE" },{ "principal": "data-scientists", + "permission": "READ" }] } - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. :param scope: str @@ -184,7 +269,14 @@ Lists all secret scopes available in the workspace. - Throws `PERMISSION_DENIED` if the user does not have permission to make this API call. + Example response: + + .. code:: + + { "scopes": [{ "name": "my-databricks-scope", "backend_type": "DATABRICKS" },{ "name": "mount-points", + "backend_type": "DATABRICKS" }] } + + Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. :returns: Iterator over :class:`SecretScope` @@ -218,9 +310,17 @@ Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ permission to make this call. - The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if - no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make - this API call. + Example response: + + .. code:: + + { "secrets": [ { "key": "my-string-key"", "last_updated_timestamp": "1520467595000" }, { "key": + "my-byte-key", "last_updated_timestamp": "1520467595000" }, ] } + + The lastUpdatedTimestamp returned is in milliseconds since epoch. + + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws ``PERMISSION_DENIED`` if the + user does not have permission to make this API call. :param scope: str The name of the scope to list secrets within. @@ -261,14 +361,12 @@ w.secrets.delete_secret(scope=scope_name, key=key_name) w.secrets.delete_scope(scope=scope_name) - Creates or overwrites the Access Control List (ACL) associated with the given principal (user or - group) on the specified scope point. + Creates or overwrites the ACL associated with the given principal (user or group) on the specified + scope point. In general, a user or group will use the most powerful permission available to them, and + permissions are ordered as follows: - In general, a user or group will use the most powerful permission available to them, and permissions - are ordered as follows: - - * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to - read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what + * ``MANAGE`` - Allowed to change ACLs, and read and write to this secret scope. * ``WRITE`` - Allowed + to read and write to this secret scope. * ``READ`` - Allowed to read this secret scope and list what secrets are available. Note that in general, secret values can only be read from within a command on a cluster (for example, @@ -276,15 +374,21 @@ However, the user's permission will be applied based on who is executing the command, and they must have at least READ permission. - Users must have the `MANAGE` permission to invoke this API. + Users must have the ``MANAGE`` permission to invoke this API. + + Example request: + + .. code:: + + { "scope": "my-secret-scope", "principal": "data-scientists", "permission": "READ" } The principal is a user or group name corresponding to an existing Databricks principal to be granted or revoked access. - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a - permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or - principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API - call. + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws ``RESOURCE_ALREADY_EXISTS`` + if a permission for the principal already exists. Throws ``INVALID_PARAMETER_VALUE`` if the permission + or principal is invalid. Throws ``PERMISSION_DENIED`` if the user does not have permission to make + this API call. :param scope: str The name of the scope to apply permissions to. @@ -323,19 +427,27 @@ Inserts a secret under the provided scope with the given name. If a secret already exists with the same name, this command overwrites the existing secret's value. The server encrypts the secret using - the secret scope's encryption settings before storing it. + the secret scope's encryption settings before storing it. You must have ``WRITE`` or ``MANAGE`` + permission on the secret scope. + + The secret key must consist of alphanumeric characters, dashes, underscores, and periods, and cannot + exceed 128 characters. The maximum allowed secret value size is 128 KB. The maximum number of secrets + in a given scope is 1000. + + Example request: + + .. code:: - You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of - alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The - maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000. + { "scope": "my-databricks-scope", "key": "my-string-key", "string_value": "foobar" } The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine the value returned when the secret value is requested. Exactly one must be specified. - Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if - maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or - value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this - API call. + Throws ``RESOURCE_DOES_NOT_EXIST`` if no such secret scope exists. Throws ``RESOURCE_LIMIT_EXCEEDED`` + if maximum number of secrets in scope is exceeded. Throws ``INVALID_PARAMETER_VALUE`` if the request + parameters are invalid. Throws ``PERMISSION_DENIED`` if the user does not have permission to make this + API call. Throws ``MALFORMED_REQUEST`` if request is incorrectly formatted or conflicting. Throws + ``BAD_REQUEST`` if request is made against Azure KeyVault backed scope. :param scope: str The name of the scope to which the secret will be associated with. diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst index 4fba581e8..03dae035c 100644 --- a/docs/workspace/workspace/workspace.rst +++ b/docs/workspace/workspace/workspace.rst @@ -178,7 +178,7 @@ content=base64.b64encode(("CREATE LIVE TABLE dlt_sample AS SELECT 1").encode()).decode(), format=workspace.ImportFormat.SOURCE, language=workspace.Language.SQL, - overwrite=True, + overwrite=true_, path=notebook_path, )