From 78679a7fc7be90867149bca26a2ff85a3737ee32 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Thu, 25 Sep 2025 12:28:09 +0000 Subject: [PATCH] Update SDK to latest API definition --- .codegen/_openapi_sha | 2 +- NEXT_CHANGELOG.md | 10 +++ databricks/sdk/__init__.py | 5 +- databricks/sdk/service/catalog.py | 21 +++-- databricks/sdk/service/dashboards.py | 44 +++++++++++ databricks/sdk/service/database.py | 102 ++++++++++++++++++++++-- databricks/sdk/service/iam.py | 8 +- databricks/sdk/service/serving.py | 56 ++++++++++++++ tagging.py | 112 ++++++++++++--------------- 9 files changed, 277 insertions(+), 83 deletions(-) diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index f09edb728..27c63e442 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -c3a3e3055fe11cb9683f398a665c225a03563ff1 \ No newline at end of file +608df7153d64c19e2d255144c9935fd4ed45900a \ No newline at end of file diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 0fd0b1445..2607f545e 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -11,3 +11,13 @@ ### Internal Changes ### API Changes +* Add `update_notifications()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service. +* Add `parameters` field for `databricks.sdk.service.dashboards.GenieQueryAttachment`. +* Add `database_instance_name` field for `databricks.sdk.service.database.CreateDatabaseInstanceRoleRequest`. +* Add `custom_tags`, `effective_custom_tags`, `effective_usage_policy_id` and `usage_policy_id` fields for `databricks.sdk.service.database.DatabaseInstance`. +* Add `effective_attributes` and `instance_name` fields for `databricks.sdk.service.database.DatabaseInstanceRole`. +* Add `external_use_schema` enum value for `databricks.sdk.service.catalog.Privilege`. +* Add `stream_native` enum value for `databricks.sdk.service.catalog.SystemType`. +* Add `exceeded_max_token_length_exception` enum value for `databricks.sdk.service.dashboards.MessageErrorType`. +* Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to be required. +* [Breaking] Change `name` field for `databricks.sdk.service.database.DatabaseInstanceRole` to be required. \ No newline at end of file diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 56782a611..ef42d1c34 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -12,7 +12,6 @@ from databricks.sdk.mixins.files import DbfsExt, FilesExt from databricks.sdk.mixins.jobs import JobsExt from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt -from databricks.sdk.mixins.sharing import SharesExt from databricks.sdk.mixins.workspace import WorkspaceExt from databricks.sdk.service import agentbricks as pkg_agentbricks from databricks.sdk.service import apps as pkg_apps @@ -358,7 +357,7 @@ def __init__( self._api_client, serving_endpoints, serving_endpoints_data_plane_token_source ) self._settings = pkg_settings.SettingsAPI(self._api_client) - self._shares = SharesExt(self._api_client) + self._shares = pkg_sharing.SharesAPI(self._api_client) self._statement_execution = pkg_sql.StatementExecutionAPI(self._api_client) self._storage_credentials = pkg_catalog.StorageCredentialsAPI(self._api_client) self._system_schemas = pkg_catalog.SystemSchemasAPI(self._api_client) @@ -882,7 +881,7 @@ def settings(self) -> pkg_settings.SettingsAPI: return self._settings @property - def shares(self) -> SharesExt: + def shares(self) -> pkg_sharing.SharesAPI: """A share is a container instantiated with :method:shares/create.""" return self._shares diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index fed76f073..3ac709a89 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -7856,6 +7856,7 @@ class Privilege(Enum): CREATE_VOLUME = "CREATE_VOLUME" EXECUTE = "EXECUTE" EXECUTE_CLEAN_ROOM_TASK = "EXECUTE_CLEAN_ROOM_TASK" + EXTERNAL_USE_SCHEMA = "EXTERNAL_USE_SCHEMA" MANAGE = "MANAGE" MANAGE_ALLOWLIST = "MANAGE_ALLOWLIST" MODIFY = "MODIFY" @@ -8539,7 +8540,7 @@ def from_dict(cls, d: Dict[str, Any]) -> Securable: class SecurableKind(Enum): - """Latest kind: CONNECTION_PALANTIR_OAUTH_M2M = 263; Next id:264""" + """Latest kind: CONNECTION_SHAREPOINT_OAUTH_M2M = 264; Next id:265""" TABLE_DB_STORAGE = "TABLE_DB_STORAGE" TABLE_DELTA = "TABLE_DELTA" @@ -9003,6 +9004,7 @@ class SystemType(Enum): SAP = "SAP" SERVICENOW = "SERVICENOW" SNOWFLAKE = "SNOWFLAKE" + STREAM_NATIVE = "STREAM_NATIVE" TABLEAU = "TABLEAU" TERADATA = "TERADATA" WORKDAY = "WORKDAY" @@ -10249,6 +10251,11 @@ class VolumeInfo: """The unique identifier of the volume""" volume_type: Optional[VolumeType] = None + """The type of the volume. An external volume is located in the specified external location. A + managed volume is located in the default location which is specified by the parent schema, or + the parent catalog, or the Metastore. [Learn more] + + [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" def as_dict(self) -> dict: """Serializes the VolumeInfo into a dictionary suitable for use as a JSON request body.""" @@ -10353,11 +10360,6 @@ def from_dict(cls, d: Dict[str, Any]) -> VolumeInfo: class VolumeType(Enum): - """The type of the volume. An external volume is located in the specified external location. A - managed volume is located in the default location which is specified by the parent schema, or - the parent catalog, or the Metastore. [Learn more] - - [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external""" EXTERNAL = "EXTERNAL" MANAGED = "MANAGED" @@ -15424,6 +15426,11 @@ def create( :param name: str The name of the volume :param volume_type: :class:`VolumeType` + The type of the volume. An external volume is located in the specified external location. A managed + volume is located in the default location which is specified by the parent schema, or the parent + catalog, or the Metastore. [Learn more] + + [Learn more]: https://docs.databricks.com/aws/en/volumes/managed-vs-external :param comment: str (optional) The comment attached to the volume :param storage_location: str (optional) @@ -15482,7 +15489,7 @@ def list( The returned volumes are filtered based on the privileges of the calling user. For example, the metastore admin is able to list all the volumes. A regular user needs to be the owner or have the - **READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case, + **READ VOLUME** privilege on the volume to receive the volumes in the response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 33f8b7382..9ea51bfb7 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -714,6 +714,8 @@ class GenieQueryAttachment: last_updated_timestamp: Optional[int] = None """Time when the user updated the query last""" + parameters: Optional[List[QueryAttachmentParameter]] = None + query: Optional[str] = None """AI generated SQL query""" @@ -736,6 +738,8 @@ def as_dict(self) -> dict: body["id"] = self.id if self.last_updated_timestamp is not None: body["last_updated_timestamp"] = self.last_updated_timestamp + if self.parameters: + body["parameters"] = [v.as_dict() for v in self.parameters] if self.query is not None: body["query"] = self.query if self.query_result_metadata: @@ -755,6 +759,8 @@ def as_shallow_dict(self) -> dict: body["id"] = self.id if self.last_updated_timestamp is not None: body["last_updated_timestamp"] = self.last_updated_timestamp + if self.parameters: + body["parameters"] = self.parameters if self.query is not None: body["query"] = self.query if self.query_result_metadata: @@ -772,6 +778,7 @@ def from_dict(cls, d: Dict[str, Any]) -> GenieQueryAttachment: description=d.get("description", None), id=d.get("id", None), last_updated_timestamp=d.get("last_updated_timestamp", None), + parameters=_repeated_dict(d, "parameters", QueryAttachmentParameter), query=d.get("query", None), query_result_metadata=_from_dict(d, "query_result_metadata", GenieResultMetadata), statement_id=d.get("statement_id", None), @@ -1135,6 +1142,7 @@ class MessageErrorType(Enum): DESCRIBE_QUERY_INVALID_SQL_ERROR = "DESCRIBE_QUERY_INVALID_SQL_ERROR" DESCRIBE_QUERY_TIMEOUT = "DESCRIBE_QUERY_TIMEOUT" DESCRIBE_QUERY_UNEXPECTED_FAILURE = "DESCRIBE_QUERY_UNEXPECTED_FAILURE" + EXCEEDED_MAX_TOKEN_LENGTH_EXCEPTION = "EXCEEDED_MAX_TOKEN_LENGTH_EXCEPTION" FUNCTIONS_NOT_AVAILABLE_EXCEPTION = "FUNCTIONS_NOT_AVAILABLE_EXCEPTION" FUNCTION_ARGUMENTS_INVALID_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_EXCEPTION" FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION" @@ -1260,6 +1268,42 @@ def from_dict(cls, d: Dict[str, Any]) -> PublishedDashboard: ) +@dataclass +class QueryAttachmentParameter: + keyword: Optional[str] = None + + sql_type: Optional[str] = None + + value: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the QueryAttachmentParameter into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.keyword is not None: + body["keyword"] = self.keyword + if self.sql_type is not None: + body["sql_type"] = self.sql_type + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the QueryAttachmentParameter into a shallow dictionary of its immediate attributes.""" + body = {} + if self.keyword is not None: + body["keyword"] = self.keyword + if self.sql_type is not None: + body["sql_type"] = self.sql_type + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> QueryAttachmentParameter: + """Deserializes the QueryAttachmentParameter from a dictionary.""" + return cls(keyword=d.get("keyword", None), sql_type=d.get("sql_type", None), value=d.get("value", None)) + + @dataclass class Result: is_truncated: Optional[bool] = None diff --git a/databricks/sdk/service/database.py b/databricks/sdk/service/database.py index aac36ec0e..73d084ce2 100755 --- a/databricks/sdk/service/database.py +++ b/databricks/sdk/service/database.py @@ -18,6 +18,38 @@ # all definitions in this file are in alphabetical order +@dataclass +class CustomTag: + key: Optional[str] = None + """The key of the custom tag.""" + + value: Optional[str] = None + """The value of the custom tag.""" + + def as_dict(self) -> dict: + """Serializes the CustomTag into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + def as_shallow_dict(self) -> dict: + """Serializes the CustomTag into a shallow dictionary of its immediate attributes.""" + body = {} + if self.key is not None: + body["key"] = self.key + if self.value is not None: + body["value"] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> CustomTag: + """Deserializes the CustomTag from a dictionary.""" + return cls(key=d.get("key", None), value=d.get("value", None)) + + @dataclass class DatabaseCatalog: name: str @@ -125,9 +157,16 @@ class DatabaseInstance: creator: Optional[str] = None """The email of the creator of the instance.""" + custom_tags: Optional[List[CustomTag]] = None + """Custom tags associated with the instance. This field is only included on create and update + responses.""" + effective_capacity: Optional[str] = None """Deprecated. The sku of the instance; this field will always match the value of capacity.""" + effective_custom_tags: Optional[List[CustomTag]] = None + """The recorded custom tags associated with the instance.""" + effective_enable_pg_native_login: Optional[bool] = None """Whether the instance has PG native password login enabled.""" @@ -145,6 +184,9 @@ class DatabaseInstance: effective_stopped: Optional[bool] = None """Whether the instance is stopped.""" + effective_usage_policy_id: Optional[str] = None + """The policy that is applied to the instance.""" + enable_pg_native_login: Optional[bool] = None """Whether to enable PG native password login on the instance. Defaults to false.""" @@ -184,6 +226,9 @@ class DatabaseInstance: uid: Optional[str] = None """An immutable UUID identifier for the instance.""" + usage_policy_id: Optional[str] = None + """The desired usage policy to associate with the instance.""" + def as_dict(self) -> dict: """Serializes the DatabaseInstance into a dictionary suitable for use as a JSON request body.""" body = {} @@ -195,8 +240,12 @@ def as_dict(self) -> dict: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator + if self.custom_tags: + body["custom_tags"] = [v.as_dict() for v in self.custom_tags] if self.effective_capacity is not None: body["effective_capacity"] = self.effective_capacity + if self.effective_custom_tags: + body["effective_custom_tags"] = [v.as_dict() for v in self.effective_custom_tags] if self.effective_enable_pg_native_login is not None: body["effective_enable_pg_native_login"] = self.effective_enable_pg_native_login if self.effective_enable_readable_secondaries is not None: @@ -207,6 +256,8 @@ def as_dict(self) -> dict: body["effective_retention_window_in_days"] = self.effective_retention_window_in_days if self.effective_stopped is not None: body["effective_stopped"] = self.effective_stopped + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.enable_pg_native_login is not None: body["enable_pg_native_login"] = self.enable_pg_native_login if self.enable_readable_secondaries is not None: @@ -231,6 +282,8 @@ def as_dict(self) -> dict: body["stopped"] = self.stopped if self.uid is not None: body["uid"] = self.uid + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body def as_shallow_dict(self) -> dict: @@ -244,8 +297,12 @@ def as_shallow_dict(self) -> dict: body["creation_time"] = self.creation_time if self.creator is not None: body["creator"] = self.creator + if self.custom_tags: + body["custom_tags"] = self.custom_tags if self.effective_capacity is not None: body["effective_capacity"] = self.effective_capacity + if self.effective_custom_tags: + body["effective_custom_tags"] = self.effective_custom_tags if self.effective_enable_pg_native_login is not None: body["effective_enable_pg_native_login"] = self.effective_enable_pg_native_login if self.effective_enable_readable_secondaries is not None: @@ -256,6 +313,8 @@ def as_shallow_dict(self) -> dict: body["effective_retention_window_in_days"] = self.effective_retention_window_in_days if self.effective_stopped is not None: body["effective_stopped"] = self.effective_stopped + if self.effective_usage_policy_id is not None: + body["effective_usage_policy_id"] = self.effective_usage_policy_id if self.enable_pg_native_login is not None: body["enable_pg_native_login"] = self.enable_pg_native_login if self.enable_readable_secondaries is not None: @@ -280,6 +339,8 @@ def as_shallow_dict(self) -> dict: body["stopped"] = self.stopped if self.uid is not None: body["uid"] = self.uid + if self.usage_policy_id is not None: + body["usage_policy_id"] = self.usage_policy_id return body @classmethod @@ -290,12 +351,15 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: child_instance_refs=_repeated_dict(d, "child_instance_refs", DatabaseInstanceRef), creation_time=d.get("creation_time", None), creator=d.get("creator", None), + custom_tags=_repeated_dict(d, "custom_tags", CustomTag), effective_capacity=d.get("effective_capacity", None), + effective_custom_tags=_repeated_dict(d, "effective_custom_tags", CustomTag), effective_enable_pg_native_login=d.get("effective_enable_pg_native_login", None), effective_enable_readable_secondaries=d.get("effective_enable_readable_secondaries", None), effective_node_count=d.get("effective_node_count", None), effective_retention_window_in_days=d.get("effective_retention_window_in_days", None), effective_stopped=d.get("effective_stopped", None), + effective_usage_policy_id=d.get("effective_usage_policy_id", None), enable_pg_native_login=d.get("enable_pg_native_login", None), enable_readable_secondaries=d.get("enable_readable_secondaries", None), name=d.get("name", None), @@ -308,6 +372,7 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance: state=_enum(d, "state", DatabaseInstanceState), stopped=d.get("stopped", None), uid=d.get("uid", None), + usage_policy_id=d.get("usage_policy_id", None), ) @@ -390,25 +455,34 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRef: class DatabaseInstanceRole: """A DatabaseInstanceRole represents a Postgres role in a database instance.""" + name: str + """The name of the role. This is the unique identifier for the role in an instance.""" + attributes: Optional[DatabaseInstanceRoleAttributes] = None - """API-exposed Postgres role attributes""" + """The desired API-exposed Postgres role attribute to associate with the role. Optional.""" + + effective_attributes: Optional[DatabaseInstanceRoleAttributes] = None + """The attributes that are applied to the role.""" identity_type: Optional[DatabaseInstanceRoleIdentityType] = None """The type of the role.""" + instance_name: Optional[str] = None + membership_role: Optional[DatabaseInstanceRoleMembershipRole] = None """An enum value for a standard role that this role is a member of.""" - name: Optional[str] = None - """The name of the role. This is the unique identifier for the role in an instance.""" - def as_dict(self) -> dict: """Serializes the DatabaseInstanceRole into a dictionary suitable for use as a JSON request body.""" body = {} if self.attributes: body["attributes"] = self.attributes.as_dict() + if self.effective_attributes: + body["effective_attributes"] = self.effective_attributes.as_dict() if self.identity_type is not None: body["identity_type"] = self.identity_type.value + if self.instance_name is not None: + body["instance_name"] = self.instance_name if self.membership_role is not None: body["membership_role"] = self.membership_role.value if self.name is not None: @@ -420,8 +494,12 @@ def as_shallow_dict(self) -> dict: body = {} if self.attributes: body["attributes"] = self.attributes + if self.effective_attributes: + body["effective_attributes"] = self.effective_attributes if self.identity_type is not None: body["identity_type"] = self.identity_type + if self.instance_name is not None: + body["instance_name"] = self.instance_name if self.membership_role is not None: body["membership_role"] = self.membership_role if self.name is not None: @@ -433,7 +511,9 @@ def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstanceRole: """Deserializes the DatabaseInstanceRole from a dictionary.""" return cls( attributes=_from_dict(d, "attributes", DatabaseInstanceRoleAttributes), + effective_attributes=_from_dict(d, "effective_attributes", DatabaseInstanceRoleAttributes), identity_type=_enum(d, "identity_type", DatabaseInstanceRoleIdentityType), + instance_name=d.get("instance_name", None), membership_role=_enum(d, "membership_role", DatabaseInstanceRoleMembershipRole), name=d.get("name", None), ) @@ -1527,22 +1607,32 @@ def create_database_instance_and_wait( return self.create_database_instance(database_instance=database_instance).result(timeout=timeout) def create_database_instance_role( - self, instance_name: str, database_instance_role: DatabaseInstanceRole + self, + instance_name: str, + database_instance_role: DatabaseInstanceRole, + *, + database_instance_name: Optional[str] = None, ) -> DatabaseInstanceRole: """Create a role for a Database Instance. :param instance_name: str :param database_instance_role: :class:`DatabaseInstanceRole` + :param database_instance_name: str (optional) :returns: :class:`DatabaseInstanceRole` """ body = database_instance_role.as_dict() + query = {} + if database_instance_name is not None: + query["database_instance_name"] = database_instance_name headers = { "Accept": "application/json", "Content-Type": "application/json", } - res = self._api.do("POST", f"/api/2.0/database/instances/{instance_name}/roles", body=body, headers=headers) + res = self._api.do( + "POST", f"/api/2.0/database/instances/{instance_name}/roles", query=query, body=body, headers=headers + ) return DatabaseInstanceRole.from_dict(res) def create_database_table(self, table: DatabaseTable) -> DatabaseTable: diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index e42fbd0f7..a470d7544 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -3583,7 +3583,7 @@ def get(self, request_object_type: str, request_object_id: str) -> ObjectPermiss :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str @@ -3604,7 +3604,7 @@ def get_permission_levels(self, request_object_type: str, request_object_id: str :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str @@ -3634,7 +3634,7 @@ def set( :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str @@ -3668,7 +3668,7 @@ def update( :param request_object_type: str The type of the request object. Can be one of the following: alerts, alertsv2, authorization, - clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, + clusters, cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, genie, instance-pools, jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index ff7ce801a..f707aadf7 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -3905,6 +3905,38 @@ def from_dict(cls, d: Dict[str, Any]) -> TrafficConfig: return cls(routes=_repeated_dict(d, "routes", Route)) +@dataclass +class UpdateInferenceEndpointNotificationsResponse: + email_notifications: Optional[EmailNotifications] = None + + name: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the UpdateInferenceEndpointNotificationsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.email_notifications: + body["email_notifications"] = self.email_notifications.as_dict() + if self.name is not None: + body["name"] = self.name + return body + + def as_shallow_dict(self) -> dict: + """Serializes the UpdateInferenceEndpointNotificationsResponse into a shallow dictionary of its immediate attributes.""" + body = {} + if self.email_notifications: + body["email_notifications"] = self.email_notifications + if self.name is not None: + body["name"] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> UpdateInferenceEndpointNotificationsResponse: + """Deserializes the UpdateInferenceEndpointNotificationsResponse from a dictionary.""" + return cls( + email_notifications=_from_dict(d, "email_notifications", EmailNotifications), name=d.get("name", None) + ) + + @dataclass class V1ResponseChoiceElement: finish_reason: Optional[str] = None @@ -4706,6 +4738,30 @@ def update_config_and_wait( traffic_config=traffic_config, ).result(timeout=timeout) + def update_notifications( + self, name: str, *, email_notifications: Optional[EmailNotifications] = None + ) -> UpdateInferenceEndpointNotificationsResponse: + """Updates the email and webhook notification settings for an endpoint. + + :param name: str + The name of the serving endpoint whose notifications are being updated. This field is required. + :param email_notifications: :class:`EmailNotifications` (optional) + The email notification settings to update. Specify email addresses to notify when endpoint state + changes occur. + + :returns: :class:`UpdateInferenceEndpointNotificationsResponse` + """ + body = {} + if email_notifications is not None: + body["email_notifications"] = email_notifications.as_dict() + headers = { + "Accept": "application/json", + "Content-Type": "application/json", + } + + res = self._api.do("PATCH", f"/api/2.0/serving-endpoints/{name}/notifications", body=body, headers=headers) + return UpdateInferenceEndpointNotificationsResponse.from_dict(res) + def update_permissions( self, serving_endpoint_id: str, diff --git a/tagging.py b/tagging.py index 5504bdd0e..f2ac65457 100644 --- a/tagging.py +++ b/tagging.py @@ -51,8 +51,7 @@ def commit_and_push(self, message: str): new_tree = self.repo.create_git_tree(self.changed_files, base_tree) parent_commit = self.repo.get_git_commit(head_ref.object.sha) - new_commit = self.repo.create_git_commit( - message=message, tree=new_tree, parents=[parent_commit]) + new_commit = self.repo.create_git_commit(message=message, tree=new_tree, parents=[parent_commit]) # Update branch reference head_ref.edit(new_commit.sha) self.sha = new_commit.sha @@ -70,11 +69,10 @@ def tag(self, tag_name: str, tag_message: str): # The email MUST be the GitHub Apps email. # Otherwise, the tag will not be verified. tagger = InputGitAuthor( - name="Databricks SDK Release Bot", - email="DECO-SDK-Tagging[bot]@users.noreply.github.com") + name="Databricks SDK Release Bot", email="DECO-SDK-Tagging[bot]@users.noreply.github.com" + ) - tag = self.repo.create_git_tag( - tag=tag_name, message=tag_message, object=self.sha, type="commit", tagger=tagger) + tag = self.repo.create_git_tag(tag=tag_name, message=tag_message, object=self.sha, type="commit", tagger=tagger) # Create a Git ref (the actual reference for the tag in the repo) self.repo.create_git_ref(ref=f"refs/tags/{tag_name}", sha=tag.sha) @@ -89,6 +87,7 @@ class Package: :name: The package name. :path: The path to the package relative to the repository root. """ + name: str path: str @@ -140,7 +139,7 @@ def get_package_name(package_path: str) -> str: } """ filepath = os.path.join(os.getcwd(), package_path, PACKAGE_FILE_NAME) - with open(filepath, 'r') as file: + with open(filepath, "r") as file: content = json.load(file) if "package" in content: return content["package"] @@ -156,21 +155,21 @@ def update_version_references(tag_info: TagInfo) -> None: # Load version patterns from '.codegen.json' file at the top level of the repository package_file_path = os.path.join(os.getcwd(), CODEGEN_FILE_NAME) - with open(package_file_path, 'r') as file: + with open(package_file_path, "r") as file: package_file = json.load(file) - version = package_file.get('version') + version = package_file.get("version") if not version: - print(f"`version` not found in .codegen.json. Nothing to update.") + print("`version` not found in .codegen.json. Nothing to update.") return # Update the versions for filename, pattern in version.items(): loc = os.path.join(os.getcwd(), tag_info.package.path, filename) - previous_version = re.sub(r'\$VERSION', r"\\d+\\.\\d+\\.\\d+", pattern) - new_version = re.sub(r'\$VERSION', tag_info.version, pattern) + previous_version = re.sub(r"\$VERSION", r"\\d+\\.\\d+\\.\\d+", pattern) + new_version = re.sub(r"\$VERSION", tag_info.version, pattern) - with open(loc, 'r') as file: + with open(loc, "r") as file: content = file.read() # Replace the version in the file content @@ -188,15 +187,15 @@ def clean_next_changelog(package_path: str) -> None: """ file_path = os.path.join(os.getcwd(), package_path, NEXT_CHANGELOG_FILE_NAME) - with open(file_path, 'r') as file: + with open(file_path, "r") as file: content = file.read() # Remove content between ### sections - cleaned_content = re.sub(r'(### [^\n]+\n)(?:.*?\n?)*?(?=###|$)', r'\1', content) + cleaned_content = re.sub(r"(### [^\n]+\n)(?:.*?\n?)*?(?=###|$)", r"\1", content) # Ensure there is exactly one empty line before each section - cleaned_content = re.sub(r'(\n*)(###[^\n]+)', r'\n\n\2', cleaned_content) + cleaned_content = re.sub(r"(\n*)(###[^\n]+)", r"\n\n\2", cleaned_content) # Find the version number - version_match = re.search(r'Release v(\d+)\.(\d+)\.(\d+)', cleaned_content) + version_match = re.search(r"Release v(\d+)\.(\d+)\.(\d+)", cleaned_content) if not version_match: raise Exception("Version not found in the changelog") major, minor, patch = map(int, version_match.groups()) @@ -206,7 +205,7 @@ def clean_next_changelog(package_path: str) -> None: # are more common than patch or major version releases. minor += 1 patch = 0 - new_version = f'Release v{major}.{minor}.{patch}' + new_version = f"Release v{major}.{minor}.{patch}" cleaned_content = cleaned_content.replace(version_match.group(0), new_version) # Update file with cleaned content @@ -220,19 +219,18 @@ def get_previous_tag_info(package: Package) -> Optional[TagInfo]: """ changelog_path = os.path.join(os.getcwd(), package.path, CHANGELOG_FILE_NAME) - with open(changelog_path, 'r') as f: + with open(changelog_path, "r") as f: changelog = f.read() # Extract the latest release section using regex - match = re.search(r"## (\[Release\] )?Release v[\d\.]+.*?(?=\n## (\[Release\] )?Release v|\Z)", - changelog, re.S) + match = re.search(r"## (\[Release\] )?Release v[\d\.]+.*?(?=\n## (\[Release\] )?Release v|\Z)", changelog, re.S) # E.g., for new packages. if not match: return None latest_release = match.group(0) - version_match = re.search(r'## (\[Release\] )?Release v(\d+\.\d+\.\d+)', latest_release) + version_match = re.search(r"## (\[Release\] )?Release v(\d+\.\d+\.\d+)", latest_release) if not version_match: raise Exception("Version not found in the changelog") @@ -247,22 +245,22 @@ def get_next_tag_info(package: Package) -> Optional[TagInfo]: """ next_changelog_path = os.path.join(os.getcwd(), package.path, NEXT_CHANGELOG_FILE_NAME) # Read NEXT_CHANGELOG.md - with open(next_changelog_path, 'r') as f: + with open(next_changelog_path, "r") as f: next_changelog = f.read() # Remove "# NEXT CHANGELOG" line - next_changelog = re.sub(r'^# NEXT CHANGELOG(\n+)', '', next_changelog, flags=re.MULTILINE) + next_changelog = re.sub(r"^# NEXT CHANGELOG(\n+)", "", next_changelog, flags=re.MULTILINE) # Remove empty sections - next_changelog = re.sub(r'###[^\n]+\n+(?=##|\Z)', '', next_changelog) + next_changelog = re.sub(r"###[^\n]+\n+(?=##|\Z)", "", next_changelog) # Ensure there is exactly one empty line before each section - next_changelog = re.sub(r'(\n*)(###[^\n]+)', r'\n\n\2', next_changelog) + next_changelog = re.sub(r"(\n*)(###[^\n]+)", r"\n\n\2", next_changelog) - if not re.search(r'###', next_changelog): + if not re.search(r"###", next_changelog): print("All sections are empty. No changes will be made to the changelog.") return None - version_match = re.search(r'## Release v(\d+\.\d+\.\d+)', next_changelog) + version_match = re.search(r"## Release v(\d+\.\d+\.\d+)", next_changelog) if not version_match: raise Exception("Version not found in the changelog") @@ -275,10 +273,9 @@ def write_changelog(tag_info: TagInfo) -> None: Updates the changelog with a new tag info. """ changelog_path = os.path.join(os.getcwd(), tag_info.package.path, CHANGELOG_FILE_NAME) - with open(changelog_path, 'r') as f: + with open(changelog_path, "r") as f: changelog = f.read() - updated_changelog = re.sub(r'(# Version changelog\n\n)', f'\\1{tag_info.content.strip()}\n\n\n', - changelog) + updated_changelog = re.sub(r"(# Version changelog\n\n)", f"\\1{tag_info.content.strip()}\n\n\n", changelog) gh.add_file(changelog_path, updated_changelog) @@ -333,8 +330,7 @@ def is_tag_applied(tag: TagInfo) -> bool: """ try: # Check if the specific tag exists - result = subprocess.check_output( - ['git', 'tag', '--list', tag.tag_name()], stderr=subprocess.PIPE, text=True) + result = subprocess.check_output(["git", "tag", "--list", tag.tag_name()], stderr=subprocess.PIPE, text=True) return result.strip() == tag.tag_name() except subprocess.CalledProcessError as e: # Raise a exception for git command errors @@ -349,10 +345,7 @@ def find_last_tags() -> List[TagInfo]: """ packages = find_packages() - return [ - info for info in (get_previous_tag_info(package) for package in packages) - if info is not None - ] + return [info for info in (get_previous_tag_info(package) for package in packages) if info is not None] def find_pending_tags() -> List[TagInfo]: @@ -379,8 +372,9 @@ def generate_commit_message(tag_infos: List[TagInfo]) -> str: # Sort tag_infos by package name for consistency tag_infos.sort(key=lambda info: info.package.name) - return 'Release\n\n' + '\n\n'.join(f"## {info.package.name}/v{info.version}\n\n{info.content}" - for info in tag_infos) + return "Release\n\n" + "\n\n".join( + f"## {info.package.name}/v{info.version}\n\n{info.content}" for info in tag_infos + ) def push_changes(tag_infos: List[TagInfo]) -> None: @@ -404,25 +398,24 @@ def reset_repository(hash: Optional[str] = None) -> None: :param hash: The commit hash to reset to. If None, it resets to HEAD. """ # Fetch the latest changes from the remote repository - subprocess.run(['git', 'fetch']) + subprocess.run(["git", "fetch"]) # Determine the commit hash (default to origin/main if none is provided) - commit_hash = hash or 'origin/main' + commit_hash = hash or "origin/main" # Reset in memory changed files and the commit hash gh.reset(hash) # Construct the Git reset command - command = ['git', 'reset', '--hard', commit_hash] + command = ["git", "reset", "--hard", commit_hash] # Execute the git reset command subprocess.run(command, check=True) -def retry_function(func: Callable[[], List[TagInfo]], - cleanup: Callable[[], None], - max_attempts: int = 5, - delay: int = 5) -> List[TagInfo]: +def retry_function( + func: Callable[[], List[TagInfo]], cleanup: Callable[[], None], max_attempts: int = 5, delay: int = 5 +) -> List[TagInfo]: """ Calls a function call up to `max_attempts` times if an exception occurs. @@ -451,9 +444,7 @@ def update_changelogs(packages: List[Package]) -> List[TagInfo]: """ Updates changelogs and pushes the commits. """ - tag_infos = [ - info for info in (process_package(package) for package in packages) if info is not None - ] + tag_infos = [info for info in (process_package(package) for package in packages) if info is not None] # If any package was changed, push the changes. if tag_infos: push_changes(tag_infos) @@ -479,12 +470,12 @@ def run_command(command: List[str]) -> str: def pull_last_release_commit() -> None: """ - Reset the repository to the last release. + Reset the repository to the last release. Uses commit for last change to .release_metadata.json, since it's only updated on releases. """ commit_hash = subprocess.check_output( - ['git', 'log', '-n', '1', '--format=%H', '--', '.release_metadata.json'], - text=True).strip() + ["git", "log", "-n", "1", "--format=%H", "--", ".release_metadata.json"], text=True + ).strip() # If no commit is found, raise an exception if not commit_hash: @@ -499,15 +490,15 @@ def get_package_from_args() -> Optional[str]: Retrieves an optional package python3 ./tagging.py --package """ - parser = argparse.ArgumentParser(description='Update changelogs and tag the release.') - parser.add_argument('--package', '-p', type=str, help='Tag a single package') + parser = argparse.ArgumentParser(description="Update changelogs and tag the release.") + parser.add_argument("--package", "-p", type=str, help="Tag a single package") args = parser.parse_args() return args.package def init_github(): - token = os.environ['GITHUB_TOKEN'] - repo_name = os.environ['GITHUB_REPOSITORY'] + token = os.environ["GITHUB_TOKEN"] + repo_name = os.environ["GITHUB_REPOSITORY"] g = Github(token) repo = g.get_repo(repo_name) global gh @@ -536,8 +527,7 @@ def process(): # Therefore, we don't support specifying the package until the previously started process has been successfully completed. if pending_tags and package_name: pending_packages = [tag.package.name for tag in pending_tags] - raise Exception( - f"Cannot release package {package_name}. Pending release for {pending_packages}") + raise Exception(f"Cannot release package {package_name}. Pending release for {pending_packages}") if pending_tags: print("Found pending tags from previous executions, entering recovery mode.") @@ -550,8 +540,7 @@ def process(): if package_name: packages = [package for package in packages if package.name == package_name] - pending_tags = retry_function( - func=lambda: update_changelogs(packages), cleanup=reset_repository) + pending_tags = retry_function(func=lambda: update_changelogs(packages), cleanup=reset_repository) push_tags(pending_tags) @@ -559,8 +548,7 @@ def validate_git_root(): """ Validate that the script is run from the root of the repository. """ - repo_root = subprocess.check_output(["git", "rev-parse", - "--show-toplevel"]).strip().decode("utf-8") + repo_root = subprocess.check_output(["git", "rev-parse", "--show-toplevel"]).strip().decode("utf-8") current_dir = subprocess.check_output(["pwd"]).strip().decode("utf-8") if repo_root != current_dir: raise Exception("Please run this script from the root of the repository.")