From 675c45b76a2068dff09ce4351d8062a95af107be Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Thu, 3 Oct 2024 17:02:56 -0700 Subject: [PATCH 01/16] [ML-45784]Add open ai client mixin --- .codegen/__init__.py.tmpl | 3 ++- databricks/sdk/__init__.py | 3 ++- databricks/sdk/mixins/open_ai_client.py | 16 ++++++++++++++++ tests/test_open_ai_mixin.py | 13 +++++++++++++ 4 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 databricks/sdk/mixins/open_ai_client.py create mode 100644 tests/test_open_ai_mixin.py diff --git a/.codegen/__init__.py.tmpl b/.codegen/__init__.py.tmpl index 5ca160685..bc68f5654 100644 --- a/.codegen/__init__.py.tmpl +++ b/.codegen/__init__.py.tmpl @@ -5,6 +5,7 @@ from databricks.sdk.credentials_provider import CredentialsStrategy from databricks.sdk.mixins.files import DbfsExt from databricks.sdk.mixins.compute import ClustersExt from databricks.sdk.mixins.workspace import WorkspaceExt +from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt {{- range .Services}} from databricks.sdk.service.{{.Package.Name}} import {{.PascalName}}API{{end}} from databricks.sdk.service.provisioning import Workspace @@ -17,7 +18,7 @@ from typing import Optional "google_credentials" "google_service_account" }} {{- define "api" -}} - {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "WorkspaceAPI" "WorkspaceExt" -}} + {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "WorkspaceAPI" "WorkspaceExt" "ServingEndpointsExt" "ServingEndpointsApi" -}} {{- $genApi := concat .PascalName "API" -}} {{- getOrDefault $mixins $genApi $genApi -}} {{- end -}} diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 848272198..a4058ec51 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -6,6 +6,7 @@ from databricks.sdk.credentials_provider import CredentialsStrategy from databricks.sdk.mixins.compute import ClustersExt from databricks.sdk.mixins.files import DbfsExt +from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt from databricks.sdk.mixins.workspace import WorkspaceExt from databricks.sdk.service.apps import AppsAPI from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI, @@ -175,7 +176,7 @@ def __init__(self, self._config = config.copy() self._dbutils = _make_dbutils(self._config) self._api_client = client.ApiClient(self._config) - serving_endpoints = ServingEndpointsAPI(self._api_client) + serving_endpoints = ServingEndpointsExt(self._api_client) self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client) self._alerts = AlertsAPI(self._api_client) self._alerts_legacy = AlertsLegacyAPI(self._api_client) diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py new file mode 100644 index 000000000..6eb5f28ae --- /dev/null +++ b/databricks/sdk/mixins/open_ai_client.py @@ -0,0 +1,16 @@ +from databricks.sdk.service.serving import ServingEndpointsAPI + +class ServingEndpointsExt(ServingEndpointsAPI): + def get_open_api_client(self): + auth_headers = self._api._cfg.authenticate() + + try: + token = auth_headers["Authorization"][len("Bearer "):] + except Exception: + raise ValueError("Unable to extract authorization token for OpenAI Client") + + from openai import OpenAI + return OpenAI( + base_url=self._api._cfg.host + "/serving-endpoints", + api_key=token + ) \ No newline at end of file diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py new file mode 100644 index 000000000..930fa01d2 --- /dev/null +++ b/tests/test_open_ai_mixin.py @@ -0,0 +1,13 @@ +import pytest +from databricks.sdk.core import Config + +def test_open_ai_client(monkeypatch): + from databricks.sdk import WorkspaceClient + + monkeypatch.setenv('DATABRICKS_HOST', 'test_host') + monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') + w = WorkspaceClient(config=Config()) + client = w.serving_endpoints.get_open_api_client() + + assert client.base_url == "https://test_host/serving-endpoints/" + assert client.api_key == "test_token" \ No newline at end of file From 2822f299b72658e00058ae504e965e1d09cc4d1e Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Thu, 3 Oct 2024 17:14:32 -0700 Subject: [PATCH 02/16] [ML-45784]Add setup file Signed-off-by: aravind-segu --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 9cfe38d09..a51039f75 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,8 @@ "yapf", "pycodestyle", "autoflake", "isort", "wheel", "ipython", "ipywidgets", "requests-mock", "pyfakefs", "databricks-connect", "pytest-rerunfailures"], - "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"]}, + "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"], + "openai": ["openai"]}, author="Serge Smertin", author_email="serge.smertin@databricks.com", description="Databricks SDK for Python (Beta)", From 2e7d6d4cde8701c6eca39b927fa7e7134200ab90 Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Thu, 3 Oct 2024 17:50:16 -0700 Subject: [PATCH 03/16] [ML-45784]Format fixes --- databricks/sdk/mixins/open_ai_client.py | 7 +++---- tests/test_open_ai_mixin.py | 4 ++-- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py index 6eb5f28ae..572702e1e 100644 --- a/databricks/sdk/mixins/open_ai_client.py +++ b/databricks/sdk/mixins/open_ai_client.py @@ -1,6 +1,8 @@ from databricks.sdk.service.serving import ServingEndpointsAPI + class ServingEndpointsExt(ServingEndpointsAPI): + def get_open_api_client(self): auth_headers = self._api._cfg.authenticate() @@ -10,7 +12,4 @@ def get_open_api_client(self): raise ValueError("Unable to extract authorization token for OpenAI Client") from openai import OpenAI - return OpenAI( - base_url=self._api._cfg.host + "/serving-endpoints", - api_key=token - ) \ No newline at end of file + return OpenAI(base_url=self._api._cfg.host + "/serving-endpoints", api_key=token) diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py index 930fa01d2..6c0642d7b 100644 --- a/tests/test_open_ai_mixin.py +++ b/tests/test_open_ai_mixin.py @@ -1,6 +1,6 @@ -import pytest from databricks.sdk.core import Config + def test_open_ai_client(monkeypatch): from databricks.sdk import WorkspaceClient @@ -10,4 +10,4 @@ def test_open_ai_client(monkeypatch): client = w.serving_endpoints.get_open_api_client() assert client.base_url == "https://test_host/serving-endpoints/" - assert client.api_key == "test_token" \ No newline at end of file + assert client.api_key == "test_token" From 8029bba17436954d0921118b8b98412e2bd15492 Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Thu, 3 Oct 2024 18:01:46 -0700 Subject: [PATCH 04/16] Add open ai to dev --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index a51039f75..5c9df231e 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,7 @@ extras_require={"dev": ["pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "yapf", "pycodestyle", "autoflake", "isort", "wheel", "ipython", "ipywidgets", "requests-mock", "pyfakefs", - "databricks-connect", "pytest-rerunfailures"], + "databricks-connect", "pytest-rerunfailures", "openai"], "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"], "openai": ["openai"]}, author="Serge Smertin", From deda35dafca2b8c6b8b70ba858eb55cd4bb82829 Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Thu, 3 Oct 2024 21:26:09 -0700 Subject: [PATCH 05/16] Add Langchain Open AI Client --- databricks/sdk/mixins/open_ai_client.py | 15 ++++++++++++++- setup.py | 5 +++-- tests/test_open_ai_mixin.py | 14 +++++++++++++- 3 files changed, 30 insertions(+), 4 deletions(-) diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py index 572702e1e..5bd61268c 100644 --- a/databricks/sdk/mixins/open_ai_client.py +++ b/databricks/sdk/mixins/open_ai_client.py @@ -3,7 +3,7 @@ class ServingEndpointsExt(ServingEndpointsAPI): - def get_open_api_client(self): + def get_open_ai_client(self): auth_headers = self._api._cfg.authenticate() try: @@ -13,3 +13,16 @@ def get_open_api_client(self): from openai import OpenAI return OpenAI(base_url=self._api._cfg.host + "/serving-endpoints", api_key=token) + + def get_langchain_chat_open_ai_client(self, model): + auth_headers = self._api._cfg.authenticate() + + try: + token = auth_headers["Authorization"][len("Bearer "):] + except Exception: + raise ValueError("Unable to extract authorization token for Langchain OpenAI Client") + + from langchain_openai import ChatOpenAI + return ChatOpenAI(model=model, + openai_api_base=self._api._cfg.host + "/serving-endpoints", + openai_api_key=token) diff --git a/setup.py b/setup.py index 5c9df231e..03e29a3bb 100644 --- a/setup.py +++ b/setup.py @@ -17,9 +17,10 @@ extras_require={"dev": ["pytest", "pytest-cov", "pytest-xdist", "pytest-mock", "yapf", "pycodestyle", "autoflake", "isort", "wheel", "ipython", "ipywidgets", "requests-mock", "pyfakefs", - "databricks-connect", "pytest-rerunfailures", "openai"], + "databricks-connect", "pytest-rerunfailures", "openai", + "langchain-openai"], "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"], - "openai": ["openai"]}, + "openai": ["openai", "langchain-openai"]}, author="Serge Smertin", author_email="serge.smertin@databricks.com", description="Databricks SDK for Python (Beta)", diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py index 6c0642d7b..abe1c20fb 100644 --- a/tests/test_open_ai_mixin.py +++ b/tests/test_open_ai_mixin.py @@ -7,7 +7,19 @@ def test_open_ai_client(monkeypatch): monkeypatch.setenv('DATABRICKS_HOST', 'test_host') monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') w = WorkspaceClient(config=Config()) - client = w.serving_endpoints.get_open_api_client() + client = w.serving_endpoints.get_open_ai_client() assert client.base_url == "https://test_host/serving-endpoints/" assert client.api_key == "test_token" + + +def test_langchain_open_ai_client(monkeypatch): + from databricks.sdk import WorkspaceClient + + monkeypatch.setenv('DATABRICKS_HOST', 'test_host') + monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') + w = WorkspaceClient(config=Config()) + client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") + + assert client.openai_api_base == "https://test_host/serving-endpoints" + assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" \ No newline at end of file From 2d3ec7d5e0cc587324fa4218d1e0d2775c44ab3e Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Thu, 3 Oct 2024 21:41:53 -0700 Subject: [PATCH 06/16] Skip langchain test for less than 3.7 --- databricks/sdk/mixins/open_ai_client.py | 16 ++++++++++++++-- setup.py | 2 +- tests/test_open_ai_mixin.py | 7 ++++++- 3 files changed, 21 insertions(+), 4 deletions(-) diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py index 5bd61268c..084983cad 100644 --- a/databricks/sdk/mixins/open_ai_client.py +++ b/databricks/sdk/mixins/open_ai_client.py @@ -11,18 +11,30 @@ def get_open_ai_client(self): except Exception: raise ValueError("Unable to extract authorization token for OpenAI Client") - from openai import OpenAI + try: + from openai import OpenAI + except Exception: + raise ImportError( + "Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]`" + ) + return OpenAI(base_url=self._api._cfg.host + "/serving-endpoints", api_key=token) def get_langchain_chat_open_ai_client(self, model): auth_headers = self._api._cfg.authenticate() + try: + from langchain_openai import ChatOpenAI + except Exception: + raise ImportError( + "Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]` and ensure you are using python>3.7" + ) + try: token = auth_headers["Authorization"][len("Bearer "):] except Exception: raise ValueError("Unable to extract authorization token for Langchain OpenAI Client") - from langchain_openai import ChatOpenAI return ChatOpenAI(model=model, openai_api_base=self._api._cfg.host + "/serving-endpoints", openai_api_key=token) diff --git a/setup.py b/setup.py index 03e29a3bb..51dcd844d 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,7 @@ "databricks-connect", "pytest-rerunfailures", "openai", "langchain-openai"], "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"], - "openai": ["openai", "langchain-openai"]}, + "openai": ["openai", 'langchain-openai; python_version > "3.7"']}, author="Serge Smertin", author_email="serge.smertin@databricks.com", description="Databricks SDK for Python (Beta)", diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py index abe1c20fb..2d184267e 100644 --- a/tests/test_open_ai_mixin.py +++ b/tests/test_open_ai_mixin.py @@ -1,3 +1,7 @@ +import sys + +import pytest + from databricks.sdk.core import Config @@ -13,6 +17,7 @@ def test_open_ai_client(monkeypatch): assert client.api_key == "test_token" +@pytest.mark.skipif(sys.version_info <= (3, 7), reason="Requires Python > 3.7") def test_langchain_open_ai_client(monkeypatch): from databricks.sdk import WorkspaceClient @@ -22,4 +27,4 @@ def test_langchain_open_ai_client(monkeypatch): client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") assert client.openai_api_base == "https://test_host/serving-endpoints" - assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" \ No newline at end of file + assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" From 295a5c3e3bc94e934115fb0c0b18cb900711a564 Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Thu, 3 Oct 2024 21:45:38 -0700 Subject: [PATCH 07/16] Update setup.py for dev --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 51dcd844d..fe5652d4d 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ "yapf", "pycodestyle", "autoflake", "isort", "wheel", "ipython", "ipywidgets", "requests-mock", "pyfakefs", "databricks-connect", "pytest-rerunfailures", "openai", - "langchain-openai"], + 'langchain-openai; python_version > "3.7"'], "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"], "openai": ["openai", 'langchain-openai; python_version > "3.7"']}, author="Serge Smertin", From 59269c1f31ee84836618612a8e0152f167c31bc6 Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Thu, 3 Oct 2024 21:46:57 -0700 Subject: [PATCH 08/16] remove unneccessary files --- .../settings/disable_legacy_features.rst | 60 ------------------ .../catalog/temporary_table_credentials.rst | 36 ----------- .../settings/disable_legacy_access.rst | 61 ------------------- 3 files changed, 157 deletions(-) delete mode 100644 docs/account/settings/disable_legacy_features.rst delete mode 100644 docs/workspace/catalog/temporary_table_credentials.rst delete mode 100644 docs/workspace/settings/disable_legacy_access.rst diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst deleted file mode 100644 index d7f1db9d3..000000000 --- a/docs/account/settings/disable_legacy_features.rst +++ /dev/null @@ -1,60 +0,0 @@ -``a.settings.disable_legacy_features``: Disable Legacy Features -=============================================================== -.. currentmodule:: databricks.sdk.service.settings - -.. py:class:: DisableLegacyFeaturesAPI - - Disable legacy features for new Databricks workspaces. - - For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be - provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions - prior to 13.3LTS. - - .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyFeaturesResponse - - Delete the disable legacy features setting. - - Deletes the disable legacy features setting. - - :param etag: str (optional) - etag used for versioning. The response is at least as fresh as the eTag provided. This is used for - optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting - each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern - to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET - request, and pass it with the DELETE request to identify the rule set version you are deleting. - - :returns: :class:`DeleteDisableLegacyFeaturesResponse` - - - .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyFeatures - - Get the disable legacy features setting. - - Gets the value of the disable legacy features setting. - - :param etag: str (optional) - etag used for versioning. The response is at least as fresh as the eTag provided. This is used for - optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting - each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern - to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET - request, and pass it with the DELETE request to identify the rule set version you are deleting. - - :returns: :class:`DisableLegacyFeatures` - - - .. py:method:: update(allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures - - Update the disable legacy features setting. - - Updates the value of the disable legacy features setting. - - :param allow_missing: bool - This should always be set to true for Settings API. Added for AIP compliance. - :param setting: :class:`DisableLegacyFeatures` - :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). - - :returns: :class:`DisableLegacyFeatures` - \ No newline at end of file diff --git a/docs/workspace/catalog/temporary_table_credentials.rst b/docs/workspace/catalog/temporary_table_credentials.rst deleted file mode 100644 index 1acd462b7..000000000 --- a/docs/workspace/catalog/temporary_table_credentials.rst +++ /dev/null @@ -1,36 +0,0 @@ -``w.temporary_table_credentials``: Temporary Table Credentials -============================================================== -.. currentmodule:: databricks.sdk.service.catalog - -.. py:class:: TemporaryTableCredentialsAPI - - Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage - locationswhere table data is stored in Databricks. These credentials are employed to provide secure and - time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider - has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS), - Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports - temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in - scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table - credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at - the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level - by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by - catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for - security reason. - - .. py:method:: generate_temporary_table_credentials( [, operation: Optional[TableOperation], table_id: Optional[str]]) -> GenerateTemporaryTableCredentialResponse - - Generate a temporary table credential. - - Get a short-lived credential for directly accessing the table data on cloud storage. The metastore - must have external_access_enabled flag set to true (default false). The caller must have - EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog - owners. - - :param operation: :class:`TableOperation` (optional) - The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is - specified, the credentials returned will have write permissions, otherwise, it will be read only. - :param table_id: str (optional) - UUID of the table to read or write. - - :returns: :class:`GenerateTemporaryTableCredentialResponse` - \ No newline at end of file diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst deleted file mode 100644 index c8baba3a7..000000000 --- a/docs/workspace/settings/disable_legacy_access.rst +++ /dev/null @@ -1,61 +0,0 @@ -``w.settings.disable_legacy_access``: Disable Legacy Access -=========================================================== -.. currentmodule:: databricks.sdk.service.settings - -.. py:class:: DisableLegacyAccessAPI - - 'Disabling legacy access' has the following impacts: - - 1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS - Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3. - Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces - Unity Catalog access on all path based access. - - .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyAccessResponse - - Delete Legacy Access Disablement Status. - - Deletes legacy access disablement status. - - :param etag: str (optional) - etag used for versioning. The response is at least as fresh as the eTag provided. This is used for - optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting - each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern - to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET - request, and pass it with the DELETE request to identify the rule set version you are deleting. - - :returns: :class:`DeleteDisableLegacyAccessResponse` - - - .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyAccess - - Retrieve Legacy Access Disablement Status. - - Retrieves legacy access disablement Status. - - :param etag: str (optional) - etag used for versioning. The response is at least as fresh as the eTag provided. This is used for - optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting - each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern - to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET - request, and pass it with the DELETE request to identify the rule set version you are deleting. - - :returns: :class:`DisableLegacyAccess` - - - .. py:method:: update(allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess - - Update Legacy Access Disablement Status. - - Updates legacy access disablement status. - - :param allow_missing: bool - This should always be set to true for Settings API. Added for AIP compliance. - :param setting: :class:`DisableLegacyAccess` - :param field_mask: str - Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the - setting payload will be updated. The field mask needs to be supplied as single string. To specify - multiple fields in the field mask, use comma as the separator (no space). - - :returns: :class:`DisableLegacyAccess` - \ No newline at end of file From c92a0e96ec15bf26f9d8d996414f6623158a4f8e Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Thu, 3 Oct 2024 21:57:53 -0700 Subject: [PATCH 09/16] handle python versions in tests --- tests/test_open_ai_mixin.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py index 2d184267e..287947b03 100644 --- a/tests/test_open_ai_mixin.py +++ b/tests/test_open_ai_mixin.py @@ -1,7 +1,4 @@ import sys - -import pytest - from databricks.sdk.core import Config @@ -17,14 +14,19 @@ def test_open_ai_client(monkeypatch): assert client.api_key == "test_token" -@pytest.mark.skipif(sys.version_info <= (3, 7), reason="Requires Python > 3.7") def test_langchain_open_ai_client(monkeypatch): from databricks.sdk import WorkspaceClient - - monkeypatch.setenv('DATABRICKS_HOST', 'test_host') - monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') - w = WorkspaceClient(config=Config()) - client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") - - assert client.openai_api_base == "https://test_host/serving-endpoints" - assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" + print(sys.version_info) + print(sys.version_info <= (3,7)) + if sys.version_info <= (3, 7): + with pytest.raises(ImportError): + w = WorkspaceClient(config=Config()) + client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") + else: + monkeypatch.setenv('DATABRICKS_HOST', 'test_host') + monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') + w = WorkspaceClient(config=Config()) + client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") + + assert client.openai_api_base == "https://test_host/serving-endpoints" + assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" From b11d127bf004bd50a55e8d5858862204a0ddbd0c Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Thu, 3 Oct 2024 22:02:36 -0700 Subject: [PATCH 10/16] Skip if less than 3.8 --- tests/test_open_ai_mixin.py | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py index 287947b03..6c9620ad3 100644 --- a/tests/test_open_ai_mixin.py +++ b/tests/test_open_ai_mixin.py @@ -1,4 +1,7 @@ import sys + +import pytest + from databricks.sdk.core import Config @@ -14,19 +17,14 @@ def test_open_ai_client(monkeypatch): assert client.api_key == "test_token" +@pytest.mark.skipif(sys.version_info < (3, 8), reason="Requires Python > 3.7") def test_langchain_open_ai_client(monkeypatch): from databricks.sdk import WorkspaceClient - print(sys.version_info) - print(sys.version_info <= (3,7)) - if sys.version_info <= (3, 7): - with pytest.raises(ImportError): - w = WorkspaceClient(config=Config()) - client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") - else: - monkeypatch.setenv('DATABRICKS_HOST', 'test_host') - monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') - w = WorkspaceClient(config=Config()) - client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") - - assert client.openai_api_base == "https://test_host/serving-endpoints" - assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" + + monkeypatch.setenv('DATABRICKS_HOST', 'test_host') + monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') + w = WorkspaceClient(config=Config()) + client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") + + assert client.openai_api_base == "https://test_host/serving-endpoints" + assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" From 64631ae0e999b1f77809605aa3a4449574efecdf Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Fri, 4 Oct 2024 16:38:12 -0700 Subject: [PATCH 11/16] Use https client for authorization on request --- databricks/sdk/mixins/open_ai_client.py | 47 ++++++++++++++++--------- tests/test_open_ai_mixin.py | 12 +++---- 2 files changed, 36 insertions(+), 23 deletions(-) diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py index 084983cad..a8045029c 100644 --- a/databricks/sdk/mixins/open_ai_client.py +++ b/databricks/sdk/mixins/open_ai_client.py @@ -1,16 +1,31 @@ +import httpx + from databricks.sdk.service.serving import ServingEndpointsAPI class ServingEndpointsExt(ServingEndpointsAPI): - def get_open_ai_client(self): - auth_headers = self._api._cfg.authenticate() + # Using the HTTP Client to pass in the databricks authorization + # This method will be called on every invocation, so when using with model serving will always get the refreshed token + def _get_authorized_http_client(self): - try: - token = auth_headers["Authorization"][len("Bearer "):] - except Exception: - raise ValueError("Unable to extract authorization token for OpenAI Client") + class BearerAuth(httpx.Auth): + + def __init__(self, get_headers_func): + self.get_headers_func = get_headers_func + + def auth_flow(self, request: httpx.Request) -> httpx.Request: + auth_headers = self.get_headers_func() + request.headers["Authorization"] = auth_headers["Authorization"] + yield request + + databricks_token_auth = BearerAuth(self._api._cfg.authenticate) + # Create an HTTP client with Bearer Token authentication + http_client = httpx.Client(auth=databricks_token_auth) + return http_client + + def get_open_ai_client(self): try: from openai import OpenAI except Exception: @@ -18,11 +33,12 @@ def get_open_ai_client(self): "Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]`" ) - return OpenAI(base_url=self._api._cfg.host + "/serving-endpoints", api_key=token) + return OpenAI( + base_url=self._api._cfg.host + "/serving-endpoints", + api_key="no-token", # Passing in a placeholder to pass validations, this will not be used + http_client=self._get_authorized_http_client()) def get_langchain_chat_open_ai_client(self, model): - auth_headers = self._api._cfg.authenticate() - try: from langchain_openai import ChatOpenAI except Exception: @@ -30,11 +46,8 @@ def get_langchain_chat_open_ai_client(self, model): "Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]` and ensure you are using python>3.7" ) - try: - token = auth_headers["Authorization"][len("Bearer "):] - except Exception: - raise ValueError("Unable to extract authorization token for Langchain OpenAI Client") - - return ChatOpenAI(model=model, - openai_api_base=self._api._cfg.host + "/serving-endpoints", - openai_api_key=token) + return ChatOpenAI( + model=model, + openai_api_base=self._api._cfg.host + "/serving-endpoints", + api_key="no-token", # Passing in a placeholder to pass validations, this will not be used + http_client=self._get_authorized_http_client()) diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py index 6c9620ad3..62c16eb1e 100644 --- a/tests/test_open_ai_mixin.py +++ b/tests/test_open_ai_mixin.py @@ -11,10 +11,10 @@ def test_open_ai_client(monkeypatch): monkeypatch.setenv('DATABRICKS_HOST', 'test_host') monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') w = WorkspaceClient(config=Config()) - client = w.serving_endpoints.get_open_ai_client() + w.serving_endpoints.get_open_ai_client() - assert client.base_url == "https://test_host/serving-endpoints/" - assert client.api_key == "test_token" + # assert client.base_url == "https://test_host/serving-endpoints/" + # assert client.api_key == "test_token" @pytest.mark.skipif(sys.version_info < (3, 8), reason="Requires Python > 3.7") @@ -24,7 +24,7 @@ def test_langchain_open_ai_client(monkeypatch): monkeypatch.setenv('DATABRICKS_HOST', 'test_host') monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') w = WorkspaceClient(config=Config()) - client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") + w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") - assert client.openai_api_base == "https://test_host/serving-endpoints" - assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" + # assert client.openai_api_base == "https://test_host/serving-endpoints" + # assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" From dec6de5a412ea7194320c5ecfdf8b872fab4d575 Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Fri, 4 Oct 2024 16:39:46 -0700 Subject: [PATCH 12/16] add print statement to verify --- databricks/sdk/mixins/open_ai_client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py index a8045029c..45dce6eb9 100644 --- a/databricks/sdk/mixins/open_ai_client.py +++ b/databricks/sdk/mixins/open_ai_client.py @@ -15,6 +15,7 @@ def __init__(self, get_headers_func): self.get_headers_func = get_headers_func def auth_flow(self, request: httpx.Request) -> httpx.Request: + print("Calling Authenticate") auth_headers = self.get_headers_func() request.headers["Authorization"] = auth_headers["Authorization"] yield request From 83618ec20f7344bfd3b8e3fb64b4f354627139b7 Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Fri, 4 Oct 2024 16:44:30 -0700 Subject: [PATCH 13/16] Update tests --- databricks/sdk/mixins/open_ai_client.py | 1 - tests/test_open_ai_mixin.py | 12 ++++++------ 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py index 45dce6eb9..a8045029c 100644 --- a/databricks/sdk/mixins/open_ai_client.py +++ b/databricks/sdk/mixins/open_ai_client.py @@ -15,7 +15,6 @@ def __init__(self, get_headers_func): self.get_headers_func = get_headers_func def auth_flow(self, request: httpx.Request) -> httpx.Request: - print("Calling Authenticate") auth_headers = self.get_headers_func() request.headers["Authorization"] = auth_headers["Authorization"] yield request diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py index 62c16eb1e..1858c66cb 100644 --- a/tests/test_open_ai_mixin.py +++ b/tests/test_open_ai_mixin.py @@ -11,10 +11,10 @@ def test_open_ai_client(monkeypatch): monkeypatch.setenv('DATABRICKS_HOST', 'test_host') monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') w = WorkspaceClient(config=Config()) - w.serving_endpoints.get_open_ai_client() + client = w.serving_endpoints.get_open_ai_client() - # assert client.base_url == "https://test_host/serving-endpoints/" - # assert client.api_key == "test_token" + assert client.base_url == "https://test_host/serving-endpoints/" + assert client.api_key == "no-token" @pytest.mark.skipif(sys.version_info < (3, 8), reason="Requires Python > 3.7") @@ -24,7 +24,7 @@ def test_langchain_open_ai_client(monkeypatch): monkeypatch.setenv('DATABRICKS_HOST', 'test_host') monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token') w = WorkspaceClient(config=Config()) - w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") + client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct") - # assert client.openai_api_base == "https://test_host/serving-endpoints" - # assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" + assert client.openai_api_base == "https://test_host/serving-endpoints" + assert client.model_name == "databricks-meta-llama-3-1-70b-instruct" From f7b4a18b3969eb6f8b61510f4f987e0ba3cf507a Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Fri, 4 Oct 2024 16:48:35 -0700 Subject: [PATCH 14/16] Add httpx to setup --- databricks/sdk/mixins/open_ai_client.py | 3 +-- setup.py | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py index a8045029c..f7a8af02d 100644 --- a/databricks/sdk/mixins/open_ai_client.py +++ b/databricks/sdk/mixins/open_ai_client.py @@ -1,5 +1,3 @@ -import httpx - from databricks.sdk.service.serving import ServingEndpointsAPI @@ -8,6 +6,7 @@ class ServingEndpointsExt(ServingEndpointsAPI): # Using the HTTP Client to pass in the databricks authorization # This method will be called on every invocation, so when using with model serving will always get the refreshed token def _get_authorized_http_client(self): + import httpx class BearerAuth(httpx.Auth): diff --git a/setup.py b/setup.py index fe5652d4d..b756e6d0d 100644 --- a/setup.py +++ b/setup.py @@ -18,9 +18,9 @@ "yapf", "pycodestyle", "autoflake", "isort", "wheel", "ipython", "ipywidgets", "requests-mock", "pyfakefs", "databricks-connect", "pytest-rerunfailures", "openai", - 'langchain-openai; python_version > "3.7"'], + 'langchain-openai; python_version > "3.7"', "httpx"], "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"], - "openai": ["openai", 'langchain-openai; python_version > "3.7"']}, + "openai": ["openai", 'langchain-openai; python_version > "3.7"', "httpx"]}, author="Serge Smertin", author_email="serge.smertin@databricks.com", description="Databricks SDK for Python (Beta)", From 41f197b1669ce044bf8f11365e6380d1cec4f6bb Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Tue, 8 Oct 2024 11:56:16 -0700 Subject: [PATCH 15/16] Update Notice to include the new packages --- NOTICE | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/NOTICE b/NOTICE index 2a353a6c8..c05cdd318 100644 --- a/NOTICE +++ b/NOTICE @@ -12,8 +12,22 @@ googleapis/google-auth-library-python - https://github.com/googleapis/google-aut Copyright google-auth-library-python authors License - https://github.com/googleapis/google-auth-library-python/blob/main/LICENSE +openai/openai-python - https://github.com/openai/openai-python +Copyright 2024 OpenAI +License - https://github.com/openai/openai-python/blob/main/LICENSE + This software contains code from the following open source projects, licensed under the BSD (3-clause) license. x/oauth2 - https://cs.opensource.google/go/x/oauth2/+/master:oauth2.go Copyright 2014 The Go Authors. All rights reserved. License - https://cs.opensource.google/go/x/oauth2/+/master:LICENSE + +encode/httpx - https://github.com/encode/httpx +Copyright 2019, Encode OSS Ltd +License - https://github.com/encode/httpx/blob/master/LICENSE.md + +This software contains code from the following open source projects, licensed under the MIT license: + +langchain-ai/langchain - https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai +Copyright 2023 LangChain, Inc. +License - https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai/LICENSE From 5582d589b03d3f6f1551aa16c5206e1ea80a0239 Mon Sep 17 00:00:00 2001 From: aravind-segu Date: Tue, 8 Oct 2024 12:08:04 -0700 Subject: [PATCH 16/16] Undo file deletions --- .../settings/disable_legacy_features.rst | 60 ++++++++++++++++++ .../catalog/temporary_table_credentials.rst | 36 +++++++++++ .../settings/disable_legacy_access.rst | 61 +++++++++++++++++++ 3 files changed, 157 insertions(+) create mode 100644 docs/account/settings/disable_legacy_features.rst create mode 100644 docs/workspace/catalog/temporary_table_credentials.rst create mode 100644 docs/workspace/settings/disable_legacy_access.rst diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst new file mode 100644 index 000000000..d7f1db9d3 --- /dev/null +++ b/docs/account/settings/disable_legacy_features.rst @@ -0,0 +1,60 @@ +``a.settings.disable_legacy_features``: Disable Legacy Features +=============================================================== +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: DisableLegacyFeaturesAPI + + Disable legacy features for new Databricks workspaces. + + For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be + provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions + prior to 13.3LTS. + + .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyFeaturesResponse + + Delete the disable legacy features setting. + + Deletes the disable legacy features setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteDisableLegacyFeaturesResponse` + + + .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyFeatures + + Get the disable legacy features setting. + + Gets the value of the disable legacy features setting. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DisableLegacyFeatures` + + + .. py:method:: update(allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures + + Update the disable legacy features setting. + + Updates the value of the disable legacy features setting. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`DisableLegacyFeatures` + :param field_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + + :returns: :class:`DisableLegacyFeatures` + \ No newline at end of file diff --git a/docs/workspace/catalog/temporary_table_credentials.rst b/docs/workspace/catalog/temporary_table_credentials.rst new file mode 100644 index 000000000..1acd462b7 --- /dev/null +++ b/docs/workspace/catalog/temporary_table_credentials.rst @@ -0,0 +1,36 @@ +``w.temporary_table_credentials``: Temporary Table Credentials +============================================================== +.. currentmodule:: databricks.sdk.service.catalog + +.. py:class:: TemporaryTableCredentialsAPI + + Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage + locationswhere table data is stored in Databricks. These credentials are employed to provide secure and + time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider + has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS), + Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports + temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in + scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table + credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at + the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level + by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by + catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for + security reason. + + .. py:method:: generate_temporary_table_credentials( [, operation: Optional[TableOperation], table_id: Optional[str]]) -> GenerateTemporaryTableCredentialResponse + + Generate a temporary table credential. + + Get a short-lived credential for directly accessing the table data on cloud storage. The metastore + must have external_access_enabled flag set to true (default false). The caller must have + EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog + owners. + + :param operation: :class:`TableOperation` (optional) + The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is + specified, the credentials returned will have write permissions, otherwise, it will be read only. + :param table_id: str (optional) + UUID of the table to read or write. + + :returns: :class:`GenerateTemporaryTableCredentialResponse` + \ No newline at end of file diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst new file mode 100644 index 000000000..c8baba3a7 --- /dev/null +++ b/docs/workspace/settings/disable_legacy_access.rst @@ -0,0 +1,61 @@ +``w.settings.disable_legacy_access``: Disable Legacy Access +=========================================================== +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: DisableLegacyAccessAPI + + 'Disabling legacy access' has the following impacts: + + 1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS + Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3. + Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces + Unity Catalog access on all path based access. + + .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyAccessResponse + + Delete Legacy Access Disablement Status. + + Deletes legacy access disablement status. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DeleteDisableLegacyAccessResponse` + + + .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyAccess + + Retrieve Legacy Access Disablement Status. + + Retrieves legacy access disablement Status. + + :param etag: str (optional) + etag used for versioning. The response is at least as fresh as the eTag provided. This is used for + optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting + each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern + to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET + request, and pass it with the DELETE request to identify the rule set version you are deleting. + + :returns: :class:`DisableLegacyAccess` + + + .. py:method:: update(allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess + + Update Legacy Access Disablement Status. + + Updates legacy access disablement status. + + :param allow_missing: bool + This should always be set to true for Settings API. Added for AIP compliance. + :param setting: :class:`DisableLegacyAccess` + :param field_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + + :returns: :class:`DisableLegacyAccess` + \ No newline at end of file