Skip to content

Commit 0a84349

Browse files
committed
remove andwait functions
1 parent 71dd495 commit 0a84349

File tree

11 files changed

+0
-582
lines changed

11 files changed

+0
-582
lines changed

databricks/sdk/apps/v2/impl.py

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
import logging
66
from dataclasses import dataclass
7-
from datetime import timedelta
87
from enum import Enum
98
from typing import Any, Dict, Iterator, List, Optional
109

@@ -1094,11 +1093,6 @@ def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None
10941093
op_response = self._api.do("POST", "/api/2.0/apps", query=query, body=body, headers=headers)
10951094
return Wait(self.WaitGetAppActive, response=App.from_dict(op_response), name=op_response["name"])
10961095

1097-
def create_and_wait(
1098-
self, *, app: Optional[App] = None, no_compute: Optional[bool] = None, timeout=timedelta(minutes=20)
1099-
) -> App:
1100-
return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
1101-
11021096
def delete(self, name: str) -> App:
11031097
"""Delete an app.
11041098
@@ -1144,11 +1138,6 @@ def deploy(self, app_name: str, *, app_deployment: Optional[AppDeployment] = Non
11441138
deployment_id=op_response["deployment_id"],
11451139
)
11461140

1147-
def deploy_and_wait(
1148-
self, app_name: str, *, app_deployment: Optional[AppDeployment] = None, timeout=timedelta(minutes=20)
1149-
) -> AppDeployment:
1150-
return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout)
1151-
11521141
def get(self, name: str) -> App:
11531142
"""Get an app.
11541143
@@ -1335,9 +1324,6 @@ def start(self, name: str) -> Wait[App]:
13351324
op_response = self._api.do("POST", f"/api/2.0/apps/{name}/start", headers=headers)
13361325
return Wait(self.WaitGetAppActive, response=App.from_dict(op_response), name=op_response["name"])
13371326

1338-
def start_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
1339-
return self.start(name=name).result(timeout=timeout)
1340-
13411327
def stop(self, name: str) -> Wait[App]:
13421328
"""Stop an app.
13431329
@@ -1359,9 +1345,6 @@ def stop(self, name: str) -> Wait[App]:
13591345
op_response = self._api.do("POST", f"/api/2.0/apps/{name}/stop", headers=headers)
13601346
return Wait(self.WaitGetAppStopped, response=App.from_dict(op_response), name=op_response["name"])
13611347

1362-
def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
1363-
return self.stop(name=name).result(timeout=timeout)
1364-
13651348
def update(self, name: str, *, app: Optional[App] = None) -> App:
13661349
"""Update an app.
13671350

databricks/sdk/catalog/v2/impl.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
import logging
66
from dataclasses import dataclass
7-
from datetime import timedelta
87
from enum import Enum
98
from typing import Any, Dict, Iterator, List, Optional
109

@@ -12311,9 +12310,6 @@ def create(self, *, table: Optional[OnlineTable] = None) -> Wait[OnlineTable]:
1231112310
self.WaitGetOnlineTableActive, response=OnlineTable.from_dict(op_response), name=op_response["name"]
1231212311
)
1231312312

12314-
def create_and_wait(self, *, table: Optional[OnlineTable] = None, timeout=timedelta(minutes=20)) -> OnlineTable:
12315-
return self.create(table=table).result(timeout=timeout)
12316-
1231712313
def delete(self, name: str):
1231812314
"""Delete an Online Table.
1231912315

databricks/sdk/compute/v2/impl.py

Lines changed: 0 additions & 200 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
import logging
66
from dataclasses import dataclass
7-
from datetime import timedelta
87
from enum import Enum
98
from typing import Any, Dict, Iterator, List, Optional
109

@@ -10307,76 +10306,6 @@ def create(
1030710306
cluster_id=op_response["cluster_id"],
1030810307
)
1030910308

10310-
def create_and_wait(
10311-
self,
10312-
spark_version: str,
10313-
*,
10314-
apply_policy_default_values: Optional[bool] = None,
10315-
autoscale: Optional[AutoScale] = None,
10316-
autotermination_minutes: Optional[int] = None,
10317-
aws_attributes: Optional[AwsAttributes] = None,
10318-
azure_attributes: Optional[AzureAttributes] = None,
10319-
clone_from: Optional[CloneCluster] = None,
10320-
cluster_log_conf: Optional[ClusterLogConf] = None,
10321-
cluster_name: Optional[str] = None,
10322-
custom_tags: Optional[Dict[str, str]] = None,
10323-
data_security_mode: Optional[DataSecurityMode] = None,
10324-
docker_image: Optional[DockerImage] = None,
10325-
driver_instance_pool_id: Optional[str] = None,
10326-
driver_node_type_id: Optional[str] = None,
10327-
enable_elastic_disk: Optional[bool] = None,
10328-
enable_local_disk_encryption: Optional[bool] = None,
10329-
gcp_attributes: Optional[GcpAttributes] = None,
10330-
init_scripts: Optional[List[InitScriptInfo]] = None,
10331-
instance_pool_id: Optional[str] = None,
10332-
is_single_node: Optional[bool] = None,
10333-
kind: Optional[Kind] = None,
10334-
node_type_id: Optional[str] = None,
10335-
num_workers: Optional[int] = None,
10336-
policy_id: Optional[str] = None,
10337-
runtime_engine: Optional[RuntimeEngine] = None,
10338-
single_user_name: Optional[str] = None,
10339-
spark_conf: Optional[Dict[str, str]] = None,
10340-
spark_env_vars: Optional[Dict[str, str]] = None,
10341-
ssh_public_keys: Optional[List[str]] = None,
10342-
use_ml_runtime: Optional[bool] = None,
10343-
workload_type: Optional[WorkloadType] = None,
10344-
timeout=timedelta(minutes=20),
10345-
) -> ClusterDetails:
10346-
return self.create(
10347-
apply_policy_default_values=apply_policy_default_values,
10348-
autoscale=autoscale,
10349-
autotermination_minutes=autotermination_minutes,
10350-
aws_attributes=aws_attributes,
10351-
azure_attributes=azure_attributes,
10352-
clone_from=clone_from,
10353-
cluster_log_conf=cluster_log_conf,
10354-
cluster_name=cluster_name,
10355-
custom_tags=custom_tags,
10356-
data_security_mode=data_security_mode,
10357-
docker_image=docker_image,
10358-
driver_instance_pool_id=driver_instance_pool_id,
10359-
driver_node_type_id=driver_node_type_id,
10360-
enable_elastic_disk=enable_elastic_disk,
10361-
enable_local_disk_encryption=enable_local_disk_encryption,
10362-
gcp_attributes=gcp_attributes,
10363-
init_scripts=init_scripts,
10364-
instance_pool_id=instance_pool_id,
10365-
is_single_node=is_single_node,
10366-
kind=kind,
10367-
node_type_id=node_type_id,
10368-
num_workers=num_workers,
10369-
policy_id=policy_id,
10370-
runtime_engine=runtime_engine,
10371-
single_user_name=single_user_name,
10372-
spark_conf=spark_conf,
10373-
spark_env_vars=spark_env_vars,
10374-
spark_version=spark_version,
10375-
ssh_public_keys=ssh_public_keys,
10376-
use_ml_runtime=use_ml_runtime,
10377-
workload_type=workload_type,
10378-
).result(timeout=timeout)
10379-
1038010309
def delete(self, cluster_id: str) -> Wait[ClusterDetails]:
1038110310
"""Terminate cluster.
1038210311

@@ -10404,9 +10333,6 @@ def delete(self, cluster_id: str) -> Wait[ClusterDetails]:
1040410333
self.WaitGetClusterTerminated, response=DeleteClusterResponse.from_dict(op_response), cluster_id=cluster_id
1040510334
)
1040610335

10407-
def delete_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails:
10408-
return self.delete(cluster_id=cluster_id).result(timeout=timeout)
10409-
1041010336
def edit(
1041110337
self,
1041210338
cluster_id: str,
@@ -10695,76 +10621,6 @@ def edit(
1069510621
self.WaitGetClusterRunning, response=EditClusterResponse.from_dict(op_response), cluster_id=cluster_id
1069610622
)
1069710623

10698-
def edit_and_wait(
10699-
self,
10700-
cluster_id: str,
10701-
spark_version: str,
10702-
*,
10703-
apply_policy_default_values: Optional[bool] = None,
10704-
autoscale: Optional[AutoScale] = None,
10705-
autotermination_minutes: Optional[int] = None,
10706-
aws_attributes: Optional[AwsAttributes] = None,
10707-
azure_attributes: Optional[AzureAttributes] = None,
10708-
cluster_log_conf: Optional[ClusterLogConf] = None,
10709-
cluster_name: Optional[str] = None,
10710-
custom_tags: Optional[Dict[str, str]] = None,
10711-
data_security_mode: Optional[DataSecurityMode] = None,
10712-
docker_image: Optional[DockerImage] = None,
10713-
driver_instance_pool_id: Optional[str] = None,
10714-
driver_node_type_id: Optional[str] = None,
10715-
enable_elastic_disk: Optional[bool] = None,
10716-
enable_local_disk_encryption: Optional[bool] = None,
10717-
gcp_attributes: Optional[GcpAttributes] = None,
10718-
init_scripts: Optional[List[InitScriptInfo]] = None,
10719-
instance_pool_id: Optional[str] = None,
10720-
is_single_node: Optional[bool] = None,
10721-
kind: Optional[Kind] = None,
10722-
node_type_id: Optional[str] = None,
10723-
num_workers: Optional[int] = None,
10724-
policy_id: Optional[str] = None,
10725-
runtime_engine: Optional[RuntimeEngine] = None,
10726-
single_user_name: Optional[str] = None,
10727-
spark_conf: Optional[Dict[str, str]] = None,
10728-
spark_env_vars: Optional[Dict[str, str]] = None,
10729-
ssh_public_keys: Optional[List[str]] = None,
10730-
use_ml_runtime: Optional[bool] = None,
10731-
workload_type: Optional[WorkloadType] = None,
10732-
timeout=timedelta(minutes=20),
10733-
) -> ClusterDetails:
10734-
return self.edit(
10735-
apply_policy_default_values=apply_policy_default_values,
10736-
autoscale=autoscale,
10737-
autotermination_minutes=autotermination_minutes,
10738-
aws_attributes=aws_attributes,
10739-
azure_attributes=azure_attributes,
10740-
cluster_id=cluster_id,
10741-
cluster_log_conf=cluster_log_conf,
10742-
cluster_name=cluster_name,
10743-
custom_tags=custom_tags,
10744-
data_security_mode=data_security_mode,
10745-
docker_image=docker_image,
10746-
driver_instance_pool_id=driver_instance_pool_id,
10747-
driver_node_type_id=driver_node_type_id,
10748-
enable_elastic_disk=enable_elastic_disk,
10749-
enable_local_disk_encryption=enable_local_disk_encryption,
10750-
gcp_attributes=gcp_attributes,
10751-
init_scripts=init_scripts,
10752-
instance_pool_id=instance_pool_id,
10753-
is_single_node=is_single_node,
10754-
kind=kind,
10755-
node_type_id=node_type_id,
10756-
num_workers=num_workers,
10757-
policy_id=policy_id,
10758-
runtime_engine=runtime_engine,
10759-
single_user_name=single_user_name,
10760-
spark_conf=spark_conf,
10761-
spark_env_vars=spark_env_vars,
10762-
spark_version=spark_version,
10763-
ssh_public_keys=ssh_public_keys,
10764-
use_ml_runtime=use_ml_runtime,
10765-
workload_type=workload_type,
10766-
).result(timeout=timeout)
10767-
1076810624
def events(
1076910625
self,
1077010626
cluster_id: str,
@@ -11055,16 +10911,6 @@ def resize(
1105510911
self.WaitGetClusterRunning, response=ResizeClusterResponse.from_dict(op_response), cluster_id=cluster_id
1105610912
)
1105710913

11058-
def resize_and_wait(
11059-
self,
11060-
cluster_id: str,
11061-
*,
11062-
autoscale: Optional[AutoScale] = None,
11063-
num_workers: Optional[int] = None,
11064-
timeout=timedelta(minutes=20),
11065-
) -> ClusterDetails:
11066-
return self.resize(autoscale=autoscale, cluster_id=cluster_id, num_workers=num_workers).result(timeout=timeout)
11067-
1106810914
def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wait[ClusterDetails]:
1106910915
"""Restart cluster.
1107010916

@@ -11094,11 +10940,6 @@ def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wai
1109410940
self.WaitGetClusterRunning, response=RestartClusterResponse.from_dict(op_response), cluster_id=cluster_id
1109510941
)
1109610942

11097-
def restart_and_wait(
11098-
self, cluster_id: str, *, restart_user: Optional[str] = None, timeout=timedelta(minutes=20)
11099-
) -> ClusterDetails:
11100-
return self.restart(cluster_id=cluster_id, restart_user=restart_user).result(timeout=timeout)
11101-
1110210943
def set_permissions(
1110310944
self, cluster_id: str, *, access_control_list: Optional[List[ClusterAccessControlRequest]] = None
1110410945
) -> ClusterPermissions:
@@ -11168,9 +11009,6 @@ def start(self, cluster_id: str) -> Wait[ClusterDetails]:
1116811009
self.WaitGetClusterRunning, response=StartClusterResponse.from_dict(op_response), cluster_id=cluster_id
1116911010
)
1117011011

11171-
def start_and_wait(self, cluster_id: str, timeout=timedelta(minutes=20)) -> ClusterDetails:
11172-
return self.start(cluster_id=cluster_id).result(timeout=timeout)
11173-
1117411012
def unpin(self, cluster_id: str):
1117511013
"""Unpin cluster.
1117611014

@@ -11245,16 +11083,6 @@ def update(
1124511083
self.WaitGetClusterRunning, response=UpdateClusterResponse.from_dict(op_response), cluster_id=cluster_id
1124611084
)
1124711085

11248-
def update_and_wait(
11249-
self,
11250-
cluster_id: str,
11251-
update_mask: str,
11252-
*,
11253-
cluster: Optional[UpdateClusterResource] = None,
11254-
timeout=timedelta(minutes=20),
11255-
) -> ClusterDetails:
11256-
return self.update(cluster=cluster, cluster_id=cluster_id, update_mask=update_mask).result(timeout=timeout)
11257-
1125811086
def update_permissions(
1125911087
self, cluster_id: str, *, access_control_list: Optional[List[ClusterAccessControlRequest]] = None
1126011088
) -> ClusterPermissions:
@@ -11325,16 +11153,6 @@ def cancel(
1132511153
context_id=context_id,
1132611154
)
1132711155

11328-
def cancel_and_wait(
11329-
self,
11330-
*,
11331-
cluster_id: Optional[str] = None,
11332-
command_id: Optional[str] = None,
11333-
context_id: Optional[str] = None,
11334-
timeout=timedelta(minutes=20),
11335-
) -> CommandStatusResponse:
11336-
return self.cancel(cluster_id=cluster_id, command_id=command_id, context_id=context_id).result(timeout=timeout)
11337-
1133811156
def command_status(self, cluster_id: str, context_id: str, command_id: str) -> CommandStatusResponse:
1133911157
"""Get command info.
1134011158

@@ -11421,11 +11239,6 @@ def create(
1142111239
context_id=op_response["id"],
1142211240
)
1142311241

11424-
def create_and_wait(
11425-
self, *, cluster_id: Optional[str] = None, language: Optional[Language] = None, timeout=timedelta(minutes=20)
11426-
) -> ContextStatusResponse:
11427-
return self.create(cluster_id=cluster_id, language=language).result(timeout=timeout)
11428-
1142911242
def destroy(self, cluster_id: str, context_id: str):
1143011243
"""Delete an execution context.
1143111244

@@ -11497,19 +11310,6 @@ def execute(
1149711310
context_id=context_id,
1149811311
)
1149911312

11500-
def execute_and_wait(
11501-
self,
11502-
*,
11503-
cluster_id: Optional[str] = None,
11504-
command: Optional[str] = None,
11505-
context_id: Optional[str] = None,
11506-
language: Optional[Language] = None,
11507-
timeout=timedelta(minutes=20),
11508-
) -> CommandStatusResponse:
11509-
return self.execute(cluster_id=cluster_id, command=command, context_id=context_id, language=language).result(
11510-
timeout=timeout
11511-
)
11512-
1151311313

1151411314
class GlobalInitScriptsAPI:
1151511315
"""The Global Init Scripts API enables Workspace administrators to configure global initialization scripts

databricks/sdk/dashboards/v2/impl.py

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
import logging
66
from dataclasses import dataclass
7-
from datetime import timedelta
87
from enum import Enum
98
from typing import Any, Dict, Iterator, List, Optional
109

@@ -2574,13 +2573,6 @@ def create_message(self, space_id: str, conversation_id: str, content: str) -> W
25742573
space_id=space_id,
25752574
)
25762575

2577-
def create_message_and_wait(
2578-
self, space_id: str, conversation_id: str, content: str, timeout=timedelta(minutes=20)
2579-
) -> GenieMessage:
2580-
return self.create_message(content=content, conversation_id=conversation_id, space_id=space_id).result(
2581-
timeout=timeout
2582-
)
2583-
25842576
def execute_message_attachment_query(
25852577
self, space_id: str, conversation_id: str, message_id: str, attachment_id: str
25862578
) -> GenieGetMessageQueryResultResponse:
@@ -2887,9 +2879,6 @@ def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]:
28872879
space_id=space_id,
28882880
)
28892881

2890-
def start_conversation_and_wait(self, space_id: str, content: str, timeout=timedelta(minutes=20)) -> GenieMessage:
2891-
return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout)
2892-
28932882

28942883
class LakeviewAPI:
28952884
"""These APIs provide specific management operations for Lakeview dashboards. Generic resource management can

0 commit comments

Comments
 (0)