Skip to content

Commit 09649c4

Browse files
committed
Remove all fields deprecated during preview, and special-case all long-deprecated fields
1 parent 3948db9 commit 09649c4

File tree

14 files changed

+129
-15
lines changed

14 files changed

+129
-15
lines changed

bundle/internal/annotation/descriptor.go

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,6 @@ type Descriptor struct {
99
MarkdownExamples string `json:"markdown_examples,omitempty"`
1010
DeprecationMessage string `json:"deprecation_message,omitempty"`
1111
Preview string `json:"x-databricks-preview,omitempty"`
12-
13-
// If true, takes priority over 'DeprecationMessage'
14-
ForceNotDeprecated bool `json:"force_not_deprecated,omitempty"`
1512
}
1613

1714
const Placeholder = "PLACEHOLDER"

bundle/internal/schema/annotations.go

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -138,11 +138,6 @@ func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
138138
s.Preview = a.Preview
139139
}
140140

141-
if a.ForceNotDeprecated {
142-
s.Deprecated = false
143-
s.DeprecationMessage = ""
144-
}
145-
146141
s.MarkdownDescription = convertLinksToAbsoluteUrl(a.MarkdownDescription)
147142
s.Title = a.Title
148143
s.Enum = a.Enum

bundle/internal/schema/annotations_openapi_overrides.yml

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -353,9 +353,6 @@ github.com/databricks/cli/bundle/config/resources.Pipeline:
353353
"run_as":
354354
"description": |-
355355
PLACEHOLDER
356-
"target":
357-
"force_not_deprecated": |-
358-
true
359356
"trigger":
360357
"deprecation_message": |-
361358
Use continuous instead

bundle/schema/jsonschema.json

Lines changed: 3 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

experimental/python/codegen/codegen/jsonschema_patch.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,21 @@
77
# doesn't work, openapi schema needs to be updated to be enum
88
"kind",
99
},
10+
# fields that were deprecated a long time ago
11+
"jobs.SparkJarTask": [
12+
# 'jar_uri' is deprecated, install jars through 'libraries' or 'environments' field instead
13+
"jar_uri",
14+
# 'run_as_repl' is deprecated, jars always run as REPL
15+
"run_as_repl",
16+
],
17+
"resources.Pipeline": {
18+
# 'trigger' is deprecated, use 'continuous' or schedule pipeline refresh using job instead
19+
"trigger",
20+
},
21+
"pipelines.PipelineLibrary": [
22+
# 'whl' is deprecated, install libraries through notebooks and %pip command
23+
"whl",
24+
],
1025
}
1126

1227
EXTRA_REQUIRED_FIELDS: dict[str, list[str]] = {

experimental/python/codegen/codegen/main.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,13 +77,17 @@ def _transitively_mark_deprecated_and_private(
7777
def _remove_deprecated_fields(
7878
schemas: dict[str, openapi.Schema],
7979
) -> dict[str, openapi.Schema]:
80+
"""
81+
Remove fields that were deprecated during Private Preview.
82+
"""
83+
8084
new_schemas = {}
8185

8286
for name, schema in schemas.items():
8387
if schema.type == openapi.SchemaType.OBJECT:
8488
new_properties = {}
8589
for field_name, field in schema.properties.items():
86-
if field.deprecated and not field.keep_deprecated:
90+
if field.deprecated and field.stage == openapi.Stage.PRIVATE:
8791
continue
8892

8993
new_properties[field_name] = field

experimental/python/databricks/bundles/jobs/_models/gcp_attributes.py

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,6 +53,13 @@ class GcpAttributes:
5353
for the supported number of local SSDs for each instance type.
5454
"""
5555

56+
use_preemptible_executors: VariableOrOptional[bool] = None
57+
"""
58+
[DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
59+
VMs (when set to true) versus standard compute engine VMs (when set to false; default).
60+
Note: Soon to be deprecated, use the 'availability' field instead.
61+
"""
62+
5663
zone_id: VariableOrOptional[str] = None
5764
"""
5865
Identifier for the availability zone in which the cluster resides.
@@ -108,6 +115,13 @@ class GcpAttributesDict(TypedDict, total=False):
108115
for the supported number of local SSDs for each instance type.
109116
"""
110117

118+
use_preemptible_executors: VariableOrOptional[bool]
119+
"""
120+
[DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
121+
VMs (when set to true) versus standard compute engine VMs (when set to false; default).
122+
Note: Soon to be deprecated, use the 'availability' field instead.
123+
"""
124+
111125
zone_id: VariableOrOptional[str]
112126
"""
113127
Identifier for the availability zone in which the cluster resides.

experimental/python/databricks/bundles/jobs/_models/init_script_info.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,10 @@
88
Adlsgen2Info,
99
Adlsgen2InfoParam,
1010
)
11+
from databricks.bundles.jobs._models.dbfs_storage_info import (
12+
DbfsStorageInfo,
13+
DbfsStorageInfoParam,
14+
)
1115
from databricks.bundles.jobs._models.gcs_storage_info import (
1216
GcsStorageInfo,
1317
GcsStorageInfoParam,
@@ -45,6 +49,12 @@ class InitScriptInfo:
4549
Contains the Azure Data Lake Storage destination path
4650
"""
4751

52+
dbfs: VariableOrOptional[DbfsStorageInfo] = None
53+
"""
54+
[DEPRECATED] destination needs to be provided. e.g.
55+
`{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
56+
"""
57+
4858
file: VariableOrOptional[LocalFileInfo] = None
4959
"""
5060
destination needs to be provided, e.g.
@@ -93,6 +103,12 @@ class InitScriptInfoDict(TypedDict, total=False):
93103
Contains the Azure Data Lake Storage destination path
94104
"""
95105

106+
dbfs: VariableOrOptional[DbfsStorageInfoParam]
107+
"""
108+
[DEPRECATED] destination needs to be provided. e.g.
109+
`{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
110+
"""
111+
96112
file: VariableOrOptional[LocalFileInfoParam]
97113
"""
98114
destination needs to be provided, e.g.

experimental/python/databricks/bundles/jobs/_models/job_email_notifications.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
from databricks.bundles.core._transform import _transform
55
from databricks.bundles.core._transform_to_json import _transform_to_json_value
6-
from databricks.bundles.core._variable import VariableOrList
6+
from databricks.bundles.core._variable import VariableOrList, VariableOrOptional
77

88
if TYPE_CHECKING:
99
from typing_extensions import Self
@@ -13,6 +13,12 @@
1313
class JobEmailNotifications:
1414
""""""
1515

16+
no_alert_for_skipped_runs: VariableOrOptional[bool] = None
17+
"""
18+
[DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
19+
This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
20+
"""
21+
1622
on_duration_warning_threshold_exceeded: VariableOrList[str] = field(
1723
default_factory=list
1824
)
@@ -53,6 +59,12 @@ def as_dict(self) -> "JobEmailNotificationsDict":
5359
class JobEmailNotificationsDict(TypedDict, total=False):
5460
""""""
5561

62+
no_alert_for_skipped_runs: VariableOrOptional[bool]
63+
"""
64+
[DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
65+
This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
66+
"""
67+
5668
on_duration_warning_threshold_exceeded: VariableOrList[str]
5769
"""
5870
A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.

experimental/python/databricks/bundles/jobs/_models/library.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,11 @@ class Library:
3030
Specification of a CRAN library to be installed as part of the library
3131
"""
3232

33+
egg: VariableOrOptional[str] = None
34+
"""
35+
[DEPRECATED] Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.
36+
"""
37+
3338
jar: VariableOrOptional[str] = None
3439
"""
3540
URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.
@@ -82,6 +87,11 @@ class LibraryDict(TypedDict, total=False):
8287
Specification of a CRAN library to be installed as part of the library
8388
"""
8489

90+
egg: VariableOrOptional[str]
91+
"""
92+
[DEPRECATED] Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.
93+
"""
94+
8595
jar: VariableOrOptional[str]
8696
"""
8797
URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.

0 commit comments

Comments
 (0)