Skip to content

Commit b5c4fc3

Browse files
authored
added a new selector for spark versions and node types (#1127)
1 parent 641a3f2 commit b5c4fc3

File tree

7 files changed

+25
-11
lines changed

7 files changed

+25
-11
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,8 @@
66
* Fixed issue at `databricks_mount` where new cluster was created for S3 mount even when `cluster_id` was specified ([#1064](https://github.com/databrickslabs/terraform-provider-databricks/issues/1064)).
77
* Allow to disable auto-termination for Databricks SQL endpoints ([#900](https://github.com/databrickslabs/terraform-provider-databricks/pull/900)).
88
* Added new `gcp_attributes` to `databricks_cluster` and `databricks_instance_pool` ([#1126](https://github.com/databrickslabs/terraform-provider-databricks/pull/1126)).
9+
* Added exporter functionality for `databricks_ip_access_list` and `databricks_workspace_conf` ([#1125](https://github.com/databrickslabs/terraform-provider-databricks/pull/1125)).
10+
* Added `graviton` selector for `databricks_node_type` and `databricks_spark_version` data sources ([#1127](https://github.com/databrickslabs/terraform-provider-databricks/pull/1127)).
911

1012
## 0.4.9
1113

clusters/data_node_type.go

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ type NodeTypeRequest struct {
2020
Category string `json:"category,omitempty"`
2121
PhotonWorkerCapable bool `json:"photon_worker_capable,omitempty"`
2222
PhotonDriverCapable bool `json:"photon_driver_capable,omitempty"`
23+
Graviton bool `json:"graviton,omitempty"`
2324
IsIOCacheEnabled bool `json:"is_io_cache_enabled,omitempty"`
2425
SupportPortForwarding bool `json:"support_port_forwarding,omitempty"`
2526
}
@@ -97,6 +98,7 @@ type NodeType struct {
9798
NodeInstanceType *NodeInstanceType `json:"node_instance_type,omitempty"`
9899
PhotonWorkerCapable bool `json:"photon_worker_capable,omitempty"`
99100
PhotonDriverCapable bool `json:"photon_driver_capable,omitempty"`
101+
Graviton bool `json:"is_graviton,omitempty"`
100102
}
101103

102104
func (a ClustersAPI) defaultSmallestNodeType() string {
@@ -157,6 +159,9 @@ func (a ClustersAPI) GetSmallestNodeType(r NodeTypeRequest) string {
157159
if r.PhotonWorkerCapable && nt.PhotonWorkerCapable != r.PhotonWorkerCapable {
158160
continue
159161
}
162+
if r.Graviton && nt.Graviton != r.Graviton {
163+
continue
164+
}
160165
return nt.NodeTypeID
161166
}
162167
return a.defaultSmallestNodeType()

clusters/data_spark_version.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ type SparkVersionRequest struct {
3636
Scala string `json:"scala,omitempty" tf:"optional,default:2.12"`
3737
SparkVersion string `json:"spark_version,omitempty" tf:"optional,default:"`
3838
Photon bool `json:"photon,omitempty" tf:"optional,default:false"`
39+
Graviton bool `json:"graviton,omitempty"`
3940
}
4041

4142
// ListSparkVersions returns smallest (or default) node type id given the criteria
@@ -79,6 +80,7 @@ func (sparkVersions SparkVersionsList) LatestSparkVersion(req SparkVersionReques
7980
(strings.Contains(version.Version, "-hls-") == req.Genomics) &&
8081
(strings.Contains(version.Version, "-gpu-") == req.GPU) &&
8182
(strings.Contains(version.Version, "-photon-") == req.Photon) &&
83+
(strings.Contains(version.Version, "-aarch64-") == req.Graviton) &&
8284
(strings.Contains(version.Description, "Beta") == req.Beta))
8385
if matches && req.LongTermSupport {
8486
matches = (matches && (strings.Contains(version.Description, "LTS") || strings.Contains(version.Version, "-esr-")))

docs/data-sources/node_type.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ Data source allows you to pick groups by the following attributes
5555
* `GPU Accelerated` (AWS, Azure)
5656
* `photon_worker_capable` - (Optional) Pick only nodes that can run Photon workers. Defaults to *false*.
5757
* `photon_driver_capable` - (Optional) Pick only nodes that can run Photon driver. Defaults to *false*.
58+
* `graviton` - (boolean, optional) if we should limit the search only to nodes with AWS Graviton CPUs. Default to *false*.
5859
* `is_io_cache_enabled` - (Optional) . Pick only nodes that have IO Cache. Defaults to *false*.
5960
* `support_port_forwarding` - (Optional) Pick only nodes that support port forwarding. Defaults to *false*.
6061

docs/data-sources/spark_version.md

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -40,15 +40,16 @@ resource "databricks_cluster" "research" {
4040

4141
Data source allows you to pick groups by the following attributes:
4242

43-
* `latest` - (boolean, optional) if we should return only the latest version if there is more than one result. Default to `true`. If set to `false` and multiple versions are matching, throws an error
44-
* `long_term_support` - (boolean, optional) if we should limit the search only to LTS (long term support) & ESR (extended support) versions. Default to `false`
45-
* `ml` - (boolean, optional) if we should limit the search only to ML runtimes. Default to `false`
46-
* `genomics` - (boolean, optional) if we should limit the search only to Genomics (HLS) runtimes. Default to `false`
47-
* `gpu` - (boolean, optional) if we should limit the search only to runtimes that support GPUs. Default to `false`
48-
* `photon` - (boolean, optional) if we should limit the search only to Photon runtimes. Default to `false`
49-
* `beta` - (boolean, optional) if we should limit the search only to runtimes that are in Beta stage. Default to `false`
50-
* `scala` - (string, optional) if we should limit the search only to runtimes that are based on specific Scala version. Default to `2.12`
51-
* `spark_version` - (string, optional) if we should limit the search only to runtimes that are based on specific Spark version. Default to empty string. It could be specified as `3`, or `3.0`, or full version, like, `3.0.1`
43+
* `latest` - (boolean, optional) if we should return only the latest version if there is more than one result. Default to `true`. If set to `false` and multiple versions are matching, throws an error.
44+
* `long_term_support` - (boolean, optional) if we should limit the search only to LTS (long term support) & ESR (extended support) versions. Default to `false`.
45+
* `ml` - (boolean, optional) if we should limit the search only to ML runtimes. Default to `false`.
46+
* `genomics` - (boolean, optional) if we should limit the search only to Genomics (HLS) runtimes. Default to `false`.
47+
* `gpu` - (boolean, optional) if we should limit the search only to runtimes that support GPUs. Default to `false`.
48+
* `photon` - (boolean, optional) if we should limit the search only to Photon runtimes. Default to `false`.
49+
* `graviton` - (boolean, optional) if we should limit the search only to runtimes supporting AWS Graviton CPUs. Default to `false`.
50+
* `beta` - (boolean, optional) if we should limit the search only to runtimes that are in Beta stage. Default to `false`.
51+
* `scala` - (string, optional) if we should limit the search only to runtimes that are based on specific Scala version. Default to `2.12`.
52+
* `spark_version` - (string, optional) if we should limit the search only to runtimes that are based on specific Spark version. Default to empty string. It could be specified as `3`, or `3.0`, or full version, like, `3.0.1`.
5253

5354
## Attribute Reference
5455

exporter/exporter_test.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1123,12 +1123,13 @@ func TestImportingIPAccessLists(t *testing.T) {
11231123
},
11241124
{
11251125
Method: "GET",
1126-
Resource: "/api/2.0/workspace-conf?keys=enableIpAccessLists%2CmaxTokenLifetimeDays%2CenableTokensConfig",
1126+
Resource: "/api/2.0/workspace-conf?keys=enableIpAccessLists%2CenableTokensConfig%2CmaxTokenLifetimeDays",
11271127
Response: map[string]interface{}{
11281128
"enableIpAccessLists": "true",
1129-
"maxTokenLifetimeDays": nil,
1129+
"maxTokenLifetimeDays": "90",
11301130
"enableTokensConfig": "true",
11311131
},
1132+
ReuseRequest: true,
11321133
},
11331134
},
11341135
func(ctx context.Context, client *common.DatabricksClient) {

workspace/resource_workspace_conf.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import (
77
"context"
88
"fmt"
99
"log"
10+
"sort"
1011
"strconv"
1112
"strings"
1213

@@ -38,6 +39,7 @@ func (a WorkspaceConfAPI) Read(conf *map[string]interface{}) error {
3839
for k := range *conf {
3940
keys = append(keys, k)
4041
}
42+
sort.Strings(keys)
4143
return a.client.Get(a.context, "/workspace-conf", map[string]string{
4244
"keys": strings.Join(keys, ","),
4345
}, &conf)

0 commit comments

Comments
 (0)