Skip to content

Commit 34bac74

Browse files
authored
[Internal] Refactored databricks_zones and databricks_spark_versions data sources to Go SDK (#3687)
## Changes - `LatestSparkVersionOrDefault` now returns 11.3 LTS, as 7.3 LTS is deprecated - Refactored `databricks_zones` to Go SDK - Refactored `databricks_spark_versions` to Go SDK. This refactoring require one additional change to `resource.go`: - Add new method `WorkspaceDataWithCustomizeFunc` to allow customization of the data source schema - Removed Spark versions related methods, as these have now moved to Go SDK. This requires migrating the function `LatestSparkVersionOrDefault` to a Go SDK method, which requires changing existing structs in Terraform provider to equivalent in Go SDK (`clusters.SparkVersionsList` to `compute.GetSparkVersionsResponse`, etc.) ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - [x] `make test` run locally - [x] covered with integration tests in `internal/acceptance` - [x] relevant acceptance tests are passing - [x] using Go SDK
1 parent b138c0b commit 34bac74

18 files changed

+264
-454
lines changed

access/resource_sql_permissions.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,7 @@ func (ta *SqlPermissions) initCluster(ctx context.Context, d *schema.ResourceDat
272272
}
273273

274274
func (ta *SqlPermissions) getOrCreateCluster(clustersAPI clusters.ClustersAPI) (string, error) {
275-
sparkVersion := clustersAPI.LatestSparkVersionOrDefault(clusters.SparkVersionRequest{
275+
sparkVersion := clusters.LatestSparkVersionOrDefault(clustersAPI.Context(), clustersAPI.WorkspaceClient(), compute.SparkVersionRequest{
276276
Latest: true,
277277
})
278278
nodeType := clustersAPI.GetSmallestNodeType(compute.NodeTypeRequest{LocalDisk: true})

access/resource_sql_permissions_test.go

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -185,11 +185,11 @@ var createHighConcurrencyCluster = []qa.HTTPFixture{
185185
Method: "GET",
186186
ReuseRequest: true,
187187
Resource: "/api/2.0/clusters/spark-versions",
188-
Response: clusters.SparkVersionsList{
189-
SparkVersions: []clusters.SparkVersion{
188+
Response: compute.GetSparkVersionsResponse{
189+
Versions: []compute.SparkVersion{
190190
{
191-
Version: "7.1.x-cpu-ml-scala2.12",
192-
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
191+
Key: "7.1.x-cpu-ml-scala2.12",
192+
Name: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
193193
},
194194
},
195195
},
@@ -222,7 +222,7 @@ var createHighConcurrencyCluster = []qa.HTTPFixture{
222222
AutoterminationMinutes: 10,
223223
ClusterName: "terraform-table-acl",
224224
NodeTypeID: "Standard_F4s",
225-
SparkVersion: "7.3.x-scala2.12",
225+
SparkVersion: "11.3.x-scala2.12",
226226
CustomTags: map[string]string{
227227
"ResourceClass": "SingleNode",
228228
},
@@ -262,11 +262,11 @@ var createSharedCluster = []qa.HTTPFixture{
262262
Method: "GET",
263263
ReuseRequest: true,
264264
Resource: "/api/2.0/clusters/spark-versions",
265-
Response: clusters.SparkVersionsList{
266-
SparkVersions: []clusters.SparkVersion{
265+
Response: compute.GetSparkVersionsResponse{
266+
Versions: []compute.SparkVersion{
267267
{
268-
Version: "7.1.x-cpu-ml-scala2.12",
269-
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
268+
Key: "7.1.x-cpu-ml-scala2.12",
269+
Name: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
270270
},
271271
},
272272
},
@@ -299,7 +299,7 @@ var createSharedCluster = []qa.HTTPFixture{
299299
AutoterminationMinutes: 10,
300300
ClusterName: "terraform-table-acl",
301301
NodeTypeID: "Standard_F4s",
302-
SparkVersion: "7.3.x-scala2.12",
302+
SparkVersion: "11.3.x-scala2.12",
303303
CustomTags: map[string]string{
304304
"ResourceClass": "SingleNode",
305305
},

catalog/resource_sql_table.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,7 @@ func (ti *SqlTableInfo) initCluster(ctx context.Context, d *schema.ResourceData,
162162
}
163163

164164
func (ti *SqlTableInfo) getOrCreateCluster(clusterName string, clustersAPI clusters.ClustersAPI) (string, error) {
165-
sparkVersion := clustersAPI.LatestSparkVersionOrDefault(clusters.SparkVersionRequest{
165+
sparkVersion := clusters.LatestSparkVersionOrDefault(clustersAPI.Context(), clustersAPI.WorkspaceClient(), compute.SparkVersionRequest{
166166
Latest: true,
167167
})
168168
nodeType := clustersAPI.GetSmallestNodeType(compute.NodeTypeRequest{LocalDisk: true})

catalog/resource_sql_table_test.go

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1248,15 +1248,15 @@ var baseClusterFixture = []qa.HTTPFixture{
12481248
Method: "GET",
12491249
ReuseRequest: true,
12501250
Resource: "/api/2.0/clusters/spark-versions",
1251-
Response: clusters.SparkVersionsList{
1252-
SparkVersions: []clusters.SparkVersion{
1251+
Response: compute.GetSparkVersionsResponse{
1252+
Versions: []compute.SparkVersion{
12531253
{
1254-
Version: "7.1.x-cpu-ml-scala2.12",
1255-
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
1254+
Key: "7.1.x-cpu-ml-scala2.12",
1255+
Name: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
12561256
},
12571257
{
1258-
Version: "7.3.x-scala2.12",
1259-
Description: "7.3 LTS (includes Apache Spark 3.0.1, Scala 2.12)",
1258+
Key: "7.3.x-scala2.12",
1259+
Name: "7.3 LTS (includes Apache Spark 3.0.1, Scala 2.12)",
12601260
},
12611261
},
12621262
},

clusters/clusters_api.go

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import (
99
"sync"
1010
"time"
1111

12+
"github.com/databricks/databricks-sdk-go"
1213
"github.com/databricks/databricks-sdk-go/apierr"
1314
"github.com/databricks/databricks-sdk-go/service/compute"
1415

@@ -574,6 +575,19 @@ type ClustersAPI struct {
574575
context context.Context
575576
}
576577

578+
// Temporary function to be used until all resources are migrated to Go SDK
579+
// Create a workspace client
580+
func (a ClustersAPI) WorkspaceClient() *databricks.WorkspaceClient {
581+
client, _ := a.client.WorkspaceClient()
582+
return client
583+
}
584+
585+
// Temporary function to be used until all resources are migrated to Go SDK
586+
// Return a context
587+
func (a ClustersAPI) Context() context.Context {
588+
return a.context
589+
}
590+
577591
// Create creates a new Spark cluster and waits till it's running
578592
func (a ClustersAPI) Create(cluster Cluster) (info ClusterInfo, err error) {
579593
var ci ClusterID
@@ -903,7 +917,7 @@ func (a ClustersAPI) GetOrCreateRunningCluster(name string, custom ...Cluster) (
903917
r := Cluster{
904918
NumWorkers: 1,
905919
ClusterName: name,
906-
SparkVersion: a.LatestSparkVersionOrDefault(SparkVersionRequest{
920+
SparkVersion: LatestSparkVersionOrDefault(a.Context(), a.WorkspaceClient(), compute.SparkVersionRequest{
907921
Latest: true,
908922
LongTermSupport: true,
909923
}),

clusters/clusters_api_sdk.go

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,3 +35,12 @@ func StartClusterAndGetInfo(ctx context.Context, w *databricks.WorkspaceClient,
3535
}
3636
return w.Clusters.StartByClusterIdAndWait(ctx, clusterID)
3737
}
38+
39+
// LatestSparkVersionOrDefault returns Spark version matching the definition, or default in case of error
40+
func LatestSparkVersionOrDefault(ctx context.Context, w *databricks.WorkspaceClient, svr compute.SparkVersionRequest) string {
41+
version, err := w.Clusters.SelectSparkVersion(ctx, svr)
42+
if err != nil {
43+
return "11.3.x-scala2.12"
44+
}
45+
return version
46+
}

clusters/clusters_api_test.go

Lines changed: 11 additions & 124 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import (
66
"fmt"
77

88
// "reflect"
9-
"strings"
9+
1010
"testing"
1111

1212
"github.com/databricks/databricks-sdk-go/apierr"
@@ -28,23 +28,23 @@ func TestGetOrCreateRunningCluster_AzureAuth(t *testing.T) {
2828
Method: "GET",
2929
ReuseRequest: true,
3030
Resource: "/api/2.0/clusters/spark-versions",
31-
Response: SparkVersionsList{
32-
SparkVersions: []SparkVersion{
31+
Response: compute.GetSparkVersionsResponse{
32+
Versions: []compute.SparkVersion{
3333
{
34-
Version: "7.1.x-cpu-ml-scala2.12",
35-
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
34+
Key: "7.1.x-cpu-ml-scala2.12",
35+
Name: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
3636
},
3737
{
38-
Version: "apache-spark-2.4.x-scala2.11",
39-
Description: "Light 2.4 (includes Apache Spark 2.4, Scala 2.11)",
38+
Key: "apache-spark-2.4.x-scala2.11",
39+
Name: "Light 2.4 (includes Apache Spark 2.4, Scala 2.11)",
4040
},
4141
{
42-
Version: "7.3.x-scala2.12",
43-
Description: "7.3 LTS (includes Apache Spark 3.0.1, Scala 2.12)",
42+
Key: "7.3.x-scala2.12",
43+
Name: "7.3 LTS (includes Apache Spark 3.0.1, Scala 2.12)",
4444
},
4545
{
46-
Version: "6.4.x-scala2.11",
47-
Description: "6.4 (includes Apache Spark 2.4.5, Scala 2.11)",
46+
Key: "6.4.x-scala2.11",
47+
Name: "6.4 (includes Apache Spark 2.4.5, Scala 2.11)",
4848
},
4949
},
5050
},
@@ -1016,119 +1016,6 @@ func TestEventsEmptyResult(t *testing.T) {
10161016
assert.Equal(t, len(clusterEvents), 0)
10171017
}
10181018

1019-
func TestListSparkVersions(t *testing.T) {
1020-
client, server, err := qa.HttpFixtureClient(t, []qa.HTTPFixture{
1021-
{
1022-
Method: "GET",
1023-
Resource: "/api/2.0/clusters/spark-versions",
1024-
Response: SparkVersionsList{
1025-
SparkVersions: []SparkVersion{
1026-
{
1027-
Version: "7.1.x-cpu-ml-scala2.12",
1028-
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
1029-
},
1030-
{
1031-
Version: "apache-spark-2.4.x-scala2.11",
1032-
Description: "Light 2.4 (includes Apache Spark 2.4, Scala 2.11)",
1033-
},
1034-
{
1035-
Version: "7.3.x-hls-scala2.12",
1036-
Description: "7.3 LTS Genomics (includes Apache Spark 3.0.1, Scala 2.12)",
1037-
},
1038-
{
1039-
Version: "6.4.x-scala2.11",
1040-
Description: "6.4 (includes Apache Spark 2.4.5, Scala 2.11)",
1041-
},
1042-
},
1043-
},
1044-
},
1045-
})
1046-
defer server.Close()
1047-
require.NoError(t, err)
1048-
1049-
ctx := context.Background()
1050-
sparkVersions, err := NewClustersAPI(ctx, client).ListSparkVersions()
1051-
require.NoError(t, err)
1052-
require.Equal(t, 4, len(sparkVersions.SparkVersions))
1053-
require.Equal(t, "6.4.x-scala2.11", sparkVersions.SparkVersions[3].Version)
1054-
}
1055-
1056-
func TestListSparkVersionsWithError(t *testing.T) {
1057-
client, server, err := qa.HttpFixtureClient(t, []qa.HTTPFixture{
1058-
{
1059-
Method: "GET",
1060-
Resource: "/api/2.0/clusters/spark-versions",
1061-
Response: "{garbage....",
1062-
},
1063-
})
1064-
defer server.Close()
1065-
require.NoError(t, err)
1066-
1067-
ctx := context.Background()
1068-
_, err = NewClustersAPI(ctx, client).ListSparkVersions()
1069-
require.Error(t, err)
1070-
require.Equal(t, true, strings.Contains(err.Error(), "invalid character 'g' looking"))
1071-
}
1072-
1073-
func TestGetLatestSparkVersion(t *testing.T) {
1074-
versions := SparkVersionsList{
1075-
SparkVersions: []SparkVersion{
1076-
{
1077-
Version: "7.1.x-cpu-ml-scala2.12",
1078-
Description: "7.1 ML (includes Apache Spark 3.0.0, Scala 2.12)",
1079-
},
1080-
{
1081-
Version: "apache-spark-2.4.x-scala2.11",
1082-
Description: "Light 2.4 (includes Apache Spark 2.4, Scala 2.11)",
1083-
},
1084-
{
1085-
Version: "7.3.x-hls-scala2.12",
1086-
Description: "7.3 LTS Genomics (includes Apache Spark 3.0.1, Scala 2.12)",
1087-
},
1088-
{
1089-
Version: "6.4.x-scala2.11",
1090-
Description: "6.4 (includes Apache Spark 2.4.5, Scala 2.11)",
1091-
},
1092-
{
1093-
Version: "7.3.x-scala2.12",
1094-
Description: "7.3 LTS (includes Apache Spark 3.0.1, Scala 2.12)",
1095-
},
1096-
{
1097-
Version: "7.4.x-scala2.12",
1098-
Description: "7.4 (includes Apache Spark 3.0.1, Scala 2.12)",
1099-
},
1100-
{
1101-
Version: "7.1.x-scala2.12",
1102-
Description: "7.1 (includes Apache Spark 3.0.0, Scala 2.12)",
1103-
},
1104-
},
1105-
}
1106-
1107-
version, err := versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12", Latest: true})
1108-
require.NoError(t, err)
1109-
require.Equal(t, "7.4.x-scala2.12", version)
1110-
1111-
version, err = versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12", LongTermSupport: true, Latest: true})
1112-
require.NoError(t, err)
1113-
require.Equal(t, "7.3.x-scala2.12", version)
1114-
1115-
version, err = versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12", Latest: true, SparkVersion: "3.0.0"})
1116-
require.NoError(t, err)
1117-
require.Equal(t, "7.1.x-scala2.12", version)
1118-
1119-
_, err = versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12"})
1120-
require.Error(t, err)
1121-
require.Equal(t, true, strings.Contains(err.Error(), "query returned multiple results"))
1122-
1123-
_, err = versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12", ML: true, Genomics: true})
1124-
require.Error(t, err)
1125-
require.Equal(t, true, strings.Contains(err.Error(), "query returned no results"))
1126-
1127-
_, err = versions.LatestSparkVersion(SparkVersionRequest{Scala: "2.12", SparkVersion: "3.10"})
1128-
require.Error(t, err)
1129-
require.Equal(t, true, strings.Contains(err.Error(), "query returned no results"))
1130-
}
1131-
11321019
func TestClusterState_CanReach(t *testing.T) {
11331020
tests := []struct {
11341021
from ClusterState

0 commit comments

Comments
 (0)