Skip to content

Commit d5015a6

Browse files
authored
Add support for release channels configuration for SQL Endpoints (#1078)
1 parent 8640e87 commit d5015a6

File tree

4 files changed

+52
-22
lines changed

4 files changed

+52
-22
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
* Added support for `tf:suppress_diff` on primitive types ([#984](https://github.com/databrickslabs/terraform-provider-databricks/issues/984)).
66
* Fixed issue arises when destroying `databricks_sql_global_config` with instance profile set ([#1076](https://github.com/databrickslabs/terraform-provider-databricks/issues/1076)).
77
* Added setting of SQL configuration parameters in `databricks_sql_global_config` ([#1080](https://github.com/databrickslabs/terraform-provider-databricks/pull/1080)).
8+
* Added support for release channels in `databricks_sql_endpoint` configuration ([#1078])(https://github.com/databrickslabs/terraform-provider-databricks/pull/1078).
89

910
Updated dependency versions:
1011

docs/resources/sql_endpoint.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,9 @@ The following arguments are supported:
3737
* `tags` - Databricks tags all endpoint resources with these tags.
3838
* `spot_instance_policy` - The spot policy to use for allocating instances to clusters: `COST_OPTIMIZED` or `RELIABILITY_OPTIMIZED`. This field is optional. Default is `COST_OPTIMIZED`.
3939
* `enable_photon` - Whether to enable [Photon](https://databricks.com/product/delta-engine). This field is optional and is enabled by default.
40-
40+
* `channel` block, consisting of following fields:
41+
* `name` - Name of the Databricks SQL release channel. Possible values are: `CHANNEL_NAME_PREVIEW` and `CHANNEL_NAME_CURRENT`. Default is `CHANNEL_NAME_CURRENT`.
42+
4143
## Attribute Reference
4244

4345
In addition to all arguments above, the following attributes are exported:

sqlanalytics/resource_sql_endpoint.go

Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -21,28 +21,34 @@ var (
2121

2222
// SQLEndpoint ...
2323
type SQLEndpoint struct {
24-
ID string `json:"id,omitempty" tf:"computed"`
25-
Name string `json:"name"`
26-
ClusterSize string `json:"cluster_size"`
27-
AutoStopMinutes int `json:"auto_stop_mins,omitempty"`
28-
MinNumClusters int `json:"min_num_clusters,omitempty"`
29-
MaxNumClusters int `json:"max_num_clusters,omitempty"`
30-
NumClusters int `json:"num_clusters,omitempty"`
31-
EnablePhoton bool `json:"enable_photon,omitempty"`
32-
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
33-
InstanceProfileARN string `json:"instance_profile_arn,omitempty"`
34-
State string `json:"state,omitempty" tf:"computed"`
35-
JdbcURL string `json:"jdbc_url,omitempty" tf:"computed"`
36-
OdbcParams *OdbcParams `json:"odbc_params,omitempty" tf:"computed"`
37-
Tags *Tags `json:"tags,omitempty" tf:"suppress_diff"`
38-
SpotInstancePolicy string `json:"spot_instance_policy,omitempty"`
24+
ID string `json:"id,omitempty" tf:"computed"`
25+
Name string `json:"name"`
26+
ClusterSize string `json:"cluster_size"`
27+
AutoStopMinutes int `json:"auto_stop_mins,omitempty" tf:"default:120"`
28+
MinNumClusters int `json:"min_num_clusters,omitempty" tf:"default:1"`
29+
MaxNumClusters int `json:"max_num_clusters,omitempty" tf:"default:1"`
30+
NumClusters int `json:"num_clusters,omitempty" tf:"default:1"`
31+
EnablePhoton bool `json:"enable_photon,omitempty" tf:"default:true"`
32+
EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"`
33+
InstanceProfileARN string `json:"instance_profile_arn,omitempty"`
34+
State string `json:"state,omitempty" tf:"computed"`
35+
JdbcURL string `json:"jdbc_url,omitempty" tf:"computed"`
36+
OdbcParams *OdbcParams `json:"odbc_params,omitempty" tf:"computed"`
37+
Tags *Tags `json:"tags,omitempty" tf:"suppress_diff"`
38+
SpotInstancePolicy string `json:"spot_instance_policy,omitempty" tf:"default:COST_OPTIMIZED"`
39+
Channel *ReleaseChannel `json:"channel,omitempty" tf:"suppress_diff"`
3940

4041
// The data source ID is not part of the endpoint API response.
4142
// We manually resolve it by retrieving the list of data sources
4243
// and matching this entity's endpoint ID.
4344
DataSourceID string `json:"data_source_id,omitempty" tf:"computed"`
4445
}
4546

47+
// ReleaseChannel holds information about DBSQL Release Channel
48+
type ReleaseChannel struct {
49+
Name string `json:"name,omitempty" tf:"default:CHANNEL_NAME_CURRENT"`
50+
}
51+
4652
// OdbcParams hold information required to submit SQL commands to the SQL endpoint using ODBC.
4753
type OdbcParams struct {
4854
Hostname string `json:"hostname,omitempty"`
@@ -185,16 +191,10 @@ func (a SQLEndpointsAPI) Delete(endpointID string) error {
185191
func ResourceSQLEndpoint() *schema.Resource {
186192
s := common.StructToSchema(SQLEndpoint{}, func(
187193
m map[string]*schema.Schema) map[string]*schema.Schema {
188-
m["auto_stop_mins"].Default = 120
189194
m["cluster_size"].ValidateDiagFunc = validation.ToDiagFunc(
190195
validation.StringInSlice(ClusterSizes, false))
191-
m["max_num_clusters"].Default = 1
192196
m["max_num_clusters"].ValidateDiagFunc = validation.ToDiagFunc(
193197
validation.IntBetween(1, MaxNumClusters))
194-
m["min_num_clusters"].Default = 1
195-
m["num_clusters"].Default = 1
196-
m["spot_instance_policy"].Default = "COST_OPTIMIZED"
197-
m["enable_photon"].Default = true
198198
return m
199199
})
200200
return common.Resource{

sqlanalytics/resource_sql_endpoint_test.go

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -295,3 +295,30 @@ func TestSQLEnpointAPI(t *testing.T) {
295295
require.NoError(t, err)
296296
})
297297
}
298+
299+
func TestResolveDataSourceIDError(t *testing.T) {
300+
qa.HTTPFixturesApply(t, []qa.HTTPFixture{
301+
{
302+
Method: "GET",
303+
Resource: "/api/2.0/preview/sql/data_sources",
304+
Response: map[string]interface{}{},
305+
Status: 404,
306+
},
307+
}, func(ctx context.Context, client *common.DatabricksClient) {
308+
_, err := NewSQLEndpointsAPI(ctx, client).ResolveDataSourceID("any")
309+
require.Error(t, err)
310+
})
311+
}
312+
313+
func TestResolveDataSourceIDNotFound(t *testing.T) {
314+
qa.HTTPFixturesApply(t, []qa.HTTPFixture{
315+
{
316+
Method: "GET",
317+
Resource: "/api/2.0/preview/sql/data_sources",
318+
Response: []interface{}{},
319+
},
320+
}, func(ctx context.Context, client *common.DatabricksClient) {
321+
_, err := NewSQLEndpointsAPI(ctx, client).ResolveDataSourceID("any")
322+
require.Error(t, err)
323+
})
324+
}

0 commit comments

Comments
 (0)