Skip to content

Commit f6cbfa8

Browse files
nkvuongalexott
andauthored
[Fix] Populate partitions when reading databricks_sql_table (#4486)
## Changes - UC returns `partition_index` in the column struct, so we can calculate `partitions` value based on that - Resolves #3980 ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - [x] `make test` run locally - [x] relevant change in `docs/` folder - [x] using Go SDK --------- Co-authored-by: Alex Ott <[email protected]>
1 parent a0c6f07 commit f6cbfa8

File tree

4 files changed

+200
-12
lines changed

4 files changed

+200
-12
lines changed

NEXT_CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010

1111
### Bug Fixes
1212

13+
* Populate `partitions` when reading `databricks_sql_table` ([#4486](https://github.com/databricks/terraform-provider-databricks/pull/4486)).
1314
* Fix configuration drift when configuring `databricks_connection` to builtin Hive Metastore ([#4505](https://github.com/databricks/terraform-provider-databricks/pull/4505)).
1415
* Only allow `authorized_paths` to be updated in the `options` field of `databricks_catalog` ([#4517](https://github.com/databricks/terraform-provider-databricks/pull/4517)).
1516
* Mark `default_catalog_name` attribute in `databricks_metastore_assignment` as deprecated ([#4522](https://github.com/databricks/terraform-provider-databricks/pull/4522))

catalog/resource_sql_table.go

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import (
77
"log"
88
"reflect"
99
"slices"
10+
"sort"
1011
"strings"
1112
"time"
1213

@@ -668,13 +669,42 @@ func ResourceSqlTable() common.Resource {
668669
if err != nil {
669670
return err
670671
}
672+
w, err := c.WorkspaceClient()
673+
if err != nil {
674+
return err
675+
}
676+
partitionInfo, err := w.Tables.GetByFullName(ctx, d.Id())
677+
if err != nil {
678+
return err
679+
}
680+
partitionIndexes := map[int]string{}
671681
for i := range ti.ColumnInfos {
672682
c := &ti.ColumnInfos[i]
673683
c.Identity, err = reconstructIdentity(c)
674684
if err != nil {
675685
return err
676686
}
677687
}
688+
689+
for i := range partitionInfo.Columns {
690+
c := &partitionInfo.Columns[i]
691+
if slices.Contains(c.ForceSendFields, "PartitionIndex") {
692+
partitionIndexes[c.PartitionIndex] = c.Name
693+
}
694+
}
695+
indexes := []int{}
696+
partitions := []string{}
697+
698+
for index := range partitionIndexes {
699+
indexes = append(indexes, index)
700+
}
701+
sort.Ints(indexes)
702+
703+
for _, p := range indexes {
704+
partitions = append(partitions, partitionIndexes[p])
705+
}
706+
707+
d.Set("partitions", partitions)
678708
return common.StructToData(ti, tableSchema, d)
679709
},
680710
Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {

catalog/resource_sql_table_test.go

Lines changed: 154 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -229,6 +229,11 @@ func TestResourceSqlTableCreateTable(t *testing.T) {
229229
},
230230
},
231231
},
232+
{
233+
Method: "GET",
234+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
235+
Response: catalog.TableInfo{},
236+
},
232237
}, useExistingClusterForSql...),
233238
Create: true,
234239
Resource: ResourceSqlTable(),
@@ -304,6 +309,11 @@ func TestResourceSqlTableCreateTableWithOwner(t *testing.T) {
304309
},
305310
},
306311
},
312+
{
313+
Method: "GET",
314+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
315+
Response: catalog.TableInfo{},
316+
},
307317
}, useExistingClusterForSql...),
308318
Create: true,
309319
Resource: ResourceSqlTable(),
@@ -426,6 +436,11 @@ func TestResourceSqlTableUpdateTable(t *testing.T) {
426436
},
427437
},
428438
},
439+
{
440+
Method: "GET",
441+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
442+
Response: catalog.TableInfo{},
443+
},
429444
{
430445
Method: "POST",
431446
Resource: "/api/2.0/clusters/start",
@@ -527,6 +542,11 @@ func TestResourceSqlTableUpdateTableAndOwner(t *testing.T) {
527542
Owner: "old group",
528543
},
529544
},
545+
{
546+
Method: "GET",
547+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
548+
Response: catalog.TableInfo{},
549+
},
530550
{
531551
Method: "POST",
532552
Resource: "/api/2.0/clusters/start",
@@ -623,6 +643,11 @@ func TestResourceSqlTableUpdateTableClusterKeys(t *testing.T) {
623643
},
624644
},
625645
},
646+
{
647+
Method: "GET",
648+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
649+
Response: catalog.TableInfo{},
650+
},
626651
{
627652
Method: "POST",
628653
Resource: "/api/2.0/clusters/start",
@@ -713,6 +738,11 @@ func TestResourceSqlTableUpdateView(t *testing.T) {
713738
},
714739
},
715740
},
741+
{
742+
Method: "GET",
743+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
744+
Response: catalog.TableInfo{},
745+
},
716746
{
717747
Method: "POST",
718748
Resource: "/api/2.0/clusters/start",
@@ -822,6 +852,11 @@ func TestResourceSqlTableUpdateView_Definition(t *testing.T) {
822852
ViewDefinition: "SELECT * FROM main.foo.bar2",
823853
},
824854
},
855+
{
856+
Method: "GET",
857+
Resource: "/api/2.1/unity-catalog/tables/main.foo.barview?",
858+
Response: catalog.TableInfo{},
859+
},
825860
{
826861
Method: "POST",
827862
Resource: "/api/2.0/clusters/start",
@@ -877,6 +912,11 @@ func TestResourceSqlTableUpdateView_Comments(t *testing.T) {
877912
Comment: "to be changed (requires new)",
878913
},
879914
},
915+
{
916+
Method: "GET",
917+
Resource: "/api/2.1/unity-catalog/tables/main.foo.barview?",
918+
Response: catalog.TableInfo{},
919+
},
880920
{
881921
Method: "POST",
882922
Resource: "/api/2.0/clusters/start",
@@ -969,6 +1009,11 @@ func resourceSqlTableUpdateColumnHelper(t *testing.T, testMetaData resourceSqlTa
9691009
ColumnInfos: testMetaData.oldColumns,
9701010
},
9711011
},
1012+
{
1013+
Method: "GET",
1014+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
1015+
Response: catalog.TableInfo{},
1016+
},
9721017
{
9731018
Method: "POST",
9741019
Resource: "/api/2.0/clusters/start",
@@ -1357,6 +1402,11 @@ func TestResourceSqlTableCreateTable_ExistingSQLWarehouse(t *testing.T) {
13571402
},
13581403
},
13591404
},
1405+
{
1406+
Method: "GET",
1407+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
1408+
Response: catalog.TableInfo{},
1409+
},
13601410
},
13611411
Create: true,
13621412
Resource: ResourceSqlTable(),
@@ -1450,6 +1500,11 @@ func TestResourceSqlTableCreateTableWithIdentityColumn_ExistingSQLWarehouse(t *t
14501500
},
14511501
},
14521502
},
1503+
{
1504+
Method: "GET",
1505+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
1506+
Response: catalog.TableInfo{},
1507+
},
14531508
},
14541509
Create: true,
14551510
Resource: ResourceSqlTable(),
@@ -1516,6 +1571,99 @@ func TestResourceSqlTableReadTableWithIdentityColumn_ExistingSQLWarehouse(t *tes
15161571
},
15171572
},
15181573
},
1574+
{
1575+
Method: "GET",
1576+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
1577+
Response: catalog.TableInfo{},
1578+
},
1579+
},
1580+
ID: "main.foo.bar",
1581+
Read: true,
1582+
Resource: ResourceSqlTable(),
1583+
}.ApplyAndExpectData(t, map[string]any{
1584+
"column.0.identity": "always",
1585+
"column.1.identity": "",
1586+
"column.2.identity": "default",
1587+
})
1588+
}
1589+
1590+
func TestResourceSqlTableReadTableWithPartitionColumn_ExistingSQLWarehouse(t *testing.T) {
1591+
qa.ResourceFixture{
1592+
CommandMock: func(commandStr string) common.CommandResults {
1593+
return common.CommandResults{
1594+
ResultType: "",
1595+
Data: nil,
1596+
}
1597+
},
1598+
HCL: `
1599+
name = "bar"
1600+
catalog_name = "main"
1601+
schema_name = "foo"
1602+
table_type = "MANAGED"
1603+
data_source_format = "DELTA"
1604+
storage_location = "abfss://container@account/somepath"
1605+
warehouse_id = "existingwarehouse"
1606+
1607+
1608+
comment = "this table is managed by terraform"
1609+
`,
1610+
Fixtures: []qa.HTTPFixture{
1611+
{
1612+
Method: "GET",
1613+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar",
1614+
Response: SqlTableInfo{
1615+
Name: "bar",
1616+
CatalogName: "main",
1617+
SchemaName: "foo",
1618+
TableType: "EXTERNAL",
1619+
DataSourceFormat: "DELTA",
1620+
StorageLocation: "s3://ext-main/foo/bar1",
1621+
StorageCredentialName: "somecred",
1622+
Comment: "terraform managed",
1623+
Properties: map[string]string{
1624+
"one": "two",
1625+
"three": "four",
1626+
},
1627+
ColumnInfos: []SqlColumnInfo{
1628+
{
1629+
Name: "id",
1630+
Type: "bigint",
1631+
TypeJson: "{\"type\":\"bigint\",\"nullable\":true, \"metadata\":{\"delta.identity.start\":1,\"delta.identity.allowExplicitInsert\":false}}",
1632+
},
1633+
{
1634+
Name: "name",
1635+
Type: "string",
1636+
Comment: "name of thing",
1637+
},
1638+
{
1639+
Name: "number",
1640+
Type: "bigint",
1641+
TypeJson: "{\"type\":\"bigint\",\"nullable\":true, \"metadata\":{\"delta.identity.start\":1,\"delta.identity.allowExplicitInsert\":true}}",
1642+
},
1643+
},
1644+
},
1645+
},
1646+
{
1647+
Method: "GET",
1648+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
1649+
Response: catalog.TableInfo{
1650+
Columns: []catalog.ColumnInfo{
1651+
{
1652+
Name: "id",
1653+
PartitionIndex: 1,
1654+
ForceSendFields: []string{"PartitionIndex"},
1655+
},
1656+
{
1657+
Name: "name",
1658+
PartitionIndex: 0,
1659+
ForceSendFields: []string{"PartitionIndex"},
1660+
},
1661+
{
1662+
Name: "number",
1663+
},
1664+
},
1665+
},
1666+
},
15191667
},
15201668
ID: "main.foo.bar",
15211669
Read: true,
@@ -1524,6 +1672,7 @@ func TestResourceSqlTableReadTableWithIdentityColumn_ExistingSQLWarehouse(t *tes
15241672
"column.0.identity": "always",
15251673
"column.1.identity": "",
15261674
"column.2.identity": "default",
1675+
"partitions": []any{"name", "id"},
15271676
})
15281677
}
15291678

@@ -1587,6 +1736,11 @@ func TestResourceSqlTableCreateTable_OnlyManagedProperties(t *testing.T) {
15871736
},
15881737
},
15891738
},
1739+
{
1740+
Method: "GET",
1741+
Resource: "/api/2.1/unity-catalog/tables/main.foo.bar?",
1742+
Response: catalog.TableInfo{},
1743+
},
15901744
},
15911745
Create: true,
15921746
Resource: ResourceSqlTable(),

docs/resources/sql_table.md

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ A `databricks_sql_table` is contained within [databricks_schema](schema.md), and
99

1010
This resource creates and updates the Unity Catalog table/view by executing the necessary SQL queries on a special auto-terminating cluster it would create for this operation. You could also specify a SQL warehouse or cluster for the queries to be executed on.
1111

12-
~> This resource doesn't handle complex cases of schema evolution due to the limitations of Terraform itself. If you need to implement schema evolution it's recommended to use specialized tools, such as, [Luquibase](https://medium.com/dbsql-sme-engineering/advanced-schema-management-on-databricks-with-liquibase-1900e9f7b9c0) and [Flyway](https://medium.com/dbsql-sme-engineering/databricks-schema-management-with-flyway-527c4a9f5d67).
12+
~> This resource doesn't handle complex cases of schema evolution due to the limitations of Terraform itself. If you need to implement schema evolution it's recommended to use specialized tools, such as, [Liquibase](https://medium.com/dbsql-sme-engineering/advanced-schema-management-on-databricks-with-liquibase-1900e9f7b9c0) and [Flyway](https://medium.com/dbsql-sme-engineering/databricks-schema-management-with-flyway-527c4a9f5d67).
1313

1414
## Example Usage
1515

@@ -162,15 +162,15 @@ The following arguments are supported:
162162
* `storage_location` - (Optional) URL of storage location for Table data (required for EXTERNAL Tables). Not supported for `VIEW` or `MANAGED` table_type.
163163
* `data_source_format` - (Optional) External tables are supported in multiple data source formats. The string constants identifying these formats are `DELTA`, `CSV`, `JSON`, `AVRO`, `PARQUET`, `ORC`, and `TEXT`. Change forces the creation of a new resource. Not supported for `MANAGED` tables or `VIEW`.
164164
* `view_definition` - (Optional) SQL text defining the view (for `table_type == "VIEW"`). Not supported for `MANAGED` or `EXTERNAL` table_type.
165-
* `cluster_id` - (Optional) All table CRUD operations must be executed on a running cluster or SQL warehouse. If a cluster_id is specified, it will be used to execute SQL commands to manage this table. If empty, a cluster will be created automatically with the name `terraform-sql-table`.
165+
* `cluster_id` - (Optional) All table CRUD operations must be executed on a running cluster or SQL warehouse. If a cluster_id is specified, it will be used to execute SQL commands to manage this table. If empty, a cluster will be created automatically with the name `terraform-sql-table`. Conflicts with `warehouse_id`.
166166
* `warehouse_id` - (Optional) All table CRUD operations must be executed on a running cluster or SQL warehouse. If a `warehouse_id` is specified, that SQL warehouse will be used to execute SQL commands to manage this table. Conflicts with `cluster_id`.
167167
* `cluster_keys` - (Optional) a subset of columns to liquid cluster the table by. Conflicts with `partitions`.
168+
* `partitions` - (Optional) a subset of columns to partition the table by. Change forces the creation of a new resource. Conflicts with `cluster_keys`.
168169
* `storage_credential_name` - (Optional) For EXTERNAL Tables only: the name of storage credential to use. Change forces the creation of a new resource.
169-
* `owner` - (Optional) User name/group name/sp application_id of the schema owner.
170+
* `owner` - (Optional) User name/group name/sp application_id of the table owner.
170171
* `comment` - (Optional) User-supplied free-form text. Changing the comment is not currently supported on the `VIEW` table type.
171172
* `options` - (Optional) Map of user defined table options. Change forces creation of a new resource.
172173
* `properties` - (Optional) A map of table properties.
173-
* `partitions` - (Optional) a subset of columns to partition the table by. Change forces the creation of a new resource. Conflicts with `cluster_keys`. Change forces creation of a new resource.
174174

175175
### `column` configuration block
176176

@@ -191,7 +191,7 @@ In addition to all the arguments above, the following attributes are exported:
191191

192192
## Import
193193

194-
This resource can be imported by its full name:
194+
This resource can be imported by its full name.
195195

196196
```bash
197197
terraform import databricks_sql_table.this <catalog_name>.<schema_name>.<name>
@@ -200,24 +200,26 @@ terraform import databricks_sql_table.this <catalog_name>.<schema_name>.<name>
200200
## Migration from `databricks_table`
201201

202202
The `databricks_table` resource has been deprecated in favor of `databricks_sql_table`. To migrate from `databricks_table` to `databricks_sql_table`:
203+
203204
1. Define a `databricks_sql_table` resource with arguments corresponding to `databricks_table`.
204205
2. Add a `removed` block to remove the `databricks_table` resource without deleting the existing table by using the `lifecycle` block. If you're using Terraform version below v1.7.0, you will need to use the `terraform state rm` command instead.
205206
3. Add an `import` block to add the `databricks_sql_table` resource, corresponding to the existing table. If you're using Terraform version below v1.5.0, you will need to use `terraform import` command instead.
206207

207208
For example, suppose we have the following `databricks_table` resource:
209+
208210
```hcl
209211
resource "databricks_table" "this" {
210-
catalog_name = "catalog"
211-
schema_name = "schema"
212-
name = "table"
213-
table_type = "MANAGED"
212+
catalog_name = "catalog"
213+
schema_name = "schema"
214+
name = "table"
215+
table_type = "MANAGED"
214216
data_source_format = "DELTA"
215217
column {
216-
name = "col1"
218+
name = "col1"
217219
type_name = "STRING"
218220
type_json = "{\"type\":\"STRING\"}"
219-
comment = "comment"
220-
nullable = true
221+
comment = "comment"
222+
nullable = true
221223
}
222224
comment = "comment"
223225
properties = {
@@ -227,6 +229,7 @@ resource "databricks_table" "this" {
227229
```
228230

229231
The migration would look like this:
232+
230233
```hcl
231234
# Leave this resource definition as-is.
232235
resource "databricks_table" "this" { ... }

0 commit comments

Comments
 (0)