diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 4f19731d9c..9394dd223f 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -7,6 +7,7 @@ ### New Features and Improvements * Added `expected_workspace_status` to `databricks_mws_workspaces` to support creating workspaces in provisioning status ([#5019](https://github.com/databricks/terraform-provider-databricks/pull/5019)) +* Added `databricks_volume_directory` resource ([#5141](https://github.com/databricks/terraform-provider-databricks/pull/5141)) ### Bug Fixes diff --git a/docs/resources/volume_directory.md b/docs/resources/volume_directory.md new file mode 100644 index 0000000000..a2aaa5fea5 --- /dev/null +++ b/docs/resources/volume_directory.md @@ -0,0 +1,119 @@ +--- +subcategory: "Unity Catalog" +--- +# databricks_volume_directory Resource + +This resource allows creating and managing directories in Unity Catalog [volumes](volume.md) using the Files API. + +-> This resource can only be used with a workspace-level provider! + +Directories in Unity Catalog volumes provide a way to organize files and data within volumes. The Files API automatically creates parent directories as needed (similar to `mkdir -p`), making it easy to create nested directory structures. + +The directory path uses the following format: + +``` +/Volumes//// +``` + + +## Example Usage + +### Basic Directory Creation + +```hcl +resource "databricks_catalog" "sandbox" { + name = "sandbox" + comment = "this catalog is managed by terraform" + properties = { + purpose = "testing" + } +} + +resource "databricks_schema" "things" { + catalog_name = databricks_catalog.sandbox.name + name = "things" + comment = "this schema is managed by terraform" + properties = { + kind = "various" + } +} + +resource "databricks_volume" "this" { + name = "quickstart_volume" + catalog_name = databricks_catalog.sandbox.name + schema_name = databricks_schema.things.name + volume_type = "MANAGED" + comment = "this volume is managed by terraform" +} + +resource "databricks_volume_directory" "data" { + directory_path = "${databricks_volume.this.volume_path}/data" +} +``` + +### Nested Directory Structure + +```hcl +resource "databricks_volume_directory" "logs" { + directory_path = "${databricks_volume.this.volume_path}/logs/2024/01" +} + +resource "databricks_volume_directory" "raw_data" { + directory_path = "${databricks_volume.this.volume_path}/raw/input" +} + +resource "databricks_volume_directory" "processed_data" { + directory_path = "${databricks_volume.this.volume_path}/processed/output" +} +``` + +### Directory with Files + +```hcl +resource "databricks_volume_directory" "scripts" { + directory_path = "${databricks_volume.this.volume_path}/scripts" +} + +resource "databricks_file" "init_script" { + source = "/local/path/to/init.sh" + path = "${databricks_volume_directory.scripts.id}/init.sh" +} +``` + +## Argument Reference + +The following arguments are required: + +* `directory_path` - (Required) The absolute path of the directory in a Unity Catalog volume. Must be in the format `/Volumes////`. Changing this value will force recreation of the resource. + +## Attribute Reference + +In addition to all arguments above, the following attributes are exported: + +* `id` - The ID of the directory resource, same as `directory_path`. + +## Import + +The resource `databricks_volume_directory` can be imported using the directory path: + +```hcl +import { + to = databricks_volume_directory.this + id = "/Volumes/main/default/my_volume/my_directory" +} +``` + +Alternatively, when using `terraform` version 1.4 or earlier, import using the `terraform import` command: + +```bash +terraform import databricks_volume_directory.this /Volumes/main/default/my_volume/my_directory +``` + +## Related Resources + +The following resources are often used in the same context: + +* [databricks_file](file.md) to manage files in Unity Catalog volumes. +* [databricks_volume](volume.md) to manage [volumes within Unity Catalog](https://docs.databricks.com/en/connect/unity-catalog/volumes.html). +* [databricks_schema](schema.md) to manage schemas within Unity Catalog. +* [databricks_catalog](catalog.md) to manage catalogs within Unity Catalog. diff --git a/internal/providers/pluginfw/pluginfw_rollout_utils.go b/internal/providers/pluginfw/pluginfw_rollout_utils.go index 575f2507c4..f18b18cbe4 100644 --- a/internal/providers/pluginfw/pluginfw_rollout_utils.go +++ b/internal/providers/pluginfw/pluginfw_rollout_utils.go @@ -49,6 +49,7 @@ var migratedDataSources = []func() datasource.DataSource{ var pluginFwOnlyResources = append( []func() resource.Resource{ app.ResourceApp, + volume.ResourceVolumeDirectory, }, autoGeneratedResources..., ) diff --git a/internal/providers/pluginfw/products/volume/resource_volume_directory.go b/internal/providers/pluginfw/products/volume/resource_volume_directory.go new file mode 100644 index 0000000000..0787405af5 --- /dev/null +++ b/internal/providers/pluginfw/products/volume/resource_volume_directory.go @@ -0,0 +1,194 @@ +package volume + +import ( + "context" + + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/service/files" + "github.com/databricks/terraform-provider-databricks/common" + pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" + pluginfwcontext "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/context" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +const resourceName = "volume_directory" + +var _ resource.ResourceWithConfigure = &VolumeDirectoryResource{} + +func ResourceVolumeDirectory() resource.Resource { + return &VolumeDirectoryResource{} +} + +type VolumeDirectoryResource struct { + Client *common.DatabricksClient +} + +type VolumeDirectoryModel struct { + DirectoryPath types.String `tfsdk:"directory_path"` + ID types.String `tfsdk:"id"` +} + +func (r *VolumeDirectoryResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName) +} + +func (r *VolumeDirectoryResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Manages directories in Unity Catalog volumes using the Files API.", + Attributes: map[string]schema.Attribute{ + "directory_path": schema.StringAttribute{ + Description: "The absolute path of the directory in a Unity Catalog volume (e.g., `/Volumes/catalog/schema/volume/path/to/dir`).", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "id": schema.StringAttribute{ + Description: "The ID of the directory resource, same as directory_path.", + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + }, + } +} + +func (r *VolumeDirectoryResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + if r.Client == nil { + r.Client = pluginfwcommon.ConfigureResource(req, resp) + } +} + +func (r *VolumeDirectoryResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + w, diags := r.Client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var data VolumeDirectoryModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + directoryPath := data.DirectoryPath.ValueString() + + // Create the directory using the Files API + err := w.Files.CreateDirectory(ctx, files.CreateDirectoryRequest{ + DirectoryPath: directoryPath, + }) + if err != nil { + resp.Diagnostics.AddError("failed to create directory", err.Error()) + return + } + + // Set the ID to the directory path + data.ID = types.StringValue(directoryPath) + + // Save the state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *VolumeDirectoryResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + w, diags := r.Client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var data VolumeDirectoryModel + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + directoryPath := data.ID.ValueString() + + // Check if the directory still exists using GetDirectoryMetadata + err := w.Files.GetDirectoryMetadata(ctx, files.GetDirectoryMetadataRequest{ + DirectoryPath: directoryPath, + }) + if err != nil { + if apierr.IsMissing(err) { + // Directory no longer exists, remove from state + resp.State.RemoveResource(ctx) + return + } + resp.Diagnostics.AddError("failed to get directory metadata", err.Error()) + return + } + + // Directory exists, keep the state as is + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *VolumeDirectoryResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + // Update is a no-op since directories are immutable once created + // The directory_path has RequiresReplace plan modifier, so any change will trigger recreation + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + var data VolumeDirectoryModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Just update the state with the current data + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *VolumeDirectoryResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + w, diags := r.Client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var data VolumeDirectoryModel + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + directoryPath := data.ID.ValueString() + + // Delete the directory using the Files API + err := w.Files.DeleteDirectory(ctx, files.DeleteDirectoryRequest{ + DirectoryPath: directoryPath, + }) + if err != nil { + // If the directory is already gone, that's okay + if !apierr.IsMissing(err) { + resp.Diagnostics.AddError("failed to delete directory", err.Error()) + return + } + } +} + +// ImportState implements resource.ResourceWithImportState +func (r *VolumeDirectoryResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) + + // The import ID is the directory path + directoryPath := req.ID + + // Set the state with the imported directory path + data := VolumeDirectoryModel{ + DirectoryPath: types.StringValue(directoryPath), + ID: types.StringValue(directoryPath), + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/internal/providers/pluginfw/products/volume/resource_volume_directory_acc_test.go b/internal/providers/pluginfw/products/volume/resource_volume_directory_acc_test.go new file mode 100644 index 0000000000..df0f192d19 --- /dev/null +++ b/internal/providers/pluginfw/products/volume/resource_volume_directory_acc_test.go @@ -0,0 +1,107 @@ +package volume_test + +import ( + "testing" + + "github.com/databricks/terraform-provider-databricks/internal/acceptance" +) + +const volumeDirectorySetup = ` +resource "databricks_catalog" "sandbox" { + name = "sandbox{var.STICKY_RANDOM}" + comment = "this catalog is managed by terraform" + properties = { + purpose = "testing" + } + force_destroy = true +} + +resource "databricks_schema" "things" { + catalog_name = databricks_catalog.sandbox.id + name = "things{var.STICKY_RANDOM}" + comment = "this schema is managed by terraform" + properties = { + kind = "various" + } +} + +resource "databricks_volume" "this" { + name = "volume{var.STICKY_RANDOM}" + catalog_name = databricks_catalog.sandbox.name + schema_name = databricks_schema.things.name + volume_type = "MANAGED" + comment = "this volume is managed by terraform" +} +` + +func TestUcAccVolumeDirectoryCreate(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: volumeDirectorySetup + ` +resource "databricks_volume_directory" "test" { + directory_path = "${databricks_volume.this.volume_path}/test_directory" +} +`, + }) +} + +func TestUcAccVolumeDirectoryCreateNested(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: volumeDirectorySetup + ` +resource "databricks_volume_directory" "parent" { + directory_path = "${databricks_volume.this.volume_path}/parent" +} + +resource "databricks_volume_directory" "child" { + directory_path = "${databricks_volume_directory.parent.id}/child" + depends_on = [databricks_volume_directory.parent] +} +`, + }) +} + +func TestUcAccVolumeDirectoryMultiple(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: volumeDirectorySetup + ` +resource "databricks_volume_directory" "dir1" { + directory_path = "${databricks_volume.this.volume_path}/dir1" +} + +resource "databricks_volume_directory" "dir2" { + directory_path = "${databricks_volume.this.volume_path}/dir2" +} + +resource "databricks_volume_directory" "dir3" { + directory_path = "${databricks_volume.this.volume_path}/dir3" +} +`, + }) +} + +func TestUcAccVolumeDirectoryUpdate(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, + acceptance.Step{ + Template: volumeDirectorySetup + ` +resource "databricks_volume_directory" "test" { + directory_path = "${databricks_volume.this.volume_path}/original_dir" +} +`, + }, + acceptance.Step{ + Template: volumeDirectorySetup + ` +resource "databricks_volume_directory" "test" { + directory_path = "${databricks_volume.this.volume_path}/new_dir" +} +`, + }, + ) +} + +func TestUcAccVolumeDirectoryDeepNesting(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: volumeDirectorySetup + ` +resource "databricks_volume_directory" "deep" { + directory_path = "${databricks_volume.this.volume_path}/level1/level2/level3/level4" +} +`, + }) +} diff --git a/internal/providers/pluginfw/products/volume/resource_volume_directory_test.go b/internal/providers/pluginfw/products/volume/resource_volume_directory_test.go new file mode 100644 index 0000000000..9d647ed36a --- /dev/null +++ b/internal/providers/pluginfw/products/volume/resource_volume_directory_test.go @@ -0,0 +1,70 @@ +package volume + +import ( + "context" + "testing" + + "github.com/databricks/terraform-provider-databricks/common" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestVolumeDirectoryResourceMetadata(t *testing.T) { + r := ResourceVolumeDirectory() + require.NotNil(t, r) + + var resp resource.MetadataResponse + r.Metadata(context.Background(), resource.MetadataRequest{ + ProviderTypeName: "databricks", + }, &resp) + + assert.Equal(t, "databricks_volume_directory", resp.TypeName) +} + +func TestVolumeDirectoryResourceSchema(t *testing.T) { + r := ResourceVolumeDirectory() + require.NotNil(t, r) + + var resp resource.SchemaResponse + r.Schema(context.Background(), resource.SchemaRequest{}, &resp) + + assert.NotNil(t, resp.Schema) + assert.Contains(t, resp.Schema.Attributes, "directory_path") + assert.Contains(t, resp.Schema.Attributes, "id") + + // Verify directory_path is required + directoryPathAttr := resp.Schema.Attributes["directory_path"] + assert.True(t, directoryPathAttr.IsRequired()) + assert.False(t, directoryPathAttr.IsOptional()) + assert.False(t, directoryPathAttr.IsComputed()) + + // Verify id is computed + idAttr := resp.Schema.Attributes["id"] + assert.False(t, idAttr.IsRequired()) + assert.False(t, idAttr.IsOptional()) + assert.True(t, idAttr.IsComputed()) +} + +func TestVolumeDirectoryResourceConfigure(t *testing.T) { + r := &VolumeDirectoryResource{} + assert.Nil(t, r.Client) + + // Configure with nil provider data should not panic + var resp resource.ConfigureResponse + r.Configure(context.Background(), resource.ConfigureRequest{}, &resp) + + // Configure with mock client + mockClient := &common.DatabricksClient{} + r.Configure(context.Background(), resource.ConfigureRequest{ + ProviderData: mockClient, + }, &resp) + + assert.NotNil(t, r.Client) + assert.Equal(t, mockClient, r.Client) +} + +func TestVolumeDirectoryResourceImplementsInterface(t *testing.T) { + var _ resource.Resource = &VolumeDirectoryResource{} + var _ resource.ResourceWithConfigure = &VolumeDirectoryResource{} +}