diff --git a/CHANGELOG.md b/CHANGELOG.md index 32b53502b..1531004d3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,6 @@ ## [Unreleased] +- Add `elasticstack_kibana_export_saved_objects` data source ([#1293](https://github.com/elastic/terraform-provider-elasticstack/pull/1293)) - Create `elasticstack_kibana_maintenance_window` resource. ([#1224](https://github.com/elastic/terraform-provider-elasticstack/pull/1224)) - Add support for `solution` field in `elasticstack_kibana_space` resource and data source ([#1102](https://github.com/elastic/terraform-provider-elasticstack/issues/1102)) - Add `slo_id` validation to `elasticstack_kibana_slo` ([#1221](https://github.com/elastic/terraform-provider-elasticstack/pull/1221)) diff --git a/docs/data-sources/kibana_export_saved_objects.md b/docs/data-sources/kibana_export_saved_objects.md new file mode 100644 index 000000000..227c5d4eb --- /dev/null +++ b/docs/data-sources/kibana_export_saved_objects.md @@ -0,0 +1,62 @@ + +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_kibana_export_saved_objects Data Source - terraform-provider-elasticstack" +subcategory: "Kibana" +description: |- + Export Kibana saved objects. This data source allows you to export saved objects from Kibana and store the result in the Terraform state. +--- + +# elasticstack_kibana_export_saved_objects (Data Source) + +Export Kibana saved objects. This data source allows you to export saved objects from Kibana and store the result in the Terraform state. + +## Example Usage + +```terraform +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +data "elasticstack_kibana_export_saved_objects" "example" { + exclude_export_details = true + include_references_deep = true + objects = [ + { + type = "dashboard", + id = "7c5f07ee-7e41-4d50-ae1f-dfe54cc87209" + } + ] +} + +output "saved_objects" { + value = data.elasticstack_kibana_export_saved_objects.example.exported_objects +} +``` + + +## Schema + +### Required + +- `objects` (Attributes List) List of objects to export. (see [below for nested schema](#nestedatt--objects)) + +### Optional + +- `exclude_export_details` (Boolean) Do not add export details. Defaults to true. +- `include_references_deep` (Boolean) Include references to other saved objects recursively. Defaults to true. +- `space_id` (String) An identifier for the space. If space_id is not provided, the default space is used. + +### Read-Only + +- `exported_objects` (String) The exported objects in NDJSON format. +- `id` (String) Generated ID for the export. + + +### Nested Schema for `objects` + +Required: + +- `id` (String) The ID of the saved object. +- `type` (String) The type of the saved object. diff --git a/examples/data-sources/elasticstack_kibana_export_saved_objects/data-source.tf b/examples/data-sources/elasticstack_kibana_export_saved_objects/data-source.tf new file mode 100644 index 000000000..bc043c4fe --- /dev/null +++ b/examples/data-sources/elasticstack_kibana_export_saved_objects/data-source.tf @@ -0,0 +1,19 @@ +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +data "elasticstack_kibana_export_saved_objects" "example" { + exclude_export_details = true + include_references_deep = true + objects = [ + { + type = "dashboard", + id = "7c5f07ee-7e41-4d50-ae1f-dfe54cc87209" + } + ] +} + +output "saved_objects" { + value = data.elasticstack_kibana_export_saved_objects.example.exported_objects +} diff --git a/internal/kibana/export_saved_objects/acc_test.go b/internal/kibana/export_saved_objects/acc_test.go new file mode 100644 index 000000000..779183c0e --- /dev/null +++ b/internal/kibana/export_saved_objects/acc_test.go @@ -0,0 +1,54 @@ +package export_saved_objects_test + +import ( + "testing" + + "github.com/elastic/terraform-provider-elasticstack/internal/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" +) + +func TestAccDataSourceKibanaExportSavedObjects(t *testing.T) { + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceKibanaExportSavedObjectsConfig, + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttrSet("data.elasticstack_kibana_export_saved_objects.test", "id"), + resource.TestCheckResourceAttrSet("data.elasticstack_kibana_export_saved_objects.test", "exported_objects"), + resource.TestCheckResourceAttr("data.elasticstack_kibana_export_saved_objects.test", "space_id", "default"), + resource.TestCheckResourceAttr("data.elasticstack_kibana_export_saved_objects.test", "exclude_export_details", "true"), + resource.TestCheckResourceAttr("data.elasticstack_kibana_export_saved_objects.test", "include_references_deep", "true"), + ), + }, + }, + }) +} + +const testAccDataSourceKibanaExportSavedObjectsConfig = ` +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +resource "elasticstack_kibana_action_connector" "test" { + name = "test-export-connector" + connector_type_id = ".slack" + secrets = jsonencode({ + webhookUrl = "https://example.com" + }) +} + +data "elasticstack_kibana_export_saved_objects" "test" { + space_id = "default" + exclude_export_details = true + include_references_deep = true + objects = [ + { + type = "action", + id = elasticstack_kibana_action_connector.test.connector_id + } + ] +} +` diff --git a/internal/kibana/export_saved_objects/data_source.go b/internal/kibana/export_saved_objects/data_source.go new file mode 100644 index 000000000..925e5de43 --- /dev/null +++ b/internal/kibana/export_saved_objects/data_source.go @@ -0,0 +1,46 @@ +package export_saved_objects + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/hashicorp/terraform-plugin-framework/datasource" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &dataSource{} + _ datasource.DataSourceWithConfigure = &dataSource{} +) + +// NewDataSource is a helper function to simplify the provider implementation. +func NewDataSource() datasource.DataSource { + return &dataSource{} +} + +// dataSource is the data source implementation. +type dataSource struct { + client *clients.ApiClient +} + +// Metadata returns the data source type name. +func (d *dataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_kibana_export_saved_objects" +} + +// Configure adds the provider configured client to the data source. +func (d *dataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Add a nil check when handling ProviderData because Terraform + // sets that data after it calls the ConfigureProvider RPC. + if req.ProviderData == nil { + return + } + + client, diags := clients.ConvertProviderData(req.ProviderData) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + d.client = client +} diff --git a/internal/kibana/export_saved_objects/read.go b/internal/kibana/export_saved_objects/read.go new file mode 100644 index 000000000..07f7c3278 --- /dev/null +++ b/internal/kibana/export_saved_objects/read.go @@ -0,0 +1,101 @@ +package export_saved_objects + +import ( + "context" + "fmt" + "net/http" + + "github.com/elastic/terraform-provider-elasticstack/generated/kbapi" + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +// Read refreshes the Terraform state with the latest data. +func (d *dataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var config dataSourceModel + + // Read configuration + diags := req.Config.Get(ctx, &config) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + // Get Kibana client + oapiClient, err := d.client.GetKibanaOapiClient() + if err != nil { + resp.Diagnostics.AddError("unable to get Kibana client", err.Error()) + return + } + + // Set default space_id if not provided + spaceId := "default" + if !config.SpaceID.IsNull() && !config.SpaceID.IsUnknown() { + spaceId = config.SpaceID.ValueString() + } + + objectsList := utils.ListTypeToSlice(ctx, config.Objects, path.Root("objects"), &resp.Diagnostics, func(item objectModel, meta utils.ListMeta) struct { + Id string `json:"id"` + Type string `json:"type"` + } { + return struct { + Id string `json:"id"` + Type string `json:"type"` + }{ + Id: item.ID.ValueString(), + Type: item.Type.ValueString(), + } + }) + + // Set default values for boolean options + excludeExportDetails := true + if !config.ExcludeExportDetails.IsNull() && !config.ExcludeExportDetails.IsUnknown() { + excludeExportDetails = config.ExcludeExportDetails.ValueBool() + } + + includeReferencesDeep := true + if !config.IncludeReferencesDeep.IsNull() && !config.IncludeReferencesDeep.IsUnknown() { + includeReferencesDeep = config.IncludeReferencesDeep.ValueBool() + } + + // Create request body + body := kbapi.PostSavedObjectsExportJSONRequestBody{ + ExcludeExportDetails: &excludeExportDetails, + IncludeReferencesDeep: &includeReferencesDeep, + Objects: &objectsList, + } + + // Make the API call + apiResp, err := oapiClient.API.PostSavedObjectsExportWithResponse(ctx, body) + if err != nil { + resp.Diagnostics.AddError("API call failed", fmt.Sprintf("Unable to export saved objects: %v", err)) + return + } + + if apiResp.StatusCode() != http.StatusOK { + resp.Diagnostics.AddError( + "Unexpected API response", + fmt.Sprintf("Unexpected status code from server: got HTTP %d, response: %s", apiResp.StatusCode(), string(apiResp.Body)), + ) + return + } + + // Create composite ID for state tracking + compositeID := &clients.CompositeId{ClusterId: spaceId, ResourceId: "export"} + + // Set the state + var state dataSourceModel + state.ID = types.StringValue(compositeID.String()) + state.SpaceID = types.StringValue(spaceId) + state.Objects = config.Objects + state.ExcludeExportDetails = types.BoolValue(excludeExportDetails) + state.IncludeReferencesDeep = types.BoolValue(includeReferencesDeep) + state.ExportedObjects = types.StringValue(string(apiResp.Body)) + + // Set state + diags = resp.State.Set(ctx, &state) + resp.Diagnostics.Append(diags...) +} diff --git a/internal/kibana/export_saved_objects/schema.go b/internal/kibana/export_saved_objects/schema.go new file mode 100644 index 000000000..91f02bf13 --- /dev/null +++ b/internal/kibana/export_saved_objects/schema.go @@ -0,0 +1,74 @@ +package export_saved_objects + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +// Schema defines the schema for the data source. +func (d *dataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Export Kibana saved objects. This data source allows you to export saved objects from Kibana and store the result in the Terraform state.", + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Description: "Generated ID for the export.", + Computed: true, + }, + "space_id": schema.StringAttribute{ + Description: "An identifier for the space. If space_id is not provided, the default space is used.", + Optional: true, + }, + "objects": schema.ListNestedAttribute{ + Description: "List of objects to export.", + Required: true, + Validators: []validator.List{ + listvalidator.SizeAtLeast(1), + }, + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "type": schema.StringAttribute{ + Description: "The type of the saved object.", + Required: true, + }, + "id": schema.StringAttribute{ + Description: "The ID of the saved object.", + Required: true, + }, + }, + }, + }, + "exclude_export_details": schema.BoolAttribute{ + Description: "Do not add export details. Defaults to true.", + Optional: true, + }, + "include_references_deep": schema.BoolAttribute{ + Description: "Include references to other saved objects recursively. Defaults to true.", + Optional: true, + }, + "exported_objects": schema.StringAttribute{ + Description: "The exported objects in NDJSON format.", + Computed: true, + }, + }, + } +} + +type objectModel struct { + Type types.String `tfsdk:"type"` + ID types.String `tfsdk:"id"` +} + +// dataSourceModel maps the data source schema data. +type dataSourceModel struct { + ID types.String `tfsdk:"id"` + SpaceID types.String `tfsdk:"space_id"` + Objects types.List `tfsdk:"objects"` + ExcludeExportDetails types.Bool `tfsdk:"exclude_export_details"` + IncludeReferencesDeep types.Bool `tfsdk:"include_references_deep"` + ExportedObjects types.String `tfsdk:"exported_objects"` +} diff --git a/provider/plugin_framework.go b/provider/plugin_framework.go index 8487b6bd8..c48c303a0 100644 --- a/provider/plugin_framework.go +++ b/provider/plugin_framework.go @@ -23,6 +23,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/fleet/server_host" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/connectors" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/data_view" + "github.com/elastic/terraform-provider-elasticstack/internal/kibana/export_saved_objects" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/import_saved_objects" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/maintenance_window" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/spaces" @@ -89,6 +90,7 @@ func (p *Provider) DataSources(ctx context.Context) []func() datasource.DataSour return []func() datasource.DataSource{ indices.NewDataSource, spaces.NewDataSource, + export_saved_objects.NewDataSource, enrollment_tokens.NewDataSource, integration_ds.NewDataSource, enrich.NewEnrichPolicyDataSource,