diff --git a/docs/resources/fleet_integration_policy.md b/docs/resources/fleet_integration_policy.md
index e8a4279be..b3cfd588a 100644
--- a/docs/resources/fleet_integration_policy.md
+++ b/docs/resources/fleet_integration_policy.md
@@ -103,7 +103,7 @@ resource "elasticstack_fleet_integration_policy" "sample" {
- `description` (String) The description of the integration policy.
- `enabled` (Boolean) Enable the integration policy.
- `force` (Boolean) Force operations, such as creation and deletion, to occur.
-- `input` (Block List) Integration inputs. (see [below for nested schema](#nestedblock--input))
+- `inputs` (Attributes Map) Integration inputs mapped by input ID. (see [below for nested schema](#nestedatt--inputs))
- `output_id` (String) The ID of the output to send data to. When not specified, the default output of the agent policy will be used.
- `policy_id` (String) Unique identifier of the integration policy.
- `space_ids` (Set of String) The Kibana space IDs where this integration policy is available. When set, must match the space_ids of the referenced agent policy. If not set, will be inherited from the agent policy. Note: The order of space IDs does not matter as this is a set.
@@ -113,18 +113,22 @@ resource "elasticstack_fleet_integration_policy" "sample" {
- `id` (String) The ID of this resource.
-
-### Nested Schema for `input`
+
+### Nested Schema for `inputs`
-Required:
+Optional:
+
+- `enabled` (Boolean) Enable the input.
+- `streams` (Attributes Map) Input streams mapped by stream ID. (see [below for nested schema](#nestedatt--inputs--streams))
+- `vars` (String, Sensitive) Input-level variables as JSON.
-- `input_id` (String) The identifier of the input.
+
+### Nested Schema for `inputs.streams`
Optional:
-- `enabled` (Boolean) Enable the input.
-- `streams_json` (String, Sensitive) Input streams as JSON.
-- `vars_json` (String, Sensitive) Input variables as JSON.
+- `enabled` (Boolean) Enable the stream.
+- `vars` (String, Sensitive) Stream-level variables as JSON.
## Import
diff --git a/internal/fleet/integration/acc_test.go b/internal/fleet/integration/acc_test.go
index 0c5d6e041..90e74a4a8 100644
--- a/internal/fleet/integration/acc_test.go
+++ b/internal/fleet/integration/acc_test.go
@@ -199,22 +199,23 @@ resource "elasticstack_fleet_integration_policy" "sample" {
integration_name = elasticstack_fleet_integration.test_integration.name
integration_version = elasticstack_fleet_integration.test_integration.version
- input {
- input_id = "tcp-tcp"
- streams_json = jsonencode({
- "tcp.generic" : {
- "enabled" : true,
- "vars" : {
- "listen_address" : "localhost",
- "listen_port" : 8080,
- "data_stream.dataset" : "tcp.generic",
- "tags" : [],
- "syslog_options" : "field: message\n#format: auto\n#timezone: Local\n",
- "ssl" : "#certificate: |\n# -----BEGIN CERTIFICATE-----\n# ...\n# -----END CERTIFICATE-----\n#key: |\n# -----BEGIN PRIVATE KEY-----\n# ...\n# -----END PRIVATE KEY-----\n",
- "custom" : ""
- }
+ inputs = {
+ "tcp-tcp" = {
+ streams = {
+ "tcp.generic" = {
+ enabled = true,
+ vars = jsonencode({
+ "listen_address" : "localhost",
+ "listen_port" : 8080,
+ "data_stream.dataset" : "tcp.generic",
+ "tags" : [],
+ "syslog_options" : "field: message\n#format: auto\n#timezone: Local\n",
+ "ssl" : "#certificate: |\n# -----BEGIN CERTIFICATE-----\n# ...\n# -----END CERTIFICATE-----\n#key: |\n# -----BEGIN PRIVATE KEY-----\n# ...\n# -----END PRIVATE KEY-----\n",
+ "custom" : ""
+ })
+ }
}
- })
+ }
}
}
`, version, policyName, policyName)
diff --git a/internal/fleet/integration_policy/acc_test.go b/internal/fleet/integration_policy/acc_test.go
index ff875290f..6d38004bf 100644
--- a/internal/fleet/integration_policy/acc_test.go
+++ b/internal/fleet/integration_policy/acc_test.go
@@ -84,9 +84,9 @@ func TestAccResourceIntegrationPolicyWithOutput(t *testing.T) {
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_name", "tcp"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_version", "1.16.0"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "output_id", fmt.Sprintf("%s-test-output", policyName)),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.input_id", "tcp-tcp"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.enabled", "true"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.streams_json", `{"tcp.generic":{"enabled":true,"vars":{"custom":"","data_stream.dataset":"tcp.generic","listen_address":"localhost","listen_port":8080,"ssl":"","syslog_options":"field: message","tags":[]}}}`),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.streams.tcp.generic.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.streams.tcp.generic.vars", `{"custom":"","data_stream.dataset":"tcp.generic","listen_address":"localhost","listen_port":8080,"ssl":"","syslog_options":"field: message","tags":[]}`),
),
},
{
@@ -104,9 +104,9 @@ func TestAccResourceIntegrationPolicyWithOutput(t *testing.T) {
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_name", "tcp"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_version", "1.16.0"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "output_id", fmt.Sprintf("%s-updated-output", policyName)),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.input_id", "tcp-tcp"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.enabled", "false"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.streams_json", `{"tcp.generic":{"enabled":false,"vars":{"custom":"","data_stream.dataset":"tcp.generic","listen_address":"localhost","listen_port":8085,"ssl":"","syslog_options":"field: message","tags":[]}}}`),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.streams.tcp.generic.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.streams.tcp.generic.vars", `{"custom":"","data_stream.dataset":"tcp.generic","listen_address":"localhost","listen_port":8080,"ssl":"","syslog_options":"field: message","tags":[]}`),
),
},
},
@@ -134,10 +134,9 @@ func TestAccResourceIntegrationPolicy(t *testing.T) {
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_version", "1.16.0"),
resource.TestCheckNoResourceAttr("elasticstack_fleet_integration_policy.test_policy", "vars_json"),
resource.TestCheckNoResourceAttr("elasticstack_fleet_integration_policy.test_policy", "output_id"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.input_id", "tcp-tcp"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.enabled", "true"),
- resource.TestCheckNoResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.vars_json"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.streams_json", `{"tcp.generic":{"enabled":true,"vars":{"custom":"","data_stream.dataset":"tcp.generic","listen_address":"localhost","listen_port":8080,"ssl":"","syslog_options":"field: message","tags":[]}}}`),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.streams.tcp.generic.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.streams.tcp.generic.vars", `{"custom":"","data_stream.dataset":"tcp.generic","listen_address":"localhost","listen_port":8080,"ssl":"","syslog_options":"field: message","tags":[]}`),
),
},
{
@@ -154,17 +153,16 @@ func TestAccResourceIntegrationPolicy(t *testing.T) {
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_version", "1.16.0"),
resource.TestCheckNoResourceAttr("elasticstack_fleet_integration_policy.test_policy", "vars_json"),
resource.TestCheckNoResourceAttr("elasticstack_fleet_integration_policy.test_policy", "output_id"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.input_id", "tcp-tcp"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.enabled", "false"),
- resource.TestCheckNoResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.vars_json"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.streams_json", `{"tcp.generic":{"enabled":false,"vars":{"custom":"","data_stream.dataset":"tcp.generic","listen_address":"localhost","listen_port":8085,"ssl":"","syslog_options":"field: message","tags":[]}}}`),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.enabled", "false"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.streams.tcp.generic.enabled", "false"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.tcp-tcp.streams.tcp.generic.vars", `{"custom":"","data_stream.dataset":"tcp.generic","listen_address":"localhost","listen_port":8085,"ssl":"","syslog_options":"field: message","tags":[]}`),
),
},
},
})
}
-//go:embed testdata/TestAccResourceIntegrationPolicySecretsFromSDK/integration_policy.tf
+//go:embed testdata/TestAccResourceIntegrationPolicySecretsFromSDK/legacy/integration_policy.tf
var sdkCreateTestConfig string
func TestAccResourceIntegrationPolicySecretsFromSDK(t *testing.T) {
@@ -197,16 +195,13 @@ func TestAccResourceIntegrationPolicySecretsFromSDK(t *testing.T) {
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_name", "aws_logs"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_version", "1.4.0"),
resource.TestMatchResourceAttr("elasticstack_fleet_integration_policy.test_policy", "vars_json", regexp.MustCompile(`{"access_key_id":{"id":"\S+","isSecretRef":true},"default_region":"us-east-1","endpoint":"endpoint","secret_access_key":{"id":"\S+","isSecretRef":true},"session_token":{"id":"\S+","isSecretRef":true}}`)),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.input_id", "aws_logs-aws-cloudwatch"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.enabled", "true"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.vars_json", ""),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.streams_json", `{"aws_logs.generic":{"enabled":true,"vars":{"api_sleep":"200ms","api_timeput":"120s","custom":"","data_stream.dataset":"aws_logs.generic","log_streams":[],"number_of_workers":1,"preserve_original_event":false,"scan_frequency":"1m","start_position":"beginning","tags":["forwarded"]}}}`),
),
},
{
ProtoV6ProviderFactories: acctest.Providers,
SkipFunc: versionutils.CheckIfVersionMeetsConstraints(sdkConstrains),
- ConfigDirectory: acctest.NamedTestCaseDirectory(""),
+ ConfigDirectory: acctest.NamedTestCaseDirectory("current"),
ConfigVariables: config.Variables{
"policy_name": config.StringVariable(policyName),
"secret_key": config.StringVariable("created"),
@@ -217,10 +212,9 @@ func TestAccResourceIntegrationPolicySecretsFromSDK(t *testing.T) {
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_name", "aws_logs"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_version", "1.4.0"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "vars_json", fmt.Sprintf(`{"access_key_id":"placeholder","default_region":"us-east-1","endpoint":"endpoint","secret_access_key":"created %s","session_token":"placeholder"}`, policyName)),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.input_id", "aws_logs-aws-cloudwatch"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.enabled", "true"),
- resource.TestCheckNoResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.vars_json"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.streams_json", `{"aws_logs.generic":{"enabled":true,"vars":{"api_sleep":"200ms","api_timeput":"120s","custom":"","data_stream.dataset":"aws_logs.generic","log_streams":[],"number_of_workers":1,"preserve_original_event":false,"scan_frequency":"1m","start_position":"beginning","tags":["forwarded"]}}}`),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.aws_logs-aws-cloudwatch.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.aws_logs-aws-cloudwatch.streams.aws_logs.generic.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.aws_logs-aws-cloudwatch.streams.aws_logs.generic.vars", `{"api_sleep":"200ms","api_timeput":"120s","custom":"","data_stream.dataset":"aws_logs.generic","log_streams":[],"number_of_workers":1,"preserve_original_event":false,"scan_frequency":"1m","start_position":"beginning","tags":["forwarded"]}`),
),
},
},
@@ -249,10 +243,9 @@ func TestAccResourceIntegrationPolicySecrets(t *testing.T) {
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_name", "aws_logs"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_version", "1.4.0"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "vars_json", fmt.Sprintf(`{"access_key_id":"placeholder","default_region":"us-east-1","endpoint":"endpoint","secret_access_key":"created %s","session_token":"placeholder"}`, policyName)),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.input_id", "aws_logs-aws-cloudwatch"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.enabled", "true"),
- resource.TestCheckNoResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.vars_json"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.streams_json", `{"aws_logs.generic":{"enabled":true,"vars":{"api_sleep":"200ms","api_timeput":"120s","custom":"","data_stream.dataset":"aws_logs.generic","log_streams":[],"number_of_workers":1,"preserve_original_event":false,"scan_frequency":"1m","start_position":"beginning","tags":["forwarded"]}}}`),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.aws_logs-aws-cloudwatch.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.aws_logs-aws-cloudwatch.streams.aws_logs.generic.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.aws_logs-aws-cloudwatch.streams.aws_logs.generic.vars", `{"api_sleep":"200ms","api_timeput":"120s","custom":"","data_stream.dataset":"aws_logs.generic","log_streams":[],"number_of_workers":1,"preserve_original_event":false,"scan_frequency":"1m","start_position":"beginning","tags":["forwarded"]}`),
),
},
{
@@ -269,10 +262,9 @@ func TestAccResourceIntegrationPolicySecrets(t *testing.T) {
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_name", "aws_logs"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_version", "1.4.0"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "vars_json", fmt.Sprintf(`{"access_key_id":"placeholder","default_region":"us-east-2","endpoint":"endpoint","secret_access_key":"updated %s","session_token":"placeholder"}`, policyName)),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.input_id", "aws_logs-aws-cloudwatch"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.enabled", "false"),
- resource.TestCheckNoResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.vars_json"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.streams_json", `{"aws_logs.generic":{"enabled":false,"vars":{"api_sleep":"200ms","api_timeput":"120s","custom":"","data_stream.dataset":"aws_logs.generic","log_streams":[],"number_of_workers":1,"preserve_original_event":false,"scan_frequency":"2m","start_position":"beginning","tags":["forwarded"]}}}`),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.aws_logs-aws-cloudwatch.enabled", "false"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.aws_logs-aws-cloudwatch.streams.aws_logs.generic.enabled", "false"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.aws_logs-aws-cloudwatch.streams.aws_logs.generic.vars", `{"api_sleep":"200ms","api_timeput":"120s","custom":"","data_stream.dataset":"aws_logs.generic","log_streams":[],"number_of_workers":1,"preserve_original_event":false,"scan_frequency":"2m","start_position":"beginning","tags":["forwarded"]}`),
),
},
{
@@ -313,10 +305,9 @@ func TestAccResourceIntegrationPolicySecrets(t *testing.T) {
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "description", "SQL Integration Policy"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_name", "sql"),
resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "integration_version", "1.1.0"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.input_id", "sql-sql/metrics"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.enabled", "true"),
- resource.TestCheckNoResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.vars_json"),
- resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.streams_json", `{"sql.sql":{"enabled":true,"vars":{"data_stream.dataset":"sql","driver":"mysql","hosts":["root:test@tcp(127.0.0.1:3306)/"],"merge_results":false,"period":"1m","processors":"","sql_queries":"- query: SHOW GLOBAL STATUS LIKE 'Innodb_system%'\n response_format: variables\n \n","ssl":""}}}`),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.sql-sql/metrics.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.sql-sql/metrics.streams.sql.sql.enabled", "true"),
+ resource.TestCheckResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.sql-sql/metrics.streams.sql.sql.vars", `{"data_stream.dataset":"sql","driver":"mysql","hosts":["root:test@tcp(127.0.0.1:3306)/"],"merge_results":false,"period":"1m","processors":"","sql_queries":"- query: SHOW GLOBAL STATUS LIKE 'Innodb_system%'\n response_format: variables\n \n","ssl":""}`),
),
},
{
@@ -332,9 +323,9 @@ func TestAccResourceIntegrationPolicySecrets(t *testing.T) {
},
ImportState: true,
ImportStateVerify: true,
- ImportStateVerifyIgnore: []string{"input.0.streams_json", "space_ids"},
+ ImportStateVerifyIgnore: []string{"inputs.sql-sql/metrics.streams.sql.sql.vars", "space_ids"},
Check: resource.ComposeTestCheckFunc(
- resource.TestMatchResourceAttr("elasticstack_fleet_integration_policy.test_policy", "input.0.streams_json", regexp.MustCompile(`"hosts":{"ids":["\S+"],"isSecretRef":true}`)),
+ resource.TestMatchResourceAttr("elasticstack_fleet_integration_policy.test_policy", "inputs.sql-sql/metrics.streams.sql.sql.vars", regexp.MustCompile(`"hosts":{"ids":["\S+"],"isSecretRef":true}`)),
),
},
},
diff --git a/internal/fleet/integration_policy/create.go b/internal/fleet/integration_policy/create.go
index 2fe21f3b2..a2beee31b 100644
--- a/internal/fleet/integration_policy/create.go
+++ b/internal/fleet/integration_policy/create.go
@@ -65,7 +65,7 @@ func (r *integrationPolicyResource) Create(ctx context.Context, req resource.Cre
}
// Remember if the user configured input in the plan
- planHadInput := utils.IsKnown(planModel.Input) && !planModel.Input.IsNull() && len(planModel.Input.Elements()) > 0
+ planHadInput := utils.IsKnown(planModel.Inputs) && !planModel.Inputs.IsNull() && len(planModel.Inputs.Elements()) > 0
diags = planModel.populateFromAPI(ctx, policy)
resp.Diagnostics.Append(diags...)
@@ -76,7 +76,7 @@ func (r *integrationPolicyResource) Create(ctx context.Context, req resource.Cre
// If plan didn't have input configured, ensure we don't add it now
// This prevents "Provider produced inconsistent result" errors
if !planHadInput {
- planModel.Input = types.ListNull(getInputTypeV1())
+ planModel.Inputs = types.MapNull(getInputsTypes())
}
diags = resp.State.Set(ctx, planModel)
diff --git a/internal/fleet/integration_policy/models.go b/internal/fleet/integration_policy/models.go
index 22642da74..70924bd9f 100644
--- a/internal/fleet/integration_policy/models.go
+++ b/internal/fleet/integration_policy/models.go
@@ -3,7 +3,6 @@ package integration_policy
import (
"context"
"fmt"
- "sort"
"github.com/elastic/terraform-provider-elasticstack/generated/kbapi"
"github.com/elastic/terraform-provider-elasticstack/internal/utils"
@@ -31,16 +30,20 @@ type integrationPolicyModel struct {
IntegrationName types.String `tfsdk:"integration_name"`
IntegrationVersion types.String `tfsdk:"integration_version"`
OutputID types.String `tfsdk:"output_id"`
- Input types.List `tfsdk:"input"` //> integrationPolicyInputModel
+ Inputs types.Map `tfsdk:"inputs"` //> integrationPolicyInputsModel
VarsJson jsontypes.Normalized `tfsdk:"vars_json"`
SpaceIds types.Set `tfsdk:"space_ids"`
}
-type integrationPolicyInputModel struct {
- InputID types.String `tfsdk:"input_id"`
- Enabled types.Bool `tfsdk:"enabled"`
- StreamsJson jsontypes.Normalized `tfsdk:"streams_json"`
- VarsJson jsontypes.Normalized `tfsdk:"vars_json"`
+type integrationPolicyInputsModel struct {
+ Enabled types.Bool `tfsdk:"enabled"`
+ Vars jsontypes.Normalized `tfsdk:"vars"`
+ Streams types.Map `tfsdk:"streams"` //> integrationPolicyInputStreamModel
+}
+
+type integrationPolicyInputStreamModel struct {
+ Enabled types.Bool `tfsdk:"enabled"`
+ Vars jsontypes.Normalized `tfsdk:"vars"`
}
func (model *integrationPolicyModel) populateFromAPI(ctx context.Context, data *kbapi.PackagePolicy) diag.Diagnostics {
@@ -107,20 +110,19 @@ func (model *integrationPolicyModel) populateFromAPI(ctx context.Context, data *
model.SpaceIds = types.SetNull(types.StringType)
}
// If originally set but API didn't return it, keep the original value
-
- model.populateInputFromAPI(ctx, data.Inputs, &diags)
+ model.populateInputsFromAPI(ctx, data.Inputs, &diags)
return diags
}
-func (model *integrationPolicyModel) populateInputFromAPI(ctx context.Context, inputs map[string]kbapi.PackagePolicyInput, diags *diag.Diagnostics) {
+func (model *integrationPolicyModel) populateInputsFromAPI(ctx context.Context, inputs map[string]kbapi.PackagePolicyInput, diags *diag.Diagnostics) {
// Handle input population based on context:
// 1. If model.Input is unknown: we're importing or reading fresh state → populate from API
// 2. If model.Input is known and null/empty: user explicitly didn't configure inputs → don't populate (avoid inconsistent state)
// 3. If model.Input is known and has values: user configured inputs → populate from API
- isInputKnown := utils.IsKnown(model.Input)
- isInputNullOrEmpty := model.Input.IsNull() || (isInputKnown && len(model.Input.Elements()) == 0)
+ isInputKnown := utils.IsKnown(model.Inputs)
+ isInputNullOrEmpty := model.Inputs.IsNull() || (isInputKnown && len(model.Inputs.Elements()) == 0)
// Case 1: Unknown (import/fresh read) - always populate
if !isInputKnown {
@@ -129,32 +131,43 @@ func (model *integrationPolicyModel) populateInputFromAPI(ctx context.Context, i
} else if isInputNullOrEmpty {
// Case 2: Known and null/empty - user explicitly didn't configure inputs
// Don't populate to avoid "Provider produced inconsistent result" error
- model.Input = types.ListNull(getInputTypeV1())
+ model.Inputs = types.MapNull(getInputsTypes())
return
}
// Case 3: Known and not null/empty - user configured inputs, populate from API (continue below)
- newInputs := utils.TransformMapToSlice(ctx, inputs, path.Root("input"), diags,
- func(inputData kbapi.PackagePolicyInput, meta utils.MapMeta) integrationPolicyInputModel {
- return integrationPolicyInputModel{
- InputID: types.StringValue(meta.Key),
- Enabled: types.BoolPointerValue(inputData.Enabled),
- StreamsJson: utils.MapToNormalizedType(utils.Deref(inputData.Streams), meta.Path.AtName("streams_json"), diags),
- VarsJson: utils.MapToNormalizedType(utils.Deref(inputData.Vars), meta.Path.AtName("vars_json"), diags),
- }
- })
- if newInputs == nil {
- model.Input = types.ListNull(getInputTypeV1())
- } else {
- oldInputs := utils.ListTypeAs[integrationPolicyInputModel](ctx, model.Input, path.Root("input"), diags)
+ newInputs := make(map[string]integrationPolicyInputsModel)
+ for inputID, inputData := range inputs {
+ inputModel := integrationPolicyInputsModel{
+ Enabled: types.BoolPointerValue(inputData.Enabled),
+ Vars: utils.MapToNormalizedType(utils.Deref(inputData.Vars), path.Root("inputs").AtMapKey(inputID).AtName("vars"), diags),
+ }
- sortInputs(newInputs, oldInputs)
+ // Populate streams
+ if inputData.Streams != nil && len(*inputData.Streams) > 0 {
+ streams := make(map[string]integrationPolicyInputStreamModel)
+ for streamID, streamData := range *inputData.Streams {
+ streamModel := integrationPolicyInputStreamModel{
+ Enabled: types.BoolPointerValue(streamData.Enabled),
+ Vars: utils.MapToNormalizedType(utils.Deref(streamData.Vars), path.Root("inputs").AtMapKey(inputID).AtName("streams").AtMapKey(streamID).AtName("vars"), diags),
+ }
- inputList, d := types.ListValueFrom(ctx, getInputTypeV1(), newInputs)
- diags.Append(d...)
+ streams[streamID] = streamModel
+ }
+
+ streamsMap, d := types.MapValueFrom(ctx, getInputStreamType(), streams)
+ diags.Append(d...)
+ inputModel.Streams = streamsMap
+ } else {
+ inputModel.Streams = types.MapNull(getInputStreamType())
+ }
- model.Input = inputList
+ newInputs[inputID] = inputModel
}
+
+ inputsMap, d := types.MapValueFrom(ctx, getInputsTypes(), newInputs)
+ diags.Append(d...)
+ model.Inputs = inputsMap
}
func (model integrationPolicyModel) toAPIModel(ctx context.Context, isUpdate bool, feat features) (kbapi.PackagePolicyRequest, diag.Diagnostics) {
@@ -218,51 +231,53 @@ func (model integrationPolicyModel) toAPIModel(ctx context.Context, isUpdate boo
body.Id = model.ID.ValueStringPointer()
}
- body.Inputs = utils.MapRef(utils.ListTypeToMap(ctx, model.Input, path.Root("input"), &diags,
- func(inputModel integrationPolicyInputModel, meta utils.ListMeta) (string, kbapi.PackagePolicyRequestInput) {
- return inputModel.InputID.ValueString(), kbapi.PackagePolicyRequestInput{
- Enabled: inputModel.Enabled.ValueBoolPointer(),
- Streams: utils.MapRef(utils.NormalizedTypeToMap[kbapi.PackagePolicyRequestInputStream](inputModel.StreamsJson, meta.Path.AtName("streams_json"), &diags)),
- Vars: utils.MapRef(utils.NormalizedTypeToMap[any](inputModel.VarsJson, meta.Path.AtName("vars_json"), &diags)),
- }
- }))
+ if utils.IsKnown(model.Inputs) && len(model.Inputs.Elements()) > 0 {
+ // Use the 'inputs' attribute (v2 format)
+ body.Inputs = utils.MapRef(model.toAPIInputsFromInputsAttribute(ctx, &diags))
+ }
// Note: space_ids is read-only for integration policies and inherited from the agent policy
return body, diags
}
-// sortInputs will sort the 'incoming' list of input definitions based on
-// the order of inputs defined in the 'existing' list. Inputs not present in
-// 'existing' will be placed at the end of the list. Inputs are identified by
-// their ID ('input_id'). The 'incoming' slice will be sorted in-place.
-func sortInputs(incoming []integrationPolicyInputModel, existing []integrationPolicyInputModel) {
- if len(existing) == 0 {
- sort.Slice(incoming, func(i, j int) bool {
- return incoming[i].InputID.ValueString() < incoming[j].InputID.ValueString()
- })
- return
+// toAPIInputsFromInputsAttribute converts the 'inputs' attribute to the API model format
+func (model integrationPolicyModel) toAPIInputsFromInputsAttribute(ctx context.Context, diags *diag.Diagnostics) map[string]kbapi.PackagePolicyRequestInput {
+ if !utils.IsKnown(model.Inputs) {
+ return nil
}
- idToIndex := make(map[string]int, len(existing))
- for index, inputData := range existing {
- inputID := inputData.InputID.ValueString()
- idToIndex[inputID] = index
+ inputsMap := utils.MapTypeAs[integrationPolicyInputsModel](ctx, model.Inputs, path.Root("inputs"), diags)
+ if inputsMap == nil {
+ return nil
}
- sort.Slice(incoming, func(i, j int) bool {
- iID := incoming[i].InputID.ValueString()
- iIdx, ok := idToIndex[iID]
- if !ok {
- return false
+ result := make(map[string]kbapi.PackagePolicyRequestInput, len(inputsMap))
+ for inputID, inputModel := range inputsMap {
+ inputPath := path.Root("inputs").AtMapKey(inputID)
+
+ apiInput := kbapi.PackagePolicyRequestInput{
+ Enabled: inputModel.Enabled.ValueBoolPointer(),
+ Vars: utils.MapRef(utils.NormalizedTypeToMap[any](inputModel.Vars, inputPath.AtName("vars"), diags)),
}
- jID := incoming[j].InputID.ValueString()
- jIdx, ok := idToIndex[jID]
- if !ok {
- return true
+ // Convert streams if present
+ if utils.IsKnown(inputModel.Streams) && len(inputModel.Streams.Elements()) > 0 {
+ streamsMap := utils.MapTypeAs[integrationPolicyInputStreamModel](ctx, inputModel.Streams, inputPath.AtName("streams"), diags)
+ if streamsMap != nil {
+ streams := make(map[string]kbapi.PackagePolicyRequestInputStream, len(streamsMap))
+ for streamID, streamModel := range streamsMap {
+ streams[streamID] = kbapi.PackagePolicyRequestInputStream{
+ Enabled: streamModel.Enabled.ValueBoolPointer(),
+ Vars: utils.MapRef(utils.NormalizedTypeToMap[any](streamModel.Vars, inputPath.AtName("streams").AtMapKey(streamID).AtName("vars"), diags)),
+ }
+ }
+ apiInput.Streams = &streams
+ }
}
- return iIdx < jIdx
- })
+ result[inputID] = apiInput
+ }
+
+ return result
}
diff --git a/internal/fleet/integration_policy/models_test.go b/internal/fleet/integration_policy/models_test.go
index 33c3c02f8..121e764c8 100644
--- a/internal/fleet/integration_policy/models_test.go
+++ b/internal/fleet/integration_policy/models_test.go
@@ -9,62 +9,6 @@ import (
"github.com/stretchr/testify/require"
)
-func Test_SortInputs(t *testing.T) {
- t.Run("WithExisting", func(t *testing.T) {
- existing := []integrationPolicyInputModel{
- {InputID: types.StringValue("A"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("B"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("C"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("D"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("E"), Enabled: types.BoolValue(true)},
- }
-
- incoming := []integrationPolicyInputModel{
- {InputID: types.StringValue("G"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("F"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("B"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("E"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("C"), Enabled: types.BoolValue(true)},
- }
-
- want := []integrationPolicyInputModel{
- {InputID: types.StringValue("B"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("C"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("E"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("G"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("F"), Enabled: types.BoolValue(true)},
- }
-
- sortInputs(incoming, existing)
-
- require.Equal(t, want, incoming)
- })
-
- t.Run("WithEmpty", func(t *testing.T) {
- var existing []integrationPolicyInputModel
-
- incoming := []integrationPolicyInputModel{
- {InputID: types.StringValue("G"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("F"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("B"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("E"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("C"), Enabled: types.BoolValue(true)},
- }
-
- want := []integrationPolicyInputModel{
- {InputID: types.StringValue("B"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("C"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("E"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("F"), Enabled: types.BoolValue(true)},
- {InputID: types.StringValue("G"), Enabled: types.BoolValue(true)},
- }
-
- sortInputs(incoming, existing)
-
- require.Equal(t, want, incoming)
- })
-}
-
func TestOutputIdHandling(t *testing.T) {
t.Run("populateFromAPI", func(t *testing.T) {
model := &integrationPolicyModel{}
diff --git a/internal/fleet/integration_policy/read.go b/internal/fleet/integration_policy/read.go
index b872c4f58..6347eaa3b 100644
--- a/internal/fleet/integration_policy/read.go
+++ b/internal/fleet/integration_policy/read.go
@@ -54,7 +54,7 @@ func (r *integrationPolicyResource) Read(ctx context.Context, req resource.ReadR
}
// Remember if the state had input configured
- stateHadInput := utils.IsKnown(stateModel.Input) && !stateModel.Input.IsNull() && len(stateModel.Input.Elements()) > 0
+ stateHadInput := utils.IsKnown(stateModel.Inputs) && !stateModel.Inputs.IsNull() && len(stateModel.Inputs.Elements()) > 0
// Check if this is an import operation (PolicyID is the only field set)
isImport := stateModel.PolicyID.ValueString() != "" &&
@@ -70,7 +70,7 @@ func (r *integrationPolicyResource) Read(ctx context.Context, req resource.ReadR
// This prevents "Provider produced inconsistent result" errors during refresh
// However, during import we should always populate inputs from the API
if !stateHadInput && !isImport {
- stateModel.Input = types.ListNull(getInputTypeV1())
+ stateModel.Inputs = types.MapNull(getInputsTypes())
}
diags = resp.State.Set(ctx, stateModel)
diff --git a/internal/fleet/integration_policy/resource.go b/internal/fleet/integration_policy/resource.go
index 3bc48867b..87efa4d1c 100644
--- a/internal/fleet/integration_policy/resource.go
+++ b/internal/fleet/integration_policy/resource.go
@@ -49,7 +49,8 @@ func (r *integrationPolicyResource) ImportState(ctx context.Context, req resourc
func (r *integrationPolicyResource) UpgradeState(context.Context) map[int64]resource.StateUpgrader {
return map[int64]resource.StateUpgrader{
- 0: {PriorSchema: getSchemaV0(), StateUpgrader: upgradeV0},
+ 0: {PriorSchema: getSchemaV0(), StateUpgrader: upgradeV0ToV2},
+ 1: {PriorSchema: getSchemaV1(), StateUpgrader: upgradeV1ToV2},
}
}
diff --git a/internal/fleet/integration_policy/schema.go b/internal/fleet/integration_policy/schema.go
index ae0dbbbd4..8d8f48443 100644
--- a/internal/fleet/integration_policy/schema.go
+++ b/internal/fleet/integration_policy/schema.go
@@ -22,12 +22,12 @@ import (
var integrationPolicyDescription string
func (r *integrationPolicyResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
- resp.Schema = getSchemaV1()
+ resp.Schema = getSchemaV2()
}
-func getSchemaV1() schema.Schema {
+func getSchemaV2() schema.Schema {
return schema.Schema{
- Version: 1,
+ Version: 2,
Description: integrationPolicyDescription,
Attributes: map[string]schema.Attribute{
"id": schema.StringAttribute{
@@ -109,35 +109,44 @@ func getSchemaV1() schema.Schema {
Optional: true,
Computed: true,
},
- },
- Blocks: map[string]schema.Block{
- "input": schema.ListNestedBlock{
- Description: "Integration inputs.",
- NestedObject: schema.NestedBlockObject{
+ "inputs": schema.MapNestedAttribute{
+ Description: "Integration inputs mapped by input ID.",
+ Computed: true,
+ Optional: true,
+ NestedObject: schema.NestedAttributeObject{
Attributes: map[string]schema.Attribute{
- "input_id": schema.StringAttribute{
- Description: "The identifier of the input.",
- Required: true,
- },
"enabled": schema.BoolAttribute{
Description: "Enable the input.",
Computed: true,
Optional: true,
Default: booldefault.StaticBool(true),
},
- "streams_json": schema.StringAttribute{
- Description: "Input streams as JSON.",
+ "vars": schema.StringAttribute{
+ Description: "Input-level variables as JSON.",
CustomType: jsontypes.NormalizedType{},
- Computed: true,
Optional: true,
Sensitive: true,
},
- "vars_json": schema.StringAttribute{
- Description: "Input variables as JSON.",
- CustomType: jsontypes.NormalizedType{},
+ "streams": schema.MapNestedAttribute{
+ Description: "Input streams mapped by stream ID.",
Computed: true,
Optional: true,
- Sensitive: true,
+ NestedObject: schema.NestedAttributeObject{
+ Attributes: map[string]schema.Attribute{
+ "enabled": schema.BoolAttribute{
+ Description: "Enable the stream.",
+ Computed: true,
+ Optional: true,
+ Default: booldefault.StaticBool(true),
+ },
+ "vars": schema.StringAttribute{
+ Description: "Stream-level variables as JSON.",
+ CustomType: jsontypes.NormalizedType{},
+ Optional: true,
+ Sensitive: true,
+ },
+ },
+ },
},
},
},
@@ -146,6 +155,14 @@ func getSchemaV1() schema.Schema {
}
}
-func getInputTypeV1() attr.Type {
- return getSchemaV1().Blocks["input"].Type().(attr.TypeWithElementType).ElementType()
+func getInputsTypes() attr.Type {
+ return getSchemaV2().Attributes["inputs"].GetType().(attr.TypeWithElementType).ElementType()
+}
+
+func getInputsAttributeTypes() map[string]attr.Type {
+ return getInputsTypes().(attr.TypeWithAttributeTypes).AttributeTypes()
+}
+
+func getInputStreamType() attr.Type {
+ return getInputsAttributeTypes()["streams"].(attr.TypeWithElementType).ElementType()
}
diff --git a/internal/fleet/integration_policy/upgrade.go b/internal/fleet/integration_policy/schema_v0.go
similarity index 87%
rename from internal/fleet/integration_policy/upgrade.go
rename to internal/fleet/integration_policy/schema_v0.go
index 4cf9f6dfc..5fc4b2685 100644
--- a/internal/fleet/integration_policy/upgrade.go
+++ b/internal/fleet/integration_policy/schema_v0.go
@@ -5,6 +5,7 @@ import (
"github.com/elastic/terraform-provider-elasticstack/internal/utils"
"github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
"github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
@@ -68,20 +69,37 @@ func getSchemaV0() *schema.Schema {
}
}
+// This function first upgrades v0 to v1, and then re-uses the v1 to v2 upgrader.
+func upgradeV0ToV2(ctx context.Context, req resource.UpgradeStateRequest, resp *resource.UpgradeStateResponse) {
+ stateModelV1, diags := upgradeV0ToV1(ctx, req)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ stateModelV2, diags := stateModelV1.toV2(ctx)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ diags = resp.State.Set(ctx, stateModelV2)
+ resp.Diagnostics.Append(diags...)
+}
+
// The schema between V0 and V1 is mostly the same, however vars_json and
// streams_json saved "" values to the state when null values were in the
// config. jsontypes.Normalized correctly states this is invalid JSON.
-func upgradeV0(ctx context.Context, req resource.UpgradeStateRequest, resp *resource.UpgradeStateResponse) {
+func upgradeV0ToV1(ctx context.Context, req resource.UpgradeStateRequest) (integrationPolicyModelV1, diag.Diagnostics) {
var stateModelV0 integrationPolicyModelV0
diags := req.State.Get(ctx, &stateModelV0)
- resp.Diagnostics.Append(diags...)
- if resp.Diagnostics.HasError() {
- return
+ if diags.HasError() {
+ return integrationPolicyModelV1{}, diags
}
// Convert V0 model to V1 model
- stateModelV1 := integrationPolicyModel{
+ stateModelV1 := integrationPolicyModelV1{
ID: stateModelV0.ID,
PolicyID: stateModelV0.PolicyID,
Name: stateModelV0.Name,
@@ -108,11 +126,11 @@ func upgradeV0(ctx context.Context, req resource.UpgradeStateRequest, resp *reso
}
// Convert inputs from V0 to V1
- inputsV0 := utils.ListTypeAs[integrationPolicyInputModelV0](ctx, stateModelV0.Input, path.Root("input"), &resp.Diagnostics)
- var inputsV1 []integrationPolicyInputModel
+ inputsV0 := utils.ListTypeAs[integrationPolicyInputModelV0](ctx, stateModelV0.Input, path.Root("input"), &diags)
+ var inputsV1 []integrationPolicyInputModelV1
for _, inputV0 := range inputsV0 {
- inputV1 := integrationPolicyInputModel{
+ inputV1 := integrationPolicyInputModelV1{
InputID: inputV0.InputID,
Enabled: inputV0.Enabled,
}
@@ -142,11 +160,6 @@ func upgradeV0(ctx context.Context, req resource.UpgradeStateRequest, resp *reso
inputsV1 = append(inputsV1, inputV1)
}
- stateModelV1.Input = utils.ListValueFrom(ctx, inputsV1, getInputTypeV1(), path.Root("input"), &resp.Diagnostics)
- if resp.Diagnostics.HasError() {
- return
- }
-
- diags = resp.State.Set(ctx, stateModelV1)
- resp.Diagnostics.Append(diags...)
+ stateModelV1.Input = utils.ListValueFrom(ctx, inputsV1, getInputTypeV1(), path.Root("input"), &diags)
+ return stateModelV1, diags
}
diff --git a/internal/fleet/integration_policy/schema_v0_test.go b/internal/fleet/integration_policy/schema_v0_test.go
new file mode 100644
index 000000000..03d619db5
--- /dev/null
+++ b/internal/fleet/integration_policy/schema_v0_test.go
@@ -0,0 +1,209 @@
+package integration_policy
+
+import (
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+// TestUpgradeV0ToV2_JSONConversions tests the V0 to V1 conversion logic
+// particularly the conversion of empty strings to null for JSON fields
+func TestUpgradeV0ToV2_JSONConversions(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ v0VarsJson types.String
+ expectedV1Null bool
+ }{
+ {
+ name: "valid JSON string is preserved",
+ v0VarsJson: types.StringValue(`{"key":"value"}`),
+ expectedV1Null: false,
+ },
+ {
+ name: "empty string converts to null",
+ v0VarsJson: types.StringValue(""),
+ expectedV1Null: true,
+ },
+ {
+ name: "null remains null",
+ v0VarsJson: types.StringNull(),
+ expectedV1Null: true,
+ },
+ {
+ name: "complex JSON is preserved",
+ v0VarsJson: types.StringValue(`{"nested":{"key":"value"},"array":[1,2,3]}`),
+ expectedV1Null: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ // Simulate the conversion logic from upgradeV0ToV2
+ var v1VarsJson jsontypes.Normalized
+ if varsJSON := tt.v0VarsJson.ValueStringPointer(); varsJSON != nil {
+ if *varsJSON == "" {
+ v1VarsJson = jsontypes.NewNormalizedNull()
+ } else {
+ v1VarsJson = jsontypes.NewNormalizedValue(*varsJSON)
+ }
+ } else {
+ v1VarsJson = jsontypes.NewNormalizedNull()
+ }
+
+ if tt.expectedV1Null {
+ assert.True(t, v1VarsJson.IsNull(), "Expected null but got non-null value")
+ } else {
+ assert.False(t, v1VarsJson.IsNull(), "Expected non-null but got null value")
+
+ // For non-null values, verify the JSON content
+ var result map[string]interface{}
+ diags := v1VarsJson.Unmarshal(&result)
+ require.Empty(t, diags, "Failed to unmarshal JSON")
+ }
+ })
+ }
+}
+
+// TestUpgradeV0ToV2_InputJSONConversions tests the conversion logic for input fields
+func TestUpgradeV0ToV2_InputJSONConversions(t *testing.T) {
+ t.Parallel()
+
+ tests := []struct {
+ name string
+ v0VarsJson types.String
+ v0StreamsJson types.String
+ expectedVarsNull bool
+ expectedStreamsNull bool
+ }{
+ {
+ name: "valid JSON strings are preserved",
+ v0VarsJson: types.StringValue(`{"var":"value"}`),
+ v0StreamsJson: types.StringValue(`{"stream-1":{"enabled":true}}`),
+ expectedVarsNull: false,
+ expectedStreamsNull: false,
+ },
+ {
+ name: "empty strings convert to null",
+ v0VarsJson: types.StringValue(""),
+ v0StreamsJson: types.StringValue(""),
+ expectedVarsNull: true,
+ expectedStreamsNull: true,
+ },
+ {
+ name: "null values remain null",
+ v0VarsJson: types.StringNull(),
+ v0StreamsJson: types.StringNull(),
+ expectedVarsNull: true,
+ expectedStreamsNull: true,
+ },
+ {
+ name: "mixed empty and valid JSON",
+ v0VarsJson: types.StringValue(`{"key":"value"}`),
+ v0StreamsJson: types.StringValue(""),
+ expectedVarsNull: false,
+ expectedStreamsNull: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ t.Parallel()
+
+ // Simulate the conversion logic for vars_json
+ var v1VarsJson jsontypes.Normalized
+ if varsJSON := tt.v0VarsJson.ValueStringPointer(); varsJSON != nil {
+ if *varsJSON == "" {
+ v1VarsJson = jsontypes.NewNormalizedNull()
+ } else {
+ v1VarsJson = jsontypes.NewNormalizedValue(*varsJSON)
+ }
+ } else {
+ v1VarsJson = jsontypes.NewNormalizedNull()
+ }
+
+ // Simulate the conversion logic for streams_json
+ var v1StreamsJson jsontypes.Normalized
+ if streamsJSON := tt.v0StreamsJson.ValueStringPointer(); streamsJSON != nil {
+ if *streamsJSON == "" {
+ v1StreamsJson = jsontypes.NewNormalizedNull()
+ } else {
+ v1StreamsJson = jsontypes.NewNormalizedValue(*streamsJSON)
+ }
+ } else {
+ v1StreamsJson = jsontypes.NewNormalizedNull()
+ }
+
+ // Verify vars_json
+ if tt.expectedVarsNull {
+ assert.True(t, v1VarsJson.IsNull(), "Expected vars_json to be null")
+ } else {
+ assert.False(t, v1VarsJson.IsNull(), "Expected vars_json to be non-null")
+ }
+
+ // Verify streams_json
+ if tt.expectedStreamsNull {
+ assert.True(t, v1StreamsJson.IsNull(), "Expected streams_json to be null")
+ } else {
+ assert.False(t, v1StreamsJson.IsNull(), "Expected streams_json to be non-null")
+ }
+ })
+ }
+}
+
+// TestUpgradeV0ToV2_NewFieldsAddedAsNull tests that new fields added in V1/V2 are set to null
+func TestUpgradeV0ToV2_NewFieldsAddedAsNull(t *testing.T) {
+ t.Parallel()
+
+ // V0 didn't have these fields, verify they're initialized as null in the upgrade
+ agentPolicyIDs := types.ListNull(types.StringType)
+ spaceIds := types.SetNull(types.StringType)
+
+ assert.True(t, agentPolicyIDs.IsNull(), "agent_policy_ids should be null (didn't exist in V0)")
+ assert.True(t, spaceIds.IsNull(), "space_ids should be null (didn't exist in V0)")
+}
+
+// TestUpgradeV0ToV2_FieldsPreserved tests that all V0 fields are preserved during upgrade
+func TestUpgradeV0ToV2_FieldsPreserved(t *testing.T) {
+ t.Parallel()
+
+ // Test that the structure of fields from V0 are preserved
+ v0Model := integrationPolicyModelV0{
+ ID: types.StringValue("test-id"),
+ PolicyID: types.StringValue("test-policy-id"),
+ Name: types.StringValue("test-name"),
+ Namespace: types.StringValue("test-namespace"),
+ AgentPolicyID: types.StringValue("agent-policy-1"),
+ Description: types.StringValue("test description"),
+ Enabled: types.BoolValue(false),
+ Force: types.BoolValue(true),
+ IntegrationName: types.StringValue("test-integration"),
+ IntegrationVersion: types.StringValue("2.0.0"),
+ VarsJson: types.StringValue(`{"complex":{"nested":"value"}}`),
+ Input: types.ListNull(getInputTypeV0()),
+ }
+
+ // Verify all fields are accessible and have the expected values
+ assert.Equal(t, "test-id", v0Model.ID.ValueString())
+ assert.Equal(t, "test-policy-id", v0Model.PolicyID.ValueString())
+ assert.Equal(t, "test-name", v0Model.Name.ValueString())
+ assert.Equal(t, "test-namespace", v0Model.Namespace.ValueString())
+ assert.Equal(t, "agent-policy-1", v0Model.AgentPolicyID.ValueString())
+ assert.Equal(t, "test description", v0Model.Description.ValueString())
+ assert.Equal(t, false, v0Model.Enabled.ValueBool())
+ assert.Equal(t, true, v0Model.Force.ValueBool())
+ assert.Equal(t, "test-integration", v0Model.IntegrationName.ValueString())
+ assert.Equal(t, "2.0.0", v0Model.IntegrationVersion.ValueString())
+ assert.Equal(t, `{"complex":{"nested":"value"}}`, v0Model.VarsJson.ValueString())
+ assert.True(t, v0Model.Input.IsNull())
+}
+
+func getInputTypeV0() types.ObjectType {
+ return getSchemaV0().Blocks["input"].Type().(types.ListType).ElemType.(types.ObjectType)
+}
diff --git a/internal/fleet/integration_policy/schema_v1.go b/internal/fleet/integration_policy/schema_v1.go
new file mode 100644
index 000000000..305a9e4d5
--- /dev/null
+++ b/internal/fleet/integration_policy/schema_v1.go
@@ -0,0 +1,266 @@
+package integration_policy
+
+import (
+ "context"
+ _ "embed"
+
+ "github.com/elastic/terraform-provider-elasticstack/generated/kbapi"
+ "github.com/elastic/terraform-provider-elasticstack/internal/utils"
+ "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes"
+ "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+type integrationPolicyModelV1 struct {
+ ID types.String `tfsdk:"id"`
+ PolicyID types.String `tfsdk:"policy_id"`
+ Name types.String `tfsdk:"name"`
+ Namespace types.String `tfsdk:"namespace"`
+ AgentPolicyID types.String `tfsdk:"agent_policy_id"`
+ AgentPolicyIDs types.List `tfsdk:"agent_policy_ids"`
+ Description types.String `tfsdk:"description"`
+ Enabled types.Bool `tfsdk:"enabled"`
+ Force types.Bool `tfsdk:"force"`
+ IntegrationName types.String `tfsdk:"integration_name"`
+ IntegrationVersion types.String `tfsdk:"integration_version"`
+ OutputID types.String `tfsdk:"output_id"`
+ Input types.List `tfsdk:"input"` //> integrationPolicyInputModel
+ VarsJson jsontypes.Normalized `tfsdk:"vars_json"`
+ SpaceIds types.Set `tfsdk:"space_ids"`
+}
+
+type integrationPolicyInputModelV1 struct {
+ InputID types.String `tfsdk:"input_id"`
+ Enabled types.Bool `tfsdk:"enabled"`
+ StreamsJson jsontypes.Normalized `tfsdk:"streams_json"`
+ VarsJson jsontypes.Normalized `tfsdk:"vars_json"`
+}
+
+func (m integrationPolicyModelV1) toV2(ctx context.Context) (integrationPolicyModel, diag.Diagnostics) {
+ // Convert V1 model to V2 model
+ stateModelV2 := integrationPolicyModel{
+ ID: m.ID,
+ PolicyID: m.PolicyID,
+ Name: m.Name,
+ Namespace: m.Namespace,
+ AgentPolicyID: m.AgentPolicyID,
+ AgentPolicyIDs: m.AgentPolicyIDs,
+ Description: m.Description,
+ Enabled: m.Enabled,
+ Force: m.Force,
+ IntegrationName: m.IntegrationName,
+ IntegrationVersion: m.IntegrationVersion,
+ OutputID: m.OutputID,
+ SpaceIds: m.SpaceIds,
+ VarsJson: m.VarsJson,
+ }
+
+ // Convert inputs from V1 to V2
+ var diags diag.Diagnostics
+ inputsV1 := utils.ListTypeAs[integrationPolicyInputModelV1](ctx, m.Input, path.Root("input"), &diags)
+ inputsV2 := make(map[string]integrationPolicyInputsModel, len(inputsV1))
+
+ for _, inputV1 := range inputsV1 {
+ id := inputV1.InputID.ValueString()
+ streams, d := updateStreamsV1ToV2(ctx, inputV1.StreamsJson, id)
+ diags.Append(d...)
+ if diags.HasError() {
+ return stateModelV2, diags
+ }
+
+ inputsV2[id] = integrationPolicyInputsModel{
+ Enabled: inputV1.Enabled,
+ Vars: inputV1.VarsJson,
+ Streams: streams,
+ }
+ }
+
+ stateModelV2.Inputs = utils.MapValueFrom(ctx, inputsV2, getInputsTypes(), path.Root("input"), &diags)
+ return stateModelV2, diags
+}
+
+// The schema between V1 and V2 is mostly the same. Except for:
+// * The input block was moved to an map attribute.
+// * The streams attribute inside the input block was also moved to a map attribute.
+// This upgrader translates the old list structures into the new map structures.
+func upgradeV1ToV2(ctx context.Context, req resource.UpgradeStateRequest, resp *resource.UpgradeStateResponse) {
+ var stateModelV1 integrationPolicyModelV1
+
+ diags := req.State.Get(ctx, &stateModelV1)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ stateModelV2, diags := stateModelV1.toV2(ctx)
+ resp.Diagnostics.Append(diags...)
+ if resp.Diagnostics.HasError() {
+ return
+ }
+
+ diags = resp.State.Set(ctx, stateModelV2)
+ resp.Diagnostics.Append(diags...)
+}
+
+func updateStreamsV1ToV2(ctx context.Context, v1 jsontypes.Normalized, inputID string) (types.Map, diag.Diagnostics) {
+ if !utils.IsKnown(v1) {
+ return types.MapNull(getInputStreamType()), nil
+ }
+
+ var apiStreams map[string]kbapi.PackagePolicyInputStream
+ diags := v1.Unmarshal(&apiStreams)
+ if diags.HasError() {
+ return types.MapNull(getInputStreamType()), diags
+ }
+
+ if len(apiStreams) == 0 {
+ return types.MapNull(getInputStreamType()), nil
+ }
+
+ streams := make(map[string]integrationPolicyInputStreamModel)
+ for streamID, streamData := range apiStreams {
+ streamModel := integrationPolicyInputStreamModel{
+ Enabled: types.BoolPointerValue(streamData.Enabled),
+ Vars: utils.MapToNormalizedType(utils.Deref(streamData.Vars), path.Root("inputs").AtMapKey(inputID).AtName("streams").AtMapKey(streamID).AtName("vars"), &diags),
+ }
+
+ streams[streamID] = streamModel
+ }
+
+ return types.MapValueFrom(ctx, getInputStreamType(), streams)
+}
+
+func getSchemaV1() *schema.Schema {
+ return &schema.Schema{
+ Version: 1,
+ Description: integrationPolicyDescription,
+ Attributes: map[string]schema.Attribute{
+ "id": schema.StringAttribute{
+ Description: "The ID of this resource.",
+ Computed: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "policy_id": schema.StringAttribute{
+ Description: "Unique identifier of the integration policy.",
+ Computed: true,
+ Optional: true,
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
+ stringplanmodifier.UseStateForUnknown(),
+ },
+ },
+ "name": schema.StringAttribute{
+ Description: "The name of the integration policy.",
+ Required: true,
+ },
+ "namespace": schema.StringAttribute{
+ Description: "The namespace of the integration policy.",
+ Required: true,
+ },
+ "agent_policy_id": schema.StringAttribute{
+ Description: "ID of the agent policy.",
+ Optional: true,
+ Validators: []validator.String{
+ stringvalidator.ConflictsWith(path.Root("agent_policy_ids").Expression()),
+ },
+ },
+ "agent_policy_ids": schema.ListAttribute{
+ Description: "List of agent policy IDs.",
+ ElementType: types.StringType,
+ Optional: true,
+ Validators: []validator.List{
+ listvalidator.ConflictsWith(path.Root("agent_policy_id").Expression()),
+ listvalidator.SizeAtLeast(1),
+ },
+ },
+ "description": schema.StringAttribute{
+ Description: "The description of the integration policy.",
+ Optional: true,
+ },
+ "enabled": schema.BoolAttribute{
+ Description: "Enable the integration policy.",
+ Computed: true,
+ Optional: true,
+ Default: booldefault.StaticBool(true),
+ },
+ "force": schema.BoolAttribute{
+ Description: "Force operations, such as creation and deletion, to occur.",
+ Optional: true,
+ },
+ "integration_name": schema.StringAttribute{
+ Description: "The name of the integration package.",
+ Required: true,
+ },
+ "integration_version": schema.StringAttribute{
+ Description: "The version of the integration package.",
+ Required: true,
+ },
+ "output_id": schema.StringAttribute{
+ Description: "The ID of the output to send data to. When not specified, the default output of the agent policy will be used.",
+ Optional: true,
+ },
+ "vars_json": schema.StringAttribute{
+ Description: "Integration-level variables as JSON.",
+ CustomType: jsontypes.NormalizedType{},
+ Computed: true,
+ Optional: true,
+ Sensitive: true,
+ },
+ "space_ids": schema.SetAttribute{
+ Description: "The Kibana space IDs where this integration policy is available. When set, must match the space_ids of the referenced agent policy. If not set, will be inherited from the agent policy. Note: The order of space IDs does not matter as this is a set.",
+ ElementType: types.StringType,
+ Optional: true,
+ Computed: true,
+ },
+ },
+ Blocks: map[string]schema.Block{
+ "input": schema.ListNestedBlock{
+ Description: "Integration inputs.",
+ NestedObject: schema.NestedBlockObject{
+ Attributes: map[string]schema.Attribute{
+ "input_id": schema.StringAttribute{
+ Description: "The identifier of the input.",
+ Required: true,
+ },
+ "enabled": schema.BoolAttribute{
+ Description: "Enable the input.",
+ Computed: true,
+ Optional: true,
+ Default: booldefault.StaticBool(true),
+ },
+ "streams_json": schema.StringAttribute{
+ Description: "Input streams as JSON.",
+ CustomType: jsontypes.NormalizedType{},
+ Computed: true,
+ Optional: true,
+ Sensitive: true,
+ },
+ "vars_json": schema.StringAttribute{
+ Description: "Input variables as JSON.",
+ CustomType: jsontypes.NormalizedType{},
+ Computed: true,
+ Optional: true,
+ Sensitive: true,
+ },
+ },
+ },
+ },
+ },
+ }
+}
+
+func getInputTypeV1() attr.Type {
+ return getSchemaV1().Blocks["input"].Type().(attr.TypeWithElementType).ElementType()
+}
diff --git a/internal/fleet/integration_policy/schema_v1_test.go b/internal/fleet/integration_policy/schema_v1_test.go
new file mode 100644
index 000000000..f1c1d43e9
--- /dev/null
+++ b/internal/fleet/integration_policy/schema_v1_test.go
@@ -0,0 +1,485 @@
+package integration_policy
+
+import (
+ "context"
+ "encoding/json"
+ "testing"
+
+ "github.com/elastic/terraform-provider-elasticstack/generated/kbapi"
+ "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes"
+ "github.com/hashicorp/terraform-plugin-framework/attr"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestUpdateStreamsV1ToV2(t *testing.T) {
+ ctx := context.Background()
+
+ t.Run("null streams", func(t *testing.T) {
+ result, diags := updateStreamsV1ToV2(ctx, jsontypes.NewNormalizedNull(), "test-input")
+ require.Empty(t, diags)
+ assert.True(t, result.IsNull())
+ })
+
+ t.Run("unknown streams", func(t *testing.T) {
+ result, diags := updateStreamsV1ToV2(ctx, jsontypes.NewNormalizedUnknown(), "test-input")
+ require.Empty(t, diags)
+ assert.True(t, result.IsNull())
+ })
+
+ t.Run("empty streams", func(t *testing.T) {
+ emptyJSON := jsontypes.NewNormalizedValue("{}")
+ result, diags := updateStreamsV1ToV2(ctx, emptyJSON, "test-input")
+ require.Empty(t, diags)
+ assert.True(t, result.IsNull())
+ })
+
+ t.Run("single stream with enabled and vars", func(t *testing.T) {
+ enabled := true
+ vars := map[string]interface{}{
+ "key1": "value1",
+ "key2": 42,
+ }
+ apiStreams := map[string]kbapi.PackagePolicyInputStream{
+ "stream-1": {
+ Enabled: &enabled,
+ Vars: &vars,
+ },
+ }
+ streamsJSON, err := json.Marshal(apiStreams)
+ require.NoError(t, err)
+ normalized := jsontypes.NewNormalizedValue(string(streamsJSON))
+
+ result, diags := updateStreamsV1ToV2(ctx, normalized, "test-input")
+ require.Empty(t, diags)
+ assert.False(t, result.IsNull())
+
+ // Convert back to verify
+ var resultStreams map[string]integrationPolicyInputStreamModel
+ d := result.ElementsAs(ctx, &resultStreams, false)
+ require.Empty(t, d)
+ require.Len(t, resultStreams, 1)
+ require.Contains(t, resultStreams, "stream-1")
+
+ stream := resultStreams["stream-1"]
+ assert.Equal(t, types.BoolValue(true), stream.Enabled)
+ assert.False(t, stream.Vars.IsNull())
+ })
+
+ t.Run("multiple streams with different configurations", func(t *testing.T) {
+ enabled1 := true
+ enabled2 := false
+ vars1 := map[string]interface{}{"key1": "value1"}
+ vars2 := map[string]interface{}{"key2": "value2"}
+
+ apiStreams := map[string]kbapi.PackagePolicyInputStream{
+ "stream-1": {
+ Enabled: &enabled1,
+ Vars: &vars1,
+ },
+ "stream-2": {
+ Enabled: &enabled2,
+ Vars: &vars2,
+ },
+ }
+ streamsJSON, err := json.Marshal(apiStreams)
+ require.NoError(t, err)
+ normalized := jsontypes.NewNormalizedValue(string(streamsJSON))
+
+ result, diags := updateStreamsV1ToV2(ctx, normalized, "test-input")
+ require.Empty(t, diags)
+ assert.False(t, result.IsNull())
+
+ var resultStreams map[string]integrationPolicyInputStreamModel
+ d := result.ElementsAs(ctx, &resultStreams, false)
+ require.Empty(t, d)
+ require.Len(t, resultStreams, 2)
+
+ assert.Equal(t, types.BoolValue(true), resultStreams["stream-1"].Enabled)
+ assert.Equal(t, types.BoolValue(false), resultStreams["stream-2"].Enabled)
+ })
+
+ t.Run("stream with nil enabled", func(t *testing.T) {
+ vars := map[string]interface{}{"key": "value"}
+ apiStreams := map[string]kbapi.PackagePolicyInputStream{
+ "stream-1": {
+ Enabled: nil,
+ Vars: &vars,
+ },
+ }
+ streamsJSON, err := json.Marshal(apiStreams)
+ require.NoError(t, err)
+ normalized := jsontypes.NewNormalizedValue(string(streamsJSON))
+
+ result, diags := updateStreamsV1ToV2(ctx, normalized, "test-input")
+ require.Empty(t, diags)
+ assert.False(t, result.IsNull())
+
+ var resultStreams map[string]integrationPolicyInputStreamModel
+ d := result.ElementsAs(ctx, &resultStreams, false)
+ require.Empty(t, d)
+ require.Len(t, resultStreams, 1)
+
+ stream := resultStreams["stream-1"]
+ assert.True(t, stream.Enabled.IsNull())
+ })
+
+ t.Run("stream with nil vars", func(t *testing.T) {
+ enabled := true
+ apiStreams := map[string]kbapi.PackagePolicyInputStream{
+ "stream-1": {
+ Enabled: &enabled,
+ Vars: nil,
+ },
+ }
+ streamsJSON, err := json.Marshal(apiStreams)
+ require.NoError(t, err)
+ normalized := jsontypes.NewNormalizedValue(string(streamsJSON))
+
+ result, diags := updateStreamsV1ToV2(ctx, normalized, "test-input")
+ require.Empty(t, diags)
+ assert.False(t, result.IsNull())
+
+ var resultStreams map[string]integrationPolicyInputStreamModel
+ d := result.ElementsAs(ctx, &resultStreams, false)
+ require.Empty(t, d)
+ require.Len(t, resultStreams, 1)
+
+ stream := resultStreams["stream-1"]
+ assert.True(t, stream.Vars.IsNull())
+ })
+
+ t.Run("invalid JSON", func(t *testing.T) {
+ normalized := jsontypes.NewNormalizedValue("not valid json")
+ result, diags := updateStreamsV1ToV2(ctx, normalized, "test-input")
+ require.NotEmpty(t, diags)
+ assert.True(t, result.IsNull())
+ })
+}
+
+func TestIntegrationPolicyModelV1ToV2(t *testing.T) {
+ ctx := context.Background()
+
+ t.Run("basic model conversion", func(t *testing.T) {
+ v1Model := integrationPolicyModelV1{
+ ID: types.StringValue("test-id"),
+ PolicyID: types.StringValue("test-policy-id"),
+ Name: types.StringValue("test-name"),
+ Namespace: types.StringValue("test-namespace"),
+ AgentPolicyID: types.StringValue("agent-policy-1"),
+ Description: types.StringValue("test description"),
+ Enabled: types.BoolValue(true),
+ Force: types.BoolValue(false),
+ IntegrationName: types.StringValue("test-integration"),
+ IntegrationVersion: types.StringValue("1.0.0"),
+ VarsJson: jsontypes.NewNormalizedValue(`{"var1":"value1"}`),
+ Input: types.ListNull(getInputTypeV1()),
+ }
+
+ v2Model, diags := v1Model.toV2(ctx)
+ require.Empty(t, diags)
+
+ assert.Equal(t, v1Model.ID, v2Model.ID)
+ assert.Equal(t, v1Model.PolicyID, v2Model.PolicyID)
+ assert.Equal(t, v1Model.Name, v2Model.Name)
+ assert.Equal(t, v1Model.Namespace, v2Model.Namespace)
+ assert.Equal(t, v1Model.AgentPolicyID, v2Model.AgentPolicyID)
+ assert.Equal(t, v1Model.Description, v2Model.Description)
+ assert.Equal(t, v1Model.Enabled, v2Model.Enabled)
+ assert.Equal(t, v1Model.Force, v2Model.Force)
+ assert.Equal(t, v1Model.IntegrationName, v2Model.IntegrationName)
+ assert.Equal(t, v1Model.IntegrationVersion, v2Model.IntegrationVersion)
+ assert.Equal(t, v1Model.VarsJson, v2Model.VarsJson)
+ })
+
+ t.Run("conversion with agent_policy_ids", func(t *testing.T) {
+ policyIDs, diags := types.ListValueFrom(ctx, types.StringType, []string{"policy-1", "policy-2"})
+ require.Empty(t, diags)
+
+ v1Model := integrationPolicyModelV1{
+ ID: types.StringValue("test-id"),
+ Name: types.StringValue("test-name"),
+ Namespace: types.StringValue("test-namespace"),
+ AgentPolicyIDs: policyIDs,
+ IntegrationName: types.StringValue("test-integration"),
+ IntegrationVersion: types.StringValue("1.0.0"),
+ Input: types.ListNull(getInputTypeV1()),
+ }
+
+ v2Model, diags := v1Model.toV2(ctx)
+ require.Empty(t, diags)
+
+ assert.Equal(t, v1Model.AgentPolicyIDs, v2Model.AgentPolicyIDs)
+ })
+
+ t.Run("conversion with space_ids", func(t *testing.T) {
+ spaceIDs, diags := types.SetValueFrom(ctx, types.StringType, []string{"space-1", "space-2"})
+ require.Empty(t, diags)
+
+ v1Model := integrationPolicyModelV1{
+ ID: types.StringValue("test-id"),
+ Name: types.StringValue("test-name"),
+ Namespace: types.StringValue("test-namespace"),
+ AgentPolicyID: types.StringValue("agent-policy-1"),
+ IntegrationName: types.StringValue("test-integration"),
+ IntegrationVersion: types.StringValue("1.0.0"),
+ SpaceIds: spaceIDs,
+ Input: types.ListNull(getInputTypeV1()),
+ }
+
+ v2Model, diags := v1Model.toV2(ctx)
+ require.Empty(t, diags)
+
+ assert.Equal(t, v1Model.SpaceIds, v2Model.SpaceIds)
+ })
+
+ t.Run("conversion with empty inputs", func(t *testing.T) {
+ v1Model := integrationPolicyModelV1{
+ ID: types.StringValue("test-id"),
+ Name: types.StringValue("test-name"),
+ Namespace: types.StringValue("test-namespace"),
+ AgentPolicyID: types.StringValue("agent-policy-1"),
+ IntegrationName: types.StringValue("test-integration"),
+ IntegrationVersion: types.StringValue("1.0.0"),
+ Input: types.ListNull(getInputTypeV1()),
+ }
+
+ v2Model, diags := v1Model.toV2(ctx)
+ require.Empty(t, diags)
+
+ assert.True(t, v2Model.Inputs.IsNull() || len(v2Model.Inputs.Elements()) == 0)
+ })
+
+ t.Run("conversion with single input without streams", func(t *testing.T) {
+ inputsV1 := []integrationPolicyInputModelV1{
+ {
+ InputID: types.StringValue("input-1"),
+ Enabled: types.BoolValue(true),
+ VarsJson: jsontypes.NewNormalizedValue(`{"input_var":"value"}`),
+ StreamsJson: jsontypes.NewNormalizedNull(),
+ },
+ }
+ inputList, diags := types.ListValueFrom(ctx, getInputTypeV1(), inputsV1)
+ require.Empty(t, diags)
+
+ v1Model := integrationPolicyModelV1{
+ ID: types.StringValue("test-id"),
+ Name: types.StringValue("test-name"),
+ Namespace: types.StringValue("test-namespace"),
+ AgentPolicyID: types.StringValue("agent-policy-1"),
+ IntegrationName: types.StringValue("test-integration"),
+ IntegrationVersion: types.StringValue("1.0.0"),
+ Input: inputList,
+ }
+
+ v2Model, diags := v1Model.toV2(ctx)
+ require.Empty(t, diags)
+ assert.False(t, v2Model.Inputs.IsNull())
+
+ var inputsMap map[string]integrationPolicyInputsModel
+ d := v2Model.Inputs.ElementsAs(ctx, &inputsMap, false)
+ require.Empty(t, d)
+ require.Len(t, inputsMap, 1)
+ require.Contains(t, inputsMap, "input-1")
+
+ input := inputsMap["input-1"]
+ assert.Equal(t, types.BoolValue(true), input.Enabled)
+ assert.False(t, input.Vars.IsNull())
+ assert.True(t, input.Streams.IsNull())
+ })
+
+ t.Run("conversion with input and streams", func(t *testing.T) {
+ enabled := true
+ vars := map[string]interface{}{"stream_var": "value"}
+ apiStreams := map[string]kbapi.PackagePolicyInputStream{
+ "stream-1": {
+ Enabled: &enabled,
+ Vars: &vars,
+ },
+ }
+ streamsJSON, err := json.Marshal(apiStreams)
+ require.NoError(t, err)
+
+ inputsV1 := []integrationPolicyInputModelV1{
+ {
+ InputID: types.StringValue("input-1"),
+ Enabled: types.BoolValue(true),
+ VarsJson: jsontypes.NewNormalizedValue(`{"input_var":"value"}`),
+ StreamsJson: jsontypes.NewNormalizedValue(string(streamsJSON)),
+ },
+ }
+ inputList, diags := types.ListValueFrom(ctx, getInputTypeV1(), inputsV1)
+ require.Empty(t, diags)
+
+ v1Model := integrationPolicyModelV1{
+ ID: types.StringValue("test-id"),
+ Name: types.StringValue("test-name"),
+ Namespace: types.StringValue("test-namespace"),
+ AgentPolicyID: types.StringValue("agent-policy-1"),
+ IntegrationName: types.StringValue("test-integration"),
+ IntegrationVersion: types.StringValue("1.0.0"),
+ Input: inputList,
+ }
+
+ v2Model, diags := v1Model.toV2(ctx)
+ require.Empty(t, diags)
+ assert.False(t, v2Model.Inputs.IsNull())
+
+ var inputsMap map[string]integrationPolicyInputsModel
+ d := v2Model.Inputs.ElementsAs(ctx, &inputsMap, false)
+ require.Empty(t, d)
+ require.Len(t, inputsMap, 1)
+
+ input := inputsMap["input-1"]
+ assert.Equal(t, types.BoolValue(true), input.Enabled)
+ assert.False(t, input.Vars.IsNull())
+ assert.False(t, input.Streams.IsNull())
+
+ var streamsMap map[string]integrationPolicyInputStreamModel
+ d = input.Streams.ElementsAs(ctx, &streamsMap, false)
+ require.Empty(t, d)
+ require.Len(t, streamsMap, 1)
+ require.Contains(t, streamsMap, "stream-1")
+
+ stream := streamsMap["stream-1"]
+ assert.Equal(t, types.BoolValue(true), stream.Enabled)
+ assert.False(t, stream.Vars.IsNull())
+ })
+
+ t.Run("conversion with multiple inputs and streams", func(t *testing.T) {
+ enabled1 := true
+ enabled2 := false
+ vars1 := map[string]interface{}{"stream1_var": "value1"}
+ vars2 := map[string]interface{}{"stream2_var": "value2"}
+
+ apiStreams1 := map[string]kbapi.PackagePolicyInputStream{
+ "stream-1": {Enabled: &enabled1, Vars: &vars1},
+ }
+ apiStreams2 := map[string]kbapi.PackagePolicyInputStream{
+ "stream-2": {Enabled: &enabled2, Vars: &vars2},
+ }
+
+ streamsJSON1, err := json.Marshal(apiStreams1)
+ require.NoError(t, err)
+ streamsJSON2, err := json.Marshal(apiStreams2)
+ require.NoError(t, err)
+
+ inputsV1 := []integrationPolicyInputModelV1{
+ {
+ InputID: types.StringValue("input-1"),
+ Enabled: types.BoolValue(true),
+ VarsJson: jsontypes.NewNormalizedValue(`{"input1_var":"value1"}`),
+ StreamsJson: jsontypes.NewNormalizedValue(string(streamsJSON1)),
+ },
+ {
+ InputID: types.StringValue("input-2"),
+ Enabled: types.BoolValue(false),
+ VarsJson: jsontypes.NewNormalizedValue(`{"input2_var":"value2"}`),
+ StreamsJson: jsontypes.NewNormalizedValue(string(streamsJSON2)),
+ },
+ }
+ inputList, diags := types.ListValueFrom(ctx, getInputTypeV1(), inputsV1)
+ require.Empty(t, diags)
+
+ v1Model := integrationPolicyModelV1{
+ ID: types.StringValue("test-id"),
+ Name: types.StringValue("test-name"),
+ Namespace: types.StringValue("test-namespace"),
+ AgentPolicyID: types.StringValue("agent-policy-1"),
+ IntegrationName: types.StringValue("test-integration"),
+ IntegrationVersion: types.StringValue("1.0.0"),
+ Input: inputList,
+ }
+
+ v2Model, diags := v1Model.toV2(ctx)
+ require.Empty(t, diags)
+ assert.False(t, v2Model.Inputs.IsNull())
+
+ var inputsMap map[string]integrationPolicyInputsModel
+ d := v2Model.Inputs.ElementsAs(ctx, &inputsMap, false)
+ require.Empty(t, d)
+ require.Len(t, inputsMap, 2)
+
+ // Verify input-1
+ input1 := inputsMap["input-1"]
+ assert.Equal(t, types.BoolValue(true), input1.Enabled)
+ var streams1 map[string]integrationPolicyInputStreamModel
+ d = input1.Streams.ElementsAs(ctx, &streams1, false)
+ require.Empty(t, d)
+ require.Contains(t, streams1, "stream-1")
+
+ // Verify input-2
+ input2 := inputsMap["input-2"]
+ assert.Equal(t, types.BoolValue(false), input2.Enabled)
+ var streams2 map[string]integrationPolicyInputStreamModel
+ d = input2.Streams.ElementsAs(ctx, &streams2, false)
+ require.Empty(t, d)
+ require.Contains(t, streams2, "stream-2")
+ })
+
+ t.Run("conversion with invalid streams JSON", func(t *testing.T) {
+ // Use valid JSON that doesn't match the expected structure
+ inputsV1 := []integrationPolicyInputModelV1{
+ {
+ InputID: types.StringValue("input-1"),
+ Enabled: types.BoolValue(true),
+ VarsJson: jsontypes.NewNormalizedValue(`{"input_var":"value"}`),
+ StreamsJson: jsontypes.NewNormalizedValue(`["array", "instead", "of", "map"]`),
+ },
+ }
+ inputList, diags := types.ListValueFrom(ctx, getInputTypeV1(), inputsV1)
+ require.Empty(t, diags)
+
+ v1Model := integrationPolicyModelV1{
+ ID: types.StringValue("test-id"),
+ Name: types.StringValue("test-name"),
+ Namespace: types.StringValue("test-namespace"),
+ AgentPolicyID: types.StringValue("agent-policy-1"),
+ IntegrationName: types.StringValue("test-integration"),
+ IntegrationVersion: types.StringValue("1.0.0"),
+ Input: inputList,
+ }
+
+ _, diags = v1Model.toV2(ctx)
+ require.NotEmpty(t, diags)
+ assert.True(t, diags.HasError())
+ })
+
+ t.Run("conversion preserves null and unknown values", func(t *testing.T) {
+ v1Model := integrationPolicyModelV1{
+ ID: types.StringValue("test-id"),
+ Name: types.StringValue("test-name"),
+ Namespace: types.StringValue("test-namespace"),
+ AgentPolicyID: types.StringValue("agent-policy-1"),
+ IntegrationName: types.StringValue("test-integration"),
+ IntegrationVersion: types.StringValue("1.0.0"),
+ Description: types.StringNull(),
+ VarsJson: jsontypes.NewNormalizedNull(),
+ SpaceIds: types.SetNull(types.StringType),
+ Input: types.ListNull(getInputTypeV1()),
+ }
+
+ v2Model, diags := v1Model.toV2(ctx)
+ require.Empty(t, diags)
+
+ assert.True(t, v2Model.Description.IsNull())
+ assert.True(t, v2Model.VarsJson.IsNull())
+ assert.True(t, v2Model.SpaceIds.IsNull())
+ })
+}
+
+func TestGetInputTypeV1(t *testing.T) {
+ inputType := getInputTypeV1()
+ require.NotNil(t, inputType)
+
+ // Verify it's an object type with the expected attributes
+ objType, ok := inputType.(attr.TypeWithAttributeTypes)
+ require.True(t, ok, "input type should be an object type with attributes")
+
+ attrTypes := objType.AttributeTypes()
+ require.Contains(t, attrTypes, "input_id")
+ require.Contains(t, attrTypes, "enabled")
+ require.Contains(t, attrTypes, "streams_json")
+ require.Contains(t, attrTypes, "vars_json")
+}
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicy/create/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicy/create/integration_policy.tf
index c32a16d40..d34ab7f3f 100644
--- a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicy/create/integration_policy.tf
+++ b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicy/create/integration_policy.tf
@@ -47,22 +47,23 @@ resource "elasticstack_fleet_integration_policy" "test_policy" {
integration_name = elasticstack_fleet_integration.test_policy.name
integration_version = elasticstack_fleet_integration.test_policy.version
- input {
- input_id = "tcp-tcp"
- enabled = true
- streams_json = jsonencode({
- "tcp.generic" : {
- "enabled" : true
- "vars" : {
- "listen_address" : "localhost"
- "listen_port" : 8080
- "data_stream.dataset" : "tcp.generic"
- "tags" : []
- "syslog_options" : "field: message"
- "ssl" : ""
- "custom" : ""
+ inputs = {
+ "tcp-tcp" = {
+ enabled = true
+ streams = {
+ "tcp.generic" = {
+ enabled = true
+ vars = jsonencode({
+ "listen_address" : "localhost"
+ "listen_port" : 8080
+ "data_stream.dataset" : "tcp.generic"
+ "tags" : []
+ "syslog_options" : "field: message"
+ "ssl" : ""
+ "custom" : ""
+ })
}
}
- })
+ }
}
}
\ No newline at end of file
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicy/update/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicy/update/integration_policy.tf
index f096447b0..b56ac95ab 100644
--- a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicy/update/integration_policy.tf
+++ b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicy/update/integration_policy.tf
@@ -47,22 +47,23 @@ resource "elasticstack_fleet_integration_policy" "test_policy" {
integration_name = elasticstack_fleet_integration.test_policy.name
integration_version = elasticstack_fleet_integration.test_policy.version
- input {
- input_id = "tcp-tcp"
- enabled = false
- streams_json = jsonencode({
- "tcp.generic" : {
- "enabled" : false
- "vars" : {
- "listen_address" : "localhost"
- "listen_port" : 8085
- "data_stream.dataset" : "tcp.generic"
- "tags" : []
- "syslog_options" : "field: message"
- "ssl" : ""
- "custom" : ""
+ inputs = {
+ "tcp-tcp" = {
+ enabled = false
+ streams = {
+ "tcp.generic" = {
+ enabled = false
+ vars = jsonencode({
+ "listen_address" : "localhost"
+ "listen_port" : 8085
+ "data_stream.dataset" : "tcp.generic"
+ "tags" : []
+ "syslog_options" : "field: message"
+ "ssl" : ""
+ "custom" : ""
+ })
}
}
- })
+ }
}
}
\ No newline at end of file
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyMultipleAgentPolicies/create/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyMultipleAgentPolicies/create/integration_policy.tf
index 96eaca083..f0522a3de 100644
--- a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyMultipleAgentPolicies/create/integration_policy.tf
+++ b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyMultipleAgentPolicies/create/integration_policy.tf
@@ -55,21 +55,22 @@ resource "elasticstack_fleet_integration_policy" "test_policy" {
integration_name = elasticstack_fleet_integration.test_policy.name
integration_version = elasticstack_fleet_integration.test_policy.version
- input {
- input_id = "tcp-tcp"
- streams_json = jsonencode({
- "tcp.generic" : {
- "enabled" : true
- "vars" : {
- "listen_address" : "localhost"
- "listen_port" : 8080
- "data_stream.dataset" : "tcp.generic"
- "tags" : []
- "syslog_options" : "field: message"
- "ssl" : ""
- "custom" : ""
+ inputs = {
+ "tcp-tcp" = {
+ streams = {
+ "tcp.generic" = {
+ enabled = true
+ vars = jsonencode({
+ "listen_address" : "localhost"
+ "listen_port" : 8080
+ "data_stream.dataset" : "tcp.generic"
+ "tags" : []
+ "syslog_options" : "field: message"
+ "ssl" : ""
+ "custom" : ""
+ })
}
}
- })
+ }
}
}
\ No newline at end of file
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/multi-valued_secrets/create/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/multi-valued_secrets/create/integration_policy.tf
index 53128397f..283ccce92 100644
--- a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/multi-valued_secrets/create/integration_policy.tf
+++ b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/multi-valued_secrets/create/integration_policy.tf
@@ -52,23 +52,24 @@ resource "elasticstack_fleet_integration_policy" "test_policy" {
integration_name = elasticstack_fleet_integration.test_policy.name
integration_version = elasticstack_fleet_integration.test_policy.version
- input {
- input_id = "sql-sql/metrics"
- enabled = true
- streams_json = jsonencode({
- "sql.sql" : {
- "enabled" : true,
- "vars" : {
- "hosts" : ["root:test@tcp(127.0.0.1:3306)/"],
- "period" : "1m",
- "driver" : "mysql",
- "sql_queries" : "- query: SHOW GLOBAL STATUS LIKE 'Innodb_system%'\n response_format: variables\n \n",
- "merge_results" : false,
- "ssl" : "",
- "data_stream.dataset" : "sql",
- "processors" : ""
+ inputs = {
+ "sql-sql/metrics" = {
+ enabled = true
+ streams = {
+ "sql.sql" = {
+ enabled = true
+ vars = jsonencode({
+ "hosts" : ["root:test@tcp(127.0.0.1:3306)/"],
+ "period" : "1m",
+ "driver" : "mysql",
+ "sql_queries" : "- query: SHOW GLOBAL STATUS LIKE 'Innodb_system%'\n response_format: variables\n \n",
+ "merge_results" : false,
+ "ssl" : "",
+ "data_stream.dataset" : "sql",
+ "processors" : ""
+ })
}
}
- })
+ }
}
}
\ No newline at end of file
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/multi-valued_secrets/import_test/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/multi-valued_secrets/import_test/integration_policy.tf
index 53128397f..283ccce92 100644
--- a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/multi-valued_secrets/import_test/integration_policy.tf
+++ b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/multi-valued_secrets/import_test/integration_policy.tf
@@ -52,23 +52,24 @@ resource "elasticstack_fleet_integration_policy" "test_policy" {
integration_name = elasticstack_fleet_integration.test_policy.name
integration_version = elasticstack_fleet_integration.test_policy.version
- input {
- input_id = "sql-sql/metrics"
- enabled = true
- streams_json = jsonencode({
- "sql.sql" : {
- "enabled" : true,
- "vars" : {
- "hosts" : ["root:test@tcp(127.0.0.1:3306)/"],
- "period" : "1m",
- "driver" : "mysql",
- "sql_queries" : "- query: SHOW GLOBAL STATUS LIKE 'Innodb_system%'\n response_format: variables\n \n",
- "merge_results" : false,
- "ssl" : "",
- "data_stream.dataset" : "sql",
- "processors" : ""
+ inputs = {
+ "sql-sql/metrics" = {
+ enabled = true
+ streams = {
+ "sql.sql" = {
+ enabled = true
+ vars = jsonencode({
+ "hosts" : ["root:test@tcp(127.0.0.1:3306)/"],
+ "period" : "1m",
+ "driver" : "mysql",
+ "sql_queries" : "- query: SHOW GLOBAL STATUS LIKE 'Innodb_system%'\n response_format: variables\n \n",
+ "merge_results" : false,
+ "ssl" : "",
+ "data_stream.dataset" : "sql",
+ "processors" : ""
+ })
}
}
- })
+ }
}
}
\ No newline at end of file
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/create/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/create/integration_policy.tf
index fa2ccef3c..c6448ee29 100644
--- a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/create/integration_policy.tf
+++ b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/create/integration_policy.tf
@@ -66,51 +66,50 @@ resource "elasticstack_fleet_integration_policy" "test_policy" {
"default_region" : var.default_region
})
- input {
- input_id = "aws_logs-aws-cloudwatch"
- enabled = true
- streams_json = jsonencode({
- "aws_logs.generic" = {
- enabled = true
- vars = {
- "number_of_workers" : 1
- "log_streams" : []
- "start_position" : "beginning"
- "scan_frequency" : "1m"
- "api_timeput" : "120s"
- "api_sleep" : "200ms"
- "tags" : ["forwarded"]
- "preserve_original_event" : false
- "data_stream.dataset" : "aws_logs.generic"
- "custom" : ""
+ inputs = {
+ "aws_logs-aws-cloudwatch" = {
+ enabled = true
+ streams = {
+ "aws_logs.generic" = {
+ enabled = true
+ vars = jsonencode({
+ "number_of_workers" : 1
+ "log_streams" : []
+ "start_position" : "beginning"
+ "scan_frequency" : "1m"
+ "api_timeput" : "120s"
+ "api_sleep" : "200ms"
+ "tags" : ["forwarded"]
+ "preserve_original_event" : false
+ "data_stream.dataset" : "aws_logs.generic"
+ "custom" : ""
+ })
}
}
- })
- }
-
- input {
- input_id = "aws_logs-aws-s3"
- enabled = true
- streams_json = jsonencode({
- "aws_logs.generic" = {
- enabled = true
- vars = {
- "number_of_workers" : 1
- "bucket_list_interval" : "120s"
- "file_selectors" : ""
- "fips_enabled" : false
- "include_s3_metadata" : []
- "max_bytes" : "10MiB"
- "max_number_of_messages" : 5
- "parsers" : ""
- "sqs.max_receive_count" : 5
- "sqs.wait_time" : "20s"
- "tags" : ["forwarded"]
- "preserve_original_event" : false
- "data_stream.dataset" : "aws_logs.generic"
- "custom" : ""
+ }
+ "aws_logs-aws-s3" = {
+ enabled = true
+ streams = {
+ "aws_logs.generic" = {
+ enabled = true
+ vars = jsonencode({
+ "number_of_workers" : 1
+ "bucket_list_interval" : "120s"
+ "file_selectors" : ""
+ "fips_enabled" : false
+ "include_s3_metadata" : []
+ "max_bytes" : "10MiB"
+ "max_number_of_messages" : 5
+ "parsers" : ""
+ "sqs.max_receive_count" : 5
+ "sqs.wait_time" : "20s"
+ "tags" : ["forwarded"]
+ "preserve_original_event" : false
+ "data_stream.dataset" : "aws_logs.generic"
+ "custom" : ""
+ })
}
}
- })
+ }
}
}
\ No newline at end of file
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/import_test/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/import_test/integration_policy.tf
index cb346d09b..6e7832ca1 100644
--- a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/import_test/integration_policy.tf
+++ b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/import_test/integration_policy.tf
@@ -66,51 +66,50 @@ resource "elasticstack_fleet_integration_policy" "test_policy" {
"default_region" : var.default_region
})
- input {
- input_id = "aws_logs-aws-cloudwatch"
- enabled = false
- streams_json = jsonencode({
- "aws_logs.generic" = {
- enabled = false
- vars = {
- "number_of_workers" : 1,
- "log_streams" : [],
- "start_position" : "beginning",
- "scan_frequency" : "2m",
- "api_timeput" : "120s",
- "api_sleep" : "200ms",
- "tags" : ["forwarded"],
- "preserve_original_event" : false,
- "data_stream.dataset" : "aws_logs.generic",
- "custom" : "",
+ inputs = {
+ "aws_logs-aws-cloudwatch" = {
+ enabled = false
+ streams = {
+ "aws_logs.generic" = {
+ enabled = false
+ vars = jsonencode({
+ "number_of_workers" : 1,
+ "log_streams" : [],
+ "start_position" : "beginning",
+ "scan_frequency" : "2m",
+ "api_timeput" : "120s",
+ "api_sleep" : "200ms",
+ "tags" : ["forwarded"],
+ "preserve_original_event" : false,
+ "data_stream.dataset" : "aws_logs.generic",
+ "custom" : "",
+ })
}
}
- })
- }
-
- input {
- input_id = "aws_logs-aws-s3"
- enabled = false
- streams_json = jsonencode({
- "aws_logs.generic" = {
- enabled = false
- vars = {
- "number_of_workers" : 1,
- "bucket_list_interval" : "120s",
- "file_selectors" : "",
- "fips_enabled" : false,
- "include_s3_metadata" : [],
- "max_bytes" : "20MiB",
- "max_number_of_messages" : 5,
- "parsers" : "",
- "sqs.max_receive_count" : 5,
- "sqs.wait_time" : "20s",
- "tags" : ["forwarded"],
- "preserve_original_event" : false,
- "data_stream.dataset" : "aws_logs.generic",
- "custom" : "",
+ }
+ "aws_logs-aws-s3" = {
+ enabled = false
+ streams = {
+ "aws_logs.generic" = {
+ enabled = false
+ vars = jsonencode({
+ "number_of_workers" : 1,
+ "bucket_list_interval" : "120s",
+ "file_selectors" : "",
+ "fips_enabled" : false,
+ "include_s3_metadata" : [],
+ "max_bytes" : "20MiB",
+ "max_number_of_messages" : 5,
+ "parsers" : "",
+ "sqs.max_receive_count" : 5,
+ "sqs.wait_time" : "20s",
+ "tags" : ["forwarded"],
+ "preserve_original_event" : false,
+ "data_stream.dataset" : "aws_logs.generic",
+ "custom" : "",
+ })
}
}
- })
+ }
}
}
\ No newline at end of file
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/update/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/update/integration_policy.tf
index cb346d09b..6e7832ca1 100644
--- a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/update/integration_policy.tf
+++ b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecrets/single_valued_secrets/update/integration_policy.tf
@@ -66,51 +66,50 @@ resource "elasticstack_fleet_integration_policy" "test_policy" {
"default_region" : var.default_region
})
- input {
- input_id = "aws_logs-aws-cloudwatch"
- enabled = false
- streams_json = jsonencode({
- "aws_logs.generic" = {
- enabled = false
- vars = {
- "number_of_workers" : 1,
- "log_streams" : [],
- "start_position" : "beginning",
- "scan_frequency" : "2m",
- "api_timeput" : "120s",
- "api_sleep" : "200ms",
- "tags" : ["forwarded"],
- "preserve_original_event" : false,
- "data_stream.dataset" : "aws_logs.generic",
- "custom" : "",
+ inputs = {
+ "aws_logs-aws-cloudwatch" = {
+ enabled = false
+ streams = {
+ "aws_logs.generic" = {
+ enabled = false
+ vars = jsonencode({
+ "number_of_workers" : 1,
+ "log_streams" : [],
+ "start_position" : "beginning",
+ "scan_frequency" : "2m",
+ "api_timeput" : "120s",
+ "api_sleep" : "200ms",
+ "tags" : ["forwarded"],
+ "preserve_original_event" : false,
+ "data_stream.dataset" : "aws_logs.generic",
+ "custom" : "",
+ })
}
}
- })
- }
-
- input {
- input_id = "aws_logs-aws-s3"
- enabled = false
- streams_json = jsonencode({
- "aws_logs.generic" = {
- enabled = false
- vars = {
- "number_of_workers" : 1,
- "bucket_list_interval" : "120s",
- "file_selectors" : "",
- "fips_enabled" : false,
- "include_s3_metadata" : [],
- "max_bytes" : "20MiB",
- "max_number_of_messages" : 5,
- "parsers" : "",
- "sqs.max_receive_count" : 5,
- "sqs.wait_time" : "20s",
- "tags" : ["forwarded"],
- "preserve_original_event" : false,
- "data_stream.dataset" : "aws_logs.generic",
- "custom" : "",
+ }
+ "aws_logs-aws-s3" = {
+ enabled = false
+ streams = {
+ "aws_logs.generic" = {
+ enabled = false
+ vars = jsonencode({
+ "number_of_workers" : 1,
+ "bucket_list_interval" : "120s",
+ "file_selectors" : "",
+ "fips_enabled" : false,
+ "include_s3_metadata" : [],
+ "max_bytes" : "20MiB",
+ "max_number_of_messages" : 5,
+ "parsers" : "",
+ "sqs.max_receive_count" : 5,
+ "sqs.wait_time" : "20s",
+ "tags" : ["forwarded"],
+ "preserve_original_event" : false,
+ "data_stream.dataset" : "aws_logs.generic",
+ "custom" : "",
+ })
}
}
- })
+ }
}
}
\ No newline at end of file
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecretsFromSDK/current/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecretsFromSDK/current/integration_policy.tf
new file mode 100644
index 000000000..c6448ee29
--- /dev/null
+++ b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecretsFromSDK/current/integration_policy.tf
@@ -0,0 +1,115 @@
+variable "policy_name" {
+ description = "The integration policy name"
+ type = string
+}
+
+variable "secret_key" {
+ description = "The secret key for access"
+ type = string
+}
+
+variable "integration_name" {
+ description = "The integration name"
+ type = string
+ default = "aws_logs"
+}
+
+variable "integration_version" {
+ description = "The integration version"
+ type = string
+ default = "1.4.0"
+}
+
+variable "default_region" {
+ description = "AWS default region"
+ type = string
+ default = "us-east-1"
+}
+
+provider "elasticstack" {
+ elasticsearch {}
+ kibana {}
+}
+
+resource "elasticstack_fleet_integration" "test_policy" {
+ name = var.integration_name
+ version = var.integration_version
+ force = true
+}
+
+resource "elasticstack_fleet_agent_policy" "test_policy" {
+ name = "${var.policy_name} Agent Policy"
+ namespace = "default"
+ description = "IntegrationPolicyTest Agent Policy"
+ monitor_logs = true
+ monitor_metrics = true
+ skip_destroy = false
+}
+
+data "elasticstack_fleet_enrollment_tokens" "test_policy" {
+ policy_id = elasticstack_fleet_agent_policy.test_policy.policy_id
+}
+
+resource "elasticstack_fleet_integration_policy" "test_policy" {
+ name = var.policy_name
+ namespace = "default"
+ description = "IntegrationPolicyTest Policy"
+ agent_policy_id = elasticstack_fleet_agent_policy.test_policy.policy_id
+ integration_name = elasticstack_fleet_integration.test_policy.name
+ integration_version = elasticstack_fleet_integration.test_policy.version
+
+ vars_json = jsonencode({
+ "access_key_id" : "placeholder"
+ "secret_access_key" : "${var.secret_key} ${var.policy_name}"
+ "session_token" : "placeholder"
+ "endpoint" : "endpoint"
+ "default_region" : var.default_region
+ })
+
+ inputs = {
+ "aws_logs-aws-cloudwatch" = {
+ enabled = true
+ streams = {
+ "aws_logs.generic" = {
+ enabled = true
+ vars = jsonencode({
+ "number_of_workers" : 1
+ "log_streams" : []
+ "start_position" : "beginning"
+ "scan_frequency" : "1m"
+ "api_timeput" : "120s"
+ "api_sleep" : "200ms"
+ "tags" : ["forwarded"]
+ "preserve_original_event" : false
+ "data_stream.dataset" : "aws_logs.generic"
+ "custom" : ""
+ })
+ }
+ }
+ }
+ "aws_logs-aws-s3" = {
+ enabled = true
+ streams = {
+ "aws_logs.generic" = {
+ enabled = true
+ vars = jsonencode({
+ "number_of_workers" : 1
+ "bucket_list_interval" : "120s"
+ "file_selectors" : ""
+ "fips_enabled" : false
+ "include_s3_metadata" : []
+ "max_bytes" : "10MiB"
+ "max_number_of_messages" : 5
+ "parsers" : ""
+ "sqs.max_receive_count" : 5
+ "sqs.wait_time" : "20s"
+ "tags" : ["forwarded"]
+ "preserve_original_event" : false
+ "data_stream.dataset" : "aws_logs.generic"
+ "custom" : ""
+ })
+ }
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecretsFromSDK/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecretsFromSDK/legacy/integration_policy.tf
similarity index 100%
rename from internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecretsFromSDK/integration_policy.tf
rename to internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicySecretsFromSDK/legacy/integration_policy.tf
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyWithOutput/create/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyWithOutput/create/integration_policy.tf
index ef4ecc3a5..758c2171d 100644
--- a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyWithOutput/create/integration_policy.tf
+++ b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyWithOutput/create/integration_policy.tf
@@ -67,22 +67,23 @@ resource "elasticstack_fleet_integration_policy" "test_policy" {
integration_version = elasticstack_fleet_integration.test_policy.version
output_id = elasticstack_fleet_output.test_output.output_id
- input {
- input_id = "tcp-tcp"
- enabled = true
- streams_json = jsonencode({
- "tcp.generic" : {
- "enabled" : true
- "vars" : {
- "listen_address" : "localhost"
- "listen_port" : 8080
- "data_stream.dataset" : "tcp.generic"
- "tags" : []
- "syslog_options" : "field: message"
- "ssl" : ""
- "custom" : ""
+ inputs = {
+ "tcp-tcp" = {
+ enabled = true
+ streams = {
+ "tcp.generic" = {
+ enabled = true
+ vars = jsonencode({
+ "listen_address" : "localhost"
+ "listen_port" : 8080
+ "data_stream.dataset" : "tcp.generic"
+ "tags" : []
+ "syslog_options" : "field: message"
+ "ssl" : ""
+ "custom" : ""
+ })
}
}
- })
+ }
}
}
\ No newline at end of file
diff --git a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyWithOutput/update/integration_policy.tf b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyWithOutput/update/integration_policy.tf
index dd6ee9d79..26ec3c6f3 100644
--- a/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyWithOutput/update/integration_policy.tf
+++ b/internal/fleet/integration_policy/testdata/TestAccResourceIntegrationPolicyWithOutput/update/integration_policy.tf
@@ -86,22 +86,23 @@ resource "elasticstack_fleet_integration_policy" "test_policy" {
integration_version = elasticstack_fleet_integration.test_policy.version
output_id = elasticstack_fleet_output.test_output_updated.output_id
- input {
- input_id = "tcp-tcp"
- enabled = false
- streams_json = jsonencode({
- "tcp.generic" : {
- "enabled" : false
- "vars" : {
- "listen_address" : "localhost"
- "listen_port" : 8085
- "data_stream.dataset" : "tcp.generic"
- "tags" : []
- "syslog_options" : "field: message"
- "ssl" : ""
- "custom" : ""
+ inputs = {
+ "tcp-tcp" = {
+ enabled = true
+ streams = {
+ "tcp.generic" = {
+ enabled = true
+ vars = jsonencode({
+ "listen_address" : "localhost"
+ "listen_port" : 8080
+ "data_stream.dataset" : "tcp.generic"
+ "tags" : []
+ "syslog_options" : "field: message"
+ "ssl" : ""
+ "custom" : ""
+ })
}
}
- })
+ }
}
}
\ No newline at end of file
diff --git a/internal/fleet/integration_policy/update.go b/internal/fleet/integration_policy/update.go
index 9932c9b53..6b2d68d16 100644
--- a/internal/fleet/integration_policy/update.go
+++ b/internal/fleet/integration_policy/update.go
@@ -74,7 +74,7 @@ func (r *integrationPolicyResource) Update(ctx context.Context, req resource.Upd
stateUsedAgentPolicyIDs := utils.IsKnown(stateModel.AgentPolicyIDs) && !stateModel.AgentPolicyIDs.IsNull()
// Remember the input configuration from state
- stateHadInput := utils.IsKnown(stateModel.Input) && !stateModel.Input.IsNull() && len(stateModel.Input.Elements()) > 0
+ stateHadInput := utils.IsKnown(stateModel.Inputs) && !stateModel.Inputs.IsNull() && len(stateModel.Inputs.Elements()) > 0
diags = planModel.populateFromAPI(ctx, policy)
resp.Diagnostics.Append(diags...)
@@ -101,8 +101,8 @@ func (r *integrationPolicyResource) Update(ctx context.Context, req resource.Upd
}
// If state didn't have input configured, ensure we don't add it now
- if !stateHadInput && (planModel.Input.IsNull() || len(planModel.Input.Elements()) == 0) {
- planModel.Input = types.ListNull(getInputTypeV1())
+ if !stateHadInput && (planModel.Inputs.IsNull() || len(planModel.Inputs.Elements()) == 0) {
+ planModel.Inputs = types.MapNull(getInputsTypes())
}
diags = resp.State.Set(ctx, planModel)