diff --git a/datadog/fwprovider/resource_datadog_compliance_custom_framework.go b/datadog/fwprovider/resource_datadog_compliance_custom_framework.go
index 282128dc0b..cc0a191b5e 100644
--- a/datadog/fwprovider/resource_datadog_compliance_custom_framework.go
+++ b/datadog/fwprovider/resource_datadog_compliance_custom_framework.go
@@ -15,6 +15,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/hashicorp/terraform-plugin-framework-validators/listvalidator"
+
"github.com/terraform-providers/terraform-provider-datadog/datadog/internal/utils"
"github.com/terraform-providers/terraform-provider-datadog/datadog/internal/validators"
)
diff --git a/datadog/fwprovider/resource_datadog_integration_cloudflare_account.go b/datadog/fwprovider/resource_datadog_integration_cloudflare_account.go
index 9a6aff5505..38236f113b 100644
--- a/datadog/fwprovider/resource_datadog_integration_cloudflare_account.go
+++ b/datadog/fwprovider/resource_datadog_integration_cloudflare_account.go
@@ -4,12 +4,15 @@ import (
"context"
"github.com/DataDog/datadog-api-client-go/v2/api/datadogV2"
+
"github.com/hashicorp/terraform-plugin-framework/diag"
frameworkPath "github.com/hashicorp/terraform-plugin-framework/path"
"github.com/hashicorp/terraform-plugin-framework/resource"
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier"
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
+
+ "github.com/hashicorp/terraform-plugin-framework/tfsdk"
"github.com/hashicorp/terraform-plugin-framework/types"
"github.com/terraform-providers/terraform-provider-datadog/datadog/internal/utils"
@@ -26,11 +29,23 @@ type integrationCloudflareAccountResource struct {
}
type integrationCloudflareAccountModel struct {
- ID types.String `tfsdk:"id"`
- ApiKey types.String `tfsdk:"api_key"`
- Email types.String `tfsdk:"email"`
- Name types.String `tfsdk:"name"`
- Resources types.Set `tfsdk:"resources"`
+ ID types.String `tfsdk:"id"`
+ ApiKey types.String `tfsdk:"api_key"`
+ ApiKeyWo types.String `tfsdk:"api_key_wo"`
+ ApiKeyWoVersion types.String `tfsdk:"api_key_wo_version"`
+ Email types.String `tfsdk:"email"`
+ Name types.String `tfsdk:"name"`
+ Resources types.Set `tfsdk:"resources"`
+}
+
+// Write-only secret configuration for Cloudflare API key
+var cloudflareApiKeyConfig = utils.WriteOnlySecretConfig{
+ OriginalAttr: "api_key",
+ WriteOnlyAttr: "api_key_wo",
+ TriggerAttr: "api_key_wo_version",
+ OriginalDescription: "The API key (or token) for the Cloudflare account.",
+ WriteOnlyDescription: "Write-only API key (or token) for the Cloudflare account.",
+ TriggerDescription: "Version associated with api_key_wo. Changing this triggers an update. Can be any string (e.g., '1', 'v2.1', '2024-Q1').",
}
func NewIntegrationCloudflareAccountResource() resource.Resource {
@@ -48,33 +63,39 @@ func (r *integrationCloudflareAccountResource) Metadata(_ context.Context, reque
}
func (r *integrationCloudflareAccountResource) Schema(_ context.Context, _ resource.SchemaRequest, response *resource.SchemaResponse) {
- response.Schema = schema.Schema{
- Description: "Provides a Datadog IntegrationCloudflareAccount resource. This can be used to create and manage Datadog integration_cloudflare_account.",
- Attributes: map[string]schema.Attribute{
- "api_key": schema.StringAttribute{
- Required: true,
- Description: "The API key (or token) for the Cloudflare account.",
- Sensitive: true,
- },
- "email": schema.StringAttribute{
- Optional: true,
- Description: "The email associated with the Cloudflare account. If an API key is provided (and not a token), this field is also required.",
- },
- "name": schema.StringAttribute{
- Required: true,
- Description: "The name of the Cloudflare account.",
- PlanModifiers: []planmodifier.String{
- stringplanmodifier.RequiresReplace(),
- },
- },
- "id": utils.ResourceIDAttribute(),
- "resources": schema.SetAttribute{
- ElementType: types.StringType,
- Optional: true,
- Computed: true,
- Description: "An allowlist of resources to pull metrics for. Includes `web`, `dns`, `lb` (load balancer), and `worker`).",
+ // Generate write-only secret attributes using helper
+ writeOnlyAttrs := utils.CreateWriteOnlySecretAttributes(cloudflareApiKeyConfig)
+
+ // Combine with other resource-specific attributes
+ allAttributes := map[string]schema.Attribute{
+ "email": schema.StringAttribute{
+ Optional: true,
+ Description: "The email associated with the Cloudflare account. If an API key is provided (and not a token), this field is also required.",
+ },
+ "name": schema.StringAttribute{
+ Required: true,
+ Description: "The name of the Cloudflare account.",
+ PlanModifiers: []planmodifier.String{
+ stringplanmodifier.RequiresReplace(),
},
},
+ "id": utils.ResourceIDAttribute(),
+ "resources": schema.SetAttribute{
+ ElementType: types.StringType,
+ Optional: true,
+ Computed: true,
+ Description: "An allowlist of resources to pull metrics for. Includes `web`, `dns`, `lb` (load balancer), and `worker`).",
+ },
+ }
+
+ // Merge write-only attributes with resource-specific ones
+ for key, attr := range writeOnlyAttrs {
+ allAttributes[key] = attr
+ }
+
+ response.Schema = schema.Schema{
+ Description: "Provides a Datadog IntegrationCloudflareAccount resource. This can be used to create and manage Datadog integration_cloudflare_account.",
+ Attributes: allAttributes,
}
}
@@ -117,7 +138,7 @@ func (r *integrationCloudflareAccountResource) Create(ctx context.Context, reque
return
}
- body, diags := r.buildIntegrationCloudflareAccountRequestBody(ctx, &state)
+ body, diags := r.buildIntegrationCloudflareAccountRequestBody(ctx, &state, &request.Config)
response.Diagnostics.Append(diags...)
if response.Diagnostics.HasError() {
return
@@ -139,15 +160,21 @@ func (r *integrationCloudflareAccountResource) Create(ctx context.Context, reque
}
func (r *integrationCloudflareAccountResource) Update(ctx context.Context, request resource.UpdateRequest, response *resource.UpdateResponse) {
- var state integrationCloudflareAccountModel
- response.Diagnostics.Append(request.Plan.Get(ctx, &state)...)
+ var plan integrationCloudflareAccountModel
+ response.Diagnostics.Append(request.Plan.Get(ctx, &plan)...)
if response.Diagnostics.HasError() {
return
}
- id := state.ID.ValueString()
+ var prior integrationCloudflareAccountModel
+ response.Diagnostics.Append(request.State.Get(ctx, &prior)...)
+ if response.Diagnostics.HasError() {
+ return
+ }
- body, diags := r.buildIntegrationCloudflareAccountUpdateRequestBody(ctx, &state)
+ id := plan.ID.ValueString()
+
+ body, diags := r.buildIntegrationCloudflareAccountUpdateRequestBody(ctx, &plan, &prior, &request.Config, &request)
response.Diagnostics.Append(diags...)
if response.Diagnostics.HasError() {
return
@@ -162,10 +189,10 @@ func (r *integrationCloudflareAccountResource) Update(ctx context.Context, reque
response.Diagnostics.AddError("response contains unparsedObject", err.Error())
return
}
- r.updateState(ctx, &state, &resp)
+ r.updateState(ctx, &plan, &resp)
// Save data into Terraform state
- response.Diagnostics.Append(response.State.Set(ctx, &state)...)
+ response.Diagnostics.Append(response.State.Set(ctx, &plan)...)
}
func (r *integrationCloudflareAccountResource) Delete(ctx context.Context, request resource.DeleteRequest, response *resource.DeleteResponse) {
@@ -206,11 +233,20 @@ func (r *integrationCloudflareAccountResource) updateState(ctx context.Context,
}
}
-func (r *integrationCloudflareAccountResource) buildIntegrationCloudflareAccountRequestBody(ctx context.Context, state *integrationCloudflareAccountModel) (*datadogV2.CloudflareAccountCreateRequest, diag.Diagnostics) {
+func (r *integrationCloudflareAccountResource) buildIntegrationCloudflareAccountRequestBody(ctx context.Context, state *integrationCloudflareAccountModel, config *tfsdk.Config) (*datadogV2.CloudflareAccountCreateRequest, diag.Diagnostics) {
diags := diag.Diagnostics{}
attributes := datadogV2.NewCloudflareAccountCreateRequestAttributesWithDefaults()
- attributes.SetApiKey(state.ApiKey.ValueString())
+ // Use helper to get secret for creation
+ handler := utils.WriteOnlySecretHandler{Config: cloudflareApiKeyConfig}
+ secret, useWriteOnly, secretDiags := handler.GetSecretForCreate(ctx, state, config)
+ diags.Append(secretDiags...)
+
+ if useWriteOnly {
+ attributes.SetApiKey(secret)
+ } else if !state.ApiKey.IsNull() && !state.ApiKey.IsUnknown() {
+ attributes.SetApiKey(state.ApiKey.ValueString())
+ }
if !state.Email.IsNull() {
attributes.SetEmail(state.Email.ValueString())
}
@@ -229,18 +265,29 @@ func (r *integrationCloudflareAccountResource) buildIntegrationCloudflareAccount
return req, diags
}
-func (r *integrationCloudflareAccountResource) buildIntegrationCloudflareAccountUpdateRequestBody(ctx context.Context, state *integrationCloudflareAccountModel) (*datadogV2.CloudflareAccountUpdateRequest, diag.Diagnostics) {
+func (r *integrationCloudflareAccountResource) buildIntegrationCloudflareAccountUpdateRequestBody(ctx context.Context, plan *integrationCloudflareAccountModel, prior *integrationCloudflareAccountModel, config *tfsdk.Config, request *resource.UpdateRequest) (*datadogV2.CloudflareAccountUpdateRequest, diag.Diagnostics) {
diags := diag.Diagnostics{}
attributes := datadogV2.NewCloudflareAccountUpdateRequestAttributesWithDefaults()
- attributes.SetApiKey(state.ApiKey.ValueString())
- if !state.Email.IsNull() {
- attributes.SetEmail(state.Email.ValueString())
+ // Use helper to determine if secret should be updated
+ handler := utils.WriteOnlySecretHandler{Config: cloudflareApiKeyConfig}
+ secret, shouldUpdate, secretDiags := handler.GetSecretForUpdate(ctx, config, request)
+ diags.Append(secretDiags...)
+
+ if shouldUpdate {
+ attributes.SetApiKey(secret)
+ } else if !plan.ApiKey.IsNull() && !plan.ApiKey.IsUnknown() {
+ // Plaintext mode: always update
+ attributes.SetApiKey(plan.ApiKey.ValueString())
}
- if !state.Resources.IsNull() && !state.Resources.IsUnknown() {
+ if !plan.Email.IsNull() {
+ attributes.SetEmail(plan.Email.ValueString())
+ }
+
+ if !plan.Resources.IsNull() && !plan.Resources.IsUnknown() {
var resources []string
- diags.Append(state.Resources.ElementsAs(ctx, &resources, false)...)
+ diags.Append(plan.Resources.ElementsAs(ctx, &resources, false)...)
attributes.SetResources(resources)
}
diff --git a/datadog/fwprovider/resource_datadog_on_call_escalation_policy.go b/datadog/fwprovider/resource_datadog_on_call_escalation_policy.go
index 7a985be2f9..b888d5ed5a 100644
--- a/datadog/fwprovider/resource_datadog_on_call_escalation_policy.go
+++ b/datadog/fwprovider/resource_datadog_on_call_escalation_policy.go
@@ -18,6 +18,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
+
"github.com/terraform-providers/terraform-provider-datadog/datadog/internal/utils"
)
diff --git a/datadog/fwprovider/resource_datadog_on_call_team_routing_rules.go b/datadog/fwprovider/resource_datadog_on_call_team_routing_rules.go
index 6530559a72..e4510a67a9 100644
--- a/datadog/fwprovider/resource_datadog_on_call_team_routing_rules.go
+++ b/datadog/fwprovider/resource_datadog_on_call_team_routing_rules.go
@@ -12,6 +12,7 @@ import (
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault"
"github.com/hashicorp/terraform-plugin-framework/schema/validator"
"github.com/hashicorp/terraform-plugin-framework/types"
+
"github.com/terraform-providers/terraform-provider-datadog/datadog/internal/utils"
"github.com/terraform-providers/terraform-provider-datadog/datadog/internal/validators"
)
diff --git a/datadog/internal/utils/writeonly_helpers.go b/datadog/internal/utils/writeonly_helpers.go
new file mode 100644
index 0000000000..2358b91ed2
--- /dev/null
+++ b/datadog/internal/utils/writeonly_helpers.go
@@ -0,0 +1,133 @@
+package utils
+
+import (
+ "context"
+
+ "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator"
+ "github.com/hashicorp/terraform-plugin-framework/diag"
+ frameworkPath "github.com/hashicorp/terraform-plugin-framework/path"
+ "github.com/hashicorp/terraform-plugin-framework/resource"
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/hashicorp/terraform-plugin-framework/schema/validator"
+ "github.com/hashicorp/terraform-plugin-framework/tfsdk"
+ "github.com/hashicorp/terraform-plugin-framework/types"
+)
+
+// WriteOnlySecretConfig represents configuration for a write-only secret attribute
+type WriteOnlySecretConfig struct {
+ // Name of the original attribute (e.g., "api_key")
+ OriginalAttr string
+ // Name of the write-only attribute (e.g., "api_key_wo")
+ WriteOnlyAttr string
+ // Name of the version trigger attribute (e.g., "api_key_wo_version")
+ TriggerAttr string
+ // Description for the original attribute
+ OriginalDescription string
+ // Description for the write-only attribute
+ WriteOnlyDescription string
+ // Description for the trigger attribute
+ TriggerDescription string
+}
+
+// CreateWriteOnlySecretAttributes creates schema attributes for a write-only secret pattern
+func CreateWriteOnlySecretAttributes(config WriteOnlySecretConfig) map[string]schema.Attribute {
+ attrs := map[string]schema.Attribute{
+ config.OriginalAttr: schema.StringAttribute{
+ Optional: true,
+ Description: config.OriginalDescription,
+ Sensitive: true,
+ Validators: []validator.String{
+ stringvalidator.ExactlyOneOf(
+ frameworkPath.MatchRoot(config.OriginalAttr),
+ frameworkPath.MatchRoot(config.WriteOnlyAttr),
+ ),
+ stringvalidator.PreferWriteOnlyAttribute(
+ frameworkPath.MatchRoot(config.WriteOnlyAttr),
+ ),
+ },
+ },
+ config.WriteOnlyAttr: schema.StringAttribute{
+ Optional: true,
+ Description: config.WriteOnlyDescription,
+ Sensitive: true,
+ WriteOnly: true,
+ Validators: []validator.String{
+ stringvalidator.ExactlyOneOf(
+ frameworkPath.MatchRoot(config.OriginalAttr),
+ frameworkPath.MatchRoot(config.WriteOnlyAttr),
+ ),
+ stringvalidator.AlsoRequires(
+ frameworkPath.MatchRoot(config.TriggerAttr),
+ ),
+ },
+ },
+ config.TriggerAttr: schema.StringAttribute{
+ Optional: true,
+ Description: config.TriggerDescription,
+ Validators: []validator.String{
+ stringvalidator.LengthAtLeast(1),
+ stringvalidator.AlsoRequires(frameworkPath.Expressions{
+ frameworkPath.MatchRoot(config.WriteOnlyAttr),
+ }...),
+ },
+ },
+ }
+
+ return attrs
+}
+
+// WriteOnlySecretHandler helps handle write-only secrets in CRUD operations
+type WriteOnlySecretHandler struct {
+ Config WriteOnlySecretConfig
+}
+
+// GetSecretForCreate retrieves the secret value for creation, preferring write-only from config
+func (h *WriteOnlySecretHandler) GetSecretForCreate(ctx context.Context, state interface{}, config *tfsdk.Config) (string, bool, diag.Diagnostics) {
+ diags := diag.Diagnostics{}
+
+ // Try to get write-only secret from config first
+ var writeOnlySecret types.String
+ diags.Append(config.GetAttribute(ctx, frameworkPath.Root(h.Config.WriteOnlyAttr), &writeOnlySecret)...)
+ if diags.HasError() {
+ return "", false, diags
+ }
+
+ // If write-only secret is provided, use it
+ if !writeOnlySecret.IsNull() && !writeOnlySecret.IsUnknown() {
+ return writeOnlySecret.ValueString(), true, diags
+ }
+
+ // Otherwise, we'll use the regular attribute (handled by caller)
+ return "", false, diags
+}
+
+// GetSecretForUpdate retrieves the secret value for updates, only if version changed
+func (h *WriteOnlySecretHandler) GetSecretForUpdate(ctx context.Context, config *tfsdk.Config, req *resource.UpdateRequest) (string, bool, diag.Diagnostics) {
+ diags := diag.Diagnostics{}
+
+ // Check if version changed by comparing plan vs state
+ var planVersion, priorVersion types.String
+ diags.Append(req.Plan.GetAttribute(ctx, frameworkPath.Root(h.Config.TriggerAttr), &planVersion)...)
+ diags.Append(req.State.GetAttribute(ctx, frameworkPath.Root(h.Config.TriggerAttr), &priorVersion)...)
+ if diags.HasError() {
+ return "", false, diags
+ }
+
+ // Only proceed if version actually changed
+ if planVersion.Equal(priorVersion) {
+ return "", false, diags
+ }
+
+ // Get write-only secret from config
+ var writeOnlySecret types.String
+ diags.Append(config.GetAttribute(ctx, frameworkPath.Root(h.Config.WriteOnlyAttr), &writeOnlySecret)...)
+ if diags.HasError() {
+ return "", false, diags
+ }
+
+ if !writeOnlySecret.IsNull() && !writeOnlySecret.IsUnknown() {
+ return writeOnlySecret.ValueString(), true, diags
+ }
+
+ return "", false, diags
+}
diff --git a/datadog/internal/utils/writeonly_helpers_test.go b/datadog/internal/utils/writeonly_helpers_test.go
new file mode 100644
index 0000000000..051b180972
--- /dev/null
+++ b/datadog/internal/utils/writeonly_helpers_test.go
@@ -0,0 +1,157 @@
+package utils
+
+import (
+ "testing"
+
+ "github.com/hashicorp/terraform-plugin-framework/resource/schema"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCreateWriteOnlySecretAttributes(t *testing.T) {
+ t.Helper()
+
+ config := WriteOnlySecretConfig{
+ OriginalAttr: "api_key",
+ WriteOnlyAttr: "api_key_wo",
+ TriggerAttr: "api_key_wo_version",
+ OriginalDescription: "The API key for the account.",
+ WriteOnlyDescription: "Write-only API key for the account.",
+ TriggerDescription: "Version for api_key_wo rotation.",
+ }
+
+ attrs := CreateWriteOnlySecretAttributes(config)
+
+ // Verify all three attributes are created
+ require.Len(t, attrs, 3)
+ require.Contains(t, attrs, "api_key")
+ require.Contains(t, attrs, "api_key_wo")
+ require.Contains(t, attrs, "api_key_wo_version")
+
+ // Verify api_key properties
+ apiKey := attrs["api_key"].(schema.StringAttribute)
+ assert.True(t, apiKey.Optional)
+ assert.True(t, apiKey.Sensitive)
+ assert.False(t, apiKey.WriteOnly)
+ assert.Equal(t, "The API key for the account.", apiKey.Description)
+ assert.Len(t, apiKey.Validators, 2) // ExactlyOneOf + PreferWriteOnlyAttribute
+
+ // Verify api_key_wo properties
+ apiKeyWo := attrs["api_key_wo"].(schema.StringAttribute)
+ assert.True(t, apiKeyWo.Optional)
+ assert.True(t, apiKeyWo.Sensitive)
+ assert.True(t, apiKeyWo.WriteOnly)
+ assert.Equal(t, "Write-only API key for the account.", apiKeyWo.Description)
+ assert.Len(t, apiKeyWo.Validators, 2) // ExactlyOneOf + AlsoRequires
+
+ // Verify api_key_wo_version properties
+ version := attrs["api_key_wo_version"].(schema.StringAttribute)
+ assert.True(t, version.Optional)
+ assert.False(t, version.Sensitive)
+ assert.False(t, version.WriteOnly)
+ assert.Equal(t, "Version for api_key_wo rotation.", version.Description)
+ assert.Len(t, version.Validators, 2) // LengthAtLeast + AlsoRequires
+}
+
+func TestWriteOnlySecretHandler_BasicFunctionality(t *testing.T) {
+ t.Helper()
+
+ config := WriteOnlySecretConfig{
+ OriginalAttr: "api_key",
+ WriteOnlyAttr: "api_key_wo",
+ TriggerAttr: "api_key_wo_version",
+ }
+
+ // Test that handler can be created without panicking
+ handler := WriteOnlySecretHandler{Config: config}
+ assert.Equal(t, "api_key", handler.Config.OriginalAttr)
+ assert.Equal(t, "api_key_wo", handler.Config.WriteOnlyAttr)
+ assert.Equal(t, "api_key_wo_version", handler.Config.TriggerAttr)
+}
+
+func TestWriteOnlySecretHandler_VersionComparison(t *testing.T) {
+ t.Helper()
+
+ // Test the core logic: string version comparisons that drive update decisions
+ testCases := []struct {
+ name string
+ prior string
+ planned string
+ shouldUpdate bool
+ }{
+ {"numeric versions", "1", "2", true},
+ {"semantic versions", "v1.0", "v1.1", true},
+ {"date versions", "2024-Q1", "2024-Q2", true},
+ {"descriptive versions", "initial", "updated", true},
+ {"same version", "v1.0", "v1.0", false},
+ {"empty to version", "", "1", true},
+ {"version to empty", "1", "", true},
+ {"both empty", "", "", false},
+ }
+
+ for _, tc := range testCases {
+ t.Run(tc.name, func(t *testing.T) {
+ // Test version equality logic (core of GetSecretForUpdate)
+ versionChanged := tc.prior != tc.planned
+ assert.Equal(t, tc.shouldUpdate, versionChanged,
+ "Version comparison failed for '%s' -> '%s'", tc.prior, tc.planned)
+ })
+ }
+}
+
+func TestWriteOnlySecretConfig_EdgeCases(t *testing.T) {
+ t.Helper()
+
+ tests := []struct {
+ name string
+ config WriteOnlySecretConfig
+ panics bool
+ }{
+ {
+ name: "valid config",
+ config: WriteOnlySecretConfig{
+ OriginalAttr: "api_key",
+ WriteOnlyAttr: "api_key_wo",
+ TriggerAttr: "api_key_wo_version",
+ },
+ panics: false,
+ },
+ {
+ name: "empty strings don't panic",
+ config: WriteOnlySecretConfig{
+ OriginalAttr: "",
+ WriteOnlyAttr: "",
+ TriggerAttr: "",
+ },
+ panics: false,
+ },
+ {
+ name: "different attribute patterns",
+ config: WriteOnlySecretConfig{
+ OriginalAttr: "client_secret",
+ WriteOnlyAttr: "client_secret_wo",
+ TriggerAttr: "client_secret_version",
+ },
+ panics: false,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if tt.panics {
+ assert.Panics(t, func() {
+ CreateWriteOnlySecretAttributes(tt.config)
+ })
+ } else {
+ assert.NotPanics(t, func() {
+ attrs := CreateWriteOnlySecretAttributes(tt.config)
+ assert.NotNil(t, attrs)
+
+ // Test handler creation doesn't panic
+ handler := WriteOnlySecretHandler{Config: tt.config}
+ assert.Equal(t, tt.config.OriginalAttr, handler.Config.OriginalAttr)
+ })
+ }
+ })
+ }
+}
diff --git a/datadog/tests/resource_datadog_app_key_registration_test.go b/datadog/tests/resource_datadog_app_key_registration_test.go
index f70a8fcedb..2789dd79f9 100644
--- a/datadog/tests/resource_datadog_app_key_registration_test.go
+++ b/datadog/tests/resource_datadog_app_key_registration_test.go
@@ -7,6 +7,7 @@ import (
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-plugin-testing/terraform"
+
"github.com/terraform-providers/terraform-provider-datadog/datadog/fwprovider"
)
diff --git a/datadog/tests/resource_datadog_integration_cloudflare_account_test.go b/datadog/tests/resource_datadog_integration_cloudflare_account_test.go
index ce0cf8c47c..c648bec79b 100644
--- a/datadog/tests/resource_datadog_integration_cloudflare_account_test.go
+++ b/datadog/tests/resource_datadog_integration_cloudflare_account_test.go
@@ -44,6 +44,38 @@ func TestAccIntegrationCloudflareAccountBasic(t *testing.T) {
})
}
+func TestAccIntegrationCloudflareAccountWriteOnly(t *testing.T) {
+ t.Parallel()
+ if !isReplaying() {
+ t.Skip("This test is replay only")
+ }
+ ctx, providers, accProviders := testAccFrameworkMuxProviders(context.Background(), t)
+ uniq := uniqueEntityName(ctx, t)
+
+ // Cloudflare-specific config generator for write-only tests
+ configGenerator := func(secret, version, uniq string) string {
+ return fmt.Sprintf(`
+resource "datadog_integration_cloudflare_account" "foo" {
+ api_key_wo = "%s"
+ api_key_wo_version = "%s"
+ name = "%s"
+ resources = ["web"]
+}`, secret, version, uniq)
+ }
+
+ resource.Test(t, resource.TestCase{
+ ProtoV5ProviderFactories: accProviders,
+ CheckDestroy: testAccCheckDatadogIntegrationCloudflareAccountDestroy(providers.frameworkProvider),
+ Steps: WriteOnlyBasicTestSteps(
+ configGenerator,
+ "datadog_integration_cloudflare_account.foo",
+ "api_key_wo",
+ "api_key_wo_version",
+ uniq,
+ ),
+ })
+}
+
func testAccCheckDatadogIntegrationCloudflareAccount(uniq string) string {
return fmt.Sprintf(`
resource "datadog_integration_cloudflare_account" "foo" {
diff --git a/datadog/tests/resource_datadog_on_call_escalation_policy_test.go b/datadog/tests/resource_datadog_on_call_escalation_policy_test.go
index ac32a00ffd..eb2ffd9a45 100644
--- a/datadog/tests/resource_datadog_on_call_escalation_policy_test.go
+++ b/datadog/tests/resource_datadog_on_call_escalation_policy_test.go
@@ -10,6 +10,7 @@ import (
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-plugin-testing/terraform"
+
"github.com/terraform-providers/terraform-provider-datadog/datadog/fwprovider"
"github.com/terraform-providers/terraform-provider-datadog/datadog/internal/utils"
)
diff --git a/datadog/tests/resource_datadog_on_call_team_routing_rules_test.go b/datadog/tests/resource_datadog_on_call_team_routing_rules_test.go
index 8575778b7f..ed9b76e9ac 100644
--- a/datadog/tests/resource_datadog_on_call_team_routing_rules_test.go
+++ b/datadog/tests/resource_datadog_on_call_team_routing_rules_test.go
@@ -10,6 +10,7 @@ import (
"github.com/hashicorp/terraform-plugin-testing/helper/resource"
"github.com/hashicorp/terraform-plugin-testing/terraform"
+
"github.com/terraform-providers/terraform-provider-datadog/datadog/fwprovider"
"github.com/terraform-providers/terraform-provider-datadog/datadog/internal/utils"
)
diff --git a/datadog/tests/writeonly_test_helpers.go b/datadog/tests/writeonly_test_helpers.go
new file mode 100644
index 0000000000..e2a58ff69f
--- /dev/null
+++ b/datadog/tests/writeonly_test_helpers.go
@@ -0,0 +1,57 @@
+package test
+
+import (
+ "fmt"
+
+ "github.com/hashicorp/terraform-plugin-testing/helper/resource"
+ "github.com/hashicorp/terraform-plugin-testing/terraform"
+)
+
+// TestCheckWriteOnlyNotInState verifies that write-only attributes are not stored in state
+func TestCheckWriteOnlyNotInState(resourceName, attrName string) resource.TestCheckFunc {
+ return func(s *terraform.State) error {
+ rs, ok := s.RootModule().Resources[resourceName]
+ if !ok {
+ return fmt.Errorf("Not found: %s", resourceName)
+ }
+ if _, ok := rs.Primary.Attributes[attrName]; ok {
+ return fmt.Errorf("Write-only attribute %s should not be in state", attrName)
+ }
+ return nil
+ }
+}
+
+// WriteOnlyBasicTestSteps provides the core write-only test pattern
+// Each resource implements its own config generation for flexibility
+func WriteOnlyBasicTestSteps(
+ configGen func(secret, version, uniq string) string,
+ resourceName, writeOnlyAttr, versionAttr string,
+ uniq string,
+) []resource.TestStep {
+ return []resource.TestStep{
+ // Create with write-only
+ {
+ Config: configGen("secret123", "1", uniq),
+ Check: resource.ComposeTestCheckFunc(
+ TestCheckWriteOnlyNotInState(resourceName, writeOnlyAttr),
+ resource.TestCheckResourceAttr(resourceName, versionAttr, "1"),
+ ),
+ },
+ // Update version (triggers rotation)
+ {
+ Config: configGen("newsecret456", "v2.0", uniq),
+ Check: resource.ComposeTestCheckFunc(
+ TestCheckWriteOnlyNotInState(resourceName, writeOnlyAttr),
+ resource.TestCheckResourceAttr(resourceName, versionAttr, "v2.0"),
+ ),
+ },
+ // Same version (no rotation expected)
+ {
+ Config: configGen("differentsecret", "v2.0", uniq),
+ Check: resource.ComposeTestCheckFunc(
+ TestCheckWriteOnlyNotInState(resourceName, writeOnlyAttr),
+ resource.TestCheckResourceAttr(resourceName, versionAttr, "v2.0"),
+ ),
+ },
+ }
+}
diff --git a/docs/resources/compliance_custom_framework.md b/docs/resources/compliance_custom_framework.md
index 4fa8d0d37a..52c214af55 100644
--- a/docs/resources/compliance_custom_framework.md
+++ b/docs/resources/compliance_custom_framework.md
@@ -49,15 +49,14 @@ resource "datadog_compliance_custom_framework" "framework" {
### Required
-- `handle` (String) The framework handle. String length must be at least 1. This field is immutable.
+- `handle` (String) The framework handle. String length must be at least 1.
- `name` (String) The framework name. String length must be at least 1.
-- `version` (String) The framework version. String length must be at least 1. This field is immutable.
-- `requirements` (Block List) The requirements of the framework. Length must be at least 1. (see [below for nested schema](#nestedblock--requirements))
-
+- `version` (String) The framework version. String length must be at least 1.
### Optional
- `icon_url` (String) The URL of the icon representing the framework
+- `requirements` (Block List) The requirements of the framework. (see [below for nested schema](#nestedblock--requirements))
### Read-Only
@@ -69,7 +68,10 @@ resource "datadog_compliance_custom_framework" "framework" {
Required:
- `name` (String) The name of the requirement. String length must be at least 1.
-- `controls` (Block List) The controls of the requirement. Length must be at least 1. (see [below for nested schema](#nestedblock--requirements--controls))
+
+Optional:
+
+- `controls` (Block List) The controls of the requirement. (see [below for nested schema](#nestedblock--requirements--controls))
### Nested Schema for `requirements.controls`
@@ -77,4 +79,4 @@ Required:
Required:
- `name` (String) The name of the control. String length must be at least 1.
-- `rules_id` (Set of String) The set of rules IDs for the control. Length must be at least 1.
+- `rules_id` (Set of String) The set of rules IDs for the control.
diff --git a/docs/resources/integration_aws_account.md b/docs/resources/integration_aws_account.md
index 71a7573ff4..acc7d393f9 100644
--- a/docs/resources/integration_aws_account.md
+++ b/docs/resources/integration_aws_account.md
@@ -1,23 +1,14 @@
---
-# !! This file is manually maintained. Do not overwrite with tfplugindocs output.
+# generated by https://github.com/hashicorp/terraform-plugin-docs
page_title: "datadog_integration_aws_account Resource - terraform-provider-datadog"
subcategory: ""
description: |-
- Provides a Datadog - Amazon Web Services integration resource. This can be used to create and manage Datadog - Amazon Web Services integration.
-
- The `datadog_integration_aws_account` resource encompasses configuration that was previously managed under these separate resources:
- - `datadog_integration_aws` (deprecated)
- - `datadog_integration_aws_lambda_arn` (deprecated)
- - `datadog_integration_aws_log_collection` (deprecated)
- - `datadog_integration_aws_tag_filter` (deprecated)
-
- See [Upgrading from `datadog_integration_aws` resources](#upgrading) for steps to migrate your existing resources.
-
+ Provides a Datadog-Amazon Web Services integration resource. This can be used to create and manage Datadog-Amazon Web Services integration.
---
# datadog_integration_aws_account (Resource)
-Provides a Datadog—Amazon Web Services integration resource. This can be used to create and manage Datadog—Amazon Web Services integration.
+Provides a Datadog-Amazon Web Services integration resource. This can be used to create and manage Datadog-Amazon Web Services integration.
## Example Usage
@@ -55,7 +46,7 @@ resource "datadog_integration_aws_account" "foo" {
collect_custom_metrics = true
enabled = true
namespace_filters {
- exclude_only = ["AWS/SQS", "AWS/ElasticMapReduce", "AWS/Usage]
+ exclude_only = ["AWS/SQS", "AWS/ElasticMapReduce", "AWS/Usage"]
}
tag_filters {
namespace = "AWS/EC2"
@@ -102,18 +93,18 @@ resource "datadog_integration_aws_account" "foo-defaults" {
### Required
-- `aws_account_id` (String) Your AWS Account ID without dashes.
-- `aws_partition` (String) AWS Account partition.
-- `auth_config` (Block) Configure how Datadog authenticates to your AWS account. Either `aws_auth_config_keys` or `aws_auth_config_role` block is required within. (see [below for nested schema](#nestedblock--auth_config))
-- `aws_regions` (Block) AWS regions to collect data from. Defaults to `include_all` if block is empty. (see [below for nested schema](#nestedblock--aws_regions))
-- `logs_config` (Block) Configure log autosubscription for your Datadog Forwarder Lambda functions. The `lambda_forwarder` block is required within, but may be empty to use defaults. (see [below for nested schema](#nestedblock--logs_config))
-- `metrics_config` (Block) Configure metrics collection from AWS CloudWatch. The `namespace_filters` block is required within, but may be empty to use defaults. (see [below for nested schema](#nestedblock--metrics_config))
-- `resources_config` (Block) AWS resources collection config. May be empty to use defaults. (see [below for nested schema](#nestedblock--resources_config))
-- `traces_config` (Block) AWS traces collection config. The `xray_services` block is required within, but may be empty to use defaults. (see [below for nested schema](#nestedblock--traces_config))
+- `aws_account_id` (String) Your AWS Account ID without dashes. Invalid aws_account_id.
+- `aws_partition` (String) AWS Account partition. Valid values are `aws`, `aws-cn`, `aws-us-gov`.
### Optional
-- `account_tags` (List of String) Tags to apply to all metrics in the account. Defaults to `[]`.
+- `account_tags` (List of String) Tags to apply to all metrics in the account.
+- `auth_config` (Block, Optional) Configure how Datadog authenticates to your AWS Account. Either `aws_auth_config_keys` or `aws_auth_config_role` block is required within. (see [below for nested schema](#nestedblock--auth_config))
+- `aws_regions` (Block, Optional) AWS Regions to collect data from. Defaults to `include_all` if block is empty. (see [below for nested schema](#nestedblock--aws_regions))
+- `logs_config` (Block, Optional) Configure log autosubscription for your Datadog Forwarder Lambda functions. The `lambda_forwarder` block is required within, but may be empty to use defaults. (see [below for nested schema](#nestedblock--logs_config))
+- `metrics_config` (Block, Optional) Configure metrics collection from AWS CloudWatch. The `namespace_filters` block is required within, but may be empty to use defaults. (see [below for nested schema](#nestedblock--metrics_config))
+- `resources_config` (Block, Optional) AWS Resources Collection config. May be empty to use defaults. (see [below for nested schema](#nestedblock--resources_config))
+- `traces_config` (Block, Optional) AWS Traces Collection config. The `xray_services` block is required within, but may be empty to use defaults. (see [below for nested schema](#nestedblock--traces_config))
### Read-Only
@@ -124,30 +115,27 @@ resource "datadog_integration_aws_account" "foo-defaults" {
Optional:
-- `aws_auth_config_keys` (Block, Optional) (see [below for nested schema](#nestedblock--auth_config--aws_auth_config_keys))
+- `aws_auth_config_keys` (Block, Optional) Datadog will use the provided AWS Access Key ID and Secret Access Key to authenticate to your account. (see [below for nested schema](#nestedblock--auth_config--aws_auth_config_keys))
- `aws_auth_config_role` (Block, Optional) (see [below for nested schema](#nestedblock--auth_config--aws_auth_config_role))
### Nested Schema for `auth_config.aws_auth_config_keys`
-Required:
+Optional:
-- `access_key_id` (String) AWS Access Key ID
-- `secret_access_key` (String, Sensitive) AWS Secret Access Key. This value is write-only; changes made outside of Terraform will not be drift-detected.
+- `access_key_id` (String) AWS Access Key ID. Invalid access_key_id.
+- `secret_access_key` (String, Sensitive) AWS Secret Access Key. This value is write-only; changes made outside of Terraform will not be drift-detected. Secret_access_key must be non-empty and not contain whitespace.
### Nested Schema for `auth_config.aws_auth_config_role`
-Required:
-
-- `role_name` (String) AWS IAM role name.
-
Optional:
-- `external_id` (String) AWS IAM external ID for associated role. If omitted, one is generated.
+- `external_id` (String) AWS IAM External ID for associated role. If omitted, one will be generated.
+- `role_name` (String) AWS IAM Role name.
+
-!> The external ID must be generated by Datadog. For more information, see the [IAM User Guide](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_common-scenarios_third-party.html).
### Nested Schema for `aws_regions`
@@ -157,12 +145,13 @@ Optional:
- `include_all` (Boolean) Include all regions. Defaults to `true`.
- `include_only` (List of String) Include only these regions.
+
### Nested Schema for `logs_config`
-Required:
+Optional:
-- `lambda_forwarder` (Block) Leave empty to omit logs config. (see [below for nested schema](#nestedblock--logs_config--lambda_forwarder))
+- `lambda_forwarder` (Block, Optional) (see [below for nested schema](#nestedblock--logs_config--lambda_forwarder))
### Nested Schema for `logs_config.lambda_forwarder`
@@ -189,12 +178,11 @@ Required:
- `tags` (List of String) The AWS resource tags to filter on for the service specified by `source`.
-
-### Nested Schema for `metrics_config`
-Required:
-- `namespace_filters` (Block) AWS metrics namespace filters. Defaults to a pre-set `exclude_only` list if block is empty. (see [below for nested schema](#nestedblock--metrics_config--namespace_filters))
+
+
+### Nested Schema for `metrics_config`
Optional:
@@ -202,6 +190,7 @@ Optional:
- `collect_cloudwatch_alarms` (Boolean) Enable CloudWatch alarms collection Defaults to `false`.
- `collect_custom_metrics` (Boolean) Enable custom metrics collection Defaults to `false`.
- `enabled` (Boolean) Enable AWS metrics collection Defaults to `true`.
+- `namespace_filters` (Block, Optional) AWS Metrics namespace filters. Defaults to a pre-set `exclude_only` list if block is empty. (see [below for nested schema](#nestedblock--metrics_config--namespace_filters))
- `tag_filters` (Block List) AWS Metrics Collection tag filters list. The array of custom AWS resource tags (in the form `key:value`) defines a filter that Datadog uses when collecting metrics from a specified service. Wildcards, such as `?` (match a single character) and `*` (match multiple characters), and exclusion using `!` before the tag are supported. For EC2, only hosts that match one of the defined tags will be imported into Datadog. The rest will be ignored. For example, `env:production,instance-type:c?.*,!region:us-east-1`. (see [below for nested schema](#nestedblock--metrics_config--tag_filters))
@@ -238,9 +227,9 @@ Optional:
### Nested Schema for `traces_config`
-Required:
+Optional:
-- `xray_services` (Block) AWS X-Ray services to collect traces from. Defaults to `include_only`. (see [below for nested schema](#nestedblock--traces_config--xray_services))
+- `xray_services` (Block, Optional) AWS X-Ray services to collect traces from. Defaults to `include_only`. (see [below for nested schema](#nestedblock--traces_config--xray_services))
### Nested Schema for `traces_config.xray_services`
@@ -250,23 +239,16 @@ Optional:
- `include_all` (Boolean) Include all services.
- `include_only` (List of String) Include only these services. Defaults to `[]`.
+## Import
-
-## Upgrading from `datadog_integration_aws` resources
+Import is supported using the following syntax:
- To migrate your account configuration from `datadog_integration_aws*` resources to `datadog_integration_aws_account`:
- 1. Import your integrated accounts into `datadog_integration_aws_account` resources using the import command below.
- 2. Once successfully imported, you can run `terraform state rm` to delete all resources of the deprecated types from state:
- - `datadog_integration_aws`
- - `datadog_integration_aws_lambda_arn`
- - `datadog_integration_aws_log_collection`
- - `datadog_integration_aws_tag_filter`
+The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example:
+```shell
+# AWS Account Config ID can be retrieved by using the List all AWS integrations endpoint and querying by AWS Account ID:
+# https://docs.datadoghq.com/api/latest/aws-integration/#list-all-aws-integrations
-## Import
-Import is supported using the following syntax:
-```shell
terraform import datadog_integration_aws_account.example ""
```
- AWS Account Config ID can be retrieved by using the [List all AWS integrations](https://docs.datadoghq.com/api/latest/aws-integration/#list-all-aws-integrations) endpoint and querying by AWS Account ID.
diff --git a/docs/resources/integration_cloudflare_account.md b/docs/resources/integration_cloudflare_account.md
index 0cef556ecb..d6c0e662b3 100644
--- a/docs/resources/integration_cloudflare_account.md
+++ b/docs/resources/integration_cloudflare_account.md
@@ -13,13 +13,43 @@ Provides a Datadog IntegrationCloudflareAccount resource. This can be used to cr
## Example Usage
```terraform
-# Create new integration_cloudflare_account resource
-
-resource "datadog_integration_cloudflare_account" "foo" {
+# Basic Usage
+resource "datadog_integration_cloudflare_account" "basic" {
api_key = "12345678910abc"
email = "test-email@example.com"
name = "test-name"
}
+
+# Write-Only API Key (Recommended for Terraform 1.11+)
+resource "datadog_integration_cloudflare_account" "secure" {
+ name = "prod-cloudflare"
+ email = "admin@company.com"
+
+ # Write-only API key with version trigger
+ api_key_wo = var.cloudflare_api_key
+ api_key_wo_version = "1" # Any string: "1", "v2.1", "2024-Q1", etc.
+}
+
+# Advanced: Automated Version Management
+locals {
+ cloudflare_keepers = {
+ rotation_date = "2024-02-15"
+ environment = "production"
+ security_policy = "v3.1"
+ }
+
+ # Auto-generate version from keepers
+ api_key_version = "rotation-${substr(md5(jsonencode(local.cloudflare_keepers)), 0, 8)}"
+}
+
+resource "datadog_integration_cloudflare_account" "automated" {
+ name = "prod-cloudflare"
+ email = "admin@company.com"
+
+ # Version automatically updates when any keeper changes
+ api_key_wo = var.cloudflare_api_key
+ api_key_wo_version = local.api_key_version
+}
```
@@ -27,11 +57,15 @@ resource "datadog_integration_cloudflare_account" "foo" {
### Required
-- `api_key` (String, Sensitive) The API key (or token) for the Cloudflare account.
- `name` (String) The name of the Cloudflare account.
### Optional
+> **NOTE**: [Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral#write-only-arguments) are supported in Terraform 1.11 and later.
+
+- `api_key` (String, Sensitive) The API key (or token) for the Cloudflare account.
+- `api_key_wo` (String, Sensitive, [Write-only](https://developer.hashicorp.com/terraform/language/resources/ephemeral#write-only-arguments)) Write-only API key (or token) for the Cloudflare account.
+- `api_key_wo_version` (String) Version associated with api_key_wo. Changing this triggers an update. Can be any string (e.g., '1', 'v2.1', '2024-Q1'). String length must be at least 1.
- `email` (String) The email associated with the Cloudflare account. If an API key is provided (and not a token), this field is also required.
- `resources` (Set of String) An allowlist of resources to pull metrics for. Includes `web`, `dns`, `lb` (load balancer), and `worker`).
diff --git a/docs/resources/on_call_schedule.md b/docs/resources/on_call_schedule.md
index 73524ac900..0e1b19c74e 100644
--- a/docs/resources/on_call_schedule.md
+++ b/docs/resources/on_call_schedule.md
@@ -42,12 +42,12 @@ resource "datadog_on_call_schedule" "test" {
### Required
-- `layer` (Block List) List of layers for the schedule. (see [below for nested schema](#nestedblock--layer))
- `name` (String) A human-readable name for the new schedule.
- `time_zone` (String) The time zone in which the schedule is defined.
### Optional
+- `layer` (Block List) List of layers for the schedule. (see [below for nested schema](#nestedblock--layer))
- `teams` (List of String) A list of team ids associated with the schedule.
### Read-Only
@@ -60,7 +60,6 @@ resource "datadog_on_call_schedule" "test" {
Required:
- `effective_date` (String) The date/time when this layer should become active (in ISO 8601).
-- `interval` (Block List) Rotation interval for this layer. (see [below for nested schema](#nestedblock--layer--interval))
- `name` (String) The name of this layer. Should be unique within the schedule.
- `rotation_start` (String) The date/time when the rotation for this layer starts (in ISO 8601).
- `users` (List of String) List of user IDs for the layer. Can either be a valid user id or null
@@ -68,6 +67,7 @@ Required:
Optional:
- `end_date` (String) The date/time after which this layer no longer applies (in ISO 8601).
+- `interval` (Block, Optional) (see [below for nested schema](#nestedblock--layer--interval))
- `restriction` (Block List) List of restrictions for the layer. (see [below for nested schema](#nestedblock--layer--restriction))
Read-Only:
diff --git a/docs/resources/on_call_team_routing_rules.md b/docs/resources/on_call_team_routing_rules.md
index 39654c18cb..a4c312feeb 100644
--- a/docs/resources/on_call_team_routing_rules.md
+++ b/docs/resources/on_call_team_routing_rules.md
@@ -60,7 +60,7 @@ Optional:
- `action` (Block List) Specifies the list of actions to perform when the routing rule is matched. (see [below for nested schema](#nestedblock--rule--action))
- `escalation_policy` (String) ID of the policy to be applied when this routing rule matches.
- `query` (String) Defines the query or condition that triggers this routing rule. Defaults to `""`.
-- `time_restrictions` (Block List) Holds time zone information and a list of time restrictions for a routing rule. (see [below for nested schema](#nestedblock--rule--time_restrictions))
+- `time_restrictions` (Block, Optional) Holds time zone information and a list of time restrictions for a routing rule. (see [below for nested schema](#nestedblock--rule--time_restrictions))
- `urgency` (String) Defines the urgency for pages created via this rule. Only valid if `escalation_policy` is set. Valid values are `high`, `low`, `dynamic`.
Read-Only:
@@ -72,13 +72,13 @@ Read-Only:
Optional:
-- `send_slack_message` (Block List) (see [below for nested schema](#nestedblock--rule--action--send_slack_message))
-- `send_teams_message` (Block List) (see [below for nested schema](#nestedblock--rule--action--send_teams_message))
+- `send_slack_message` (Block, Optional) (see [below for nested schema](#nestedblock--rule--action--send_slack_message))
+- `send_teams_message` (Block, Optional) (see [below for nested schema](#nestedblock--rule--action--send_teams_message))
### Nested Schema for `rule.action.send_slack_message`
-Required:
+Optional:
- `channel` (String) Slack channel ID.
- `workspace` (String) Slack workspace ID.
@@ -87,7 +87,7 @@ Required:
### Nested Schema for `rule.action.send_teams_message`
-Required:
+Optional:
- `channel` (String) Teams channel ID.
- `team` (String) Teams team ID.
@@ -98,15 +98,15 @@ Required:
### Nested Schema for `rule.time_restrictions`
-Required:
+Optional:
-- `time_zone` (String) Specifies the time zone applicable to the restrictions, e.g. `America/New_York`.
- `restriction` (Block List) List of restrictions for the rule. (see [below for nested schema](#nestedblock--rule--time_restrictions--restriction))
+- `time_zone` (String) Specifies the time zone applicable to the restrictions, e.g. `America/New_York`.
### Nested Schema for `rule.time_restrictions.restriction`
-Required:
+Optional:
- `end_day` (String) The weekday when the restriction period ends. Valid values are `monday`, `tuesday`, `wednesday`, `thursday`, `friday`, `saturday`, `sunday`.
- `end_time` (String) The time of day when the restriction ends (hh:mm:ss).
@@ -120,6 +120,6 @@ Import is supported using the following syntax:
The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example:
```shell
-# Import existing on_call_team_routing_rules
+# Import an existing on_call_team_routing_rules
terraform import datadog_on_call_team_routing_rules.test "b03a07d5-49da-43e9-83b4-5d84969b588b"
```
diff --git a/examples/resources/datadog_integration_cloudflare_account/resource.tf b/examples/resources/datadog_integration_cloudflare_account/resource.tf
index 8a83b51b45..e915909b88 100644
--- a/examples/resources/datadog_integration_cloudflare_account/resource.tf
+++ b/examples/resources/datadog_integration_cloudflare_account/resource.tf
@@ -1,7 +1,37 @@
-# Create new integration_cloudflare_account resource
-
-resource "datadog_integration_cloudflare_account" "foo" {
+# Basic Usage
+resource "datadog_integration_cloudflare_account" "basic" {
api_key = "12345678910abc"
email = "test-email@example.com"
name = "test-name"
-}
\ No newline at end of file
+}
+
+# Write-Only API Key (Recommended for Terraform 1.11+)
+resource "datadog_integration_cloudflare_account" "secure" {
+ name = "prod-cloudflare"
+ email = "admin@company.com"
+
+ # Write-only API key with version trigger
+ api_key_wo = var.cloudflare_api_key
+ api_key_wo_version = "1" # Any string: "1", "v2.1", "2024-Q1", etc.
+}
+
+# Advanced: Automated Version Management
+locals {
+ cloudflare_keepers = {
+ rotation_date = "2024-02-15"
+ environment = "production"
+ security_policy = "v3.1"
+ }
+
+ # Auto-generate version from keepers
+ api_key_version = "rotation-${substr(md5(jsonencode(local.cloudflare_keepers)), 0, 8)}"
+}
+
+resource "datadog_integration_cloudflare_account" "automated" {
+ name = "prod-cloudflare"
+ email = "admin@company.com"
+
+ # Version automatically updates when any keeper changes
+ api_key_wo = var.cloudflare_api_key
+ api_key_wo_version = local.api_key_version
+}
diff --git a/go.mod b/go.mod
index f6cf4d1a9b..2d05e29c2a 100644
--- a/go.mod
+++ b/go.mod
@@ -18,6 +18,7 @@ require (
github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0
github.com/hashicorp/terraform-plugin-testing v1.12.0
github.com/jonboulle/clockwork v0.2.2
+ github.com/stretchr/testify v1.8.3
github.com/zorkian/go-datadog-api v2.30.0+incompatible
gopkg.in/DataDog/dd-trace-go.v1 v1.34.0
gopkg.in/dnaeon/go-vcr.v3 v3.1.2
@@ -40,6 +41,7 @@ require (
github.com/bgentry/speakeasy v0.1.0 // indirect
github.com/cenkalti/backoff v2.2.1+incompatible // indirect
github.com/cloudflare/circl v1.6.1 // indirect
+ github.com/davecgh/go-spew v1.1.1 // indirect
github.com/fatih/color v1.18.0 // indirect
github.com/goccy/go-json v0.10.5 // indirect
github.com/golang/protobuf v1.5.4 // indirect
@@ -75,6 +77,7 @@ require (
github.com/oklog/run v1.1.0 // indirect
github.com/opentracing/opentracing-go v1.2.0 // indirect
github.com/philhofer/fwd v1.1.1 // indirect
+ github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/posener/complete v1.2.3 // indirect
github.com/rogpeppe/go-internal v1.11.0 // indirect
github.com/russross/blackfriday v1.6.0 // indirect
diff --git a/go.sum b/go.sum
index 587c674125..a87b730f35 100644
--- a/go.sum
+++ b/go.sum
@@ -255,8 +255,9 @@ github.com/skeema/knownhosts v1.3.1/go.mod h1:r7KTdC8l4uxWRyK2TpQZ/1o5HaSzh06ePQ
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
-github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=