Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
## [Unreleased]

- Add resource `elasticstack_elasticsearch_data_stream_lifecycle` ([838](https://github.com/elastic/terraform-provider-elasticstack/issues/838))

## [0.11.10] - 2024-10-23

- Fix bug updating alert delay ([#859](https://github.com/elastic/terraform-provider-elasticstack/pull/859))
Expand Down
109 changes: 109 additions & 0 deletions docs/resources/elasticsearch_data_stream_lifecycle.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
---
subcategory: "Index"
layout: ""
page_title: "Elasticstack: elasticstack_elasticsearch_data_stream_lifecycle Resource"
description: |-
Manages Lifecycle for Elasticsearch Data Streams
---

# Resource: elasticstack_elasticsearch_data_stream

Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html

## Example Usage

```terraform
provider "elasticstack" {
elasticsearch {}
}
// First we must have a index template created
resource "elasticstack_elasticsearch_index_template" "my_data_stream_template" {
name = "my_data_stream"
index_patterns = ["my-stream*"]
data_stream {}
}
// and now we can create data stream based on the index template
resource "elasticstack_elasticsearch_data_stream" "my_data_stream" {
name = "my-stream"
// make sure that template is created before the data stream
depends_on = [
elasticstack_elasticsearch_index_template.my_data_stream_template
]
}
// finally we can manage lifecycle of data stream
resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle" {
name = "my-stream"
data_retention = "3d"
depends_on = [
elasticstack_elasticsearch_data_stream.my_data_stream,
]
}
// or you can use wildcards to manage multiple lifecycles at once
resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle_multiple" {
name = "stream-*"
data_retention = "3d"
}
```

<!-- schema generated by tfplugindocs -->
## Schema

### Required

- `name` (String) Name of the data stream. Supports wildcards.

### Optional

- `data_retention` (String) Every document added to this data stream will be stored at least for this time frame. When empty, every document in this data stream will be stored indefinitely
- `downsampling` (Attributes List) Downsampling configuration objects, each defining an after interval representing when the backing index is meant to be downsampled and a fixed_interval representing the downsampling interval. (see [below for nested schema](#nestedatt--downsampling))
- `elasticsearch_connection` (Block List, Deprecated) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch_connection))
- `enabled` (Boolean) Data stream lifecycle on/off.
- `expand_wildcards` (String) Determines how wildcard patterns in the `indices` parameter match data streams and indices. Supports comma-separated values, such as `closed,hidden`.

### Read-Only

- `id` (String) Internal identifier of the resource.

<a id="nestedatt--downsampling"></a>
### Nested Schema for `downsampling`

Required:

- `after` (String) Interval representing when the backing index is meant to be downsampled
- `fixed_interval` (String) The interval at which to aggregate the original time series index.


<a id="nestedblock--elasticsearch_connection"></a>
### Nested Schema for `elasticsearch_connection`

Optional:

- `api_key` (String, Sensitive) API Key to use for authentication to Elasticsearch
- `bearer_token` (String, Sensitive) Bearer Token to use for authentication to Elasticsearch
- `ca_data` (String) PEM-encoded custom Certificate Authority certificate
- `ca_file` (String) Path to a custom Certificate Authority certificate
- `cert_data` (String) PEM encoded certificate for client auth
- `cert_file` (String) Path to a file containing the PEM encoded certificate for client auth
- `endpoints` (List of String, Sensitive) A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number.
- `es_client_authentication` (String, Sensitive) ES Client Authentication field to be used with the bearer token
- `insecure` (Boolean) Disable TLS certificate validation
- `key_data` (String, Sensitive) PEM encoded private key for client auth
- `key_file` (String) Path to a file containing the PEM encoded private key for client auth
- `password` (String, Sensitive) Password to use for API authentication to Elasticsearch.
- `username` (String) Username to use for API authentication to Elasticsearch.

## Import

Import is supported using the following syntax:

```shell
terraform import elasticstack_elasticsearch_data_stream_lifecycle.my_data_stream_lifecycle <cluster_uuid>/<data_stream_name>
```
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
terraform import elasticstack_elasticsearch_data_stream_lifecycle.my_data_stream_lifecycle <cluster_uuid>/<data_stream_name>

Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
provider "elasticstack" {
elasticsearch {}
}

// First we must have a index template created
resource "elasticstack_elasticsearch_index_template" "my_data_stream_template" {
name = "my_data_stream"

index_patterns = ["my-stream*"]

data_stream {}
}

// and now we can create data stream based on the index template
resource "elasticstack_elasticsearch_data_stream" "my_data_stream" {
name = "my-stream"

// make sure that template is created before the data stream
depends_on = [
elasticstack_elasticsearch_index_template.my_data_stream_template
]
}

// finally we can manage lifecycle of data stream
resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle" {
name = "my-stream"
data_retention = "3d"

depends_on = [
elasticstack_elasticsearch_data_stream.my_data_stream,
]
}

// or you can use wildcards to manage multiple lifecycles at once
resource "elasticstack_elasticsearch_data_stream_lifecycle" "my_data_stream_lifecycle_multiple" {
name = "stream-*"
data_retention = "3d"
}
79 changes: 79 additions & 0 deletions internal/clients/elasticsearch/index.go
Original file line number Diff line number Diff line change
Expand Up @@ -499,6 +499,85 @@ func DeleteDataStream(ctx context.Context, apiClient *clients.ApiClient, dataStr
return diags
}

func PutDataStreamLifecycle(ctx context.Context, apiClient *clients.ApiClient, dataStreamName string, expand_wildcards string, lifecycle models.LifecycleSettings) fwdiags.Diagnostics {

esClient, err := apiClient.GetESClient()
if err != nil {
return utils.FrameworkDiagFromError(err)
}

lifecycleBytes, err := json.Marshal(lifecycle)
if err != nil {
return utils.FrameworkDiagFromError(err)
}

opts := []func(*esapi.IndicesPutDataLifecycleRequest){
esClient.Indices.PutDataLifecycle.WithBody(bytes.NewReader(lifecycleBytes)),
esClient.Indices.PutDataLifecycle.WithContext(ctx),
esClient.Indices.PutDataLifecycle.WithExpandWildcards(expand_wildcards),
}
res, err := esClient.Indices.PutDataLifecycle([]string{dataStreamName}, opts...)
if err != nil {
return utils.FrameworkDiagFromError(err)
}
defer res.Body.Close()
if diags := utils.CheckError(res, fmt.Sprintf("Unable to create DataStreamLifecycle: %s", dataStreamName)); diags.HasError() {
return utils.FrameworkDiagsFromSDK(diags)
}
return nil
}

func GetDataStreamLifecycle(ctx context.Context, apiClient *clients.ApiClient, dataStreamName string, expand_wildcards string) (*[]models.DataStreamLifecycle, fwdiags.Diagnostics) {
esClient, err := apiClient.GetESClient()
if err != nil {
return nil, utils.FrameworkDiagFromError(err)
}
opts := []func(*esapi.IndicesGetDataLifecycleRequest){
esClient.Indices.GetDataLifecycle.WithContext(ctx),
esClient.Indices.GetDataLifecycle.WithExpandWildcards(expand_wildcards),
}
res, err := esClient.Indices.GetDataLifecycle([]string{dataStreamName}, opts...)
if err != nil {
return nil, utils.FrameworkDiagFromError(err)
}
defer res.Body.Close()
if res.StatusCode == http.StatusNotFound {
return nil, nil
}
if diags := utils.CheckError(res, fmt.Sprintf("Unable to get requested DataStreamLifecycle: %s", dataStreamName)); diags.HasError() {
return nil, utils.FrameworkDiagsFromSDK(diags)
}

dStreams := make(map[string][]models.DataStreamLifecycle)
if err := json.NewDecoder(res.Body).Decode(&dStreams); err != nil {
return nil, utils.FrameworkDiagFromError(err)
}
ds := dStreams["data_streams"]
return &ds, nil
}

func DeleteDataStreamLifecycle(ctx context.Context, apiClient *clients.ApiClient, dataStreamName string, expand_wildcards string) fwdiags.Diagnostics {

esClient, err := apiClient.GetESClient()
if err != nil {
return utils.FrameworkDiagFromError(err)
}
opts := []func(*esapi.IndicesDeleteDataLifecycleRequest){
esClient.Indices.DeleteDataLifecycle.WithContext(ctx),
esClient.Indices.DeleteDataLifecycle.WithExpandWildcards(expand_wildcards),
}
res, err := esClient.Indices.DeleteDataLifecycle([]string{dataStreamName}, opts...)
if err != nil {
return utils.FrameworkDiagFromError(err)
}
defer res.Body.Close()
if diags := utils.CheckError(res, fmt.Sprintf("Unable to delete DataStreamLifecycle: %s", dataStreamName)); diags.HasError() {
return utils.FrameworkDiagsFromSDK(diags)
}

return nil
}

func PutIngestPipeline(ctx context.Context, apiClient *clients.ApiClient, pipeline *models.IngestPipeline) diag.Diagnostics {
var diags diag.Diagnostics
pipelineBytes, err := json.Marshal(pipeline)
Expand Down
Loading
Loading