diff --git a/docs/configuration.md b/docs/configuration.md index a782e1f67..be62bf0c6 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -137,6 +137,14 @@ statistics: # Export the metric with the original CloudWatch timestamp (General Setting for all metrics in this job) [ addCloudwatchTimestamp: ] +# Enables the inclusion of past metric data points from the CloudWatch response if available. +# This is useful when a metric is configured with a 60-second period and a 300-second duration, ensuring that all +# five data points are exposed at the metrics endpoint instead of only the latest one. +# Note: This option requires `addCloudwatchTimestamp` to be enabled. +# The metric destination must support out of order timestamps, see https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tsdb +# (General Setting for all metrics in this job) +[ exportAllDataPoints: ] + # List of metric definitions metrics: [ - ... ] @@ -276,6 +284,14 @@ statistics: # Export the metric with the original CloudWatch timestamp (General Setting for all metrics in this job) [ addCloudwatchTimestamp: ] +# Enables the inclusion of past metric data points from the CloudWatch response if available. +# This is useful when a metric is configured with a 60-second period and a 300-second duration, ensuring that all +# five data points are exposed at the metrics endpoint instead of only the latest one. +# Note: This option requires `addCloudwatchTimestamp` to be enabled. +# The metric destination must support out of order timestamps, see https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tsdb +# (General Setting for all metrics in this job) +[ exportAllDataPoints: ] + # List of metric definitions metrics: [ - ... ] @@ -333,12 +349,20 @@ statistics: # Export the metric with the original CloudWatch timestamp (Overrides job level setting) [ addCloudwatchTimestamp: ] + +# Enables the inclusion of past metric data points from the CloudWatch response if available. +# This is useful when a metric is configured with a 60-second period and a 300-second duration, ensuring that all +# five data points are exposed at the metrics endpoint instead of only the latest one. +# Note: This option requires `addCloudwatchTimestamp` to be enabled. +# The metric destination must support out of order timestamps, see https://prometheus.io/docs/prometheus/latest/configuration/configuration/#tsdb +# (General Setting for all metrics in this job) +[ exportAllDataPoints: ] ``` Notes: - Available statistics: `Maximum`, `Minimum`, `Sum`, `SampleCount`, `Average`, `pXX` (e.g. `p90`). -- Watch out using `addCloudwatchTimestamp` for sparse metrics, e.g from S3, since Prometheus won't scrape metrics containing timestamps older than 2-3 hours. +- Watch out using `addCloudwatchTimestamp` for sparse metrics, e.g from S3, since Prometheus won't scrape metrics containing timestamps older than 2-3 hours. Also the same applies when enabling `exportAllDataPoints` in any metric. ### `exported_tags_config` @@ -390,4 +414,4 @@ This is an example of the `dimensions_config` block: dimensions: - name: AutoScalingGroupName value: MyGroup -``` +``` \ No newline at end of file diff --git a/examples/historic-data.yml b/examples/historic-data.yml new file mode 100644 index 000000000..98467a717 --- /dev/null +++ b/examples/historic-data.yml @@ -0,0 +1,29 @@ +apiVersion: v1alpha1 +discovery: + jobs: + - type: AWS/SQS + regions: + - us-east-1 + period: 60 + length: 300 + addCloudwatchTimestamp: true + exportAllDataPoints: true + metrics: + - name: NumberOfMessagesSent + statistics: [Sum] + - name: NumberOfMessagesReceived + statistics: [Sum] + - name: NumberOfMessagesDeleted + statistics: [Sum] + - name: ApproximateAgeOfOldestMessage + statistics: [Average] + - name: NumberOfEmptyReceives + statistics: [Sum] + - name: SentMessageSize + statistics: [Average] + - name: ApproximateNumberOfMessagesNotVisible + statistics: [Sum] + - name: ApproximateNumberOfMessagesDelayed + statistics: [Sum] + - name: ApproximateNumberOfMessagesVisible + statistics: [Sum] diff --git a/pkg/clients/cloudwatch/client.go b/pkg/clients/cloudwatch/client.go index 44b06113b..9d98ecf83 100644 --- a/pkg/clients/cloudwatch/client.go +++ b/pkg/clients/cloudwatch/client.go @@ -37,7 +37,7 @@ type Client interface { GetMetricData(ctx context.Context, getMetricData []*model.CloudwatchData, namespace string, startTime time.Time, endTime time.Time) []MetricDataResult // GetMetricStatistics returns the output of the GetMetricStatistics CloudWatch API. - GetMetricStatistics(ctx context.Context, logger *slog.Logger, dimensions []model.Dimension, namespace string, metric *model.MetricConfig) []*model.Datapoint + GetMetricStatistics(ctx context.Context, logger *slog.Logger, dimensions []model.Dimension, namespace string, metric *model.MetricConfig) []*model.MetricStatisticsResult } // ConcurrencyLimiter limits the concurrency when calling AWS CloudWatch APIs. The functions implemented @@ -55,9 +55,12 @@ type ConcurrencyLimiter interface { } type MetricDataResult struct { - ID string - // A nil datapoint is a marker for no datapoint being found - Datapoint *float64 + ID string + DataPoints []DataPoint +} + +type DataPoint struct { + Value *float64 Timestamp time.Time } @@ -73,7 +76,7 @@ func NewLimitedConcurrencyClient(client Client, limiter ConcurrencyLimiter) Clie } } -func (c limitedConcurrencyClient) GetMetricStatistics(ctx context.Context, logger *slog.Logger, dimensions []model.Dimension, namespace string, metric *model.MetricConfig) []*model.Datapoint { +func (c limitedConcurrencyClient) GetMetricStatistics(ctx context.Context, logger *slog.Logger, dimensions []model.Dimension, namespace string, metric *model.MetricConfig) []*model.MetricStatisticsResult { c.limiter.Acquire(getMetricStatisticsCall) res := c.client.GetMetricStatistics(ctx, logger, dimensions, namespace, metric) c.limiter.Release(getMetricStatisticsCall) diff --git a/pkg/clients/cloudwatch/v1/client.go b/pkg/clients/cloudwatch/v1/client.go index 64bf2924b..d9c405d74 100644 --- a/pkg/clients/cloudwatch/v1/client.go +++ b/pkg/clients/cloudwatch/v1/client.go @@ -95,6 +95,7 @@ func toModelDimensions(dimensions []*cloudwatch.Dimension) []model.Dimension { func (c client) GetMetricData(ctx context.Context, getMetricData []*model.CloudwatchData, namespace string, startTime time.Time, endTime time.Time) []cloudwatch_client.MetricDataResult { metricDataQueries := make([]*cloudwatch.MetricDataQuery, 0, len(getMetricData)) + exportAllDataPoints := false for _, data := range getMetricData { metricStat := &cloudwatch.MetricStat{ Metric: &cloudwatch.Metric{ @@ -110,6 +111,7 @@ func (c client) GetMetricData(ctx context.Context, getMetricData []*model.Cloudw MetricStat: metricStat, ReturnData: aws.Bool(true), }) + exportAllDataPoints = exportAllDataPoints || data.MetricMigrationParams.ExportAllDataPoints } input := &cloudwatch.GetMetricDataInput{ EndTime: &endTime, @@ -137,23 +139,31 @@ func (c client) GetMetricData(ctx context.Context, getMetricData []*model.Cloudw c.logger.Error("GetMetricData error", "err", err) return nil } - return toMetricDataResult(resp) + return toMetricDataResult(resp, exportAllDataPoints) } -func toMetricDataResult(resp cloudwatch.GetMetricDataOutput) []cloudwatch_client.MetricDataResult { +func toMetricDataResult(resp cloudwatch.GetMetricDataOutput, exportAllDataPoints bool) []cloudwatch_client.MetricDataResult { output := make([]cloudwatch_client.MetricDataResult, 0, len(resp.MetricDataResults)) for _, metricDataResult := range resp.MetricDataResults { - mappedResult := cloudwatch_client.MetricDataResult{ID: *metricDataResult.Id} - if len(metricDataResult.Values) > 0 { - mappedResult.Datapoint = metricDataResult.Values[0] - mappedResult.Timestamp = *metricDataResult.Timestamps[0] + mappedResult := cloudwatch_client.MetricDataResult{ + ID: *metricDataResult.Id, + DataPoints: make([]cloudwatch_client.DataPoint, 0, len(metricDataResult.Timestamps))} + for i := 0; i < len(metricDataResult.Timestamps); i++ { + mappedResult.DataPoints = append(mappedResult.DataPoints, cloudwatch_client.DataPoint{ + Value: metricDataResult.Values[i], + Timestamp: *metricDataResult.Timestamps[i], + }) + + if !exportAllDataPoints { + break + } } output = append(output, mappedResult) } return output } -func (c client) GetMetricStatistics(ctx context.Context, logger *slog.Logger, dimensions []model.Dimension, namespace string, metric *model.MetricConfig) []*model.Datapoint { +func (c client) GetMetricStatistics(ctx context.Context, logger *slog.Logger, dimensions []model.Dimension, namespace string, metric *model.MetricConfig) []*model.MetricStatisticsResult { filter := createGetMetricStatisticsInput(dimensions, &namespace, metric, logger) c.logger.Debug("GetMetricStatistics", "input", filter) @@ -171,14 +181,14 @@ func (c client) GetMetricStatistics(ctx context.Context, logger *slog.Logger, di return nil } - return toModelDatapoints(resp.Datapoints) + return toModelDataPoints(resp.Datapoints) } -func toModelDatapoints(cwDatapoints []*cloudwatch.Datapoint) []*model.Datapoint { - modelDataPoints := make([]*model.Datapoint, 0, len(cwDatapoints)) +func toModelDataPoints(cwDataPoints []*cloudwatch.Datapoint) []*model.MetricStatisticsResult { + modelDataPoints := make([]*model.MetricStatisticsResult, 0, len(cwDataPoints)) - for _, cwDatapoint := range cwDatapoints { - modelDataPoints = append(modelDataPoints, &model.Datapoint{ + for _, cwDatapoint := range cwDataPoints { + modelDataPoints = append(modelDataPoints, &model.MetricStatisticsResult{ Average: cwDatapoint.Average, ExtendedStatistics: cwDatapoint.ExtendedStatistics, Maximum: cwDatapoint.Maximum, diff --git a/pkg/clients/cloudwatch/v1/client_test.go b/pkg/clients/cloudwatch/v1/client_test.go index 0d761b5f0..b3553b5a4 100644 --- a/pkg/clients/cloudwatch/v1/client_test.go +++ b/pkg/clients/cloudwatch/v1/client_test.go @@ -47,11 +47,13 @@ func Test_toMetricDataResult(t *testing.T) { name string getMetricDataOutput cloudwatch.GetMetricDataOutput expectedMetricDataResults []cloudwatch_client.MetricDataResult + exportAllDataPoints bool } testCases := []testCase{ { - name: "all metrics present", + name: "all metrics present", + exportAllDataPoints: false, getMetricDataOutput: cloudwatch.GetMetricDataOutput{ MetricDataResults: []*cloudwatch.MetricDataResult{ { @@ -67,12 +69,21 @@ func Test_toMetricDataResult(t *testing.T) { }, }, expectedMetricDataResults: []cloudwatch_client.MetricDataResult{ - {ID: "metric-1", Datapoint: aws.Float64(1.0), Timestamp: ts.Add(10 * time.Minute)}, - {ID: "metric-2", Datapoint: aws.Float64(2.0), Timestamp: ts}, + { + ID: "metric-1", DataPoints: []cloudwatch_client.DataPoint{ + {Value: aws.Float64(1.0), Timestamp: ts.Add(10 * time.Minute)}, + }, + }, + { + ID: "metric-2", DataPoints: []cloudwatch_client.DataPoint{ + {Value: aws.Float64(2.0), Timestamp: ts}, + }, + }, }, }, { - name: "metric with no values", + name: "metric with no values", + exportAllDataPoints: false, getMetricDataOutput: cloudwatch.GetMetricDataOutput{ MetricDataResults: []*cloudwatch.MetricDataResult{ { @@ -88,15 +99,54 @@ func Test_toMetricDataResult(t *testing.T) { }, }, expectedMetricDataResults: []cloudwatch_client.MetricDataResult{ - {ID: "metric-1", Datapoint: aws.Float64(1.0), Timestamp: ts.Add(10 * time.Minute)}, - {ID: "metric-2", Datapoint: nil, Timestamp: time.Time{}}, + { + ID: "metric-1", DataPoints: []cloudwatch_client.DataPoint{ + {Value: aws.Float64(1.0), Timestamp: ts.Add(10 * time.Minute)}, + }, + }, + { + ID: "metric-2", + DataPoints: []cloudwatch_client.DataPoint{}, + }, + }, + }, + { + name: "export all data points", + exportAllDataPoints: true, + getMetricDataOutput: cloudwatch.GetMetricDataOutput{ + MetricDataResults: []*cloudwatch.MetricDataResult{ + { + Id: aws.String("metric-1"), + Values: []*float64{aws.Float64(1.0), aws.Float64(2.0), aws.Float64(3.0)}, + Timestamps: []*time.Time{aws.Time(ts.Add(10 * time.Minute)), aws.Time(ts.Add(5 * time.Minute)), aws.Time(ts)}, + }, + { + Id: aws.String("metric-2"), + Values: []*float64{aws.Float64(2.0)}, + Timestamps: []*time.Time{aws.Time(ts)}, + }, + }, + }, + expectedMetricDataResults: []cloudwatch_client.MetricDataResult{ + { + ID: "metric-1", DataPoints: []cloudwatch_client.DataPoint{ + {Value: aws.Float64(1.0), Timestamp: ts.Add(10 * time.Minute)}, + {Value: aws.Float64(2.0), Timestamp: ts.Add(5 * time.Minute)}, + {Value: aws.Float64(3.0), Timestamp: ts}, + }, + }, + { + ID: "metric-2", DataPoints: []cloudwatch_client.DataPoint{ + {Value: aws.Float64(2.0), Timestamp: ts}, + }, + }, }, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { - metricDataResults := toMetricDataResult(tc.getMetricDataOutput) + metricDataResults := toMetricDataResult(tc.getMetricDataOutput, tc.exportAllDataPoints) require.Equal(t, tc.expectedMetricDataResults, metricDataResults) }) } diff --git a/pkg/clients/cloudwatch/v2/client.go b/pkg/clients/cloudwatch/v2/client.go index 92a263b3a..0ba6933bd 100644 --- a/pkg/clients/cloudwatch/v2/client.go +++ b/pkg/clients/cloudwatch/v2/client.go @@ -98,6 +98,7 @@ func toModelDimensions(dimensions []types.Dimension) []model.Dimension { func (c client) GetMetricData(ctx context.Context, getMetricData []*model.CloudwatchData, namespace string, startTime time.Time, endTime time.Time) []cloudwatch_client.MetricDataResult { metricDataQueries := make([]types.MetricDataQuery, 0, len(getMetricData)) + exportAllDataPoints := false for _, data := range getMetricData { metricStat := &types.MetricStat{ Metric: &types.Metric{ @@ -113,6 +114,7 @@ func (c client) GetMetricData(ctx context.Context, getMetricData []*model.Cloudw MetricStat: metricStat, ReturnData: aws.Bool(true), }) + exportAllDataPoints = exportAllDataPoints || data.MetricMigrationParams.ExportAllDataPoints } input := &cloudwatch.GetMetricDataInput{ @@ -143,23 +145,32 @@ func (c client) GetMetricData(ctx context.Context, getMetricData []*model.Cloudw c.logger.Debug("GetMetricData", "output", resp) - return toMetricDataResult(resp) + return toMetricDataResult(resp, exportAllDataPoints) } -func toMetricDataResult(resp cloudwatch.GetMetricDataOutput) []cloudwatch_client.MetricDataResult { +func toMetricDataResult(resp cloudwatch.GetMetricDataOutput, exportAllDataPoints bool) []cloudwatch_client.MetricDataResult { output := make([]cloudwatch_client.MetricDataResult, 0, len(resp.MetricDataResults)) for _, metricDataResult := range resp.MetricDataResults { - mappedResult := cloudwatch_client.MetricDataResult{ID: *metricDataResult.Id} - if len(metricDataResult.Values) > 0 { - mappedResult.Datapoint = &metricDataResult.Values[0] - mappedResult.Timestamp = metricDataResult.Timestamps[0] + mappedResult := cloudwatch_client.MetricDataResult{ + ID: *metricDataResult.Id, + DataPoints: make([]cloudwatch_client.DataPoint, 0, len(metricDataResult.Timestamps)), + } + for i := 0; i < len(metricDataResult.Timestamps); i++ { + mappedResult.DataPoints = append(mappedResult.DataPoints, cloudwatch_client.DataPoint{ + Value: &metricDataResult.Values[i], + Timestamp: metricDataResult.Timestamps[i], + }) + + if !exportAllDataPoints { + break + } } output = append(output, mappedResult) } return output } -func (c client) GetMetricStatistics(ctx context.Context, logger *slog.Logger, dimensions []model.Dimension, namespace string, metric *model.MetricConfig) []*model.Datapoint { +func (c client) GetMetricStatistics(ctx context.Context, logger *slog.Logger, dimensions []model.Dimension, namespace string, metric *model.MetricConfig) []*model.MetricStatisticsResult { filter := createGetMetricStatisticsInput(logger, dimensions, &namespace, metric) c.logger.Debug("GetMetricStatistics", "input", filter) @@ -181,18 +192,18 @@ func (c client) GetMetricStatistics(ctx context.Context, logger *slog.Logger, di ptrs = append(ptrs, &datapoint) } - return toModelDatapoints(ptrs) + return toModelDataPoints(ptrs) } -func toModelDatapoints(cwDatapoints []*types.Datapoint) []*model.Datapoint { - modelDataPoints := make([]*model.Datapoint, 0, len(cwDatapoints)) +func toModelDataPoints(cwDataPoints []*types.Datapoint) []*model.MetricStatisticsResult { + modelDataPoints := make([]*model.MetricStatisticsResult, 0, len(cwDataPoints)) - for _, cwDatapoint := range cwDatapoints { + for _, cwDatapoint := range cwDataPoints { extendedStats := make(map[string]*float64, len(cwDatapoint.ExtendedStatistics)) for name, value := range cwDatapoint.ExtendedStatistics { extendedStats[name] = &value } - modelDataPoints = append(modelDataPoints, &model.Datapoint{ + modelDataPoints = append(modelDataPoints, &model.MetricStatisticsResult{ Average: cwDatapoint.Average, ExtendedStatistics: extendedStats, Maximum: cwDatapoint.Maximum, diff --git a/pkg/clients/cloudwatch/v2/client_test.go b/pkg/clients/cloudwatch/v2/client_test.go index 9440fe66e..f45cfe434 100644 --- a/pkg/clients/cloudwatch/v2/client_test.go +++ b/pkg/clients/cloudwatch/v2/client_test.go @@ -30,13 +30,15 @@ func Test_toMetricDataResult(t *testing.T) { type testCase struct { name string + exportAllDataPoints bool getMetricDataOutput cloudwatch.GetMetricDataOutput expectedMetricDataResults []cloudwatch_client.MetricDataResult } testCases := []testCase{ { - name: "all metrics present", + name: "all metrics present", + exportAllDataPoints: false, getMetricDataOutput: cloudwatch.GetMetricDataOutput{ MetricDataResults: []types.MetricDataResult{ { @@ -52,12 +54,21 @@ func Test_toMetricDataResult(t *testing.T) { }, }, expectedMetricDataResults: []cloudwatch_client.MetricDataResult{ - {ID: "metric-1", Datapoint: aws.Float64(1.0), Timestamp: ts.Add(10 * time.Minute)}, - {ID: "metric-2", Datapoint: aws.Float64(2.0), Timestamp: ts}, + { + ID: "metric-1", DataPoints: []cloudwatch_client.DataPoint{ + {Value: aws.Float64(1.0), Timestamp: ts.Add(10 * time.Minute)}, + }, + }, + { + ID: "metric-2", DataPoints: []cloudwatch_client.DataPoint{ + {Value: aws.Float64(2.0), Timestamp: ts}, + }, + }, }, }, { - name: "metric with no values", + name: "metric with no values", + exportAllDataPoints: false, getMetricDataOutput: cloudwatch.GetMetricDataOutput{ MetricDataResults: []types.MetricDataResult{ { @@ -73,15 +84,54 @@ func Test_toMetricDataResult(t *testing.T) { }, }, expectedMetricDataResults: []cloudwatch_client.MetricDataResult{ - {ID: "metric-1", Datapoint: aws.Float64(1.0), Timestamp: ts.Add(10 * time.Minute)}, - {ID: "metric-2", Datapoint: nil, Timestamp: time.Time{}}, + { + ID: "metric-1", DataPoints: []cloudwatch_client.DataPoint{ + {Value: aws.Float64(1.0), Timestamp: ts.Add(10 * time.Minute)}, + }, + }, + { + ID: "metric-2", + DataPoints: []cloudwatch_client.DataPoint{}, + }, + }, + }, + { + name: "export all data points", + exportAllDataPoints: true, + getMetricDataOutput: cloudwatch.GetMetricDataOutput{ + MetricDataResults: []types.MetricDataResult{ + { + Id: aws.String("metric-1"), + Values: []float64{1.0, 2.0, 3.0}, + Timestamps: []time.Time{ts.Add(10 * time.Minute), ts.Add(5 * time.Minute), ts}, + }, + { + Id: aws.String("metric-2"), + Values: []float64{2.0}, + Timestamps: []time.Time{ts}, + }, + }, + }, + expectedMetricDataResults: []cloudwatch_client.MetricDataResult{ + { + ID: "metric-1", DataPoints: []cloudwatch_client.DataPoint{ + {Value: aws.Float64(1.0), Timestamp: ts.Add(10 * time.Minute)}, + {Value: aws.Float64(2.0), Timestamp: ts.Add(5 * time.Minute)}, + {Value: aws.Float64(3.0), Timestamp: ts}, + }, + }, + { + ID: "metric-2", DataPoints: []cloudwatch_client.DataPoint{ + {Value: aws.Float64(2.0), Timestamp: ts}, + }, + }, }, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { - metricDataResults := toMetricDataResult(tc.getMetricDataOutput) + metricDataResults := toMetricDataResult(tc.getMetricDataOutput, tc.exportAllDataPoints) require.Equal(t, tc.expectedMetricDataResults, metricDataResults) }) } diff --git a/pkg/clients/v2/factory_test.go b/pkg/clients/v2/factory_test.go index 56e9d5df8..0d2f257de 100644 --- a/pkg/clients/v2/factory_test.go +++ b/pkg/clients/v2/factory_test.go @@ -496,6 +496,6 @@ func (t testClient) GetMetricData(_ context.Context, _ []*model.CloudwatchData, return nil } -func (t testClient) GetMetricStatistics(_ context.Context, _ *slog.Logger, _ []model.Dimension, _ string, _ *model.MetricConfig) []*model.Datapoint { +func (t testClient) GetMetricStatistics(_ context.Context, _ *slog.Logger, _ []model.Dimension, _ string, _ *model.MetricConfig) []*model.MetricStatisticsResult { return nil } diff --git a/pkg/config/config.go b/pkg/config/config.go index ca44ce9db..cf1a83dce 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -52,6 +52,7 @@ type JobLevelMetricFields struct { Delay int64 `yaml:"delay"` NilToZero *bool `yaml:"nilToZero"` AddCloudwatchTimestamp *bool `yaml:"addCloudwatchTimestamp"` + ExportAllDataPoints *bool `yaml:"exportAllDataPoints"` } type Job struct { @@ -99,6 +100,7 @@ type Metric struct { Delay int64 `yaml:"delay"` NilToZero *bool `yaml:"nilToZero"` AddCloudwatchTimestamp *bool `yaml:"addCloudwatchTimestamp"` + ExportAllDataPoints *bool `yaml:"exportAllDataPoints"` } type Dimension struct { @@ -154,7 +156,7 @@ func (c *ScrapeConf) Load(file string, logger *slog.Logger) (model.JobsConfig, e func (c *ScrapeConf) Validate(logger *slog.Logger) (model.JobsConfig, error) { if c.Discovery.Jobs == nil && c.Static == nil && c.CustomNamespace == nil { - return model.JobsConfig{}, fmt.Errorf("At least 1 Discovery job, 1 Static or one CustomNamespace must be defined") + return model.JobsConfig{}, fmt.Errorf("at least 1 Discovery job, 1 Static or one CustomNamespace must be defined") } if c.Discovery.Jobs != nil { @@ -387,6 +389,19 @@ func (m *Metric) validateMetric(logger *slog.Logger, metricIdx int, parent strin } } + mExportAllDataPoints := m.ExportAllDataPoints + if mExportAllDataPoints == nil { + if discovery != nil && discovery.ExportAllDataPoints != nil { + mExportAllDataPoints = discovery.ExportAllDataPoints + } else { + mExportAllDataPoints = aws.Bool(false) + } + } + + if aws.BoolValue(mExportAllDataPoints) && !aws.BoolValue(mAddCloudwatchTimestamp) { + return fmt.Errorf("Metric [%s/%d] in %v: ExportAllDataPoints can only be enabled if AddCloudwatchTimestamp is enabled", m.Name, metricIdx, parent) + } + if mLength < mPeriod { return fmt.Errorf( "Metric [%s/%d] in %v: length(%d) is smaller than period(%d). This can cause that the data requested is not ready and generate data gaps", @@ -398,6 +413,7 @@ func (m *Metric) validateMetric(logger *slog.Logger, metricIdx int, parent strin m.Delay = mDelay m.NilToZero = mNilToZero m.AddCloudwatchTimestamp = mAddCloudwatchTimestamp + m.ExportAllDataPoints = mExportAllDataPoints m.Statistics = mStatistics return nil @@ -519,6 +535,7 @@ func toModelMetricConfig(metrics []*Metric) []*model.MetricConfig { Delay: m.Delay, NilToZero: aws.BoolValue(m.NilToZero), AddCloudwatchTimestamp: aws.BoolValue(m.AddCloudwatchTimestamp), + ExportAllDataPoints: aws.BoolValue(m.ExportAllDataPoints), }) } return ret diff --git a/pkg/job/custom.go b/pkg/job/custom.go index 4e40fbd5e..cd4a11bdb 100644 --- a/pkg/job/custom.go +++ b/pkg/job/custom.go @@ -86,6 +86,7 @@ func getMetricDataForQueriesForCustomNamespace( MetricMigrationParams: model.MetricMigrationParams{ NilToZero: metric.NilToZero, AddCloudwatchTimestamp: metric.AddCloudwatchTimestamp, + ExportAllDataPoints: metric.ExportAllDataPoints, }, Tags: nil, GetMetricDataResult: nil, diff --git a/pkg/job/discovery.go b/pkg/job/discovery.go index 168ca1549..d0a4b8899 100644 --- a/pkg/job/discovery.go +++ b/pkg/job/discovery.go @@ -179,6 +179,7 @@ func getFilteredMetricDatas( MetricMigrationParams: model.MetricMigrationParams{ NilToZero: m.NilToZero, AddCloudwatchTimestamp: m.AddCloudwatchTimestamp, + ExportAllDataPoints: m.ExportAllDataPoints, }, Tags: metricTags, GetMetricDataResult: nil, diff --git a/pkg/job/getmetricdata/processor.go b/pkg/job/getmetricdata/processor.go index 21c8e7f6f..defd46456 100644 --- a/pkg/job/getmetricdata/processor.go +++ b/pkg/job/getmetricdata/processor.go @@ -131,10 +131,15 @@ func mapResultsToBatch(logger *slog.Logger, results []cloudwatch.MetricDataResul } if batch[id].GetMetricDataResult == nil { cloudwatchData := batch[id] + + mappedDataPoints := make([]model.DataPoint, 0, len(entry.DataPoints)) + for i := 0; i < len(entry.DataPoints); i++ { + mappedDataPoints = append(mappedDataPoints, model.DataPoint{Value: entry.DataPoints[i].Value, Timestamp: entry.DataPoints[i].Timestamp}) + } + cloudwatchData.GetMetricDataResult = &model.GetMetricDataResult{ - Statistic: cloudwatchData.GetMetricDataProcessingParams.Statistic, - Datapoint: entry.Datapoint, - Timestamp: entry.Timestamp, + Statistic: cloudwatchData.GetMetricDataProcessingParams.Statistic, + DataPoints: mappedDataPoints, } // All GetMetricData processing is done clear the params diff --git a/pkg/job/getmetricdata/processor_test.go b/pkg/job/getmetricdata/processor_test.go index 3ccca6441..d037f43d8 100644 --- a/pkg/job/getmetricdata/processor_test.go +++ b/pkg/job/getmetricdata/processor_test.go @@ -78,10 +78,10 @@ func TestProcessor_Run(t *testing.T) { {MetricName: "metric-1", GetMetricDataProcessingParams: &model.GetMetricDataProcessingParams{Statistic: "Average"}}, }, metricDataResultForMetrics: []metricDataResultForMetric{ - {MetricName: "metric-1", result: cloudwatch.MetricDataResult{Datapoint: aws.Float64(1000), Timestamp: now}}, + {MetricName: "metric-1", result: cloudwatch.MetricDataResult{DataPoints: []cloudwatch.DataPoint{{Value: aws.Float64(1000), Timestamp: now}}}}, }, want: []cloudwatchDataOutput{ - {MetricName: "metric-1", GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Average", Datapoint: aws.Float64(1000), Timestamp: now}}, + {MetricName: "metric-1", GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Average", DataPoints: []model.DataPoint{{Value: aws.Float64(1000), Timestamp: now}}}}, }, }, { @@ -90,14 +90,13 @@ func TestProcessor_Run(t *testing.T) { {GetMetricDataProcessingParams: &model.GetMetricDataProcessingParams{Statistic: "Min"}, MetricName: "MetricOne"}, }, metricDataResultForMetrics: []metricDataResultForMetric{ - {MetricName: "MetricOne", result: cloudwatch.MetricDataResult{Datapoint: aws.Float64(5), Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC)}}, - {MetricName: "MetricOne", result: cloudwatch.MetricDataResult{Datapoint: aws.Float64(15), Timestamp: time.Date(2023, time.June, 7, 2, 9, 8, 0, time.UTC)}}, + {MetricName: "MetricOne", result: cloudwatch.MetricDataResult{DataPoints: []cloudwatch.DataPoint{{Value: aws.Float64(5), Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC)}}}}, + {MetricName: "MetricOne", result: cloudwatch.MetricDataResult{DataPoints: []cloudwatch.DataPoint{{Value: aws.Float64(15), Timestamp: time.Date(2023, time.June, 7, 2, 9, 8, 0, time.UTC)}}}}, }, want: []cloudwatchDataOutput{ {MetricName: "MetricOne", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Min", - Datapoint: aws.Float64(5), - Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC), + Statistic: "Min", + DataPoints: []model.DataPoint{{Value: aws.Float64(5), Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC)}}, }}, }, }, @@ -108,37 +107,35 @@ func TestProcessor_Run(t *testing.T) { {MetricName: "metric-2", GetMetricDataProcessingParams: &model.GetMetricDataProcessingParams{Statistic: "Average"}}, }, metricDataResultForMetrics: []metricDataResultForMetric{ - {MetricName: "metric-1", result: cloudwatch.MetricDataResult{Datapoint: aws.Float64(1000), Timestamp: now}}, + {MetricName: "metric-1", result: cloudwatch.MetricDataResult{DataPoints: []cloudwatch.DataPoint{{Value: aws.Float64(1000), Timestamp: now}}}}, }, want: []cloudwatchDataOutput{ - {MetricName: "metric-1", GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Average", Datapoint: aws.Float64(1000), Timestamp: now}}, + {MetricName: "metric-1", GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Average", DataPoints: []model.DataPoint{{Value: aws.Float64(1000), Timestamp: now}}}}, }, }, { - name: "maps nil metric datapoints", + name: "maps nil metric dataPoints", requests: []*cloudwatchDataInput{ {GetMetricDataProcessingParams: &model.GetMetricDataProcessingParams{Statistic: "Min"}, MetricName: "MetricOne"}, {GetMetricDataProcessingParams: &model.GetMetricDataProcessingParams{Statistic: "Max"}, MetricName: "MetricTwo"}, }, metricDataResultForMetrics: []metricDataResultForMetric{ - {MetricName: "MetricOne", result: cloudwatch.MetricDataResult{Datapoint: aws.Float64(5), Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC)}}, + {MetricName: "MetricOne", result: cloudwatch.MetricDataResult{DataPoints: []cloudwatch.DataPoint{{Value: aws.Float64(5), Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC)}}}}, {MetricName: "MetricTwo"}, }, want: []cloudwatchDataOutput{ { MetricName: "MetricOne", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Min", - Datapoint: aws.Float64(5), - Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC), + Statistic: "Min", + DataPoints: []model.DataPoint{{Value: aws.Float64(5), Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC)}}, }, }, { MetricName: "MetricTwo", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Max", - Datapoint: nil, - Timestamp: time.Time{}, + Statistic: "Max", + DataPoints: []model.DataPoint{}, }, }, }, @@ -153,42 +150,38 @@ func TestProcessor_Run(t *testing.T) { {GetMetricDataProcessingParams: &model.GetMetricDataProcessingParams{Statistic: "Count"}, MetricName: "MetricFour"}, }, metricDataResultForMetrics: []metricDataResultForMetric{ - {MetricName: "MetricOne", result: cloudwatch.MetricDataResult{Datapoint: aws.Float64(5), Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC)}}, - {MetricName: "MetricTwo", result: cloudwatch.MetricDataResult{Datapoint: aws.Float64(12), Timestamp: time.Date(2023, time.June, 7, 2, 9, 8, 0, time.UTC)}}, - {MetricName: "MetricThree", result: cloudwatch.MetricDataResult{Datapoint: aws.Float64(15), Timestamp: time.Date(2023, time.June, 7, 3, 9, 8, 0, time.UTC)}}, - {MetricName: "MetricFour", result: cloudwatch.MetricDataResult{Datapoint: aws.Float64(20), Timestamp: time.Date(2023, time.June, 7, 4, 9, 8, 0, time.UTC)}}, + {MetricName: "MetricOne", result: cloudwatch.MetricDataResult{DataPoints: []cloudwatch.DataPoint{{Value: aws.Float64(5), Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC)}}}}, + {MetricName: "MetricTwo", result: cloudwatch.MetricDataResult{DataPoints: []cloudwatch.DataPoint{{Value: aws.Float64(12), Timestamp: time.Date(2023, time.June, 7, 2, 9, 8, 0, time.UTC)}}}}, + {MetricName: "MetricThree", result: cloudwatch.MetricDataResult{DataPoints: []cloudwatch.DataPoint{{Value: aws.Float64(15), Timestamp: time.Date(2023, time.June, 7, 3, 9, 8, 0, time.UTC)}}}}, + {MetricName: "MetricFour", result: cloudwatch.MetricDataResult{DataPoints: []cloudwatch.DataPoint{{Value: aws.Float64(20), Timestamp: time.Date(2023, time.June, 7, 4, 9, 8, 0, time.UTC)}}}}, }, want: []cloudwatchDataOutput{ { MetricName: "MetricOne", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Min", - Datapoint: aws.Float64(5), - Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC), + Statistic: "Min", + DataPoints: []model.DataPoint{{Value: aws.Float64(5), Timestamp: time.Date(2023, time.June, 7, 1, 9, 8, 0, time.UTC)}}, }, }, { MetricName: "MetricTwo", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Max", - Datapoint: aws.Float64(12), - Timestamp: time.Date(2023, time.June, 7, 2, 9, 8, 0, time.UTC), + Statistic: "Max", + DataPoints: []model.DataPoint{{Value: aws.Float64(12), Timestamp: time.Date(2023, time.June, 7, 2, 9, 8, 0, time.UTC)}}, }, }, { MetricName: "MetricThree", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Sum", - Datapoint: aws.Float64(15), - Timestamp: time.Date(2023, time.June, 7, 3, 9, 8, 0, time.UTC), + Statistic: "Sum", + DataPoints: []model.DataPoint{{Value: aws.Float64(15), Timestamp: time.Date(2023, time.June, 7, 3, 9, 8, 0, time.UTC)}}, }, }, { MetricName: "MetricFour", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Count", - Datapoint: aws.Float64(20), - Timestamp: time.Date(2023, time.June, 7, 4, 9, 8, 0, time.UTC), + Statistic: "Count", + DataPoints: []model.DataPoint{{Value: aws.Float64(20), Timestamp: time.Date(2023, time.June, 7, 4, 9, 8, 0, time.UTC)}}, }, }, }, @@ -266,6 +259,7 @@ func getSampleMetricDatas(id string) *model.CloudwatchData { MetricMigrationParams: model.MetricMigrationParams{ NilToZero: false, AddCloudwatchTimestamp: false, + ExportAllDataPoints: false, }, GetMetricDataProcessingParams: &model.GetMetricDataProcessingParams{ Period: 60, @@ -317,9 +311,8 @@ func doBench(b *testing.B, metricsPerQuery, testResourcesCount int, concurrency results := make([]cloudwatch.MetricDataResult, 0, len(getMetricData)) for _, entry := range getMetricData { results = append(results, cloudwatch.MetricDataResult{ - ID: entry.GetMetricDataProcessingParams.QueryID, - Datapoint: aws.Float64(1), - Timestamp: time.Now(), + ID: entry.GetMetricDataProcessingParams.QueryID, + DataPoints: []cloudwatch.DataPoint{{Value: aws.Float64(1), Timestamp: time.Now()}}, }) } b.StartTimer() diff --git a/pkg/job/scraper_test.go b/pkg/job/scraper_test.go index c27cbb314..1b25f7855 100644 --- a/pkg/job/scraper_test.go +++ b/pkg/job/scraper_test.go @@ -123,7 +123,7 @@ func TestScrapeRunner_Run(t *testing.T) { Namespace: "aws-namespace", Tags: []model.Tag{{Key: "tag1", Value: "value1"}}, Dimensions: []model.Dimension{{Name: "dimension1", Value: "value1"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", Datapoint: aws.Float64(1.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", DataPoints: []model.DataPoint{{Value: aws.Float64(1.0), Timestamp: time.Time{}}}}, }, }, nil }, @@ -145,7 +145,7 @@ func TestScrapeRunner_Run(t *testing.T) { Namespace: "aws-namespace", Tags: []model.Tag{{Key: "tag1", Value: "value1"}}, Dimensions: []model.Dimension{{Name: "dimension1", Value: "value1"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", Datapoint: aws.Float64(1.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", DataPoints: []model.DataPoint{{Value: aws.Float64(1.0), Timestamp: time.Time{}}}}, }, }, }, @@ -178,7 +178,7 @@ func TestScrapeRunner_Run(t *testing.T) { ResourceName: "resource-2", Namespace: "custom-namespace", Dimensions: []model.Dimension{{Name: "dimension2", Value: "value2"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", Datapoint: aws.Float64(2.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", DataPoints: []model.DataPoint{{Value: aws.Float64(2.0), Timestamp: time.Time{}}}}, }, }, nil }, @@ -191,7 +191,7 @@ func TestScrapeRunner_Run(t *testing.T) { ResourceName: "resource-2", Namespace: "custom-namespace", Dimensions: []model.Dimension{{Name: "dimension2", Value: "value2"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", Datapoint: aws.Float64(2.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", DataPoints: []model.DataPoint{{Value: aws.Float64(2.0), Timestamp: time.Time{}}}}, }, }, }, @@ -239,7 +239,7 @@ func TestScrapeRunner_Run(t *testing.T) { ResourceName: "resource-2", Namespace: "custom-namespace", Dimensions: []model.Dimension{{Name: "dimension2", Value: "value2"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", Datapoint: aws.Float64(2.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", DataPoints: []model.DataPoint{{Value: aws.Float64(2.0), Timestamp: time.Time{}}}}, }, }, nil } @@ -250,7 +250,7 @@ func TestScrapeRunner_Run(t *testing.T) { Namespace: "aws-namespace", Tags: []model.Tag{{Key: "tag1", Value: "value1"}}, Dimensions: []model.Dimension{{Name: "dimension1", Value: "value1"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", Datapoint: aws.Float64(1.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", DataPoints: []model.DataPoint{{Value: aws.Float64(1.0), Timestamp: time.Time{}}}}, }, }, nil }, @@ -272,7 +272,7 @@ func TestScrapeRunner_Run(t *testing.T) { Namespace: "aws-namespace", Tags: []model.Tag{{Key: "tag1", Value: "value1"}}, Dimensions: []model.Dimension{{Name: "dimension1", Value: "value1"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", Datapoint: aws.Float64(1.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", DataPoints: []model.DataPoint{{Value: aws.Float64(1.0), Timestamp: time.Time{}}}}, }, }, }, @@ -284,7 +284,7 @@ func TestScrapeRunner_Run(t *testing.T) { ResourceName: "resource-2", Namespace: "custom-namespace", Dimensions: []model.Dimension{{Name: "dimension2", Value: "value2"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", Datapoint: aws.Float64(2.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", DataPoints: []model.DataPoint{{Value: aws.Float64(2.0), Timestamp: time.Time{}}}}, }, }, }, @@ -351,7 +351,7 @@ func TestScrapeRunner_Run(t *testing.T) { Namespace: "aws-namespace", Tags: []model.Tag{{Key: "tag1", Value: "value1"}}, Dimensions: []model.Dimension{{Name: "dimension1", Value: "value1"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", Datapoint: aws.Float64(1.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", DataPoints: []model.DataPoint{{Value: aws.Float64(1.0), Timestamp: time.Time{}}}}, }, }, nil }, @@ -373,7 +373,7 @@ func TestScrapeRunner_Run(t *testing.T) { Namespace: "aws-namespace", Tags: []model.Tag{{Key: "tag1", Value: "value1"}}, Dimensions: []model.Dimension{{Name: "dimension1", Value: "value1"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", Datapoint: aws.Float64(1.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", DataPoints: []model.DataPoint{{Value: aws.Float64(1.0), Timestamp: time.Time{}}}}, }, }, }, @@ -418,7 +418,7 @@ func TestScrapeRunner_Run(t *testing.T) { ResourceName: "resource-2", Namespace: "custom-namespace", Dimensions: []model.Dimension{{Name: "dimension2", Value: "value2"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", Datapoint: aws.Float64(2.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", DataPoints: []model.DataPoint{{Value: aws.Float64(2.0), Timestamp: time.Time{}}}}, }, }, nil }, @@ -431,7 +431,7 @@ func TestScrapeRunner_Run(t *testing.T) { ResourceName: "resource-2", Namespace: "custom-namespace", Dimensions: []model.Dimension{{Name: "dimension2", Value: "value2"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", Datapoint: aws.Float64(2.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Minimum", DataPoints: []model.DataPoint{{Value: aws.Float64(2.0), Timestamp: time.Time{}}}}, }, }, }, @@ -493,7 +493,7 @@ func TestScrapeRunner_Run(t *testing.T) { Namespace: "aws-namespace", Tags: []model.Tag{{Key: "tag1", Value: "value1"}}, Dimensions: []model.Dimension{{Name: "dimension1", Value: "value1"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", Datapoint: aws.Float64(1.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", DataPoints: []model.DataPoint{{Value: aws.Float64(1.0), Timestamp: time.Time{}}}}, }, }, nil }, @@ -515,7 +515,7 @@ func TestScrapeRunner_Run(t *testing.T) { Namespace: "aws-namespace", Tags: []model.Tag{{Key: "tag1", Value: "value1"}}, Dimensions: []model.Dimension{{Name: "dimension1", Value: "value1"}}, - GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", Datapoint: aws.Float64(1.0), Timestamp: time.Time{}}, + GetMetricDataResult: &model.GetMetricDataResult{Statistic: "Maximum", DataPoints: []model.DataPoint{{Value: aws.Float64(1.0), Timestamp: time.Time{}}}}, }, }, }, diff --git a/pkg/job/static.go b/pkg/job/static.go index 5b3cd4fe5..1b4cd21fb 100644 --- a/pkg/job/static.go +++ b/pkg/job/static.go @@ -53,11 +53,11 @@ func runStaticJob( } data.GetMetricStatisticsResult = &model.GetMetricStatisticsResult{ - Datapoints: clientCloudwatch.GetMetricStatistics(ctx, logger, data.Dimensions, resource.Namespace, metric), + Results: clientCloudwatch.GetMetricStatistics(ctx, logger, data.Dimensions, resource.Namespace, metric), Statistics: metric.Statistics, } - if data.GetMetricStatisticsResult.Datapoints != nil { + if data.GetMetricStatisticsResult.Results != nil { mux.Lock() cw = append(cw, &data) mux.Unlock() diff --git a/pkg/model/model.go b/pkg/model/model.go index deb2dbd53..cf9c3f698 100644 --- a/pkg/model/model.go +++ b/pkg/model/model.go @@ -80,6 +80,7 @@ type MetricConfig struct { Delay int64 NilToZero bool AddCloudwatchTimestamp bool + ExportAllDataPoints bool } type DimensionsRegexp struct { @@ -111,30 +112,6 @@ type Metric struct { Namespace string } -type Datapoint struct { - // The average of the metric values that correspond to the data point. - Average *float64 - - // The percentile statistic for the data point. - ExtendedStatistics map[string]*float64 - - // The maximum metric value for the data point. - Maximum *float64 - - // The minimum metric value for the data point. - Minimum *float64 - - // The number of metric values that contributed to the aggregate value of this - // data point. - SampleCount *float64 - - // The sum of the metric values for the data point. - Sum *float64 - - // The time stamp used for the data point. - Timestamp *time.Time -} - type CloudwatchMetricResult struct { Context *ScrapeContext Data []*CloudwatchData @@ -178,10 +155,34 @@ type CloudwatchData struct { } type GetMetricStatisticsResult struct { - Datapoints []*Datapoint + Results []*MetricStatisticsResult Statistics []string } +type MetricStatisticsResult struct { + // The average of the metric values that correspond to the data point. + Average *float64 + + // The percentile statistic for the data point. + ExtendedStatistics map[string]*float64 + + // The maximum metric value for the data point. + Maximum *float64 + + // The minimum metric value for the data point. + Minimum *float64 + + // The number of metric values that contributed to the aggregate value of this + // data point. + SampleCount *float64 + + // The sum of the metric values for the data point. + Sum *float64 + + // The time stamp used for the data point. + Timestamp *time.Time +} + type GetMetricDataProcessingParams struct { // QueryID is a value internal to processing used for mapping results from GetMetricData their original request QueryID string @@ -198,11 +199,16 @@ type GetMetricDataProcessingParams struct { type MetricMigrationParams struct { NilToZero bool AddCloudwatchTimestamp bool + ExportAllDataPoints bool } type GetMetricDataResult struct { - Statistic string - Datapoint *float64 + Statistic string + DataPoints []DataPoint +} + +type DataPoint struct { + Value *float64 Timestamp time.Time } diff --git a/pkg/promutil/migrate.go b/pkg/promutil/migrate.go index 6457d1364..4b1705af7 100644 --- a/pkg/promutil/migrate.go +++ b/pkg/promutil/migrate.go @@ -101,40 +101,59 @@ func BuildMetrics(results []model.CloudwatchMetricResult, labelsSnakeCase bool, } for _, statistic := range statisticsInCloudwatchData(metric) { - dataPoint, ts, err := getDatapoint(metric, statistic) - if err != nil { - return nil, nil, err + dataPoints, err := getDataPoints(metric, statistic) + for _, dataPoint := range dataPoints { + ts := dataPoint.Timestamp + dataPoint := dataPoint.Value + if err != nil { + return nil, nil, err + } + var exportedDatapoint float64 + if dataPoint == nil && metric.MetricMigrationParams.AddCloudwatchTimestamp { + // If we did not get a datapoint then the timestamp is a default value making it unusable in the + // exported metric. Attempting to put a fake timestamp on the metric will likely conflict with + // future CloudWatch timestamps which are always in the past. + if metric.MetricMigrationParams.ExportAllDataPoints { + // If we're exporting all data points, we can skip this one and check for a historical datapoint + continue + } else { + // If we are not exporting all data points, we better have nothing exported + break + } + } + if dataPoint == nil { + exportedDatapoint = math.NaN() + } else { + exportedDatapoint = *dataPoint + } + + if metric.MetricMigrationParams.NilToZero && math.IsNaN(exportedDatapoint) { + exportedDatapoint = 0 + } + + name := BuildMetricName(metric.Namespace, metric.MetricName, statistic) + + promLabels := createPrometheusLabels(metric, labelsSnakeCase, contextLabels, logger) + observedMetricLabels = recordLabelsForMetric(name, promLabels, observedMetricLabels) + + if !metric.MetricMigrationParams.AddCloudwatchTimestamp { + //if we're not adding the original timestamp, we have to zero it so we can validate the data in the exporter via EnsureLabelConsistencyAndRemoveDuplicates + ts = time.Time{} + } + + output = append(output, &PrometheusMetric{ + Name: name, + Labels: promLabels, + Value: exportedDatapoint, + Timestamp: ts, + IncludeTimestamp: metric.MetricMigrationParams.AddCloudwatchTimestamp, + }) + + if !metric.MetricMigrationParams.ExportAllDataPoints { + // If we're not exporting all data points, we can skip the rest of the data points for this metric + break + } } - var exportedDatapoint float64 - if dataPoint == nil && metric.MetricMigrationParams.AddCloudwatchTimestamp { - // If we did not get a datapoint then the timestamp is a default value making it unusable in the - // exported metric. Attempting to put a fake timestamp on the metric will likely conflict with - // future CloudWatch timestamps which are always in the past. It's safer to skip here than guess - continue - } - if dataPoint == nil { - exportedDatapoint = math.NaN() - } else { - exportedDatapoint = *dataPoint - } - - if metric.MetricMigrationParams.NilToZero && math.IsNaN(exportedDatapoint) { - exportedDatapoint = 0 - } - - name := BuildMetricName(metric.Namespace, metric.MetricName, statistic) - - promLabels := createPrometheusLabels(metric, labelsSnakeCase, contextLabels, logger) - observedMetricLabels = recordLabelsForMetric(name, promLabels, observedMetricLabels) - - output = append(output, &PrometheusMetric{ - Name: name, - Labels: promLabels, - Value: exportedDatapoint, - Timestamp: ts, - IncludeTimestamp: metric.MetricMigrationParams.AddCloudwatchTimestamp, - }) - } } } @@ -152,37 +171,45 @@ func statisticsInCloudwatchData(d *model.CloudwatchData) []string { return []string{} } -func getDatapoint(cwd *model.CloudwatchData, statistic string) (*float64, time.Time, error) { +func getDataPoints(cwd *model.CloudwatchData, statistic string) ([]model.DataPoint, error) { // Not possible but for sanity if cwd.GetMetricStatisticsResult == nil && cwd.GetMetricDataResult == nil { - return nil, time.Time{}, fmt.Errorf("cannot map a data point with no results on %s", cwd.MetricName) + return nil, fmt.Errorf("cannot map a data point with no results on %s", cwd.MetricName) } if cwd.GetMetricDataResult != nil { - return cwd.GetMetricDataResult.Datapoint, cwd.GetMetricDataResult.Timestamp, nil + // If we have no dataPoints, we should return a single nil datapoint, which is then either dropped or converted to 0 + if len(cwd.GetMetricDataResult.DataPoints) == 0 && !cwd.MetricMigrationParams.AddCloudwatchTimestamp { + return []model.DataPoint{{ + Value: nil, + Timestamp: time.Time{}, + }}, nil + } + + return cwd.GetMetricDataResult.DataPoints, nil } - var averageDataPoints []*model.Datapoint + var averageDataPoints []*model.MetricStatisticsResult // sorting by timestamps so we can consistently export the most updated datapoint - // assuming Timestamp field in cloudwatch.Datapoint struct is never nil - for _, datapoint := range sortByTimestamp(cwd.GetMetricStatisticsResult.Datapoints) { + // assuming Timestamp field in cloudwatch.Value struct is never nil + for _, datapoint := range sortByTimestamp(cwd.GetMetricStatisticsResult.Results) { switch { case statistic == "Maximum": if datapoint.Maximum != nil { - return datapoint.Maximum, *datapoint.Timestamp, nil + return []model.DataPoint{{Value: datapoint.Maximum, Timestamp: *datapoint.Timestamp}}, nil } case statistic == "Minimum": if datapoint.Minimum != nil { - return datapoint.Minimum, *datapoint.Timestamp, nil + return []model.DataPoint{{Value: datapoint.Minimum, Timestamp: *datapoint.Timestamp}}, nil } case statistic == "Sum": if datapoint.Sum != nil { - return datapoint.Sum, *datapoint.Timestamp, nil + return []model.DataPoint{{Value: datapoint.Sum, Timestamp: *datapoint.Timestamp}}, nil } case statistic == "SampleCount": if datapoint.SampleCount != nil { - return datapoint.SampleCount, *datapoint.Timestamp, nil + return []model.DataPoint{{Value: datapoint.SampleCount, Timestamp: *datapoint.Timestamp}}, nil } case statistic == "Average": if datapoint.Average != nil { @@ -190,10 +217,10 @@ func getDatapoint(cwd *model.CloudwatchData, statistic string) (*float64, time.T } case Percentile.MatchString(statistic): if data, ok := datapoint.ExtendedStatistics[statistic]; ok { - return data, *datapoint.Timestamp, nil + return []model.DataPoint{{Value: data, Timestamp: *datapoint.Timestamp}}, nil } default: - return nil, time.Time{}, fmt.Errorf("invalid statistic requested on metric %s: %s", cwd.MetricName, statistic) + return nil, fmt.Errorf("invalid statistic requested on metric %s: %s", cwd.MetricName, statistic) } } @@ -208,17 +235,17 @@ func getDatapoint(cwd *model.CloudwatchData, statistic string) (*float64, time.T total += *p.Average } average := total / float64(len(averageDataPoints)) - return &average, timestamp, nil + return []model.DataPoint{{Value: &average, Timestamp: timestamp}}, nil } - return nil, time.Time{}, nil + return nil, nil } -func sortByTimestamp(datapoints []*model.Datapoint) []*model.Datapoint { - sort.Slice(datapoints, func(i, j int) bool { - jTimestamp := *datapoints[j].Timestamp - return datapoints[i].Timestamp.After(jTimestamp) +func sortByTimestamp(dataPoints []*model.MetricStatisticsResult) []*model.MetricStatisticsResult { + sort.Slice(dataPoints, func(i, j int) bool { + jTimestamp := *dataPoints[j].Timestamp + return dataPoints[i].Timestamp.After(jTimestamp) }) - return datapoints + return dataPoints } func createPrometheusLabels(cwd *model.CloudwatchData, labelsSnakeCase bool, contextLabels map[string]string, logger *slog.Logger) map[string]string { @@ -303,7 +330,10 @@ func EnsureLabelConsistencyAndRemoveDuplicates(metrics []*PrometheusMetric, obse } } - metricKey := fmt.Sprintf("%s-%d", metric.Name, prom_model.LabelsToSignature(metric.Labels)) + // We are including the timestamp in the metric key to ensure that we don't have duplicate metrics + // if we have AddCloudwatchTimestamp enabled its the real timestamp, otherwise its a zero value + // the timestamp is needed to ensure valid date created by ExportAllDataPoints + metricKey := fmt.Sprintf("%s-%d-%d", metric.Name, prom_model.LabelsToSignature(metric.Labels), metric.Timestamp.Unix()) if _, exists := metricKeys[metricKey]; !exists { metricKeys[metricKey] = struct{}{} output = append(output, metric) diff --git a/pkg/promutil/migrate_test.go b/pkg/promutil/migrate_test.go index f617007d3..1b93148c5 100644 --- a/pkg/promutil/migrate_test.go +++ b/pkg/promutil/migrate_test.go @@ -287,6 +287,7 @@ func TestBuildNamespaceInfoMetrics(t *testing.T) { func TestBuildMetrics(t *testing.T) { ts := time.Date(2024, time.January, 1, 0, 0, 0, 0, time.UTC) + nullTs := time.Time{} type testCase struct { name string @@ -315,9 +316,8 @@ func TestBuildMetrics(t *testing.T) { }, Namespace: "AWS/ElastiCache", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(1), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(1), Timestamp: ts}}, }, Dimensions: []model.Dimension{ { @@ -341,9 +341,8 @@ func TestBuildMetrics(t *testing.T) { }, }, GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(2), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(2), Timestamp: ts}}, }, ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", }, @@ -361,9 +360,8 @@ func TestBuildMetrics(t *testing.T) { }, }, GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(3), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(3), Timestamp: ts}}, }, ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", }, @@ -380,10 +378,81 @@ func TestBuildMetrics(t *testing.T) { Value: "redis-cluster", }, }, + GetMetricDataResult: &model.GetMetricDataResult{ + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(4), Timestamp: ts}}, + }, + ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + }, + { + MetricName: "NetworkPacketsIn", + MetricMigrationParams: model.MetricMigrationParams{ + NilToZero: true, + AddCloudwatchTimestamp: true, + ExportAllDataPoints: true, + }, + Namespace: "AWS/ElastiCache", + Dimensions: []model.Dimension{ + { + Name: "CacheClusterId", + Value: "redis-cluster", + }, + }, + GetMetricDataResult: &model.GetMetricDataResult{ + Statistic: "Average", + DataPoints: []model.DataPoint{ + {Value: aws.Float64(4), Timestamp: ts}, + {Value: aws.Float64(5), Timestamp: ts.Add(-1 * time.Minute)}, + {Value: aws.Float64(6), Timestamp: ts.Add(-2 * time.Minute)}, + }, + }, + ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + }, + { + MetricName: "NetworkPacketsOut", + MetricMigrationParams: model.MetricMigrationParams{ + NilToZero: true, + AddCloudwatchTimestamp: true, + ExportAllDataPoints: true, + }, + Namespace: "AWS/ElastiCache", + Dimensions: []model.Dimension{ + { + Name: "CacheClusterId", + Value: "redis-cluster", + }, + }, GetMetricDataResult: &model.GetMetricDataResult{ Statistic: "Average", - Datapoint: aws.Float64(4), - Timestamp: ts, + DataPoints: []model.DataPoint{ + {Value: nil, Timestamp: ts}, + {Value: aws.Float64(5), Timestamp: ts.Add(-1 * time.Minute)}, + {Value: aws.Float64(6), Timestamp: ts.Add(-2 * time.Minute)}, + }, + }, + ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + }, + { + MetricName: "NetworkMaxBytesIn", + MetricMigrationParams: model.MetricMigrationParams{ + NilToZero: true, + AddCloudwatchTimestamp: true, + ExportAllDataPoints: false, + }, + Namespace: "AWS/ElastiCache", + Dimensions: []model.Dimension{ + { + Name: "CacheClusterId", + Value: "redis-cluster", + }, + }, + GetMetricDataResult: &model.GetMetricDataResult{ + Statistic: "Average", + DataPoints: []model.DataPoint{ + {Value: nil, Timestamp: ts}, + {Value: aws.Float64(5), Timestamp: ts.Add(-1 * time.Minute)}, + {Value: aws.Float64(6), Timestamp: ts.Add(-2 * time.Minute)}, + }, }, ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", }, @@ -394,7 +463,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_elasticache_cpuutilization_average", Value: 1, - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", @@ -405,7 +474,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_elasticache_freeable_memory_average", Value: 2, - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", @@ -416,7 +485,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_elasticache_network_bytes_in_average", Value: 3, - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", @@ -436,6 +505,66 @@ func TestBuildMetrics(t *testing.T) { "dimension_CacheClusterId": "redis-cluster", }, }, + { + Name: "aws_elasticache_network_packets_in_average", + Value: 4, + Timestamp: ts, + IncludeTimestamp: true, + Labels: map[string]string{ + "account_id": "123456789012", + "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "region": "us-east-1", + "dimension_CacheClusterId": "redis-cluster", + }, + }, + { + Name: "aws_elasticache_network_packets_in_average", + Value: 5, + Timestamp: ts.Add(-1 * time.Minute), + IncludeTimestamp: true, + Labels: map[string]string{ + "account_id": "123456789012", + "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "region": "us-east-1", + "dimension_CacheClusterId": "redis-cluster", + }, + }, + { + Name: "aws_elasticache_network_packets_in_average", + Value: 6, + Timestamp: ts.Add(-2 * time.Minute), + IncludeTimestamp: true, + Labels: map[string]string{ + "account_id": "123456789012", + "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "region": "us-east-1", + "dimension_CacheClusterId": "redis-cluster", + }, + }, + { + Name: "aws_elasticache_network_packets_out_average", + Value: 5, + Timestamp: ts.Add(-1 * time.Minute), + IncludeTimestamp: true, + Labels: map[string]string{ + "account_id": "123456789012", + "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "region": "us-east-1", + "dimension_CacheClusterId": "redis-cluster", + }, + }, + { + Name: "aws_elasticache_network_packets_out_average", + Value: 6, + Timestamp: ts.Add(-2 * time.Minute), + IncludeTimestamp: true, + Labels: map[string]string{ + "account_id": "123456789012", + "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", + "region": "us-east-1", + "dimension_CacheClusterId": "redis-cluster", + }, + }, }, expectedLabels: map[string]model.LabelSet{ "aws_elasticache_cpuutilization_average": { @@ -462,6 +591,18 @@ func TestBuildMetrics(t *testing.T) { "region": {}, "dimension_CacheClusterId": {}, }, + "aws_elasticache_network_packets_in_average": { + "account_id": {}, + "name": {}, + "region": {}, + "dimension_CacheClusterId": {}, + }, + "aws_elasticache_network_packets_out_average": { + "account_id": {}, + "name": {}, + "region": {}, + "dimension_CacheClusterId": {}, + }, }, expectedErr: nil, }, @@ -488,9 +629,8 @@ func TestBuildMetrics(t *testing.T) { }, }, GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: nil, - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: nil, Timestamp: ts}}, }, ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", }, @@ -509,9 +649,8 @@ func TestBuildMetrics(t *testing.T) { }, }, GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: nil, - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: nil, Timestamp: ts}}, }, ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", }, @@ -529,9 +668,8 @@ func TestBuildMetrics(t *testing.T) { }, }, GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: nil, - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: nil, Timestamp: ts}}, }, ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", }, @@ -549,9 +687,8 @@ func TestBuildMetrics(t *testing.T) { }, }, GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: nil, - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: nil, Timestamp: ts}}, }, ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", }, @@ -562,7 +699,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_elasticache_cpuutilization_average", Value: 0, - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", @@ -574,7 +711,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_elasticache_freeable_memory_average", Value: math.NaN(), - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", @@ -586,7 +723,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_elasticache_network_bytes_in_average", Value: 0, - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", @@ -635,9 +772,8 @@ func TestBuildMetrics(t *testing.T) { }, Namespace: "AWS/ElastiCache", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(1), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(1), Timestamp: ts}}, }, Dimensions: []model.Dimension{ { @@ -654,7 +790,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_elasticache_cpuutilization_average", Value: 1, - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", @@ -690,9 +826,8 @@ func TestBuildMetrics(t *testing.T) { }, Namespace: "/aws/sagemaker/TrainingJobs", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(1), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(1), Timestamp: ts}}, }, Dimensions: []model.Dimension{ { @@ -709,7 +844,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_sagemaker_trainingjobs_cpuutilization_average", Value: 1, - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "name": "arn:aws:sagemaker:us-east-1:123456789012:training-job/sagemaker-xgboost", @@ -745,9 +880,8 @@ func TestBuildMetrics(t *testing.T) { }, Namespace: "Glue", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(1), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(1), Timestamp: ts}}, }, Dimensions: []model.Dimension{ { @@ -764,7 +898,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_glue_driver_aggregate_bytes_read_average", Value: 1, - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "name": "arn:aws:glue:us-east-1:123456789012:job/test-job", @@ -800,9 +934,8 @@ func TestBuildMetrics(t *testing.T) { }, Namespace: "Glue", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(1), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(1), Timestamp: ts}}, }, Dimensions: []model.Dimension{ { @@ -819,7 +952,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_glue_aggregate_glue_jobs_bytes_read_average", Value: 1, - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "name": "arn:aws:glue:us-east-1:123456789012:job/test-job", @@ -858,9 +991,8 @@ func TestBuildMetrics(t *testing.T) { }, Namespace: "AWS/ElastiCache", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(1), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(1), Timestamp: ts}}, }, Dimensions: []model.Dimension{ { @@ -877,7 +1009,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_elasticache_cpuutilization_average", Value: 1, - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "name": "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", @@ -915,9 +1047,8 @@ func TestBuildMetrics(t *testing.T) { }, Namespace: "AWS/ElastiCache", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(1), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(1), Timestamp: ts}}, }, Dimensions: []model.Dimension{ { @@ -934,7 +1065,7 @@ func TestBuildMetrics(t *testing.T) { { Name: "aws_elasticache_cpuutilization_average", Value: 1, - Timestamp: ts, + Timestamp: nullTs, Labels: map[string]string{ "account_id": "123456789012", "account_alias": "billingacct", @@ -989,9 +1120,8 @@ func Benchmark_BuildMetrics(b *testing.B) { }, Namespace: "AWS/ElastiCache", GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(1), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(1), Timestamp: ts}}, }, Dimensions: []model.Dimension{ { @@ -1019,9 +1149,8 @@ func Benchmark_BuildMetrics(b *testing.B) { }, }, GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(2), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(2), Timestamp: ts}}, }, ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", Tags: []model.Tag{{ @@ -1043,9 +1172,8 @@ func Benchmark_BuildMetrics(b *testing.B) { }, }, GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(3), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(3), Timestamp: ts}}, }, ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", Tags: []model.Tag{{ @@ -1067,9 +1195,8 @@ func Benchmark_BuildMetrics(b *testing.B) { }, }, GetMetricDataResult: &model.GetMetricDataResult{ - Statistic: "Average", - Datapoint: aws.Float64(4), - Timestamp: ts, + Statistic: "Average", + DataPoints: []model.DataPoint{{Value: aws.Float64(4), Timestamp: ts}}, }, ResourceName: "arn:aws:elasticache:us-east-1:123456789012:cluster:redis-cluster", Tags: []model.Tag{{ @@ -1139,22 +1266,22 @@ func replaceNaNValues(metrics []*PrometheusMetric) []*PrometheusMetric { // TestSortByTimeStamp validates that sortByTimestamp() sorts in descending order. func TestSortByTimeStamp(t *testing.T) { ts := time.Date(2024, time.January, 1, 0, 0, 0, 0, time.UTC) - dataPointMiddle := &model.Datapoint{ + dataPointMiddle := &model.MetricStatisticsResult{ Timestamp: aws.Time(ts.Add(time.Minute * 2 * -1)), Maximum: aws.Float64(2), } - dataPointNewest := &model.Datapoint{ + dataPointNewest := &model.MetricStatisticsResult{ Timestamp: aws.Time(ts.Add(time.Minute * -1)), Maximum: aws.Float64(1), } - dataPointOldest := &model.Datapoint{ + dataPointOldest := &model.MetricStatisticsResult{ Timestamp: aws.Time(ts.Add(time.Minute * 3 * -1)), Maximum: aws.Float64(3), } - cloudWatchDataPoints := []*model.Datapoint{ + cloudWatchDataPoints := []*model.MetricStatisticsResult{ dataPointMiddle, dataPointNewest, dataPointOldest, @@ -1162,7 +1289,7 @@ func TestSortByTimeStamp(t *testing.T) { sortedDataPoints := sortByTimestamp(cloudWatchDataPoints) - expectedDataPoints := []*model.Datapoint{ + expectedDataPoints := []*model.MetricStatisticsResult{ dataPointNewest, dataPointMiddle, dataPointOldest,