Skip to content

Commit ca5c28d

Browse files
mmorel-35dmathieu
authored andcommitted
[chore]: fix staticcheck rules (exporter) (open-telemetry#39192)
#### Description This fixes staticcheck issues in exporter discovered after golangci-lint@v2 upgrade Signed-off-by: Matthieu MOREL <[email protected]>
1 parent 5fb5f1f commit ca5c28d

File tree

92 files changed

+273
-275
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

92 files changed

+273
-275
lines changed

exporter/alertmanagerexporter/alertmanager_exporter.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ func (s *alertmanagerExporter) pushTraces(ctx context.Context, td ptrace.Traces)
189189
}
190190

191191
func (s *alertmanagerExporter) start(ctx context.Context, host component.Host) error {
192-
client, err := s.config.ClientConfig.ToClient(ctx, host, s.settings)
192+
client, err := s.config.ToClient(ctx, host, s.settings)
193193
if err != nil {
194194
return fmt.Errorf("failed to create HTTP Client: %w", err)
195195
}
@@ -209,7 +209,7 @@ func newAlertManagerExporter(cfg *Config, set component.TelemetrySettings) *aler
209209
config: cfg,
210210
settings: set,
211211
tracesMarshaler: &ptrace.JSONMarshaler{},
212-
endpoint: fmt.Sprintf("%s/api/%s/alerts", cfg.ClientConfig.Endpoint, cfg.APIVersion),
212+
endpoint: fmt.Sprintf("%s/api/%s/alerts", cfg.Endpoint, cfg.APIVersion),
213213
generatorURL: cfg.GeneratorURL,
214214
defaultSeverity: cfg.DefaultSeverity,
215215
severityAttribute: cfg.SeverityAttribute,

exporter/alertmanagerexporter/config.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ var _ component.Config = (*Config)(nil)
3030

3131
// Validate checks if the exporter configuration is valid
3232
func (cfg *Config) Validate() error {
33-
if cfg.ClientConfig.Endpoint == "" {
33+
if cfg.Endpoint == "" {
3434
return errors.New("endpoint must be non-empty")
3535
}
3636
if cfg.DefaultSeverity == "" {

exporter/alertmanagerexporter/config_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ func TestConfig_Validate(t *testing.T) {
112112
name: "NoEndpoint",
113113
cfg: func() *Config {
114114
cfg := createDefaultConfig().(*Config)
115-
cfg.ClientConfig.Endpoint = ""
115+
cfg.Endpoint = ""
116116
return cfg
117117
}(),
118118
wantErr: "endpoint must be non-empty",

exporter/awsemfexporter/datapoint.go

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ func (split *dataPointSplit) appendMetricData(metricVal float64, count uint64) {
138138

139139
// CalculateDeltaDatapoints retrieves the NumberDataPoint at the given index and performs rate/delta calculation if necessary.
140140
func (dps numberDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, _ bool, calculators *emfCalculators) ([]dataPoint, bool) {
141-
metric := dps.NumberDataPointSlice.At(i)
141+
metric := dps.At(i)
142142
labels := createLabels(metric.Attributes(), instrumentationScopeName)
143143
timestampMs := unixNanoToMilliseconds(metric.Timestamp())
144144

@@ -177,7 +177,7 @@ func (dps numberDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationS
177177
}
178178

179179
func (dps numberDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
180-
metric := dps.NumberDataPointSlice.At(i)
180+
metric := dps.At(i)
181181
if metric.Flags().NoRecordedValue() {
182182
return true, metric.Attributes()
183183
}
@@ -189,7 +189,7 @@ func (dps numberDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
189189

190190
// CalculateDeltaDatapoints retrieves the HistogramDataPoint at the given index.
191191
func (dps histogramDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, _ bool, _ *emfCalculators) ([]dataPoint, bool) {
192-
metric := dps.HistogramDataPointSlice.At(i)
192+
metric := dps.At(i)
193193
labels := createLabels(metric.Attributes(), instrumentationScopeName)
194194
timestamp := unixNanoToMilliseconds(metric.Timestamp())
195195

@@ -207,7 +207,7 @@ func (dps histogramDataPointSlice) CalculateDeltaDatapoints(i int, instrumentati
207207
}
208208

209209
func (dps histogramDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
210-
metric := dps.HistogramDataPointSlice.At(i)
210+
metric := dps.At(i)
211211
if metric.Flags().NoRecordedValue() {
212212
return true, metric.Attributes()
213213
}
@@ -229,7 +229,7 @@ func (dps histogramDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
229229
// - Sum is only assigned to the first split to ensure the total sum of the datapoints after aggregation is correct.
230230
// - Count is accumulated based on the bucket counts within each split.
231231
func (dps exponentialHistogramDataPointSlice) CalculateDeltaDatapoints(idx int, instrumentationScopeName string, _ bool, _ *emfCalculators) ([]dataPoint, bool) {
232-
metric := dps.ExponentialHistogramDataPointSlice.At(idx)
232+
metric := dps.At(idx)
233233

234234
const splitThreshold = 100
235235
currentBucketIndex := 0
@@ -412,7 +412,7 @@ func collectDatapointsWithNegativeBuckets(split *dataPointSplit, metric pmetric.
412412
}
413413

414414
func (dps exponentialHistogramDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
415-
metric := dps.ExponentialHistogramDataPointSlice.At(i)
415+
metric := dps.At(i)
416416
if metric.Flags().NoRecordedValue() {
417417
return true, metric.Attributes()
418418
}
@@ -430,7 +430,7 @@ func (dps exponentialHistogramDataPointSlice) IsStaleNaNInf(i int) (bool, pcommo
430430

431431
// CalculateDeltaDatapoints retrieves the SummaryDataPoint at the given index and perform calculation with sum and count while retain the quantile value.
432432
func (dps summaryDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, detailedMetrics bool, calculators *emfCalculators) ([]dataPoint, bool) {
433-
metric := dps.SummaryDataPointSlice.At(i)
433+
metric := dps.At(i)
434434
labels := createLabels(metric.Attributes(), instrumentationScopeName)
435435
timestampMs := unixNanoToMilliseconds(metric.Timestamp())
436436

@@ -485,7 +485,7 @@ func (dps summaryDataPointSlice) CalculateDeltaDatapoints(i int, instrumentation
485485
}
486486

487487
func (dps summaryDataPointSlice) IsStaleNaNInf(i int) (bool, pcommon.Map) {
488-
metric := dps.SummaryDataPointSlice.At(i)
488+
metric := dps.At(i)
489489
if metric.Flags().NoRecordedValue() {
490490
return true, metric.Attributes()
491491
}

exporter/awsemfexporter/datapoint_test.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2052,7 +2052,7 @@ func TestGetDataPoints(t *testing.T) {
20522052
expectedDPS := tc.expectedDatapointSlice.(numberDataPointSlice)
20532053
assert.Equal(t, expectedDPS.deltaMetricMetadata, convertedDPS.deltaMetricMetadata)
20542054
assert.Equal(t, 1, convertedDPS.Len())
2055-
dp := convertedDPS.NumberDataPointSlice.At(0)
2055+
dp := convertedDPS.At(0)
20562056
switch dp.ValueType() {
20572057
case pmetric.NumberDataPointValueTypeDouble:
20582058
assert.Equal(t, 0.1, dp.DoubleValue())
@@ -2062,14 +2062,14 @@ func TestGetDataPoints(t *testing.T) {
20622062
assert.Equal(t, tc.expectedAttributes, dp.Attributes().AsRaw())
20632063
case histogramDataPointSlice:
20642064
assert.Equal(t, 1, convertedDPS.Len())
2065-
dp := convertedDPS.HistogramDataPointSlice.At(0)
2065+
dp := convertedDPS.At(0)
20662066
assert.Equal(t, 35.0, dp.Sum())
20672067
assert.Equal(t, uint64(18), dp.Count())
20682068
assert.Equal(t, []float64{0, 10}, dp.ExplicitBounds().AsRaw())
20692069
assert.Equal(t, tc.expectedAttributes, dp.Attributes().AsRaw())
20702070
case exponentialHistogramDataPointSlice:
20712071
assert.Equal(t, 1, convertedDPS.Len())
2072-
dp := convertedDPS.ExponentialHistogramDataPointSlice.At(0)
2072+
dp := convertedDPS.At(0)
20732073
assert.Equal(t, float64(0), dp.Sum())
20742074
assert.Equal(t, uint64(4), dp.Count())
20752075
assert.Equal(t, []uint64{1, 0, 1}, dp.Positive().BucketCounts().AsRaw())
@@ -2080,7 +2080,7 @@ func TestGetDataPoints(t *testing.T) {
20802080
expectedDPS := tc.expectedDatapointSlice.(summaryDataPointSlice)
20812081
assert.Equal(t, expectedDPS.deltaMetricMetadata, convertedDPS.deltaMetricMetadata)
20822082
assert.Equal(t, 1, convertedDPS.Len())
2083-
dp := convertedDPS.SummaryDataPointSlice.At(0)
2083+
dp := convertedDPS.At(0)
20842084
assert.Equal(t, 15.0, dp.Sum())
20852085
assert.Equal(t, uint64(5), dp.Count())
20862086
assert.Equal(t, 2, dp.QuantileValues().Len())

exporter/awsemfexporter/grouped_metric.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ func addToGroupedMetric(
8686
}
8787

8888
// Extra params to use when grouping metrics
89-
metadata.groupedMetricMetadata.batchIndex = i
89+
metadata.batchIndex = i
9090
groupKey := aws.NewKey(metadata.groupedMetricMetadata, labels)
9191
if _, ok := groupedMetrics[groupKey]; ok {
9292
// if MetricName already exists in metrics map, print warning log

exporter/awsemfexporter/grouped_metric_test.go

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -309,9 +309,10 @@ func TestAddToGroupedMetric(t *testing.T) {
309309
}
310310
assert.Equal(t, expectedLabels, group.labels)
311311

312-
if group.metadata.logGroup == "log-group-2" {
312+
switch group.metadata.logGroup {
313+
case "log-group-2":
313314
seenLogGroup2 = true
314-
} else if group.metadata.logGroup == "log-group-1" {
315+
case "log-group-1":
315316
seenLogGroup1 = true
316317
}
317318
}
@@ -441,10 +442,10 @@ func TestAddToGroupedMetric(t *testing.T) {
441442
for _, v := range groupedMetrics {
442443
assert.Len(t, v.metrics, 1)
443444
assert.Len(t, v.labels, 2)
444-
assert.Contains(t, expectedMetadata, v.metadata.groupedMetricMetadata.batchIndex)
445-
assert.Equal(t, expectedMetadata[v.metadata.groupedMetricMetadata.batchIndex], v.metadata)
445+
assert.Contains(t, expectedMetadata, v.metadata.batchIndex)
446+
assert.Equal(t, expectedMetadata[v.metadata.batchIndex], v.metadata)
446447
assert.Equal(t, expectedLabels, v.labels)
447-
delete(expectedMetadata, v.metadata.groupedMetricMetadata.batchIndex)
448+
delete(expectedMetadata, v.metadata.batchIndex)
448449
}
449450
})
450451
}

exporter/awsemfexporter/metric_translator_test.go

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -348,16 +348,16 @@ func TestTranslateOtToGroupedMetric(t *testing.T) {
348348

349349
for _, v := range groupedMetrics {
350350
assert.Equal(t, tc.expectedNamespace, v.metadata.namespace)
351-
switch {
352-
case v.metadata.metricDataType == pmetric.MetricTypeSum:
351+
switch v.metadata.metricDataType {
352+
case pmetric.MetricTypeSum:
353353
assert.Len(t, v.metrics, 2)
354354
assert.Equal(t, tc.counterLabels, v.labels)
355355
assert.Equal(t, counterSumMetrics, v.metrics)
356-
case v.metadata.metricDataType == pmetric.MetricTypeGauge:
356+
case pmetric.MetricTypeGauge:
357357
assert.Len(t, v.metrics, 2)
358358
assert.Equal(t, tc.counterLabels, v.labels)
359359
assert.Equal(t, counterGaugeMetrics, v.metrics)
360-
case v.metadata.metricDataType == pmetric.MetricTypeHistogram:
360+
case pmetric.MetricTypeHistogram:
361361
assert.Len(t, v.metrics, 1)
362362
assert.Equal(t, tc.timerLabels, v.labels)
363363
assert.Equal(t, timerMetrics, v.metrics)

exporter/awskinesisexporter/exporter.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ func createExporter(ctx context.Context, c component.Config, log *zap.Logger, op
9090
}
9191

9292
encoder, err := batch.NewEncoder(
93-
conf.Encoding.Name,
93+
conf.Name,
9494
batch.WithMaxRecordSize(conf.MaxRecordSize),
9595
batch.WithMaxRecordsPerBatch(conf.MaxRecordsPerBatch),
9696
batch.WithCompressionType(conf.Compression),
@@ -99,7 +99,7 @@ func createExporter(ctx context.Context, c component.Config, log *zap.Logger, op
9999
return nil, err
100100
}
101101

102-
if conf.Encoding.Name == "otlp_json" {
102+
if conf.Name == "otlp_json" {
103103
log.Info("otlp_json is considered experimental and should not be used in a production environment")
104104
}
105105

exporter/awss3exporter/sumo_marshaler.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ func newSumoICMarshaler() sumoMarshaler {
3232
}
3333

3434
func logEntry(buf *bytes.Buffer, format string, a ...any) {
35-
buf.WriteString(fmt.Sprintf(format, a...))
35+
fmt.Fprintf(buf, format, a...)
3636
buf.WriteString("\n")
3737
}
3838

0 commit comments

Comments
 (0)