Skip to content

Commit f91b27f

Browse files
authored
skip prometheus histograms (#266)
* skip prometheus histograms
1 parent bb732ca commit f91b27f

File tree

3 files changed

+28
-14
lines changed

3 files changed

+28
-14
lines changed

src/main/docker/kafka/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
# Original: https://github.com/spotify/docker-kafka
1616

1717
# Kafka and Zookeeper
18-
FROM openjdk:8-jre-buster
18+
FROM openjdk:17-slim-bullseye
1919

2020
ENV SCALA_VERSION 2.13
2121
ENV KAFKA_VERSION 2.7.1

src/main/docker/mad/Dockerfile

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,8 @@ ENV JAVA_OPTS=""
3232
# Build
3333
RUN apt update && \
3434
apt install -y gosu openssl && \
35+
rm -rf /var/lib/apt/lists/* && \
36+
apt-get clean && \
3537
mkdir -p /opt/mad/lib/ext && \
3638
mkdir -p /opt/mad/logs && \
3739
mkdir -p /opt/mad/config/pipelines && \

src/main/java/com/arpnetworking/metrics/mad/parsers/PrometheusToRecordParser.java

Lines changed: 25 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -114,32 +114,27 @@ ParseResult parseNameAndUnit(final String name) {
114114
@Override
115115
public List<Record> parse(final HttpRequest data) throws ParsingException {
116116
final List<Record> records = Lists.newArrayList();
117-
final byte[] uncompressed;
118-
try {
119-
final byte[] input = data.getBody().toArray();
120-
if (_outputDebugInfo) {
121-
final int outputFile = _outputFileNumber.incrementAndGet();
122-
if (outputFile < 10) {
123-
Files.write(Paths.get("prometheus_debug_" + outputFile), input);
124-
}
125-
}
126-
uncompressed = Snappy.uncompress(input);
127-
} catch (final IOException e) {
128-
throw new ParsingException("Failed to decompress snappy stream", data.getBody().toArray(), e);
129-
}
117+
final byte[] uncompressed = decompress(data);
130118
try {
131119
final Remote.WriteRequest writeRequest = Remote.WriteRequest.parseFrom(uncompressed);
132120
for (final TimeSeries timeSeries : writeRequest.getTimeseriesList()) {
121+
boolean skipSeries = false;
133122
Optional<String> nameOpt = Optional.empty();
134123
final ImmutableMap.Builder<String, String> dimensionsBuilder = ImmutableMap.builder();
135124
for (final Types.Label label : timeSeries.getLabelsList()) {
136125
if ("__name__".equals(label.getName())) {
137126
final String value = label.getValue();
138127
nameOpt = Optional.ofNullable(value);
128+
} else if ("le".equals(label.getName())) {
129+
skipSeries = true;
139130
} else {
140131
dimensionsBuilder.put(label.getName(), label.getValue());
141132
}
142133
}
134+
135+
if (skipSeries) {
136+
continue;
137+
}
143138
final ParseResult result = parseNameAndUnit(nameOpt.orElse("").trim());
144139
final String metricName = result.getName();
145140
if (metricName.isEmpty()) {
@@ -172,6 +167,23 @@ public List<Record> parse(final HttpRequest data) throws ParsingException {
172167
return records;
173168
}
174169

170+
private byte[] decompress(final HttpRequest data) throws ParsingException {
171+
final byte[] uncompressed;
172+
try {
173+
final byte[] input = data.getBody().toArray();
174+
if (_outputDebugInfo) {
175+
final int outputFile = _outputFileNumber.incrementAndGet();
176+
if (outputFile < 10) {
177+
Files.write(Paths.get("prometheus_debug_" + outputFile), input);
178+
}
179+
}
180+
uncompressed = Snappy.uncompress(input);
181+
} catch (final IOException e) {
182+
throw new ParsingException("Failed to decompress snappy stream", data.getBody().toArray(), e);
183+
}
184+
return uncompressed;
185+
}
186+
175187
private ImmutableMap<String, ? extends Metric> createMetric(final String name, final Types.Sample sample, final Optional<Unit> unit) {
176188
final Metric metric = ThreadLocalBuilder.build(
177189
DefaultMetric.Builder.class,

0 commit comments

Comments
 (0)