@@ -18,20 +18,24 @@ package kamon.otel
1818import io .opentelemetry .sdk .common .InstrumentationScopeInfo
1919import io .opentelemetry .sdk .metrics .data ._
2020import io .opentelemetry .sdk .metrics .internal .data ._
21- import io .opentelemetry .sdk .metrics .internal .data .exponentialhistogram .{ExponentialHistogramData , ExponentialHistogramPointData , ImmutableExponentialHistogramData }
21+ import io .opentelemetry .sdk .metrics .internal .data .exponentialhistogram .{ExponentialHistogramBuckets , ExponentialHistogramData , ExponentialHistogramPointData , ImmutableExponentialHistogramData }
2222import io .opentelemetry .sdk .resources .Resource
2323import kamon .metric .Instrument .Snapshot
2424import kamon .metric .{Distribution , MeasurementUnit , MetricSnapshot , PeriodSnapshot }
2525import kamon .otel .HistogramFormat .{Explicit , Exponential , HistogramFormat }
26+ import kamon .otel .MetricsConverter .{ExplBucketFn , ExpoBucketFn }
2627import org .slf4j .LoggerFactory
2728
2829import java .lang .{Double => JDouble , Long => JLong }
2930import java .time .Instant
30- import java .util .{Collection => JCollection , ArrayList => JArrayList }
31+ import java .util
32+ import java .util .{ArrayList => JArrayList , Collection => JCollection }
3133import scala .collection .JavaConverters ._
3234import scala .collection .mutable .ArrayBuffer
3335
34- class WithResourceMetricsConverter (resource : Resource , kamonVersion : String , from : Instant , to : Instant ) {
36+ class WithResourceMetricsConverter (resource : Resource , kamonVersion : String , from : Instant , to : Instant ,
37+ explBucketConfig : ExplBucketFn , expoBucketConfig : ExpoBucketFn ) {
38+ private val maxDouble : JDouble = JDouble .valueOf(JDouble .MAX_VALUE )
3539 private val logger = LoggerFactory .getLogger(getClass)
3640 private val fromNs = from.toEpochMilli * 1000000
3741 private val toNs = to.toEpochMilli * 1000000
@@ -54,71 +58,212 @@ class WithResourceMetricsConverter(resource: Resource, kamonVersion: String, fro
5458 toString(gauge.settings.unit),
5559 toGaugeData(gauge.instruments))
5660
57- private def toExplicitHistogramDatum (s : Snapshot [Distribution ]): HistogramPointData = {
58- val boundaries = ArrayBuffer .newBuilder[JDouble ]
61+ private def getExplBucketCounts (bucketConfiguration : Seq [JDouble ])(s : Snapshot [Distribution ]) = {
5962 val counts = ArrayBuffer .newBuilder[JLong ]
63+ val boundaryIterator : Iterator [JDouble ] = (bucketConfiguration :+ maxDouble).iterator
64+ var nextBoundary = boundaryIterator.next()
65+ var inBucketCount = 0L
6066 for (el <- s.value.bucketsIterator) {
61- counts += el.frequency
62- boundaries += el.value.toDouble
67+ while (el.value > nextBoundary) {
68+ nextBoundary = boundaryIterator.next()
69+ counts += inBucketCount
70+ inBucketCount = 0L
71+ }
72+ inBucketCount += el.frequency
6373 }
74+ while (boundaryIterator.hasNext) {
75+ counts += inBucketCount
76+ boundaryIterator.next()
77+ inBucketCount = 0L
78+ }
79+ counts += inBucketCount
80+ counts
81+ }
82+
83+ private def toExplicitHistogramDatum (bucketConfiguration : Seq [JDouble ])(s : Snapshot [Distribution ]): HistogramPointData = {
84+ val counts = getExplBucketCounts(bucketConfiguration)(s)
6485 ImmutableHistogramPointData .create(
6586 fromNs,
6687 toNs,
6788 SpanConverter .toAttributes(s.tags),
6889 JDouble valueOf s.value.sum.toDouble,
6990 JDouble valueOf s.value.min.toDouble,
7091 JDouble valueOf s.value.max.toDouble,
71- boundaries.result().dropRight( 1 ) .asJava,
92+ bucketConfiguration .asJava,
7293 counts.result().asJava
7394 )
7495 }
7596
76- private def toExplicitHistogramData (distributions : Seq [Snapshot [Distribution ]]): Option [HistogramData ] =
97+ private def toExplicitHistogramData (bucketConfiguration : Seq [ JDouble ], distributions : Seq [Snapshot [Distribution ]]): Option [HistogramData ] =
7798 distributions.filter(_.value.buckets.nonEmpty) match {
7899 case Nil => None
79- case nonEmpty => Some (ImmutableHistogramData .create(AggregationTemporality .DELTA , nonEmpty.map(toExplicitHistogramDatum).asJava))
100+ case nonEmpty => Some (ImmutableHistogramData .create(AggregationTemporality .DELTA , nonEmpty.map(toExplicitHistogramDatum(bucketConfiguration) ).asJava))
80101 }
81102
82- def convertExplicitHistogram (histogram : MetricSnapshot .Distributions ): Option [MetricData ] =
83- toExplicitHistogramData(histogram.instruments).map(d =>
103+ def convertExplicitHistogram (histogram : MetricSnapshot .Distributions ): Option [MetricData ] = {
104+ val bucketConfiguration = explBucketConfig(histogram.name, histogram.settings.unit)
105+ toExplicitHistogramData(bucketConfiguration, histogram.instruments).map(d =>
84106 ImmutableMetricData .createDoubleHistogram(
85107 resource,
86108 instrumentationScopeInfo(histogram),
87109 histogram.name,
88110 histogram.description,
89111 toString(histogram.settings.unit),
90112 d))
113+ }
114+
115+ class ItWithLast [T ](it : Iterator [T ], last : T ) extends Iterator [T ] {
116+ private var showedLast : Boolean = false
117+
118+ def hasNext : Boolean = it.hasNext || ! showedLast
119+
120+ def next (): T = if (it.hasNext) it.next() else if (! showedLast) {
121+ showedLast = true
122+ last
123+ } else throw new RuntimeException (" Next on empty Iterator" )
124+ }
91125
92- private def toExponentialHistogramData (distributions : Seq [Snapshot [Distribution ]]): Option [ExponentialHistogramData ] =
126+ private def getExpoBucketCounts (scale : Int , maxBucketCount : Int )(s : Snapshot [Distribution ]) = {
127+ val base = Math .pow(2 , Math .pow(2 , - scale))
128+ val lowerBoundaryIterator : Iterator [Double ] = ((- maxBucketCount to maxBucketCount).map(i => Math .pow(base, i)) :+ Double .MaxValue ).iterator
129+ val valuesIterator = new ItWithLast [Distribution .Bucket ](s.value.bucketsIterator, new Distribution .Bucket {
130+ def value : Long = Long .MaxValue
131+
132+ def frequency : Long = 0
133+ })
134+ var fromLowerBound = valuesIterator.next()
135+ var fromUpperBound = valuesIterator.next()
136+ var toLowerBound = lowerBoundaryIterator.next()
137+ var toUpperBound = lowerBoundaryIterator.next()
138+ var zeroCount : JLong = 0L
139+ var countInBucket = 0L
140+
141+ val negativeCounts = ArrayBuffer .newBuilder[JLong ]
142+ val positiveCounts = ArrayBuffer .newBuilder[JLong ]
143+
144+ def iterFrom : JLong = {
145+ val d = fromLowerBound.frequency
146+ fromLowerBound = fromUpperBound
147+ fromUpperBound = valuesIterator.next()
148+ d
149+ }
150+
151+ def iterTo : JLong = {
152+ toLowerBound = toUpperBound
153+ toUpperBound = lowerBoundaryIterator.next()
154+ val res = countInBucket
155+ countInBucket = 0
156+ res
157+ }
158+ // normal case
159+ while (lowerBoundaryIterator.hasNext && valuesIterator.hasNext) {
160+ if (fromUpperBound.value <= toLowerBound) {
161+ countInBucket += iterFrom // Or drop?
162+ } else if (fromLowerBound.value >= toUpperBound) toLowerBound match {
163+ case 1 => zeroCount += iterTo
164+ case b if b < 1 => negativeCounts += iterTo
165+ case b if b > 1 => positiveCounts += iterTo
166+ } else if (fromUpperBound.value == toUpperBound) toLowerBound match {
167+ case 1 =>
168+ zeroCount += iterFrom
169+ iterTo
170+ case b if b < 1 =>
171+ countInBucket += iterFrom
172+ negativeCounts += iterTo
173+ case b if b > 1 =>
174+ countInBucket += iterFrom
175+ positiveCounts += iterTo
176+ } else if (fromUpperBound.value > toUpperBound) {
177+ val firstBonus : JLong = countInBucket
178+ var negBuckets = 0
179+ var zeroBuckets = 0
180+ var posBuckets = 0
181+ while (fromUpperBound.value > toUpperBound && lowerBoundaryIterator.hasNext) {
182+ if (toLowerBound < 1 ) negBuckets += 1
183+ else if (toLowerBound == 1 ) zeroBuckets += 1
184+ else if (toLowerBound >= 1 ) posBuckets += 1
185+ toLowerBound = toUpperBound
186+ toUpperBound = lowerBoundaryIterator.next()
187+ }
188+ val total = iterFrom
189+ // Not sure about this... everything's going into the first bucket, even though we might be spanning multiple target buckets.
190+ // Might be better to do something like push the avg.floor into each bucket, interpolating the remainder.
191+ // OTOH it may not really come up much in practice, since the internal histos are likely to have similar or finer granularity
192+ negativeCounts ++= (if (negBuckets > 0 ) JLong .valueOf(firstBonus + total) +: Array .fill(negBuckets - 1 )(JLong .valueOf(0 )) else Nil )
193+ zeroCount += (if (negBuckets == 0 && zeroBuckets == 1 ) JLong .valueOf(firstBonus + total) else JLong .valueOf(0 ))
194+ positiveCounts ++= (
195+ if (negBuckets == 0 && zeroBuckets == 0 && posBuckets > 0 )
196+ JLong .valueOf(firstBonus + total) +: Array .fill(posBuckets - 1 )(JLong .valueOf(0 ))
197+ else Array .fill(posBuckets)(JLong .valueOf(0 )))
198+ } else /* if (fromUpperBound.value < toUpperBound) */ toLowerBound match {
199+ case 1 => zeroCount += iterFrom
200+ case _ => countInBucket += iterFrom
201+ }
202+ }
203+ var usedLastValue = false
204+ // more buckets left to fill but only one unused value, sitting in fromLowerBound.
205+ while (lowerBoundaryIterator.hasNext) {
206+ if (fromLowerBound.value > toLowerBound && fromLowerBound.value < toUpperBound) {
207+ usedLastValue = true
208+ countInBucket += fromLowerBound.frequency
209+ }
210+ toLowerBound match {
211+ case 1 => zeroCount += iterTo
212+ case b if b < 1 => negativeCounts += iterTo
213+ case b if b > 1 => positiveCounts += iterTo
214+ }
215+ }
216+ // more values left, but only one unfilled bucket, sitting in toLowerBound
217+ while (valuesIterator.hasNext) {
218+ countInBucket += iterFrom
219+ }
220+ if (! usedLastValue) countInBucket += fromLowerBound.frequency
221+ positiveCounts += countInBucket
222+
223+ val negBucket : ExponentialHistogramBuckets = new ExponentialHistogramBuckets {
224+ val getOffset : Int = - maxBucketCount
225+ private val longs : ArrayBuffer [JLong ] = negativeCounts.result()
226+ val getBucketCounts : util.List [JLong ] = new JArrayList (longs.asJava)
227+ val getTotalCount : Long = longs.foldLeft(0L )(_ + _)
228+ }
229+ val posBucket : ExponentialHistogramBuckets = new ExponentialHistogramBuckets {
230+ val getOffset : Int = 1
231+ private val longs : ArrayBuffer [JLong ] = positiveCounts.result()
232+ val getBucketCounts : util.List [JLong ] = new JArrayList (longs.asJava)
233+ val getTotalCount : Long = longs.foldLeft(0L )(_ + _)
234+ }
235+ (negBucket, zeroCount, posBucket)
236+ }
237+
238+ private def toExponentialHistogramData (maxBucketCount : Int , distributions : Seq [Snapshot [Distribution ]]): Option [ExponentialHistogramData ] =
93239 distributions.filter(_.value.buckets.nonEmpty) match {
94240 case Nil => None
95241 case nonEmpty =>
96242 val mapped = nonEmpty.flatMap { s =>
97- s.value match {
98- case zigZag : Distribution .ZigZagCounts =>
99- logger.error(" Unable to construct exponential histogram data - Unimplemented" )
100- None
101- // Some(ExponentialHistogramPointData.create(
102- // ???, zigZag.sum, ???, ???, ???, fromNs, toNs, SpanConverter.toAttributes(s.tags), new JArrayList[DoubleExemplarData]()
103- // ))
104- case _ =>
105- logger.error(" Unable to construct exponential histogram data - only ZigZagCounts distribution can be converted" )
106- None
107- }
243+ def maxScale (v : JDouble ): Int = MetricsConverter .maxScale(maxBucketCount)(v)
244+
245+ // Could also calculate an 'offset' here, but defaulting to offset = 1 for simplicity
246+ val scale = Math .min(maxScale(s.value.min.toDouble), maxScale(s.value.max.toDouble))
247+ val (neg, zero, pos) = getExpoBucketCounts(scale, maxBucketCount)(s)
248+ Some (ExponentialHistogramPointData .create(
249+ scale, s.value.sum, zero, pos, neg, fromNs, toNs, SpanConverter .toAttributes(s.tags), new JArrayList [DoubleExemplarData ]()
250+ ))
108251 }
109252 if (mapped.nonEmpty) Some (ImmutableExponentialHistogramData .create(AggregationTemporality .DELTA , mapped.asJava))
110253 else None
111254 }
112255
113- def convertExponentialHistogram (histogram : MetricSnapshot .Distributions ): Option [MetricData ] =
114- toExponentialHistogramData(histogram.instruments).map(d =>
256+ def convertExponentialHistogram (histogram : MetricSnapshot .Distributions ): Option [MetricData ] = {
257+ val maxBucketCount = expoBucketConfig(histogram.name, histogram.settings.unit)
258+ toExponentialHistogramData(maxBucketCount, histogram.instruments).map(d =>
115259 ImmutableMetricData .createExponentialHistogram(
116260 resource,
117261 instrumentationScopeInfo(histogram),
118262 histogram.name,
119263 histogram.description,
120264 toString(histogram.settings.unit),
121265 d))
266+ }
122267
123268 def convertHistogram (histogramFormat : HistogramFormat )(histogram : MetricSnapshot .Distributions ): Option [MetricData ] = histogramFormat match {
124269 case Explicit => convertExplicitHistogram(histogram)
@@ -146,13 +291,26 @@ class WithResourceMetricsConverter(resource: Resource, kamonVersion: String, fro
146291 * Converts Kamon metrics to OpenTelemetry [[MetricData ]]s
147292 */
148293private [otel] object MetricsConverter {
149- def convert (resource : Resource , kamonVersion : String , histogramFormat : HistogramFormat )(metrics : PeriodSnapshot ): JCollection [MetricData ] = {
150- val converter = new WithResourceMetricsConverter (resource, kamonVersion, metrics.from, metrics.to)
294+ type ExplBucketFn = (String , MeasurementUnit ) => Seq [JDouble ]
295+ type ExpoBucketFn = (String , MeasurementUnit ) => Int
296+ private val minScale = - 10
297+ private val maxScale = 20
298+
299+ def convert (resource : Resource , kamonVersion : String , histogramFormat : HistogramFormat ,
300+ explicitBucketConfig : ExplBucketFn , exponentialBucketConfig : ExpoBucketFn )(metrics : PeriodSnapshot ): JCollection [MetricData ] = {
301+ val converter = new WithResourceMetricsConverter (resource, kamonVersion, metrics.from, metrics.to, explicitBucketConfig, exponentialBucketConfig)
151302 val gauges = metrics.gauges.filter(_.instruments.nonEmpty).map(converter.convertGauge)
152303 val histograms = (metrics.histograms ++ metrics.timers ++ metrics.rangeSamplers).filter(_.instruments.nonEmpty)
153304 .flatMap(converter.convertHistogram(histogramFormat))
154305 val counters = metrics.counters.filter(_.instruments.nonEmpty).map(converter.convertCounter)
155306
156307 (gauges ++ histograms ++ counters).asJava
157308 }
309+
310+ private val bases = (maxScale to minScale by - 1 ).map(scale => (scale, Math .pow(2 , Math .pow(2 , - scale)))).toArray
311+
312+ def maxScale (maxBucketCount : Int )(v : JDouble ): Int = {
313+ if (v >= 1 ) bases.collectFirst { case (scale, base) if Math .pow(base, maxBucketCount) >= v => scale }.getOrElse(minScale)
314+ else bases.collectFirst { case (scale, base) if Math .pow(base, - maxBucketCount) <= v => scale }.getOrElse(minScale)
315+ }
158316}
0 commit comments