@@ -18,20 +18,24 @@ package kamon.otel
18
18
import io .opentelemetry .sdk .common .InstrumentationScopeInfo
19
19
import io .opentelemetry .sdk .metrics .data ._
20
20
import io .opentelemetry .sdk .metrics .internal .data ._
21
- import io .opentelemetry .sdk .metrics .internal .data .exponentialhistogram .{ExponentialHistogramData , ExponentialHistogramPointData , ImmutableExponentialHistogramData }
21
+ import io .opentelemetry .sdk .metrics .internal .data .exponentialhistogram .{ExponentialHistogramBuckets , ExponentialHistogramData , ExponentialHistogramPointData , ImmutableExponentialHistogramData }
22
22
import io .opentelemetry .sdk .resources .Resource
23
23
import kamon .metric .Instrument .Snapshot
24
24
import kamon .metric .{Distribution , MeasurementUnit , MetricSnapshot , PeriodSnapshot }
25
25
import kamon .otel .HistogramFormat .{Explicit , Exponential , HistogramFormat }
26
+ import kamon .otel .MetricsConverter .{ExplBucketFn , ExpoBucketFn }
26
27
import org .slf4j .LoggerFactory
27
28
28
29
import java .lang .{Double => JDouble , Long => JLong }
29
30
import java .time .Instant
30
- import java .util .{Collection => JCollection , ArrayList => JArrayList }
31
+ import java .util
32
+ import java .util .{ArrayList => JArrayList , Collection => JCollection }
31
33
import scala .collection .JavaConverters ._
32
34
import scala .collection .mutable .ArrayBuffer
33
35
34
- class WithResourceMetricsConverter (resource : Resource , kamonVersion : String , from : Instant , to : Instant ) {
36
+ class WithResourceMetricsConverter (resource : Resource , kamonVersion : String , from : Instant , to : Instant ,
37
+ explBucketConfig : ExplBucketFn , expoBucketConfig : ExpoBucketFn ) {
38
+ private val maxDouble : JDouble = JDouble .valueOf(JDouble .MAX_VALUE )
35
39
private val logger = LoggerFactory .getLogger(getClass)
36
40
private val fromNs = from.toEpochMilli * 1000000
37
41
private val toNs = to.toEpochMilli * 1000000
@@ -54,71 +58,212 @@ class WithResourceMetricsConverter(resource: Resource, kamonVersion: String, fro
54
58
toString(gauge.settings.unit),
55
59
toGaugeData(gauge.instruments))
56
60
57
- private def toExplicitHistogramDatum (s : Snapshot [Distribution ]): HistogramPointData = {
58
- val boundaries = ArrayBuffer .newBuilder[JDouble ]
61
+ private def getExplBucketCounts (bucketConfiguration : Seq [JDouble ])(s : Snapshot [Distribution ]) = {
59
62
val counts = ArrayBuffer .newBuilder[JLong ]
63
+ val boundaryIterator : Iterator [JDouble ] = (bucketConfiguration :+ maxDouble).iterator
64
+ var nextBoundary = boundaryIterator.next()
65
+ var inBucketCount = 0L
60
66
for (el <- s.value.bucketsIterator) {
61
- counts += el.frequency
62
- boundaries += el.value.toDouble
67
+ while (el.value > nextBoundary) {
68
+ nextBoundary = boundaryIterator.next()
69
+ counts += inBucketCount
70
+ inBucketCount = 0L
71
+ }
72
+ inBucketCount += el.frequency
63
73
}
74
+ while (boundaryIterator.hasNext) {
75
+ counts += inBucketCount
76
+ boundaryIterator.next()
77
+ inBucketCount = 0L
78
+ }
79
+ counts += inBucketCount
80
+ counts
81
+ }
82
+
83
+ private def toExplicitHistogramDatum (bucketConfiguration : Seq [JDouble ])(s : Snapshot [Distribution ]): HistogramPointData = {
84
+ val counts = getExplBucketCounts(bucketConfiguration)(s)
64
85
ImmutableHistogramPointData .create(
65
86
fromNs,
66
87
toNs,
67
88
SpanConverter .toAttributes(s.tags),
68
89
JDouble valueOf s.value.sum.toDouble,
69
90
JDouble valueOf s.value.min.toDouble,
70
91
JDouble valueOf s.value.max.toDouble,
71
- boundaries.result().dropRight( 1 ) .asJava,
92
+ bucketConfiguration .asJava,
72
93
counts.result().asJava
73
94
)
74
95
}
75
96
76
- private def toExplicitHistogramData (distributions : Seq [Snapshot [Distribution ]]): Option [HistogramData ] =
97
+ private def toExplicitHistogramData (bucketConfiguration : Seq [ JDouble ], distributions : Seq [Snapshot [Distribution ]]): Option [HistogramData ] =
77
98
distributions.filter(_.value.buckets.nonEmpty) match {
78
99
case Nil => None
79
- case nonEmpty => Some (ImmutableHistogramData .create(AggregationTemporality .DELTA , nonEmpty.map(toExplicitHistogramDatum).asJava))
100
+ case nonEmpty => Some (ImmutableHistogramData .create(AggregationTemporality .DELTA , nonEmpty.map(toExplicitHistogramDatum(bucketConfiguration) ).asJava))
80
101
}
81
102
82
- def convertExplicitHistogram (histogram : MetricSnapshot .Distributions ): Option [MetricData ] =
83
- toExplicitHistogramData(histogram.instruments).map(d =>
103
+ def convertExplicitHistogram (histogram : MetricSnapshot .Distributions ): Option [MetricData ] = {
104
+ val bucketConfiguration = explBucketConfig(histogram.name, histogram.settings.unit)
105
+ toExplicitHistogramData(bucketConfiguration, histogram.instruments).map(d =>
84
106
ImmutableMetricData .createDoubleHistogram(
85
107
resource,
86
108
instrumentationScopeInfo(histogram),
87
109
histogram.name,
88
110
histogram.description,
89
111
toString(histogram.settings.unit),
90
112
d))
113
+ }
114
+
115
+ class ItWithLast [T ](it : Iterator [T ], last : T ) extends Iterator [T ] {
116
+ private var showedLast : Boolean = false
117
+
118
+ def hasNext : Boolean = it.hasNext || ! showedLast
119
+
120
+ def next (): T = if (it.hasNext) it.next() else if (! showedLast) {
121
+ showedLast = true
122
+ last
123
+ } else throw new RuntimeException (" Next on empty Iterator" )
124
+ }
91
125
92
- private def toExponentialHistogramData (distributions : Seq [Snapshot [Distribution ]]): Option [ExponentialHistogramData ] =
126
+ private def getExpoBucketCounts (scale : Int , maxBucketCount : Int )(s : Snapshot [Distribution ]) = {
127
+ val base = Math .pow(2 , Math .pow(2 , - scale))
128
+ val lowerBoundaryIterator : Iterator [Double ] = ((- maxBucketCount to maxBucketCount).map(i => Math .pow(base, i)) :+ Double .MaxValue ).iterator
129
+ val valuesIterator = new ItWithLast [Distribution .Bucket ](s.value.bucketsIterator, new Distribution .Bucket {
130
+ def value : Long = Long .MaxValue
131
+
132
+ def frequency : Long = 0
133
+ })
134
+ var fromLowerBound = valuesIterator.next()
135
+ var fromUpperBound = valuesIterator.next()
136
+ var toLowerBound = lowerBoundaryIterator.next()
137
+ var toUpperBound = lowerBoundaryIterator.next()
138
+ var zeroCount : JLong = 0L
139
+ var countInBucket = 0L
140
+
141
+ val negativeCounts = ArrayBuffer .newBuilder[JLong ]
142
+ val positiveCounts = ArrayBuffer .newBuilder[JLong ]
143
+
144
+ def iterFrom : JLong = {
145
+ val d = fromLowerBound.frequency
146
+ fromLowerBound = fromUpperBound
147
+ fromUpperBound = valuesIterator.next()
148
+ d
149
+ }
150
+
151
+ def iterTo : JLong = {
152
+ toLowerBound = toUpperBound
153
+ toUpperBound = lowerBoundaryIterator.next()
154
+ val res = countInBucket
155
+ countInBucket = 0
156
+ res
157
+ }
158
+ // normal case
159
+ while (lowerBoundaryIterator.hasNext && valuesIterator.hasNext) {
160
+ if (fromUpperBound.value <= toLowerBound) {
161
+ countInBucket += iterFrom // Or drop?
162
+ } else if (fromLowerBound.value >= toUpperBound) toLowerBound match {
163
+ case 1 => zeroCount += iterTo
164
+ case b if b < 1 => negativeCounts += iterTo
165
+ case b if b > 1 => positiveCounts += iterTo
166
+ } else if (fromUpperBound.value == toUpperBound) toLowerBound match {
167
+ case 1 =>
168
+ zeroCount += iterFrom
169
+ iterTo
170
+ case b if b < 1 =>
171
+ countInBucket += iterFrom
172
+ negativeCounts += iterTo
173
+ case b if b > 1 =>
174
+ countInBucket += iterFrom
175
+ positiveCounts += iterTo
176
+ } else if (fromUpperBound.value > toUpperBound) {
177
+ val firstBonus : JLong = countInBucket
178
+ var negBuckets = 0
179
+ var zeroBuckets = 0
180
+ var posBuckets = 0
181
+ while (fromUpperBound.value > toUpperBound && lowerBoundaryIterator.hasNext) {
182
+ if (toLowerBound < 1 ) negBuckets += 1
183
+ else if (toLowerBound == 1 ) zeroBuckets += 1
184
+ else if (toLowerBound >= 1 ) posBuckets += 1
185
+ toLowerBound = toUpperBound
186
+ toUpperBound = lowerBoundaryIterator.next()
187
+ }
188
+ val total = iterFrom
189
+ // Not sure about this... everything's going into the first bucket, even though we might be spanning multiple target buckets.
190
+ // Might be better to do something like push the avg.floor into each bucket, interpolating the remainder.
191
+ // OTOH it may not really come up much in practice, since the internal histos are likely to have similar or finer granularity
192
+ negativeCounts ++= (if (negBuckets > 0 ) JLong .valueOf(firstBonus + total) +: Array .fill(negBuckets - 1 )(JLong .valueOf(0 )) else Nil )
193
+ zeroCount += (if (negBuckets == 0 && zeroBuckets == 1 ) JLong .valueOf(firstBonus + total) else JLong .valueOf(0 ))
194
+ positiveCounts ++= (
195
+ if (negBuckets == 0 && zeroBuckets == 0 && posBuckets > 0 )
196
+ JLong .valueOf(firstBonus + total) +: Array .fill(posBuckets - 1 )(JLong .valueOf(0 ))
197
+ else Array .fill(posBuckets)(JLong .valueOf(0 )))
198
+ } else /* if (fromUpperBound.value < toUpperBound) */ toLowerBound match {
199
+ case 1 => zeroCount += iterFrom
200
+ case _ => countInBucket += iterFrom
201
+ }
202
+ }
203
+ var usedLastValue = false
204
+ // more buckets left to fill but only one unused value, sitting in fromLowerBound.
205
+ while (lowerBoundaryIterator.hasNext) {
206
+ if (fromLowerBound.value > toLowerBound && fromLowerBound.value < toUpperBound) {
207
+ usedLastValue = true
208
+ countInBucket += fromLowerBound.frequency
209
+ }
210
+ toLowerBound match {
211
+ case 1 => zeroCount += iterTo
212
+ case b if b < 1 => negativeCounts += iterTo
213
+ case b if b > 1 => positiveCounts += iterTo
214
+ }
215
+ }
216
+ // more values left, but only one unfilled bucket, sitting in toLowerBound
217
+ while (valuesIterator.hasNext) {
218
+ countInBucket += iterFrom
219
+ }
220
+ if (! usedLastValue) countInBucket += fromLowerBound.frequency
221
+ positiveCounts += countInBucket
222
+
223
+ val negBucket : ExponentialHistogramBuckets = new ExponentialHistogramBuckets {
224
+ val getOffset : Int = - maxBucketCount
225
+ private val longs : ArrayBuffer [JLong ] = negativeCounts.result()
226
+ val getBucketCounts : util.List [JLong ] = new JArrayList (longs.asJava)
227
+ val getTotalCount : Long = longs.foldLeft(0L )(_ + _)
228
+ }
229
+ val posBucket : ExponentialHistogramBuckets = new ExponentialHistogramBuckets {
230
+ val getOffset : Int = 1
231
+ private val longs : ArrayBuffer [JLong ] = positiveCounts.result()
232
+ val getBucketCounts : util.List [JLong ] = new JArrayList (longs.asJava)
233
+ val getTotalCount : Long = longs.foldLeft(0L )(_ + _)
234
+ }
235
+ (negBucket, zeroCount, posBucket)
236
+ }
237
+
238
+ private def toExponentialHistogramData (maxBucketCount : Int , distributions : Seq [Snapshot [Distribution ]]): Option [ExponentialHistogramData ] =
93
239
distributions.filter(_.value.buckets.nonEmpty) match {
94
240
case Nil => None
95
241
case nonEmpty =>
96
242
val mapped = nonEmpty.flatMap { s =>
97
- s.value match {
98
- case zigZag : Distribution .ZigZagCounts =>
99
- logger.error(" Unable to construct exponential histogram data - Unimplemented" )
100
- None
101
- // Some(ExponentialHistogramPointData.create(
102
- // ???, zigZag.sum, ???, ???, ???, fromNs, toNs, SpanConverter.toAttributes(s.tags), new JArrayList[DoubleExemplarData]()
103
- // ))
104
- case _ =>
105
- logger.error(" Unable to construct exponential histogram data - only ZigZagCounts distribution can be converted" )
106
- None
107
- }
243
+ def maxScale (v : JDouble ): Int = MetricsConverter .maxScale(maxBucketCount)(v)
244
+
245
+ // Could also calculate an 'offset' here, but defaulting to offset = 1 for simplicity
246
+ val scale = Math .min(maxScale(s.value.min.toDouble), maxScale(s.value.max.toDouble))
247
+ val (neg, zero, pos) = getExpoBucketCounts(scale, maxBucketCount)(s)
248
+ Some (ExponentialHistogramPointData .create(
249
+ scale, s.value.sum, zero, pos, neg, fromNs, toNs, SpanConverter .toAttributes(s.tags), new JArrayList [DoubleExemplarData ]()
250
+ ))
108
251
}
109
252
if (mapped.nonEmpty) Some (ImmutableExponentialHistogramData .create(AggregationTemporality .DELTA , mapped.asJava))
110
253
else None
111
254
}
112
255
113
- def convertExponentialHistogram (histogram : MetricSnapshot .Distributions ): Option [MetricData ] =
114
- toExponentialHistogramData(histogram.instruments).map(d =>
256
+ def convertExponentialHistogram (histogram : MetricSnapshot .Distributions ): Option [MetricData ] = {
257
+ val maxBucketCount = expoBucketConfig(histogram.name, histogram.settings.unit)
258
+ toExponentialHistogramData(maxBucketCount, histogram.instruments).map(d =>
115
259
ImmutableMetricData .createExponentialHistogram(
116
260
resource,
117
261
instrumentationScopeInfo(histogram),
118
262
histogram.name,
119
263
histogram.description,
120
264
toString(histogram.settings.unit),
121
265
d))
266
+ }
122
267
123
268
def convertHistogram (histogramFormat : HistogramFormat )(histogram : MetricSnapshot .Distributions ): Option [MetricData ] = histogramFormat match {
124
269
case Explicit => convertExplicitHistogram(histogram)
@@ -146,13 +291,26 @@ class WithResourceMetricsConverter(resource: Resource, kamonVersion: String, fro
146
291
* Converts Kamon metrics to OpenTelemetry [[MetricData ]]s
147
292
*/
148
293
private [otel] object MetricsConverter {
149
- def convert (resource : Resource , kamonVersion : String , histogramFormat : HistogramFormat )(metrics : PeriodSnapshot ): JCollection [MetricData ] = {
150
- val converter = new WithResourceMetricsConverter (resource, kamonVersion, metrics.from, metrics.to)
294
+ type ExplBucketFn = (String , MeasurementUnit ) => Seq [JDouble ]
295
+ type ExpoBucketFn = (String , MeasurementUnit ) => Int
296
+ private val minScale = - 10
297
+ private val maxScale = 20
298
+
299
+ def convert (resource : Resource , kamonVersion : String , histogramFormat : HistogramFormat ,
300
+ explicitBucketConfig : ExplBucketFn , exponentialBucketConfig : ExpoBucketFn )(metrics : PeriodSnapshot ): JCollection [MetricData ] = {
301
+ val converter = new WithResourceMetricsConverter (resource, kamonVersion, metrics.from, metrics.to, explicitBucketConfig, exponentialBucketConfig)
151
302
val gauges = metrics.gauges.filter(_.instruments.nonEmpty).map(converter.convertGauge)
152
303
val histograms = (metrics.histograms ++ metrics.timers ++ metrics.rangeSamplers).filter(_.instruments.nonEmpty)
153
304
.flatMap(converter.convertHistogram(histogramFormat))
154
305
val counters = metrics.counters.filter(_.instruments.nonEmpty).map(converter.convertCounter)
155
306
156
307
(gauges ++ histograms ++ counters).asJava
157
308
}
309
+
310
+ private val bases = (maxScale to minScale by - 1 ).map(scale => (scale, Math .pow(2 , Math .pow(2 , - scale)))).toArray
311
+
312
+ def maxScale (maxBucketCount : Int )(v : JDouble ): Int = {
313
+ if (v >= 1 ) bases.collectFirst { case (scale, base) if Math .pow(base, maxBucketCount) >= v => scale }.getOrElse(minScale)
314
+ else bases.collectFirst { case (scale, base) if Math .pow(base, - maxBucketCount) <= v => scale }.getOrElse(minScale)
315
+ }
158
316
}
0 commit comments