Skip to content

Commit

Permalink
fix histograms & add tests
Browse files Browse the repository at this point in the history
  • Loading branch information
hughsimpson committed Aug 4, 2023
1 parent 10b3dc8 commit dcd8938
Show file tree
Hide file tree
Showing 2 changed files with 84 additions and 3 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import java.lang.{Double => JDouble, Long => JLong}
import java.time.Instant
import java.util.{Collection => JCollection}
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer

class WithResourceMetricsConverter(resource: Resource, kamonVersion: String, from: Instant, to: Instant) {
private val fromNs = from.toEpochMilli * 1000000
Expand All @@ -53,17 +54,24 @@ class WithResourceMetricsConverter(resource: Resource, kamonVersion: String, fro
toString(gauge.settings.unit),
toGaugeData(gauge.instruments))

def toHistogramDatum(s: Snapshot[Distribution]): HistogramPointData =
def toHistogramDatum(s: Snapshot[Distribution]): HistogramPointData = {
val boundaries = ArrayBuffer.newBuilder[JDouble]
val counts = ArrayBuffer.newBuilder[JLong]
for (el <- s.value.bucketsIterator) {
counts += el.frequency
boundaries += el.value.toDouble
}
ImmutableHistogramPointData.create(
fromNs,
toNs,
SpanConverter.toAttributes(s.tags),
JDouble valueOf s.value.sum.toDouble,
JDouble valueOf s.value.min.toDouble,
JDouble valueOf s.value.max.toDouble,
s.value.buckets.map(JDouble valueOf _.value.toDouble).asJava,
s.value.buckets.map(JLong valueOf _.frequency).asJava
boundaries.result().dropRight(1).asJava,
counts.result().asJava
)
}

def toHistogramData(distributions: Seq[Snapshot[Distribution]]): Option[HistogramData] =
distributions.filter(_.value.buckets.nonEmpty) match {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import kamon.testkit.Reconfigure
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

import java.lang.{Double => JDouble, Long => JLong}
import java.time.Instant
import java.util.{Collection => JCollection}
import scala.collection.JavaConverters._
Expand Down Expand Up @@ -103,8 +104,80 @@ class OpenTelemetryMetricReporterSpec extends AnyWordSpec
points.head.getAttributes.get(AttributeKey.stringKey("tag1")) should equal("value1")
points.head.getValue shouldEqual 42L
}
"send histogram metrics" in {
val (reporter, mockService) = openTelemetryMetricsReporter()
val now = Instant.now()
reporter.reportPeriodSnapshot(
PeriodSnapshot.apply(
now.minusMillis(1000),
now,
Nil,
Nil,
MetricSnapshot.ofDistributions(
"test.histogram",
"test",
Metric.Settings.ForDistributionInstrument(MeasurementUnit.none, java.time.Duration.ZERO, DynamicRange.Default),
Instrument.Snapshot(
TagSet.from(Map("tag1" -> "value1")),
buildHistogramDist(Seq(1L -> 2L, 2L -> 2L, 3L -> 3L))
) :: Nil) :: Nil,
Nil,
Nil
)
)
// basic sanity
mockService.exportMetricsServiceRequest should not be empty
mockService.exportMetricsServiceRequest.get should have size 1
val exportedMetrics: Seq[MetricData] = mockService.exportMetricsServiceRequest.get.asScala.toSeq
exportedMetrics should have size 1
val metricData = exportedMetrics.head


// check value
metricData.getName should equal("test.histogram")
metricData.getDescription should equal("test")
val sumData = metricData.getHistogramData
val points = sumData.getPoints.asScala.toSeq
points should have size 1
points.head.getAttributes should have size 1
points.head.getAttributes.get(AttributeKey.stringKey("tag1")) should equal("value1")
points.head.getMin shouldEqual 1L
points.head.getMax shouldEqual 3L
points.head.getSum shouldEqual 15L
points.head.getCount shouldEqual 7L
points.head.getBoundaries.asScala shouldEqual Seq[JDouble](1d, 2d)
points.head.getCounts.asScala shouldEqual Seq[JDouble](2d, 2d, 3d)
}
}

private def buildHistogramDist(_buckets: Seq[(Long, Long)]): Distribution = {

val distribution: Distribution = new Distribution() {
override def dynamicRange: DynamicRange = DynamicRange.Default

override def min: Long = _buckets.minBy(_._1)._1

override def max: Long = _buckets.maxBy(_._1)._1

override def sum: Long = _buckets.foldLeft(0L) { case (a, (v, f)) => a + (v * f) }

override def count: Long = _buckets.foldLeft(0L) { case (a, (_, f)) => a + f }

override def percentile(rank: Double): Distribution.Percentile = ??? //percentileValues.get(rank).map(r => r.toPercentile).orNull

override def percentiles: Seq[Distribution.Percentile] = ??? //Seq(perc.toPercentile)

override def percentilesIterator: Iterator[Distribution.Percentile] = null

override def buckets: Seq[Distribution.Bucket] = bucketsIterator.toSeq

override def bucketsIterator: Iterator[Distribution.Bucket] = _buckets.iterator.map { case (v, f) => PureDistributionBucket(v, f) }
}
distribution
}

case class PureDistributionBucket(value: Long, frequency: Long) extends Distribution.Bucket

private class MockMetricsService extends MetricsService {
var exportMetricsServiceRequest: Option[JCollection[MetricData]] = None
var hasBeenClosed = false
Expand Down

0 comments on commit dcd8938

Please sign in to comment.