Skip to content

Commit

Permalink
update oc internal, prometheus exporters to use attributes (#3770)
Browse files Browse the repository at this point in the history
* update oc internal, prometheus exporters to use attributes

* rename addAttributesToDoubleHistogramDataPoints

* cleaned up TODOs, mentioned the code in the issue instead
  • Loading branch information
alrex committed Aug 11, 2021
1 parent a360414 commit 632ed32
Show file tree
Hide file tree
Showing 17 changed files with 187 additions and 204 deletions.
2 changes: 1 addition & 1 deletion exporter/exporterhelper/metrics.go
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ func NewMetricsExporter(

mc, err := consumerhelper.NewMetrics(func(ctx context.Context, md pdata.Metrics) error {
if bs.ResourceToTelemetrySettings.Enabled {
md = convertResourceToLabels(md)
md = convertResourceToAttributes(md)
}
req := newMetricsRequest(ctx, md, pusher)
err := be.sender.send(req)
Expand Down
45 changes: 14 additions & 31 deletions exporter/exporterhelper/resource_to_label.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ package exporterhelper

import (
"go.opentelemetry.io/collector/model/pdata"
tracetranslator "go.opentelemetry.io/collector/translator/trace"
)

// ResourceToTelemetrySettings defines configuration for converting resource attributes to metric labels.
Expand All @@ -32,68 +31,52 @@ func defaultResourceToTelemetrySettings() ResourceToTelemetrySettings {
}
}

// convertResourceToLabels converts all resource attributes to metric labels
func convertResourceToLabels(md pdata.Metrics) pdata.Metrics {
// convertResourceToAttributes converts all resource attributes to metric labels
func convertResourceToAttributes(md pdata.Metrics) pdata.Metrics {
cloneMd := md.Clone()
rms := cloneMd.ResourceMetrics()
for i := 0; i < rms.Len(); i++ {
resource := rms.At(i).Resource()

labelMap := extractLabelsFromResource(&resource)

ilms := rms.At(i).InstrumentationLibraryMetrics()
for j := 0; j < ilms.Len(); j++ {
ilm := ilms.At(j)
metricSlice := ilm.Metrics()
for k := 0; k < metricSlice.Len(); k++ {
metric := metricSlice.At(k)
addLabelsToMetric(&metric, labelMap)
addAttributesToMetric(&metric, resource.Attributes())
}
}
}
return cloneMd
}

// extractAttributesFromResource extracts the attributes from a given resource and
// returns them as a StringMap.
func extractLabelsFromResource(resource *pdata.Resource) pdata.StringMap {
labelMap := pdata.NewStringMap()

attrMap := resource.Attributes()
attrMap.Range(func(k string, av pdata.AttributeValue) bool {
stringLabel := tracetranslator.AttributeValueToString(av)
labelMap.Upsert(k, stringLabel)
return true
})
return labelMap
}

// addLabelsToMetric adds additional labels to the given metric
func addLabelsToMetric(metric *pdata.Metric, labelMap pdata.StringMap) {
// addAttributesToMetric adds additional labels to the given metric
func addAttributesToMetric(metric *pdata.Metric, labelMap pdata.AttributeMap) {
switch metric.DataType() {
case pdata.MetricDataTypeGauge:
addLabelsToNumberDataPoints(metric.Gauge().DataPoints(), labelMap)
addAttributesToNumberDataPoints(metric.Gauge().DataPoints(), labelMap)
case pdata.MetricDataTypeSum:
addLabelsToNumberDataPoints(metric.Sum().DataPoints(), labelMap)
addAttributesToNumberDataPoints(metric.Sum().DataPoints(), labelMap)
case pdata.MetricDataTypeHistogram:
addLabelsToDoubleHistogramDataPoints(metric.Histogram().DataPoints(), labelMap)
addAttributesToHistogramDataPoints(metric.Histogram().DataPoints(), labelMap)
}
}

func addLabelsToNumberDataPoints(ps pdata.NumberDataPointSlice, newLabelMap pdata.StringMap) {
func addAttributesToNumberDataPoints(ps pdata.NumberDataPointSlice, newAttributeMap pdata.AttributeMap) {
for i := 0; i < ps.Len(); i++ {
joinStringMaps(newLabelMap, ps.At(i).LabelsMap())
joinAttributeMaps(newAttributeMap, ps.At(i).Attributes())
}
}

func addLabelsToDoubleHistogramDataPoints(ps pdata.HistogramDataPointSlice, newLabelMap pdata.StringMap) {
func addAttributesToHistogramDataPoints(ps pdata.HistogramDataPointSlice, newAttributeMap pdata.AttributeMap) {
for i := 0; i < ps.Len(); i++ {
joinStringMaps(newLabelMap, ps.At(i).LabelsMap())
joinAttributeMaps(newAttributeMap, ps.At(i).Attributes())
}
}

func joinStringMaps(from, to pdata.StringMap) {
from.Range(func(k, v string) bool {
func joinAttributeMaps(from, to pdata.AttributeMap) {
from.Range(func(k string, v pdata.AttributeValue) bool {
to.Upsert(k, v)
return true
})
Expand Down
44 changes: 22 additions & 22 deletions exporter/exporterhelper/resource_to_label_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,52 +21,52 @@ import (
"go.opentelemetry.io/collector/internal/testdata"
)

func TestConvertResourceToLabels(t *testing.T) {
func TestConvertResourceToAttributes(t *testing.T) {
md := testdata.GenerateMetricsOneMetric()
assert.NotNil(t, md)

// Before converting resource to labels
assert.Equal(t, 1, md.ResourceMetrics().At(0).Resource().Attributes().Len())
assert.Equal(t, 1, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 1, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).Attributes().Len())

cloneMd := convertResourceToLabels(md)
cloneMd := convertResourceToAttributes(md)

// After converting resource to labels
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).Resource().Attributes().Len())
assert.Equal(t, 2, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 2, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).Attributes().Len())

assert.Equal(t, 1, md.ResourceMetrics().At(0).Resource().Attributes().Len())
assert.Equal(t, 1, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 1, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).Attributes().Len())

}

func TestConvertResourceToLabelsAllDataTypesEmptyDataPoint(t *testing.T) {
func TestConvertResourceToAttributesAllDataTypesEmptyDataPoint(t *testing.T) {
md := testdata.GenerateMetricsAllTypesEmptyDataPoint()
assert.NotNil(t, md)

// Before converting resource to labels
assert.Equal(t, 1, md.ResourceMetrics().At(0).Resource().Attributes().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).Attributes().Len())

cloneMd := convertResourceToLabels(md)
cloneMd := convertResourceToAttributes(md)

// After converting resource to labels
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).Resource().Attributes().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).Attributes().Len())

assert.Equal(t, 1, md.ResourceMetrics().At(0).Resource().Attributes().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).Attributes().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).Attributes().Len())

}
14 changes: 7 additions & 7 deletions exporter/prometheusexporter/accumulator.go
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ func (a *lastValueAccumulator) accumulateSummary(metric pdata.Metric, il pdata.I
for i := 0; i < dps.Len(); i++ {
ip := dps.At(i)

signature := timeseriesSignature(il.Name(), metric, ip.LabelsMap())
signature := timeseriesSignature(il.Name(), metric, ip.Attributes())

v, ok := a.registeredMetrics.Load(signature)
stalePoint := ok &&
Expand All @@ -130,7 +130,7 @@ func (a *lastValueAccumulator) accumulateGauge(metric pdata.Metric, il pdata.Ins
for i := 0; i < dps.Len(); i++ {
ip := dps.At(i)

signature := timeseriesSignature(il.Name(), metric, ip.LabelsMap())
signature := timeseriesSignature(il.Name(), metric, ip.Attributes())

v, ok := a.registeredMetrics.Load(signature)
if !ok {
Expand Down Expand Up @@ -167,7 +167,7 @@ func (a *lastValueAccumulator) accumulateSum(metric pdata.Metric, il pdata.Instr
for i := 0; i < dps.Len(); i++ {
ip := dps.At(i)

signature := timeseriesSignature(il.Name(), metric, ip.LabelsMap())
signature := timeseriesSignature(il.Name(), metric, ip.Attributes())

v, ok := a.registeredMetrics.Load(signature)
if !ok {
Expand Down Expand Up @@ -208,7 +208,7 @@ func (a *lastValueAccumulator) accumulateDoubleHistogram(metric pdata.Metric, il
for i := 0; i < dps.Len(); i++ {
ip := dps.At(i)

signature := timeseriesSignature(il.Name(), metric, ip.LabelsMap())
signature := timeseriesSignature(il.Name(), metric, ip.Attributes())

v, ok := a.registeredMetrics.Load(signature)
if !ok {
Expand Down Expand Up @@ -256,13 +256,13 @@ func (a *lastValueAccumulator) Collect() []pdata.Metric {
return res
}

func timeseriesSignature(ilmName string, metric pdata.Metric, labels pdata.StringMap) string {
func timeseriesSignature(ilmName string, metric pdata.Metric, attributes pdata.AttributeMap) string {
var b strings.Builder
b.WriteString(metric.DataType().String())
b.WriteString("*" + ilmName)
b.WriteString("*" + metric.Name())
labels.Sort().Range(func(k string, v string) bool {
b.WriteString("*" + k + "*" + v)
attributes.Sort().Range(func(k string, v pdata.AttributeValue) bool {
b.WriteString("*" + k + "*" + v.StringVal())
return true
})
return b.String()
Expand Down
52 changes: 26 additions & 26 deletions exporter/prometheusexporter/accumulator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ func TestAccumulateDeltaAggregation(t *testing.T) {
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityDelta)
dp := metric.Sum().DataPoints().AppendEmpty()
dp.SetIntVal(42)
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.Attributes().InsertString("label_1", "1")
dp.Attributes().InsertString("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
Expand All @@ -59,8 +59,8 @@ func TestAccumulateDeltaAggregation(t *testing.T) {
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityDelta)
dp := metric.Sum().DataPoints().AppendEmpty()
dp.SetDoubleVal(42.42)
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.Attributes().InsertString("label_1", "1")
dp.Attributes().InsertString("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
Expand All @@ -76,8 +76,8 @@ func TestAccumulateDeltaAggregation(t *testing.T) {
dp.SetCount(7)
dp.SetExplicitBounds([]float64{3.5, 10.0})
dp.SetSum(42.42)
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.Attributes().InsertString("label_1", "1")
dp.Attributes().InsertString("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
Expand All @@ -94,7 +94,7 @@ func TestAccumulateDeltaAggregation(t *testing.T) {
n := a.Accumulate(resourceMetrics)
require.Equal(t, 0, n)

signature := timeseriesSignature(ilm.InstrumentationLibrary().Name(), ilm.Metrics().At(0), pdata.NewStringMap())
signature := timeseriesSignature(ilm.InstrumentationLibrary().Name(), ilm.Metrics().At(0), pdata.NewAttributeMap())
v, ok := a.registeredMetrics.Load(signature)
require.False(t, ok)
require.Nil(t, v)
Expand All @@ -116,8 +116,8 @@ func TestAccumulateMetrics(t *testing.T) {
metric.SetDescription("test description")
dp := metric.Gauge().DataPoints().AppendEmpty()
dp.SetIntVal(int64(v))
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.Attributes().InsertString("label_1", "1")
dp.Attributes().InsertString("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
Expand All @@ -130,8 +130,8 @@ func TestAccumulateMetrics(t *testing.T) {
metric.SetDescription("test description")
dp := metric.Gauge().DataPoints().AppendEmpty()
dp.SetDoubleVal(v)
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.Attributes().InsertString("label_1", "1")
dp.Attributes().InsertString("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
Expand All @@ -146,8 +146,8 @@ func TestAccumulateMetrics(t *testing.T) {
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
dp := metric.Sum().DataPoints().AppendEmpty()
dp.SetIntVal(int64(v))
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.Attributes().InsertString("label_1", "1")
dp.Attributes().InsertString("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
Expand All @@ -162,8 +162,8 @@ func TestAccumulateMetrics(t *testing.T) {
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
dp := metric.Sum().DataPoints().AppendEmpty()
dp.SetDoubleVal(v)
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.Attributes().InsertString("label_1", "1")
dp.Attributes().InsertString("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
Expand All @@ -178,8 +178,8 @@ func TestAccumulateMetrics(t *testing.T) {
metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
dp := metric.Sum().DataPoints().AppendEmpty()
dp.SetIntVal(int64(v))
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.Attributes().InsertString("label_1", "1")
dp.Attributes().InsertString("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
Expand All @@ -194,8 +194,8 @@ func TestAccumulateMetrics(t *testing.T) {
metric.SetDescription("test description")
dp := metric.Sum().DataPoints().AppendEmpty()
dp.SetDoubleVal(v)
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.Attributes().InsertString("label_1", "1")
dp.Attributes().InsertString("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
Expand All @@ -212,8 +212,8 @@ func TestAccumulateMetrics(t *testing.T) {
dp.SetCount(7)
dp.SetExplicitBounds([]float64{3.5, 10.0})
dp.SetSum(v)
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.Attributes().InsertString("label_1", "1")
dp.Attributes().InsertString("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
Expand Down Expand Up @@ -249,7 +249,7 @@ func TestAccumulateMetrics(t *testing.T) {

require.Equal(t, v.instrumentationLibrary.Name(), "test")
require.Equal(t, v.value.DataType(), ilm2.Metrics().At(0).DataType())
vLabels.Range(func(k, v string) bool {
vLabels.Range(func(k string, v pdata.AttributeValue) bool {
r, _ := m2Labels.Get(k)
require.Equal(t, r, v)
return true
Expand Down Expand Up @@ -286,15 +286,15 @@ func TestAccumulateMetrics(t *testing.T) {
}

func getMetricProperties(metric pdata.Metric) (
labels pdata.StringMap,
attributes pdata.AttributeMap,
ts time.Time,
value float64,
temporality pdata.AggregationTemporality,
isMonotonic bool,
) {
switch metric.DataType() {
case pdata.MetricDataTypeGauge:
labels = metric.Gauge().DataPoints().At(0).LabelsMap()
attributes = metric.Gauge().DataPoints().At(0).Attributes()
ts = metric.Gauge().DataPoints().At(0).Timestamp().AsTime()
dp := metric.Gauge().DataPoints().At(0)
switch dp.Type() {
Expand All @@ -306,7 +306,7 @@ func getMetricProperties(metric pdata.Metric) (
temporality = pdata.AggregationTemporalityUnspecified
isMonotonic = false
case pdata.MetricDataTypeSum:
labels = metric.Sum().DataPoints().At(0).LabelsMap()
attributes = metric.Sum().DataPoints().At(0).Attributes()
ts = metric.Sum().DataPoints().At(0).Timestamp().AsTime()
dp := metric.Sum().DataPoints().At(0)
switch dp.Type() {
Expand All @@ -318,7 +318,7 @@ func getMetricProperties(metric pdata.Metric) (
temporality = metric.Sum().AggregationTemporality()
isMonotonic = metric.Sum().IsMonotonic()
case pdata.MetricDataTypeHistogram:
labels = metric.Histogram().DataPoints().At(0).LabelsMap()
attributes = metric.Histogram().DataPoints().At(0).Attributes()
ts = metric.Histogram().DataPoints().At(0).Timestamp().AsTime()
value = metric.Histogram().DataPoints().At(0).Sum()
temporality = metric.Histogram().AggregationTemporality()
Expand Down
Loading

0 comments on commit 632ed32

Please sign in to comment.