From 632ed32b5f8515da8da377d8ae34215b30301c24 Mon Sep 17 00:00:00 2001 From: alrex Date: Wed, 11 Aug 2021 10:17:12 -0700 Subject: [PATCH] update oc internal, prometheus exporters to use attributes (#3770) * update oc internal, prometheus exporters to use attributes * rename addAttributesToDoubleHistogramDataPoints * cleaned up TODOs, mentioned the code in the issue instead --- exporter/exporterhelper/metrics.go | 2 +- exporter/exporterhelper/resource_to_label.go | 45 +++++----------- .../exporterhelper/resource_to_label_test.go | 44 ++++++++-------- exporter/prometheusexporter/accumulator.go | 14 ++--- .../prometheusexporter/accumulator_test.go | 52 +++++++++---------- exporter/prometheusexporter/collector.go | 26 +++++----- exporter/prometheusexporter/collector_test.go | 36 ++++++------- .../prometheusexporter/prometheus_test.go | 8 +-- .../prometheusremotewriteexporter/helper.go | 24 ++++----- .../helper_test.go | 6 +-- .../testutil_test.go | 44 ++++++++-------- internal/testdata/common.go | 20 +++---- internal/testdata/metric.go | 30 +++++------ translator/internaldata/metrics_to_oc.go | 22 ++++---- translator/internaldata/metrics_to_oc_test.go | 2 +- translator/internaldata/oc_to_metrics.go | 14 ++--- translator/internaldata/oc_to_metrics_test.go | 2 +- 17 files changed, 187 insertions(+), 204 deletions(-) diff --git a/exporter/exporterhelper/metrics.go b/exporter/exporterhelper/metrics.go index 5b5b38cd2ea..bbe8174fba7 100644 --- a/exporter/exporterhelper/metrics.go +++ b/exporter/exporterhelper/metrics.go @@ -91,7 +91,7 @@ func NewMetricsExporter( mc, err := consumerhelper.NewMetrics(func(ctx context.Context, md pdata.Metrics) error { if bs.ResourceToTelemetrySettings.Enabled { - md = convertResourceToLabels(md) + md = convertResourceToAttributes(md) } req := newMetricsRequest(ctx, md, pusher) err := be.sender.send(req) diff --git a/exporter/exporterhelper/resource_to_label.go b/exporter/exporterhelper/resource_to_label.go index d2457c1105e..ec7a9a1d8d0 100644 --- a/exporter/exporterhelper/resource_to_label.go +++ b/exporter/exporterhelper/resource_to_label.go @@ -16,7 +16,6 @@ package exporterhelper import ( "go.opentelemetry.io/collector/model/pdata" - tracetranslator "go.opentelemetry.io/collector/translator/trace" ) // ResourceToTelemetrySettings defines configuration for converting resource attributes to metric labels. @@ -32,68 +31,52 @@ func defaultResourceToTelemetrySettings() ResourceToTelemetrySettings { } } -// convertResourceToLabels converts all resource attributes to metric labels -func convertResourceToLabels(md pdata.Metrics) pdata.Metrics { +// convertResourceToAttributes converts all resource attributes to metric labels +func convertResourceToAttributes(md pdata.Metrics) pdata.Metrics { cloneMd := md.Clone() rms := cloneMd.ResourceMetrics() for i := 0; i < rms.Len(); i++ { resource := rms.At(i).Resource() - labelMap := extractLabelsFromResource(&resource) - ilms := rms.At(i).InstrumentationLibraryMetrics() for j := 0; j < ilms.Len(); j++ { ilm := ilms.At(j) metricSlice := ilm.Metrics() for k := 0; k < metricSlice.Len(); k++ { metric := metricSlice.At(k) - addLabelsToMetric(&metric, labelMap) + addAttributesToMetric(&metric, resource.Attributes()) } } } return cloneMd } -// extractAttributesFromResource extracts the attributes from a given resource and -// returns them as a StringMap. -func extractLabelsFromResource(resource *pdata.Resource) pdata.StringMap { - labelMap := pdata.NewStringMap() - - attrMap := resource.Attributes() - attrMap.Range(func(k string, av pdata.AttributeValue) bool { - stringLabel := tracetranslator.AttributeValueToString(av) - labelMap.Upsert(k, stringLabel) - return true - }) - return labelMap -} - -// addLabelsToMetric adds additional labels to the given metric -func addLabelsToMetric(metric *pdata.Metric, labelMap pdata.StringMap) { +// addAttributesToMetric adds additional labels to the given metric +func addAttributesToMetric(metric *pdata.Metric, labelMap pdata.AttributeMap) { switch metric.DataType() { case pdata.MetricDataTypeGauge: - addLabelsToNumberDataPoints(metric.Gauge().DataPoints(), labelMap) + addAttributesToNumberDataPoints(metric.Gauge().DataPoints(), labelMap) case pdata.MetricDataTypeSum: - addLabelsToNumberDataPoints(metric.Sum().DataPoints(), labelMap) + addAttributesToNumberDataPoints(metric.Sum().DataPoints(), labelMap) case pdata.MetricDataTypeHistogram: - addLabelsToDoubleHistogramDataPoints(metric.Histogram().DataPoints(), labelMap) + addAttributesToHistogramDataPoints(metric.Histogram().DataPoints(), labelMap) } } -func addLabelsToNumberDataPoints(ps pdata.NumberDataPointSlice, newLabelMap pdata.StringMap) { +func addAttributesToNumberDataPoints(ps pdata.NumberDataPointSlice, newAttributeMap pdata.AttributeMap) { for i := 0; i < ps.Len(); i++ { - joinStringMaps(newLabelMap, ps.At(i).LabelsMap()) + joinAttributeMaps(newAttributeMap, ps.At(i).Attributes()) } } -func addLabelsToDoubleHistogramDataPoints(ps pdata.HistogramDataPointSlice, newLabelMap pdata.StringMap) { +func addAttributesToHistogramDataPoints(ps pdata.HistogramDataPointSlice, newAttributeMap pdata.AttributeMap) { for i := 0; i < ps.Len(); i++ { - joinStringMaps(newLabelMap, ps.At(i).LabelsMap()) + joinAttributeMaps(newAttributeMap, ps.At(i).Attributes()) } } -func joinStringMaps(from, to pdata.StringMap) { - from.Range(func(k, v string) bool { +func joinAttributeMaps(from, to pdata.AttributeMap) { + from.Range(func(k string, v pdata.AttributeValue) bool { to.Upsert(k, v) return true }) diff --git a/exporter/exporterhelper/resource_to_label_test.go b/exporter/exporterhelper/resource_to_label_test.go index 00036568a02..540fcd8040d 100644 --- a/exporter/exporterhelper/resource_to_label_test.go +++ b/exporter/exporterhelper/resource_to_label_test.go @@ -21,52 +21,52 @@ import ( "go.opentelemetry.io/collector/internal/testdata" ) -func TestConvertResourceToLabels(t *testing.T) { +func TestConvertResourceToAttributes(t *testing.T) { md := testdata.GenerateMetricsOneMetric() assert.NotNil(t, md) // Before converting resource to labels assert.Equal(t, 1, md.ResourceMetrics().At(0).Resource().Attributes().Len()) - assert.Equal(t, 1, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).LabelsMap().Len()) + assert.Equal(t, 1, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).Attributes().Len()) - cloneMd := convertResourceToLabels(md) + cloneMd := convertResourceToAttributes(md) // After converting resource to labels assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).Resource().Attributes().Len()) - assert.Equal(t, 2, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).LabelsMap().Len()) + assert.Equal(t, 2, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).Attributes().Len()) assert.Equal(t, 1, md.ResourceMetrics().At(0).Resource().Attributes().Len()) - assert.Equal(t, 1, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).LabelsMap().Len()) + assert.Equal(t, 1, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints().At(0).Attributes().Len()) } -func TestConvertResourceToLabelsAllDataTypesEmptyDataPoint(t *testing.T) { +func TestConvertResourceToAttributesAllDataTypesEmptyDataPoint(t *testing.T) { md := testdata.GenerateMetricsAllTypesEmptyDataPoint() assert.NotNil(t, md) // Before converting resource to labels assert.Equal(t, 1, md.ResourceMetrics().At(0).Resource().Attributes().Len()) - assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len()) + assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).Attributes().Len()) - cloneMd := convertResourceToLabels(md) + cloneMd := convertResourceToAttributes(md) // After converting resource to labels assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).Resource().Attributes().Len()) - assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len()) + assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).Attributes().Len()) assert.Equal(t, 1, md.ResourceMetrics().At(0).Resource().Attributes().Len()) - assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).LabelsMap().Len()) - assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len()) + assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).Gauge().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).Sum().DataPoints().At(0).Attributes().Len()) + assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).Attributes().Len()) } diff --git a/exporter/prometheusexporter/accumulator.go b/exporter/prometheusexporter/accumulator.go index 4916ced4024..ecaa7da6c4b 100644 --- a/exporter/prometheusexporter/accumulator.go +++ b/exporter/prometheusexporter/accumulator.go @@ -105,7 +105,7 @@ func (a *lastValueAccumulator) accumulateSummary(metric pdata.Metric, il pdata.I for i := 0; i < dps.Len(); i++ { ip := dps.At(i) - signature := timeseriesSignature(il.Name(), metric, ip.LabelsMap()) + signature := timeseriesSignature(il.Name(), metric, ip.Attributes()) v, ok := a.registeredMetrics.Load(signature) stalePoint := ok && @@ -130,7 +130,7 @@ func (a *lastValueAccumulator) accumulateGauge(metric pdata.Metric, il pdata.Ins for i := 0; i < dps.Len(); i++ { ip := dps.At(i) - signature := timeseriesSignature(il.Name(), metric, ip.LabelsMap()) + signature := timeseriesSignature(il.Name(), metric, ip.Attributes()) v, ok := a.registeredMetrics.Load(signature) if !ok { @@ -167,7 +167,7 @@ func (a *lastValueAccumulator) accumulateSum(metric pdata.Metric, il pdata.Instr for i := 0; i < dps.Len(); i++ { ip := dps.At(i) - signature := timeseriesSignature(il.Name(), metric, ip.LabelsMap()) + signature := timeseriesSignature(il.Name(), metric, ip.Attributes()) v, ok := a.registeredMetrics.Load(signature) if !ok { @@ -208,7 +208,7 @@ func (a *lastValueAccumulator) accumulateDoubleHistogram(metric pdata.Metric, il for i := 0; i < dps.Len(); i++ { ip := dps.At(i) - signature := timeseriesSignature(il.Name(), metric, ip.LabelsMap()) + signature := timeseriesSignature(il.Name(), metric, ip.Attributes()) v, ok := a.registeredMetrics.Load(signature) if !ok { @@ -256,13 +256,13 @@ func (a *lastValueAccumulator) Collect() []pdata.Metric { return res } -func timeseriesSignature(ilmName string, metric pdata.Metric, labels pdata.StringMap) string { +func timeseriesSignature(ilmName string, metric pdata.Metric, attributes pdata.AttributeMap) string { var b strings.Builder b.WriteString(metric.DataType().String()) b.WriteString("*" + ilmName) b.WriteString("*" + metric.Name()) - labels.Sort().Range(func(k string, v string) bool { - b.WriteString("*" + k + "*" + v) + attributes.Sort().Range(func(k string, v pdata.AttributeValue) bool { + b.WriteString("*" + k + "*" + v.StringVal()) return true }) return b.String() diff --git a/exporter/prometheusexporter/accumulator_test.go b/exporter/prometheusexporter/accumulator_test.go index 0f8ffae399c..442fcae71f9 100644 --- a/exporter/prometheusexporter/accumulator_test.go +++ b/exporter/prometheusexporter/accumulator_test.go @@ -46,8 +46,8 @@ func TestAccumulateDeltaAggregation(t *testing.T) { metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityDelta) dp := metric.Sum().DataPoints().AppendEmpty() dp.SetIntVal(42) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) }, }, @@ -59,8 +59,8 @@ func TestAccumulateDeltaAggregation(t *testing.T) { metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityDelta) dp := metric.Sum().DataPoints().AppendEmpty() dp.SetDoubleVal(42.42) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) }, }, @@ -76,8 +76,8 @@ func TestAccumulateDeltaAggregation(t *testing.T) { dp.SetCount(7) dp.SetExplicitBounds([]float64{3.5, 10.0}) dp.SetSum(42.42) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) }, }, @@ -94,7 +94,7 @@ func TestAccumulateDeltaAggregation(t *testing.T) { n := a.Accumulate(resourceMetrics) require.Equal(t, 0, n) - signature := timeseriesSignature(ilm.InstrumentationLibrary().Name(), ilm.Metrics().At(0), pdata.NewStringMap()) + signature := timeseriesSignature(ilm.InstrumentationLibrary().Name(), ilm.Metrics().At(0), pdata.NewAttributeMap()) v, ok := a.registeredMetrics.Load(signature) require.False(t, ok) require.Nil(t, v) @@ -116,8 +116,8 @@ func TestAccumulateMetrics(t *testing.T) { metric.SetDescription("test description") dp := metric.Gauge().DataPoints().AppendEmpty() dp.SetIntVal(int64(v)) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) }, }, @@ -130,8 +130,8 @@ func TestAccumulateMetrics(t *testing.T) { metric.SetDescription("test description") dp := metric.Gauge().DataPoints().AppendEmpty() dp.SetDoubleVal(v) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) }, }, @@ -146,8 +146,8 @@ func TestAccumulateMetrics(t *testing.T) { metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) dp := metric.Sum().DataPoints().AppendEmpty() dp.SetIntVal(int64(v)) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) }, }, @@ -162,8 +162,8 @@ func TestAccumulateMetrics(t *testing.T) { metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) dp := metric.Sum().DataPoints().AppendEmpty() dp.SetDoubleVal(v) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) }, }, @@ -178,8 +178,8 @@ func TestAccumulateMetrics(t *testing.T) { metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) dp := metric.Sum().DataPoints().AppendEmpty() dp.SetIntVal(int64(v)) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) }, }, @@ -194,8 +194,8 @@ func TestAccumulateMetrics(t *testing.T) { metric.SetDescription("test description") dp := metric.Sum().DataPoints().AppendEmpty() dp.SetDoubleVal(v) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) }, }, @@ -212,8 +212,8 @@ func TestAccumulateMetrics(t *testing.T) { dp.SetCount(7) dp.SetExplicitBounds([]float64{3.5, 10.0}) dp.SetSum(v) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) }, }, @@ -249,7 +249,7 @@ func TestAccumulateMetrics(t *testing.T) { require.Equal(t, v.instrumentationLibrary.Name(), "test") require.Equal(t, v.value.DataType(), ilm2.Metrics().At(0).DataType()) - vLabels.Range(func(k, v string) bool { + vLabels.Range(func(k string, v pdata.AttributeValue) bool { r, _ := m2Labels.Get(k) require.Equal(t, r, v) return true @@ -286,7 +286,7 @@ func TestAccumulateMetrics(t *testing.T) { } func getMetricProperties(metric pdata.Metric) ( - labels pdata.StringMap, + attributes pdata.AttributeMap, ts time.Time, value float64, temporality pdata.AggregationTemporality, @@ -294,7 +294,7 @@ func getMetricProperties(metric pdata.Metric) ( ) { switch metric.DataType() { case pdata.MetricDataTypeGauge: - labels = metric.Gauge().DataPoints().At(0).LabelsMap() + attributes = metric.Gauge().DataPoints().At(0).Attributes() ts = metric.Gauge().DataPoints().At(0).Timestamp().AsTime() dp := metric.Gauge().DataPoints().At(0) switch dp.Type() { @@ -306,7 +306,7 @@ func getMetricProperties(metric pdata.Metric) ( temporality = pdata.AggregationTemporalityUnspecified isMonotonic = false case pdata.MetricDataTypeSum: - labels = metric.Sum().DataPoints().At(0).LabelsMap() + attributes = metric.Sum().DataPoints().At(0).Attributes() ts = metric.Sum().DataPoints().At(0).Timestamp().AsTime() dp := metric.Sum().DataPoints().At(0) switch dp.Type() { @@ -318,7 +318,7 @@ func getMetricProperties(metric pdata.Metric) ( temporality = metric.Sum().AggregationTemporality() isMonotonic = metric.Sum().IsMonotonic() case pdata.MetricDataTypeHistogram: - labels = metric.Histogram().DataPoints().At(0).LabelsMap() + attributes = metric.Histogram().DataPoints().At(0).Attributes() ts = metric.Histogram().DataPoints().At(0).Timestamp().AsTime() value = metric.Histogram().DataPoints().At(0).Sum() temporality = metric.Histogram().AggregationTemporality() diff --git a/exporter/prometheusexporter/collector.go b/exporter/prometheusexporter/collector.go index dece04a585f..55fc06fa41c 100644 --- a/exporter/prometheusexporter/collector.go +++ b/exporter/prometheusexporter/collector.go @@ -78,13 +78,13 @@ func metricName(namespace string, metric pdata.Metric) string { return sanitize(metric.Name()) } -func (c *collector) getMetricMetadata(metric pdata.Metric, labels pdata.StringMap) (*prometheus.Desc, []string) { - keys := make([]string, 0, labels.Len()) - values := make([]string, 0, labels.Len()) +func (c *collector) getMetricMetadata(metric pdata.Metric, attributes pdata.AttributeMap) (*prometheus.Desc, []string) { + keys := make([]string, 0, attributes.Len()) + values := make([]string, 0, attributes.Len()) - labels.Range(func(k string, v string) bool { + attributes.Range(func(k string, v pdata.AttributeValue) bool { keys = append(keys, sanitize(k)) - values = append(values, v) + values = append(values, v.StringVal()) return true }) @@ -99,7 +99,7 @@ func (c *collector) getMetricMetadata(metric pdata.Metric, labels pdata.StringMa func (c *collector) convertGauge(metric pdata.Metric) (prometheus.Metric, error) { ip := metric.Gauge().DataPoints().At(0) - desc, labels := c.getMetricMetadata(metric, ip.LabelsMap()) + desc, attributes := c.getMetricMetadata(metric, ip.Attributes()) var value float64 switch ip.Type() { case pdata.MetricValueTypeInt: @@ -107,7 +107,7 @@ func (c *collector) convertGauge(metric pdata.Metric) (prometheus.Metric, error) case pdata.MetricValueTypeDouble: value = ip.DoubleVal() } - m, err := prometheus.NewConstMetric(desc, prometheus.GaugeValue, value, labels...) + m, err := prometheus.NewConstMetric(desc, prometheus.GaugeValue, value, attributes...) if err != nil { return nil, err } @@ -126,7 +126,7 @@ func (c *collector) convertSum(metric pdata.Metric) (prometheus.Metric, error) { metricType = prometheus.CounterValue } - desc, labels := c.getMetricMetadata(metric, ip.LabelsMap()) + desc, attributes := c.getMetricMetadata(metric, ip.Attributes()) var value float64 switch ip.Type() { case pdata.MetricValueTypeInt: @@ -134,7 +134,7 @@ func (c *collector) convertSum(metric pdata.Metric) (prometheus.Metric, error) { case pdata.MetricValueTypeDouble: value = ip.DoubleVal() } - m, err := prometheus.NewConstMetric(desc, metricType, value, labels...) + m, err := prometheus.NewConstMetric(desc, metricType, value, attributes...) if err != nil { return nil, err } @@ -158,8 +158,8 @@ func (c *collector) convertSummary(metric pdata.Metric) (prometheus.Metric, erro quantiles[qvj.Quantile()] = qvj.Value() } - desc, labelValues := c.getMetricMetadata(metric, point.LabelsMap()) - m, err := prometheus.NewConstSummary(desc, point.Count(), point.Sum(), quantiles, labelValues...) + desc, attributes := c.getMetricMetadata(metric, point.Attributes()) + m, err := prometheus.NewConstSummary(desc, point.Count(), point.Sum(), quantiles, attributes...) if err != nil { return nil, err } @@ -171,7 +171,7 @@ func (c *collector) convertSummary(metric pdata.Metric) (prometheus.Metric, erro func (c *collector) convertDoubleHistogram(metric pdata.Metric) (prometheus.Metric, error) { ip := metric.Histogram().DataPoints().At(0) - desc, labels := c.getMetricMetadata(metric, ip.LabelsMap()) + desc, attributes := c.getMetricMetadata(metric, ip.Attributes()) indicesMap := make(map[float64]int) buckets := make([]float64, 0, len(ip.BucketCounts())) @@ -196,7 +196,7 @@ func (c *collector) convertDoubleHistogram(metric pdata.Metric) (prometheus.Metr points[bucket] = cumCount } - m, err := prometheus.NewConstHistogram(desc, ip.Count(), ip.Sum(), points, labels...) + m, err := prometheus.NewConstHistogram(desc, ip.Count(), ip.Sum(), points, attributes...) if err != nil { return nil, err } diff --git a/exporter/prometheusexporter/collector_test.go b/exporter/prometheusexporter/collector_test.go index 09859517e03..38c719e9397 100644 --- a/exporter/prometheusexporter/collector_test.go +++ b/exporter/prometheusexporter/collector_test.go @@ -116,8 +116,8 @@ func TestCollectMetricsLabelSanitize(t *testing.T) { metric.SetDescription("test description") dp := metric.Gauge().DataPoints().AppendEmpty() dp.SetIntVal(42) - dp.LabelsMap().Insert("label.1", "1") - dp.LabelsMap().Insert("label/2", "2") + dp.Attributes().InsertString("label.1", "1") + dp.Attributes().InsertString("label/2", "2") dp.SetTimestamp(pdata.TimestampFromTime(time.Now())) loggerCore := errorCheckCore{} @@ -170,8 +170,8 @@ func TestCollectMetrics(t *testing.T) { metric.SetDescription("test description") dp := metric.Gauge().DataPoints().AppendEmpty() dp.SetIntVal(42) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) return @@ -188,8 +188,8 @@ func TestCollectMetrics(t *testing.T) { metric.SetDescription("test description") dp := metric.Gauge().DataPoints().AppendEmpty() dp.SetDoubleVal(42.42) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) return @@ -208,8 +208,8 @@ func TestCollectMetrics(t *testing.T) { metric.SetDescription("test description") dp := metric.Sum().DataPoints().AppendEmpty() dp.SetIntVal(42) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) return @@ -228,8 +228,8 @@ func TestCollectMetrics(t *testing.T) { metric.SetDescription("test description") dp := metric.Sum().DataPoints().AppendEmpty() dp.SetDoubleVal(42.42) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) return @@ -248,8 +248,8 @@ func TestCollectMetrics(t *testing.T) { metric.SetDescription("test description") dp := metric.Sum().DataPoints().AppendEmpty() dp.SetIntVal(42) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) return @@ -268,8 +268,8 @@ func TestCollectMetrics(t *testing.T) { metric.SetDescription("test description") dp := metric.Sum().DataPoints().AppendEmpty() dp.SetDoubleVal(42.42) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) return @@ -368,8 +368,8 @@ func TestAccumulateHistograms(t *testing.T) { dp.SetCount(7) dp.SetExplicitBounds([]float64{3.5, 10.0}) dp.SetSum(42.42) - dp.LabelsMap().Insert("label_1", "1") - dp.LabelsMap().Insert("label_2", "2") + dp.Attributes().InsertString("label_1", "1") + dp.Attributes().InsertString("label_2", "2") dp.SetTimestamp(pdata.TimestampFromTime(ts)) return }, @@ -466,8 +466,8 @@ func TestAccumulateSummary(t *testing.T) { sp.SetCount(10) sp.SetSum(0.012) sp.SetCount(10) - sp.LabelsMap().Insert("label_1", "1") - sp.LabelsMap().Insert("label_2", "2") + sp.Attributes().InsertString("label_1", "1") + sp.Attributes().InsertString("label_2", "2") sp.SetTimestamp(pdata.TimestampFromTime(ts)) fillQuantileValue(0.50, 190, sp.QuantileValues().AppendEmpty()) diff --git a/exporter/prometheusexporter/prometheus_test.go b/exporter/prometheusexporter/prometheus_test.go index 27fe3e504c9..e68081a074d 100644 --- a/exporter/prometheusexporter/prometheus_test.go +++ b/exporter/prometheusexporter/prometheus_test.go @@ -322,8 +322,8 @@ func metricBuilder(delta int64, prefix string) pdata.Metrics { dp1 := d1.DataPoints().AppendEmpty() dp1.SetStartTimestamp(pdata.TimestampFromTime(time.Unix(1543160298+delta, 100000090))) dp1.SetTimestamp(pdata.TimestampFromTime(time.Unix(1543160298+delta, 100000997))) - dp1.LabelsMap().Upsert("os", "windows") - dp1.LabelsMap().Upsert("arch", "x86") + dp1.Attributes().UpsertString("os", "windows") + dp1.Attributes().UpsertString("arch", "x86") dp1.SetIntVal(99 + delta) m2 := ms.AppendEmpty() @@ -337,8 +337,8 @@ func metricBuilder(delta int64, prefix string) pdata.Metrics { dp2 := d2.DataPoints().AppendEmpty() dp2.SetStartTimestamp(pdata.TimestampFromTime(time.Unix(1543160298, 100000090))) dp2.SetTimestamp(pdata.TimestampFromTime(time.Unix(1543160298, 100000997))) - dp2.LabelsMap().Upsert("os", "linux") - dp2.LabelsMap().Upsert("arch", "x86") + dp2.Attributes().UpsertString("os", "linux") + dp2.Attributes().UpsertString("arch", "x86") dp2.SetIntVal(100 + delta) return md diff --git a/exporter/prometheusremotewriteexporter/helper.go b/exporter/prometheusremotewriteexporter/helper.go index 8a28c46f848..cf3a05a5136 100644 --- a/exporter/prometheusremotewriteexporter/helper.go +++ b/exporter/prometheusremotewriteexporter/helper.go @@ -106,10 +106,10 @@ func timeSeriesSignature(metric pdata.Metric, labels *[]prompb.Label) string { return b.String() } -// createLabelSet creates a slice of Cortex Label with OTLP labels and paris of string values. +// createAttributes creates a slice of Cortex Label with OTLP attributes and pairs of string values. // Unpaired string value is ignored. String pairs overwrites OTLP labels if collision happens, and the overwrite is // logged. Resultant label names are sanitized. -func createLabelSet(resource pdata.Resource, labels pdata.StringMap, externalLabels map[string]string, extras ...string) []prompb.Label { +func createAttributes(resource pdata.Resource, attributes pdata.AttributeMap, externalLabels map[string]string, extras ...string) []prompb.Label { // map ensures no duplicate label name l := map[string]prompb.Label{} @@ -132,10 +132,10 @@ func createLabelSet(resource pdata.Resource, labels pdata.StringMap, externalLab return true }) - labels.Range(func(key string, value string) bool { + attributes.Range(func(key string, value pdata.AttributeValue) bool { l[key] = prompb.Label{ Name: sanitize(key), - Value: value, + Value: value.StringVal(), } return true @@ -266,7 +266,7 @@ func addSingleNumberDataPoint(pt pdata.NumberDataPoint, resource pdata.Resource, tsMap map[string]*prompb.TimeSeries, externalLabels map[string]string) { // create parameters for addSample name := getPromMetricName(metric, namespace) - labels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, name) + labels := createAttributes(resource, pt.Attributes(), externalLabels, nameStr, name) sample := &prompb.Sample{ // convert ns to ms Timestamp: convertTimeStamp(pt.Timestamp()), @@ -293,7 +293,7 @@ func addSingleHistogramDataPoint(pt pdata.HistogramDataPoint, resource pdata.Res Timestamp: time, } - sumlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+sumStr) + sumlabels := createAttributes(resource, pt.Attributes(), externalLabels, nameStr, baseName+sumStr) addSample(tsMap, sum, sumlabels, metric) // treat count as a sample in an individual TimeSeries @@ -301,7 +301,7 @@ func addSingleHistogramDataPoint(pt pdata.HistogramDataPoint, resource pdata.Res Value: float64(pt.Count()), Timestamp: time, } - countlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+countStr) + countlabels := createAttributes(resource, pt.Attributes(), externalLabels, nameStr, baseName+countStr) addSample(tsMap, count, countlabels, metric) // cumulative count for conversion to cumulative histogram @@ -318,7 +318,7 @@ func addSingleHistogramDataPoint(pt pdata.HistogramDataPoint, resource pdata.Res Timestamp: time, } boundStr := strconv.FormatFloat(bound, 'f', -1, 64) - labels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+bucketStr, leStr, boundStr) + labels := createAttributes(resource, pt.Attributes(), externalLabels, nameStr, baseName+bucketStr, leStr, boundStr) addSample(tsMap, bucket, labels, metric) } // add le=+Inf bucket @@ -327,7 +327,7 @@ func addSingleHistogramDataPoint(pt pdata.HistogramDataPoint, resource pdata.Res Value: float64(cumulativeCount), Timestamp: time, } - infLabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+bucketStr, leStr, pInfStr) + infLabels := createAttributes(resource, pt.Attributes(), externalLabels, nameStr, baseName+bucketStr, leStr, pInfStr) addSample(tsMap, infBucket, infLabels, metric) } @@ -343,7 +343,7 @@ func addSingleSummaryDataPoint(pt pdata.SummaryDataPoint, resource pdata.Resourc Timestamp: time, } - sumlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+sumStr) + sumlabels := createAttributes(resource, pt.Attributes(), externalLabels, nameStr, baseName+sumStr) addSample(tsMap, sum, sumlabels, metric) // treat count as a sample in an individual TimeSeries @@ -351,7 +351,7 @@ func addSingleSummaryDataPoint(pt pdata.SummaryDataPoint, resource pdata.Resourc Value: float64(pt.Count()), Timestamp: time, } - countlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName+countStr) + countlabels := createAttributes(resource, pt.Attributes(), externalLabels, nameStr, baseName+countStr) addSample(tsMap, count, countlabels, metric) // process each percentile/quantile @@ -362,7 +362,7 @@ func addSingleSummaryDataPoint(pt pdata.SummaryDataPoint, resource pdata.Resourc Timestamp: time, } percentileStr := strconv.FormatFloat(qt.Quantile(), 'f', -1, 64) - qtlabels := createLabelSet(resource, pt.LabelsMap(), externalLabels, nameStr, baseName, quantileStr, percentileStr) + qtlabels := createAttributes(resource, pt.Attributes(), externalLabels, nameStr, baseName, quantileStr, percentileStr) addSample(tsMap, quantile, qtlabels, metric) } } diff --git a/exporter/prometheusremotewriteexporter/helper_test.go b/exporter/prometheusremotewriteexporter/helper_test.go index 273e871d4f1..0bbb46b6288 100644 --- a/exporter/prometheusremotewriteexporter/helper_test.go +++ b/exporter/prometheusremotewriteexporter/helper_test.go @@ -180,7 +180,7 @@ func Test_createLabelSet(t *testing.T) { tests := []struct { name string resource pdata.Resource - orig pdata.StringMap + orig pdata.AttributeMap externalLabels map[string]string extras []string want []prompb.Label @@ -220,7 +220,7 @@ func Test_createLabelSet(t *testing.T) { { "no_original_case", getResource(), - pdata.NewStringMap(), + pdata.NewAttributeMap(), nil, []string{label31, value31, label32, value32}, getPromLabels(label31, value31, label32, value32), @@ -261,7 +261,7 @@ func Test_createLabelSet(t *testing.T) { // run tests for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - assert.ElementsMatch(t, tt.want, createLabelSet(tt.resource, tt.orig, tt.externalLabels, tt.extras...)) + assert.ElementsMatch(t, tt.want, createAttributes(tt.resource, tt.orig, tt.externalLabels, tt.extras...)) }) } } diff --git a/exporter/prometheusremotewriteexporter/testutil_test.go b/exporter/prometheusremotewriteexporter/testutil_test.go index 7570df4b0f2..e60df7a82f9 100644 --- a/exporter/prometheusremotewriteexporter/testutil_test.go +++ b/exporter/prometheusremotewriteexporter/testutil_test.go @@ -54,9 +54,9 @@ var ( floatVal2 = 2.0 floatVal3 = 3.0 - lbs1 = getLabels(label11, value11, label12, value12) - lbs2 = getLabels(label21, value21, label22, value22) - lbs1Dirty = getLabels(label11+dirty1, value11, dirty2+label12, value12) + lbs1 = getAttributes(label11, value11, label12, value12) + lbs2 = getAttributes(label21, value21, label22, value22) + lbs1Dirty = getAttributes(label11+dirty1, value11, dirty2+label12, value12) exlbs1 = map[string]string{label41: value41} exlbs2 = map[string]string{label11: value41} @@ -116,7 +116,7 @@ var ( validHistogram: getHistogramMetric(validHistogram, lbs2, time2, floatVal2, uint64(intVal2), bounds, buckets), validSummary: getSummaryMetric(validSummary, lbs2, time2, floatVal2, uint64(intVal2), quantiles), validIntGaugeDirty: getIntGaugeMetric(validIntGaugeDirty, lbs1, intVal1, time1), - unmatchedBoundBucketHist: getHistogramMetric(unmatchedBoundBucketHist, pdata.NewStringMap(), 0, 0, 0, []float64{0.1, 0.2, 0.3}, []uint64{1, 2}), + unmatchedBoundBucketHist: getHistogramMetric(unmatchedBoundBucketHist, pdata.NewAttributeMap(), 0, 0, 0, []float64{0.1, 0.2, 0.3}, []uint64{1, 2}), } empty = "empty" @@ -144,13 +144,13 @@ var ( ) // OTLP metrics -// labels must come in pairs -func getLabels(labels ...string) pdata.StringMap { - stringMap := pdata.NewStringMap() +// attributes must come in pairs +func getAttributes(labels ...string) pdata.AttributeMap { + attributeMap := pdata.NewAttributeMap() for i := 0; i < len(labels); i += 2 { - stringMap.Upsert(labels[i], labels[i+1]) + attributeMap.UpsertString(labels[i], labels[i+1]) } - return stringMap + return attributeMap } // Prometheus TimeSeries @@ -213,26 +213,26 @@ func getEmptyGaugeMetric(name string) pdata.Metric { return metric } -func getIntGaugeMetric(name string, labels pdata.StringMap, value int64, ts uint64) pdata.Metric { +func getIntGaugeMetric(name string, attributes pdata.AttributeMap, value int64, ts uint64) pdata.Metric { metric := pdata.NewMetric() metric.SetName(name) metric.SetDataType(pdata.MetricDataTypeGauge) dp := metric.Gauge().DataPoints().AppendEmpty() dp.SetIntVal(value) - labels.CopyTo(dp.LabelsMap()) + attributes.CopyTo(dp.Attributes()) dp.SetStartTimestamp(pdata.Timestamp(0)) dp.SetTimestamp(pdata.Timestamp(ts)) return metric } -func getDoubleGaugeMetric(name string, labels pdata.StringMap, value float64, ts uint64) pdata.Metric { +func getDoubleGaugeMetric(name string, attributes pdata.AttributeMap, value float64, ts uint64) pdata.Metric { metric := pdata.NewMetric() metric.SetName(name) metric.SetDataType(pdata.MetricDataTypeGauge) dp := metric.Gauge().DataPoints().AppendEmpty() dp.SetDoubleVal(value) - labels.CopyTo(dp.LabelsMap()) + attributes.CopyTo(dp.Attributes()) dp.SetStartTimestamp(pdata.Timestamp(0)) dp.SetTimestamp(pdata.Timestamp(ts)) @@ -246,14 +246,14 @@ func getEmptySumMetric(name string) pdata.Metric { return metric } -func getIntSumMetric(name string, labels pdata.StringMap, value int64, ts uint64) pdata.Metric { +func getIntSumMetric(name string, attributes pdata.AttributeMap, value int64, ts uint64) pdata.Metric { metric := pdata.NewMetric() metric.SetName(name) metric.SetDataType(pdata.MetricDataTypeSum) metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) dp := metric.Sum().DataPoints().AppendEmpty() dp.SetIntVal(value) - labels.CopyTo(dp.LabelsMap()) + attributes.CopyTo(dp.Attributes()) dp.SetStartTimestamp(pdata.Timestamp(0)) dp.SetTimestamp(pdata.Timestamp(ts)) @@ -268,14 +268,14 @@ func getEmptyCumulativeSumMetric(name string) pdata.Metric { return metric } -func getSumMetric(name string, labels pdata.StringMap, value float64, ts uint64) pdata.Metric { +func getSumMetric(name string, attributes pdata.AttributeMap, value float64, ts uint64) pdata.Metric { metric := pdata.NewMetric() metric.SetName(name) metric.SetDataType(pdata.MetricDataTypeSum) metric.Sum().SetAggregationTemporality(pdata.AggregationTemporalityCumulative) dp := metric.Sum().DataPoints().AppendEmpty() dp.SetDoubleVal(value) - labels.CopyTo(dp.LabelsMap()) + attributes.CopyTo(dp.Attributes()) dp.SetStartTimestamp(pdata.Timestamp(0)) dp.SetTimestamp(pdata.Timestamp(ts)) @@ -297,7 +297,7 @@ func getEmptyCumulativeHistogramMetric(name string) pdata.Metric { return metric } -func getHistogramMetric(name string, labels pdata.StringMap, ts uint64, sum float64, count uint64, bounds []float64, buckets []uint64) pdata.Metric { +func getHistogramMetric(name string, attributes pdata.AttributeMap, ts uint64, sum float64, count uint64, bounds []float64, buckets []uint64) pdata.Metric { metric := pdata.NewMetric() metric.SetName(name) metric.SetDataType(pdata.MetricDataTypeHistogram) @@ -307,7 +307,7 @@ func getHistogramMetric(name string, labels pdata.StringMap, ts uint64, sum floa dp.SetSum(sum) dp.SetBucketCounts(buckets) dp.SetExplicitBounds(bounds) - labels.CopyTo(dp.LabelsMap()) + attributes.CopyTo(dp.Attributes()) dp.SetTimestamp(pdata.Timestamp(ts)) return metric @@ -320,7 +320,7 @@ func getEmptySummaryMetric(name string) pdata.Metric { return metric } -func getSummaryMetric(name string, labels pdata.StringMap, ts uint64, sum float64, count uint64, quantiles pdata.ValueAtQuantileSlice) pdata.Metric { +func getSummaryMetric(name string, attributes pdata.AttributeMap, ts uint64, sum float64, count uint64, quantiles pdata.ValueAtQuantileSlice) pdata.Metric { metric := pdata.NewMetric() metric.SetName(name) metric.SetDataType(pdata.MetricDataTypeSummary) @@ -328,8 +328,8 @@ func getSummaryMetric(name string, labels pdata.StringMap, ts uint64, sum float6 dp.SetCount(count) dp.SetSum(sum) - labels.Range(func(k string, v string) bool { - dp.LabelsMap().Upsert(k, v) + attributes.Range(func(k string, v pdata.AttributeValue) bool { + dp.Attributes().Upsert(k, v) return true }) diff --git a/internal/testdata/common.go b/internal/testdata/common.go index ced24a4eb13..6bc1972281b 100644 --- a/internal/testdata/common.go +++ b/internal/testdata/common.go @@ -57,22 +57,22 @@ func initSpanLinkAttributes(dest pdata.AttributeMap) { dest.InitFromMap(spanLinkAttributes) } -func initMetricLabels1(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestLabelKey1: TestLabelValue1}) +func initMetricAttachment(dest pdata.StringMap) { + dest.InitFromMap(map[string]string{TestAttachmentKey: TestAttachmentValue}) } -func initMetricLabels12(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestLabelKey1: TestLabelValue1, TestLabelKey2: TestLabelValue2}).Sort() +func initMetricAttributes1(dest pdata.AttributeMap) { + dest.InitFromMap(map[string]pdata.AttributeValue{TestLabelKey1: pdata.NewAttributeValueString(TestLabelValue1)}) } -func initMetricLabels13(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestLabelKey1: TestLabelValue1, TestLabelKey3: TestLabelValue3}).Sort() +func initMetricAttributes12(dest pdata.AttributeMap) { + dest.InitFromMap(map[string]pdata.AttributeValue{TestLabelKey1: pdata.NewAttributeValueString(TestLabelValue1), TestLabelKey2: pdata.NewAttributeValueString(TestLabelValue2)}).Sort() } -func initMetricLabels2(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestLabelKey2: TestLabelValue2}) +func initMetricAttributes13(dest pdata.AttributeMap) { + dest.InitFromMap(map[string]pdata.AttributeValue{TestLabelKey1: pdata.NewAttributeValueString(TestLabelValue1), TestLabelKey3: pdata.NewAttributeValueString(TestLabelValue3)}).Sort() } -func initMetricAttachment(dest pdata.StringMap) { - dest.InitFromMap(map[string]string{TestAttachmentKey: TestAttachmentValue}) +func initMetricAttributes2(dest pdata.AttributeMap) { + dest.InitFromMap(map[string]pdata.AttributeValue{TestLabelKey2: pdata.NewAttributeValueString(TestLabelValue2)}) } diff --git a/internal/testdata/metric.go b/internal/testdata/metric.go index 38d9ee79a03..3ba7fba01a9 100644 --- a/internal/testdata/metric.go +++ b/internal/testdata/metric.go @@ -90,11 +90,11 @@ func GenerateMetricsOneCounterOneSummaryMetrics() pdata.Metrics { return md } -func GenerateMetricsOneMetricNoLabels() pdata.Metrics { +func GenerateMetricsOneMetricNoAttributes() pdata.Metrics { md := GenerateMetricsOneMetric() dps := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Sum().DataPoints() - dps.At(0).LabelsMap().InitFromMap(map[string]string{}) - dps.At(1).LabelsMap().InitFromMap(map[string]string{}) + dps.At(0).Attributes().InitFromMap(map[string]pdata.AttributeValue{}) + dps.At(1).Attributes().InitFromMap(map[string]pdata.AttributeValue{}) return md } @@ -164,12 +164,12 @@ func initGaugeIntMetric(im pdata.Metric) { idps := im.Gauge().DataPoints() idp0 := idps.AppendEmpty() - initMetricLabels1(idp0.LabelsMap()) + initMetricAttributes1(idp0.Attributes()) idp0.SetStartTimestamp(TestMetricStartTimestamp) idp0.SetTimestamp(TestMetricTimestamp) idp0.SetIntVal(123) idp1 := idps.AppendEmpty() - initMetricLabels2(idp1.LabelsMap()) + initMetricAttributes2(idp1.Attributes()) idp1.SetStartTimestamp(TestMetricStartTimestamp) idp1.SetTimestamp(TestMetricTimestamp) idp1.SetIntVal(456) @@ -180,12 +180,12 @@ func initGaugeDoubleMetric(im pdata.Metric) { idps := im.Gauge().DataPoints() idp0 := idps.AppendEmpty() - initMetricLabels12(idp0.LabelsMap()) + initMetricAttributes12(idp0.Attributes()) idp0.SetStartTimestamp(TestMetricStartTimestamp) idp0.SetTimestamp(TestMetricTimestamp) idp0.SetDoubleVal(1.23) idp1 := idps.AppendEmpty() - initMetricLabels13(idp1.LabelsMap()) + initMetricAttributes13(idp1.Attributes()) idp1.SetStartTimestamp(TestMetricStartTimestamp) idp1.SetTimestamp(TestMetricTimestamp) idp1.SetDoubleVal(4.56) @@ -196,12 +196,12 @@ func initSumIntMetric(im pdata.Metric) { idps := im.Sum().DataPoints() idp0 := idps.AppendEmpty() - initMetricLabels1(idp0.LabelsMap()) + initMetricAttributes1(idp0.Attributes()) idp0.SetStartTimestamp(TestMetricStartTimestamp) idp0.SetTimestamp(TestMetricTimestamp) idp0.SetIntVal(123) idp1 := idps.AppendEmpty() - initMetricLabels2(idp1.LabelsMap()) + initMetricAttributes2(idp1.Attributes()) idp1.SetStartTimestamp(TestMetricStartTimestamp) idp1.SetTimestamp(TestMetricTimestamp) idp1.SetIntVal(456) @@ -212,13 +212,13 @@ func initSumDoubleMetric(dm pdata.Metric) { ddps := dm.Sum().DataPoints() ddp0 := ddps.AppendEmpty() - initMetricLabels12(ddp0.LabelsMap()) + initMetricAttributes12(ddp0.Attributes()) ddp0.SetStartTimestamp(TestMetricStartTimestamp) ddp0.SetTimestamp(TestMetricTimestamp) ddp0.SetDoubleVal(1.23) ddp1 := ddps.AppendEmpty() - initMetricLabels13(ddp1.LabelsMap()) + initMetricAttributes13(ddp1.Attributes()) ddp1.SetStartTimestamp(TestMetricStartTimestamp) ddp1.SetTimestamp(TestMetricTimestamp) ddp1.SetDoubleVal(4.56) @@ -229,13 +229,13 @@ func initDoubleHistogramMetric(hm pdata.Metric) { hdps := hm.Histogram().DataPoints() hdp0 := hdps.AppendEmpty() - initMetricLabels13(hdp0.LabelsMap()) + initMetricAttributes13(hdp0.Attributes()) hdp0.SetStartTimestamp(TestMetricStartTimestamp) hdp0.SetTimestamp(TestMetricTimestamp) hdp0.SetCount(1) hdp0.SetSum(15) hdp1 := hdps.AppendEmpty() - initMetricLabels2(hdp1.LabelsMap()) + initMetricAttributes2(hdp1.Attributes()) hdp1.SetStartTimestamp(TestMetricStartTimestamp) hdp1.SetTimestamp(TestMetricTimestamp) hdp1.SetCount(1) @@ -253,13 +253,13 @@ func initDoubleSummaryMetric(sm pdata.Metric) { sdps := sm.Summary().DataPoints() sdp0 := sdps.AppendEmpty() - initMetricLabels13(sdp0.LabelsMap()) + initMetricAttributes13(sdp0.Attributes()) sdp0.SetStartTimestamp(TestMetricStartTimestamp) sdp0.SetTimestamp(TestMetricTimestamp) sdp0.SetCount(1) sdp0.SetSum(15) sdp1 := sdps.AppendEmpty() - initMetricLabels2(sdp1.LabelsMap()) + initMetricAttributes2(sdp1.Attributes()) sdp1.SetStartTimestamp(TestMetricStartTimestamp) sdp1.SetTimestamp(TestMetricTimestamp) sdp1.SetCount(1) diff --git a/translator/internaldata/metrics_to_oc.go b/translator/internaldata/metrics_to_oc.go index fbc362d4160..53ac2ace616 100644 --- a/translator/internaldata/metrics_to_oc.go +++ b/translator/internaldata/metrics_to_oc.go @@ -140,7 +140,7 @@ func collectLabelKeysAndValueType(metric pdata.Metric) *labelKeysAndType { func collectLabelKeysNumberDataPoints(dps pdata.NumberDataPointSlice, keySet map[string]struct{}) bool { allInt := true for i := 0; i < dps.Len(); i++ { - addLabelKeys(keySet, dps.At(i).LabelsMap()) + addLabelKeys(keySet, dps.At(i).Attributes()) if dps.At(i).Type() != pdata.MetricValueTypeInt { allInt = false } @@ -150,18 +150,18 @@ func collectLabelKeysNumberDataPoints(dps pdata.NumberDataPointSlice, keySet map func collectLabelKeysHistogramDataPoints(dhdp pdata.HistogramDataPointSlice, keySet map[string]struct{}) { for i := 0; i < dhdp.Len(); i++ { - addLabelKeys(keySet, dhdp.At(i).LabelsMap()) + addLabelKeys(keySet, dhdp.At(i).Attributes()) } } func collectLabelKeysSummaryDataPoints(dhdp pdata.SummaryDataPointSlice, keySet map[string]struct{}) { for i := 0; i < dhdp.Len(); i++ { - addLabelKeys(keySet, dhdp.At(i).LabelsMap()) + addLabelKeys(keySet, dhdp.At(i).Attributes()) } } -func addLabelKeys(keySet map[string]struct{}, labels pdata.StringMap) { - labels.Range(func(k string, v string) bool { +func addLabelKeys(keySet map[string]struct{}, attributes pdata.AttributeMap) { + attributes.Range(func(k string, v pdata.AttributeValue) bool { keySet[k] = struct{}{} return true }) @@ -240,7 +240,7 @@ func numberDataPointsToOC(dps pdata.NumberDataPointSlice, labelKeys *labelKeysAn } ts := &ocmetrics.TimeSeries{ StartTimestamp: timestampAsTimestampPb(dp.StartTimestamp()), - LabelValues: labelValuesToOC(dp.LabelsMap(), labelKeys), + LabelValues: attributeValuesToOC(dp.Attributes(), labelKeys), Points: []*ocmetrics.Point{point}, } timeseries = append(timeseries, ts) @@ -260,7 +260,7 @@ func doubleHistogramPointToOC(dps pdata.HistogramDataPointSlice, labelKeys *labe ts := &ocmetrics.TimeSeries{ StartTimestamp: timestampAsTimestampPb(dp.StartTimestamp()), - LabelValues: labelValuesToOC(dp.LabelsMap(), labelKeys), + LabelValues: attributeValuesToOC(dp.Attributes(), labelKeys), Points: []*ocmetrics.Point{ { Timestamp: timestampAsTimestampPb(dp.Timestamp()), @@ -320,7 +320,7 @@ func doubleSummaryPointToOC(dps pdata.SummaryDataPointSlice, labelKeys *labelKey ts := &ocmetrics.TimeSeries{ StartTimestamp: timestampAsTimestampPb(dp.StartTimestamp()), - LabelValues: labelValuesToOC(dp.LabelsMap(), labelKeys), + LabelValues: attributeValuesToOC(dp.Attributes(), labelKeys), Points: []*ocmetrics.Point{ { Timestamp: timestampAsTimestampPb(dp.Timestamp()), @@ -399,7 +399,7 @@ func exemplarToOC(filteredLabels pdata.StringMap, value float64, timestamp pdata } } -func labelValuesToOC(labels pdata.StringMap, labelKeys *labelKeysAndType) []*ocmetrics.LabelValue { +func attributeValuesToOC(labels pdata.AttributeMap, labelKeys *labelKeysAndType) []*ocmetrics.LabelValue { if len(labelKeys.keys) == 0 { return nil } @@ -413,13 +413,13 @@ func labelValuesToOC(labels pdata.StringMap, labelKeys *labelKeysAndType) []*ocm } // Visit all defined labels in the point and override defaults with actual values - labels.Range(func(k string, v string) bool { + labels.Range(func(k string, v pdata.AttributeValue) bool { // Find the appropriate label value that we need to update keyIndex := labelKeys.keyIndices[k] labelValue := labelValues[keyIndex] // Update label value - labelValue.Value = v + labelValue.Value = v.StringVal() labelValue.HasValue = true return true }) diff --git a/translator/internaldata/metrics_to_oc_test.go b/translator/internaldata/metrics_to_oc_test.go index 63c50883d36..c91b3df49a0 100644 --- a/translator/internaldata/metrics_to_oc_test.go +++ b/translator/internaldata/metrics_to_oc_test.go @@ -82,7 +82,7 @@ func TestMetricsToOC(t *testing.T) { { name: "one-metric-no-labels", - internal: testdata.GenerateMetricsOneMetricNoLabels(), + internal: testdata.GenerateMetricsOneMetricNoAttributes(), oc: generateOCTestDataNoLabels(), }, diff --git a/translator/internaldata/oc_to_metrics.go b/translator/internaldata/oc_to_metrics.go index 0710cb766ee..42898ec5f26 100644 --- a/translator/internaldata/oc_to_metrics.go +++ b/translator/internaldata/oc_to_metrics.go @@ -203,7 +203,7 @@ func setDataPoints(ocMetric *ocmetrics.Metric, metric pdata.Metric, valType pdat } } -func fillLabelsMap(ocLabelsKeys []*ocmetrics.LabelKey, ocLabelValues []*ocmetrics.LabelValue, labelsMap pdata.StringMap) { +func fillAttributesMap(ocLabelsKeys []*ocmetrics.LabelKey, ocLabelValues []*ocmetrics.LabelValue, attributesMap pdata.AttributeMap) { if len(ocLabelsKeys) == 0 || len(ocLabelValues) == 0 { return } @@ -215,13 +215,13 @@ func fillLabelsMap(ocLabelsKeys []*ocmetrics.LabelKey, ocLabelValues []*ocmetric lablesCount = len(ocLabelValues) } - labelsMap.Clear() - labelsMap.EnsureCapacity(lablesCount) + attributesMap.Clear() + attributesMap.EnsureCapacity(lablesCount) for i := 0; i < lablesCount; i++ { if !ocLabelValues[i].GetHasValue() { continue } - labelsMap.Insert(ocLabelsKeys[i].Key, ocLabelValues[i].Value) + attributesMap.InsertString(ocLabelsKeys[i].Key, ocLabelValues[i].Value) } } @@ -243,7 +243,7 @@ func fillNumberDataPoint(ocMetric *ocmetrics.Metric, dps pdata.NumberDataPointSl dp := dps.AppendEmpty() dp.SetStartTimestamp(startTimestamp) dp.SetTimestamp(pdata.TimestampFromTime(point.GetTimestamp().AsTime())) - fillLabelsMap(ocLabelsKeys, timeseries.LabelValues, dp.LabelsMap()) + fillAttributesMap(ocLabelsKeys, timeseries.LabelValues, dp.Attributes()) switch valType { case pdata.MetricValueTypeInt: dp.SetIntVal(point.GetInt64Value()) @@ -272,7 +272,7 @@ func fillDoubleHistogramDataPoint(ocMetric *ocmetrics.Metric, dps pdata.Histogra dp := dps.AppendEmpty() dp.SetStartTimestamp(startTimestamp) dp.SetTimestamp(pdata.TimestampFromTime(point.GetTimestamp().AsTime())) - fillLabelsMap(ocLabelsKeys, timeseries.LabelValues, dp.LabelsMap()) + fillAttributesMap(ocLabelsKeys, timeseries.LabelValues, dp.Attributes()) distributionValue := point.GetDistributionValue() dp.SetSum(distributionValue.GetSum()) dp.SetCount(uint64(distributionValue.GetCount())) @@ -300,7 +300,7 @@ func fillDoubleSummaryDataPoint(ocMetric *ocmetrics.Metric, dps pdata.SummaryDat dp := dps.AppendEmpty() dp.SetStartTimestamp(startTimestamp) dp.SetTimestamp(pdata.TimestampFromTime(point.GetTimestamp().AsTime())) - fillLabelsMap(ocLabelsKeys, timeseries.LabelValues, dp.LabelsMap()) + fillAttributesMap(ocLabelsKeys, timeseries.LabelValues, dp.Attributes()) summaryValue := point.GetSummaryValue() dp.SetSum(summaryValue.GetSum().GetValue()) dp.SetCount(uint64(summaryValue.GetCount().GetValue())) diff --git a/translator/internaldata/oc_to_metrics_test.go b/translator/internaldata/oc_to_metrics_test.go index 90d6cfbccda..2d6b2ac2a11 100644 --- a/translator/internaldata/oc_to_metrics_test.go +++ b/translator/internaldata/oc_to_metrics_test.go @@ -57,7 +57,7 @@ func TestOCToMetrics(t *testing.T) { { name: "one-metric-no-labels", oc: generateOCTestDataNoLabels(), - internal: testdata.GenerateMetricsOneMetricNoLabels(), + internal: testdata.GenerateMetricsOneMetricNoAttributes(), }, {