Skip to content

Commit

Permalink
Revert "[pkg/translator/prometheus] Switch NormalizeName FG back to…
Browse files Browse the repository at this point in the history
… alpha (open-telemetry#23229)"

This reverts commit 47b208f.
  • Loading branch information
hdkshingala committed Jul 4, 2023
1 parent ab1dbd4 commit 1ec4080
Show file tree
Hide file tree
Showing 12 changed files with 134 additions and 158 deletions.
3 changes: 2 additions & 1 deletion exporter/prometheusexporter/collector_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -448,7 +448,6 @@ func TestCollectMetrics(t *testing.T) {
continue
}

require.Contains(t, m.Desc().String(), "fqName: \"test_space_test_metric\"")
require.Regexp(t, `variableLabels: \[.*label_1.+label_2.+job.+instance.*\]`, m.Desc().String())

pbMetric := io_prometheus_client.Metric{}
Expand All @@ -467,11 +466,13 @@ func TestCollectMetrics(t *testing.T) {

switch tt.metricType {
case prometheus.CounterValue:
require.Contains(t, m.Desc().String(), "fqName: \"test_space_test_metric_total\"")
require.Equal(t, tt.value, *pbMetric.Counter.Value)
require.Nil(t, pbMetric.Gauge)
require.Nil(t, pbMetric.Histogram)
require.Nil(t, pbMetric.Summary)
case prometheus.GaugeValue:
require.Contains(t, m.Desc().String(), "fqName: \"test_space_test_metric\"")
require.Equal(t, tt.value, *pbMetric.Gauge.Value)
require.Nil(t, pbMetric.Counter)
require.Nil(t, pbMetric.Histogram)
Expand Down
72 changes: 36 additions & 36 deletions exporter/prometheusexporter/prometheus_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -160,10 +160,10 @@ func TestPrometheusExporter_WithTLS(t *testing.T) {
_ = rsp.Body.Close()

want := []string{
`# HELP test_counter_int`,
`# TYPE test_counter_int counter`,
`test_counter_int{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
`# HELP test_counter_int_total`,
`# TYPE test_counter_int_total counter`,
`test_counter_int_total{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int_total{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
}

for _, w := range want {
Expand Down Expand Up @@ -220,18 +220,18 @@ func TestPrometheusExporter_endToEndMultipleTargets(t *testing.T) {
blob, _ := io.ReadAll(res.Body)
_ = res.Body.Close()
want := []string{
`# HELP test_metric_1_this_one_there_where Extra ones`,
`# TYPE test_metric_1_this_one_there_where counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where Extra ones`,
`# TYPE test_metric_2_this_one_there_where counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+delta),
`# HELP test_metric_1_this_one_there_where_total Extra ones`,
`# TYPE test_metric_1_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where_total Extra ones`,
`# TYPE test_metric_2_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8081",job="cpu-exporter",os="linux"} %v`, 100+delta),
}

for _, w := range want {
Expand Down Expand Up @@ -300,14 +300,14 @@ func TestPrometheusExporter_endToEnd(t *testing.T) {
blob, _ := io.ReadAll(res.Body)
_ = res.Body.Close()
want := []string{
`# HELP test_metric_1_this_one_there_where Extra ones`,
`# TYPE test_metric_1_this_one_there_where counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where Extra ones`,
`# TYPE test_metric_2_this_one_there_where counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
`# HELP test_metric_1_this_one_there_where_total Extra ones`,
`# TYPE test_metric_1_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+128),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+128),
`# HELP test_metric_2_this_one_there_where_total Extra ones`,
`# TYPE test_metric_2_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="windows"} %v`, 99+delta),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code1="one1",foo1="bar1",instance="localhost:8080",job="cpu-exporter",os="linux"} %v`, 100+delta),
}

for _, w := range want {
Expand Down Expand Up @@ -377,14 +377,14 @@ func TestPrometheusExporter_endToEndWithTimestamps(t *testing.T) {
blob, _ := io.ReadAll(res.Body)
_ = res.Body.Close()
want := []string{
`# HELP test_metric_1_this_one_there_where Extra ones`,
`# TYPE test_metric_1_this_one_there_where counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+128, 1543160298100+128000),
fmt.Sprintf(`test_metric_1_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+128, 1543160298100),
`# HELP test_metric_2_this_one_there_where Extra ones`,
`# TYPE test_metric_2_this_one_there_where counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+delta, 1543160298100+delta*1000),
fmt.Sprintf(`test_metric_2_this_one_there_where{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+delta, 1543160298100),
`# HELP test_metric_1_this_one_there_where_total Extra ones`,
`# TYPE test_metric_1_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+128, 1543160298100+128000),
fmt.Sprintf(`test_metric_1_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+128, 1543160298100),
`# HELP test_metric_2_this_one_there_where_total Extra ones`,
`# TYPE test_metric_2_this_one_there_where_total counter`,
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="windows"} %v %v`, 99+delta, 1543160298100+delta*1000),
fmt.Sprintf(`test_metric_2_this_one_there_where_total{arch="x86",code2="one2",foo2="bar2",instance="localhost:8080",job="node-exporter",os="linux"} %v %v`, 100+delta, 1543160298100),
}

for _, w := range want {
Expand Down Expand Up @@ -457,10 +457,10 @@ func TestPrometheusExporter_endToEndWithResource(t *testing.T) {
_ = rsp.Body.Close()

want := []string{
`# HELP test_counter_int`,
`# TYPE test_counter_int counter`,
`test_counter_int{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
`# HELP test_counter_int_total`,
`# TYPE test_counter_int_total counter`,
`test_counter_int_total{code2="one2",foo2="bar2",label_1="label-value-1",resource_attr="resource-attr-val-1"} 123 1581452773000`,
`test_counter_int_total{code2="one2",foo2="bar2",label_2="label-value-2",resource_attr="resource-attr-val-1"} 456 1581452773000`,
}

for _, w := range want {
Expand Down
4 changes: 2 additions & 2 deletions pkg/translator/prometheus/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@

> **Warning**
>
> This feature can be enabled with [feature gate](https://github.com/open-telemetry/opentelemetry-collector/tree/main/featuregate) `pkg.translator.prometheus.NormalizeName`. It is disabled by default (alpha stage).
> This feature can be disabled with [feature gate](https://github.com/open-telemetry/opentelemetry-collector/tree/main/featuregate) `pkg.translator.prometheus.NormalizeName`. It is enabled by default (beta stage).
>
> ```shell-session
> $ otelcol --config=config.yaml --feature-gates=pkg.translator.prometheus.NormalizeName
> $ otelcol --config=config.yaml --feature-gates=-pkg.translator.prometheus.NormalizeName
> ```
#### List of transformations to convert OpenTelemetry metrics to Prometheus metrics
Expand Down
2 changes: 1 addition & 1 deletion pkg/translator/prometheus/normalize_name.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ var perUnitMap = map[string]string{

var normalizeNameGate = featuregate.GlobalRegistry().MustRegister(
"pkg.translator.prometheus.NormalizeName",
featuregate.StageAlpha,
featuregate.StageBeta,
featuregate.WithRegisterDescription("Controls whether metrics names are automatically normalized to follow Prometheus naming convention"),
featuregate.WithRegisterReferenceURL("https://github.com/open-telemetry/opentelemetry-collector-contrib/issues/8950"),
)
Expand Down
10 changes: 5 additions & 5 deletions pkg/translator/prometheus/normalize_name_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -139,10 +139,7 @@ func TestOtelReceivers(t *testing.T) {
}

func TestTrimPromSuffixes(t *testing.T) {
registry := featuregate.NewRegistry()
_, err := registry.Register(normalizeNameGate.ID(), featuregate.StageBeta)
require.NoError(t, err)
normalizer := NewNormalizer(registry)
normalizer := NewNormalizer(featuregate.NewRegistry())

assert.Equal(t, "active_directory_ds_replication_network_io", normalizer.TrimPromSuffixes("active_directory_ds_replication_network_io_bytes_total", pmetric.MetricTypeSum, "bytes"))
assert.Equal(t, "active_directory_ds_name_cache_hit_rate", normalizer.TrimPromSuffixes("active_directory_ds_name_cache_hit_rate_percent", pmetric.MetricTypeGauge, "percent"))
Expand Down Expand Up @@ -175,7 +172,10 @@ func TestTrimPromSuffixes(t *testing.T) {
}

func TestTrimPromSuffixesWithFeatureGateDisabled(t *testing.T) {
normalizer := NewNormalizer(featuregate.NewRegistry())
registry := featuregate.NewRegistry()
_, err := registry.Register(normalizeNameGate.ID(), featuregate.StageAlpha)
require.NoError(t, err)
normalizer := NewNormalizer(registry)

assert.Equal(t, "apache_current_connections", normalizer.TrimPromSuffixes("apache_current_connections", pmetric.MetricTypeGauge, "connections"))
assert.Equal(t, "apache_requests_total", normalizer.TrimPromSuffixes("apache_requests_total", pmetric.MetricTypeSum, "1"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,10 +147,10 @@ func TestAddSingleSumNumberDataPoint(t *testing.T) {
},
want: func() map[string]*prompb.TimeSeries {
labels := []prompb.Label{
{Name: model.MetricNameLabel, Value: "test_sum"},
{Name: model.MetricNameLabel, Value: "test_sum_total"},
}
createdLabels := []prompb.Label{
{Name: model.MetricNameLabel, Value: "test_sum" + createdSuffix},
{Name: model.MetricNameLabel, Value: "test_sum_total" + createdSuffix},
}
return map[string]*prompb.TimeSeries{
timeSeriesSignature(pmetric.MetricTypeSum.String(), &labels): {
Expand Down Expand Up @@ -183,7 +183,7 @@ func TestAddSingleSumNumberDataPoint(t *testing.T) {
},
want: func() map[string]*prompb.TimeSeries {
labels := []prompb.Label{
{Name: model.MetricNameLabel, Value: "test_sum"},
{Name: model.MetricNameLabel, Value: "test_sum_total"},
}
return map[string]*prompb.TimeSeries{
timeSeriesSignature(pmetric.MetricTypeSum.String(), &labels): {
Expand Down
39 changes: 14 additions & 25 deletions receiver/prometheusreceiver/metrics_receiver_helper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,6 @@ type testData struct {
pages []mockPrometheusResponse
attributes pcommon.Map
validateScrapes bool
normalizedName bool
validateFunc func(t *testing.T, td *testData, result []pmetric.ResourceMetrics)
}

Expand Down Expand Up @@ -223,13 +222,13 @@ func metricsCount(resourceMetric pmetric.ResourceMetrics) int {
return metricsCount
}

func getValidScrapes(t *testing.T, rms []pmetric.ResourceMetrics, normalizedNames bool) []pmetric.ResourceMetrics {
func getValidScrapes(t *testing.T, rms []pmetric.ResourceMetrics) []pmetric.ResourceMetrics {
var out []pmetric.ResourceMetrics
// rms will include failed scrapes and scrapes that received no metrics but have internal scrape metrics, filter those out
for i := 0; i < len(rms); i++ {
allMetrics := getMetrics(rms[i])
if expectedScrapeMetricCount < len(allMetrics) && countScrapeMetrics(allMetrics, normalizedNames) == expectedScrapeMetricCount {
if isFirstFailedScrape(allMetrics, normalizedNames) {
if expectedScrapeMetricCount < len(allMetrics) && countScrapeMetrics(allMetrics) == expectedScrapeMetricCount {
if isFirstFailedScrape(allMetrics) {
continue
}
assertUp(t, 1, allMetrics)
Expand All @@ -241,7 +240,7 @@ func getValidScrapes(t *testing.T, rms []pmetric.ResourceMetrics, normalizedName
return out
}

func isFirstFailedScrape(metrics []pmetric.Metric, normalizedNames bool) bool {
func isFirstFailedScrape(metrics []pmetric.Metric) bool {
for _, m := range metrics {
if m.Name() == "up" {
if m.Gauge().DataPoints().At(0).DoubleValue() == 1 { // assumed up will not have multiple datapoints
Expand All @@ -251,7 +250,7 @@ func isFirstFailedScrape(metrics []pmetric.Metric, normalizedNames bool) bool {
}

for _, m := range metrics {
if isDefaultMetrics(m, normalizedNames) {
if isDefaultMetrics(m) {
continue
}

Expand Down Expand Up @@ -295,43 +294,37 @@ func assertUp(t *testing.T, expected float64, metrics []pmetric.Metric) {
t.Error("No 'up' metric found")
}

func countScrapeMetricsRM(got pmetric.ResourceMetrics, normalizedNames bool) int {
func countScrapeMetricsRM(got pmetric.ResourceMetrics) int {
n := 0
ilms := got.ScopeMetrics()
for j := 0; j < ilms.Len(); j++ {
ilm := ilms.At(j)
for i := 0; i < ilm.Metrics().Len(); i++ {
if isDefaultMetrics(ilm.Metrics().At(i), normalizedNames) {
if isDefaultMetrics(ilm.Metrics().At(i)) {
n++
}
}
}
return n
}

func countScrapeMetrics(metrics []pmetric.Metric, normalizedNames bool) int {
func countScrapeMetrics(metrics []pmetric.Metric) int {
n := 0
for _, m := range metrics {
if isDefaultMetrics(m, normalizedNames) {
if isDefaultMetrics(m) {
n++
}
}
return n
}

func isDefaultMetrics(m pmetric.Metric, normalizedNames bool) bool {
func isDefaultMetrics(m pmetric.Metric) bool {
switch m.Name() {
case "up", "scrape_samples_scraped", "scrape_samples_post_metric_relabeling", "scrape_series_added":
case "up", "scrape_samples_scraped", "scrape_samples_post_metric_relabeling", "scrape_series_added", "scrape_duration":
return true

// if normalizedNames is true, we expect unit `_seconds` to be trimmed.
case "scrape_duration_seconds":
return !normalizedNames
case "scrape_duration":
return normalizedNames
default:
return false
}
return false
}

type metricTypeComparator func(*testing.T, pmetric.Metric)
Expand All @@ -348,12 +341,8 @@ type dataPointExpectation struct {
type testExpectation func(*testing.T, pmetric.ResourceMetrics)

func doCompare(t *testing.T, name string, want pcommon.Map, got pmetric.ResourceMetrics, expectations []testExpectation) {
doCompareNormalized(t, name, want, got, expectations, false)
}

func doCompareNormalized(t *testing.T, name string, want pcommon.Map, got pmetric.ResourceMetrics, expectations []testExpectation, normalizedNames bool) {
t.Run(name, func(t *testing.T) {
assert.Equal(t, expectedScrapeMetricCount, countScrapeMetricsRM(got, normalizedNames))
assert.Equal(t, expectedScrapeMetricCount, countScrapeMetricsRM(got))
assert.Equal(t, want.Len(), got.Resource().Attributes().Len())
for k, v := range want.AsRaw() {
val, ok := got.Resource().Attributes().Get(k)
Expand Down Expand Up @@ -628,7 +617,7 @@ func testComponent(t *testing.T, targets []*testData, useStartTimeMetric bool, s
}
scrapes := pResults[name]
if !target.validateScrapes {
scrapes = getValidScrapes(t, pResults[name], target.normalizedName)
scrapes = getValidScrapes(t, pResults[name])
}
target.validateFunc(t, target, scrapes)
})
Expand Down
Loading

0 comments on commit 1ec4080

Please sign in to comment.