Skip to content

Commit

Permalink
[exporter/awsemf] Enforce TTL on metric calculator maps (open-telemet…
Browse files Browse the repository at this point in the history
…ry#25066)

**Description:** This PR implements a ticker to automatically enforce
the TimeToLive value on `MapWithExpiry` objects inside the
`internal/aws/metrics` package. This map object is used directly by the
Container Insights receiver and the MetricCalculator object in the same
package. Metric calculators are used by the EMF Exporter.

Changes to container insights receiver interfaces had to be made so that
shutdown on all calculators could be enforced.

In the EMF Exporter, the `metrictranslator` object now owns an
`emfCalculator` object which can be passed down to the data point
interface. The metric translator now has a shutdown method which can be
called by the emf exporter on close.

**Link to tracking Issue:** Closes open-telemetry#25058
  • Loading branch information
bryan-aguilar committed Aug 8, 2023
1 parent ffd4eeb commit a24294f
Show file tree
Hide file tree
Showing 32 changed files with 387 additions and 70 deletions.
32 changes: 32 additions & 0 deletions .chloggen/awsemf_mapCleanup.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: bug_fix

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: awsemfexporter

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Enforce time to live on metric data that is stored for the purpose of cumulative to delta conversions within EMF Exporter

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [25058]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext: |
This change fixes a bug where the cache used to store metric information for cumulative to delta
conversions was not enforcing its time to live. This could cause excessive memory growth in certain scenarios which could
lead to OOM failures for Collector. To properly fix this issue package global metric caches were removed and replaced
with caches that are unique per emf exporter. A byproduct of this change is that no two emf exporters within an
Collector will share a caches leading to more accurate cumulative to delta conversions.
# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: [user]
22 changes: 11 additions & 11 deletions exporter/awsemfexporter/datapoint.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ const (
summarySumSuffix = "_sum"
)

var (
deltaMetricCalculator = aws.NewFloat64DeltaCalculator()
summaryMetricCalculator = aws.NewMetricCalculator(calculateSummaryDelta)
)
type emfCalculators struct {
delta aws.MetricCalculator
summary aws.MetricCalculator
}

func calculateSummaryDelta(prev *aws.MetricValue, val interface{}, _ time.Time) (interface{}, bool) {
metricEntry := val.(summaryMetricEntry)
Expand Down Expand Up @@ -60,7 +60,7 @@ type dataPoints interface {
// dataPoint: the adjusted data point
// retained: indicates whether the data point is valid for further process
// NOTE: It is an expensive call as it calculates the metric value.
CalculateDeltaDatapoints(i int, instrumentationScopeName string, detailedMetrics bool) (dataPoint []dataPoint, retained bool)
CalculateDeltaDatapoints(i int, instrumentationScopeName string, detailedMetrics bool, calculators *emfCalculators) (dataPoint []dataPoint, retained bool)
}

// deltaMetricMetadata contains the metadata required to perform rate/delta calculation
Expand Down Expand Up @@ -106,7 +106,7 @@ type summaryMetricEntry struct {
}

// CalculateDeltaDatapoints retrieves the NumberDataPoint at the given index and performs rate/delta calculation if necessary.
func (dps numberDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, _ bool) ([]dataPoint, bool) {
func (dps numberDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, _ bool, calculators *emfCalculators) ([]dataPoint, bool) {
metric := dps.NumberDataPointSlice.At(i)
labels := createLabels(metric.Attributes(), instrumentationScopeName)
timestampMs := unixNanoToMilliseconds(metric.Timestamp())
Expand All @@ -124,7 +124,7 @@ func (dps numberDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationS
if dps.adjustToDelta {
var deltaVal interface{}
mKey := aws.NewKey(dps.deltaMetricMetadata, labels)
deltaVal, retained = deltaMetricCalculator.Calculate(mKey, metricVal, metric.Timestamp().AsTime())
deltaVal, retained = calculators.delta.Calculate(mKey, metricVal, metric.Timestamp().AsTime())

// If a delta to the previous data point could not be computed use the current metric value instead
if !retained && dps.retainInitialValueForDelta {
Expand All @@ -146,7 +146,7 @@ func (dps numberDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationS
}

// CalculateDeltaDatapoints retrieves the HistogramDataPoint at the given index.
func (dps histogramDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, _ bool) ([]dataPoint, bool) {
func (dps histogramDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, _ bool, _ *emfCalculators) ([]dataPoint, bool) {
metric := dps.HistogramDataPointSlice.At(i)
labels := createLabels(metric.Attributes(), instrumentationScopeName)
timestamp := unixNanoToMilliseconds(metric.Timestamp())
Expand All @@ -165,7 +165,7 @@ func (dps histogramDataPointSlice) CalculateDeltaDatapoints(i int, instrumentati
}

// CalculateDeltaDatapoints retrieves the ExponentialHistogramDataPoint at the given index.
func (dps exponentialHistogramDataPointSlice) CalculateDeltaDatapoints(idx int, instrumentationScopeName string, _ bool) ([]dataPoint, bool) {
func (dps exponentialHistogramDataPointSlice) CalculateDeltaDatapoints(idx int, instrumentationScopeName string, _ bool, _ *emfCalculators) ([]dataPoint, bool) {
metric := dps.ExponentialHistogramDataPointSlice.At(idx)

scale := metric.Scale()
Expand Down Expand Up @@ -247,7 +247,7 @@ func (dps exponentialHistogramDataPointSlice) CalculateDeltaDatapoints(idx int,
}

// CalculateDeltaDatapoints retrieves the SummaryDataPoint at the given index and perform calculation with sum and count while retain the quantile value.
func (dps summaryDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, detailedMetrics bool) ([]dataPoint, bool) {
func (dps summaryDataPointSlice) CalculateDeltaDatapoints(i int, instrumentationScopeName string, detailedMetrics bool, calculators *emfCalculators) ([]dataPoint, bool) {
metric := dps.SummaryDataPointSlice.At(i)
labels := createLabels(metric.Attributes(), instrumentationScopeName)
timestampMs := unixNanoToMilliseconds(metric.Timestamp())
Expand All @@ -261,7 +261,7 @@ func (dps summaryDataPointSlice) CalculateDeltaDatapoints(i int, instrumentation
if dps.adjustToDelta {
var delta interface{}
mKey := aws.NewKey(dps.deltaMetricMetadata, labels)
delta, retained = summaryMetricCalculator.Calculate(mKey, summaryMetricEntry{sum, count}, metric.Timestamp().AsTime())
delta, retained = calculators.summary.Calculate(mKey, summaryMetricEntry{sum, count}, metric.Timestamp().AsTime())

// If a delta to the previous data point could not be computed use the current metric value instead
if !retained && dps.retainInitialValueForDelta {
Expand Down
44 changes: 31 additions & 13 deletions exporter/awsemfexporter/datapoint_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@ import (
"time"

"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.opentelemetry.io/collector/pdata/pcommon"
"go.opentelemetry.io/collector/pdata/pmetric"
"go.uber.org/multierr"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
"go.uber.org/zap/zaptest/observer"
Expand Down Expand Up @@ -169,14 +171,24 @@ func generateDeltaMetricMetadata(adjustToDelta bool, metricName string, retainIn
}
}

func setupDataPointCache() {
deltaMetricCalculator = aws.NewFloat64DeltaCalculator()
summaryMetricCalculator = aws.NewMetricCalculator(calculateSummaryDelta)
func setupEmfCalculators() *emfCalculators {
return &emfCalculators{
summary: aws.NewMetricCalculator(calculateSummaryDelta),
delta: aws.NewFloat64DeltaCalculator(),
}
}

func shutdownEmfCalculators(c *emfCalculators) error {
var errs error
errs = multierr.Append(errs, c.delta.Shutdown())
return multierr.Append(errs, c.summary.Shutdown())

}

func TestCalculateDeltaDatapoints_NumberDataPointSlice(t *testing.T) {
emfCalcs := setupEmfCalculators()
defer require.NoError(t, shutdownEmfCalculators(emfCalcs))
for _, retainInitialValueOfDeltaMetric := range []bool{true, false} {
setupDataPointCache()

testCases := []struct {
name string
Expand Down Expand Up @@ -263,6 +275,7 @@ func TestCalculateDeltaDatapoints_NumberDataPointSlice(t *testing.T) {

for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {

// Given the number datapoint (including Sum and Gauge OTEL metric type) with data type as int or double
numberDPS := pmetric.NewNumberDataPointSlice()
numberDP := numberDPS.AppendEmpty()
Expand All @@ -280,7 +293,7 @@ func TestCalculateDeltaDatapoints_NumberDataPointSlice(t *testing.T) {
numberDatapointSlice := numberDataPointSlice{deltaMetricMetadata, numberDPS}

// When calculate the delta datapoints for number datapoint
dps, retained := numberDatapointSlice.CalculateDeltaDatapoints(0, instrLibName, false)
dps, retained := numberDatapointSlice.CalculateDeltaDatapoints(0, instrLibName, false, emfCalcs)

assert.Equal(t, 1, numberDatapointSlice.Len())
assert.Equal(t, tc.expectedRetained, retained)
Expand Down Expand Up @@ -362,14 +375,16 @@ func TestCalculateDeltaDatapoints_HistogramDataPointSlice(t *testing.T) {
t.Run(tc.name, func(_ *testing.T) {
// Given the histogram datapoints
histogramDatapointSlice := histogramDataPointSlice{deltaMetricMetadata, tc.histogramDPS}

emfCalcs := setupEmfCalculators()
defer require.NoError(t, shutdownEmfCalculators(emfCalcs))
// When calculate the delta datapoints for histograms
dps, retained := histogramDatapointSlice.CalculateDeltaDatapoints(0, instrLibName, false)
dps, retained := histogramDatapointSlice.CalculateDeltaDatapoints(0, instrLibName, false, emfCalcs)

// Then receiving the following datapoint with an expected length
assert.True(t, retained)
assert.Equal(t, 1, histogramDatapointSlice.Len())
assert.Equal(t, tc.expectedDatapoint, dps[0])

})
}

Expand Down Expand Up @@ -462,9 +477,10 @@ func TestCalculateDeltaDatapoints_ExponentialHistogramDataPointSlice(t *testing.
t.Run(tc.name, func(_ *testing.T) {
// Given the histogram datapoints
exponentialHistogramDatapointSlice := exponentialHistogramDataPointSlice{deltaMetricMetadata, tc.histogramDPS}

emfCalcs := setupEmfCalculators()
defer require.NoError(t, shutdownEmfCalculators(emfCalcs))
// When calculate the delta datapoints for histograms
dps, retained := exponentialHistogramDatapointSlice.CalculateDeltaDatapoints(0, instrLibName, false)
dps, retained := exponentialHistogramDatapointSlice.CalculateDeltaDatapoints(0, instrLibName, false, emfCalcs)

// Then receiving the following datapoint with an expected length
assert.True(t, retained)
Expand All @@ -476,6 +492,8 @@ func TestCalculateDeltaDatapoints_ExponentialHistogramDataPointSlice(t *testing.
}

func TestCalculateDeltaDatapoints_SummaryDataPointSlice(t *testing.T) {
emfCalcs := setupEmfCalculators()
defer require.NoError(t, shutdownEmfCalculators(emfCalcs))
for _, retainInitialValueOfDeltaMetric := range []bool{true, false} {
deltaMetricMetadata := generateDeltaMetricMetadata(true, "foo", retainInitialValueOfDeltaMetric)

Expand Down Expand Up @@ -540,7 +558,7 @@ func TestCalculateDeltaDatapoints_SummaryDataPointSlice(t *testing.T) {
summaryDatapointSlice := summaryDataPointSlice{deltaMetricMetadata, summaryDPS}

// When calculate the delta datapoints for sum and count in summary
dps, retained := summaryDatapointSlice.CalculateDeltaDatapoints(0, "", true)
dps, retained := summaryDatapointSlice.CalculateDeltaDatapoints(0, "", true, emfCalcs)

// Then receiving the following datapoint with an expected length
assert.Equal(t, tc.expectedRetained, retained)
Expand Down Expand Up @@ -644,7 +662,6 @@ func TestGetDataPoints(t *testing.T) {
metadata := generateTestMetricMetadata("namespace", time.Now().UnixNano()/int64(time.Millisecond), "log-group", "log-stream", "cloudwatch-otel", metric.Type())

t.Run(tc.name, func(t *testing.T) {
setupDataPointCache()

if tc.isPrometheusMetrics {
metadata.receiver = prometheusReceiver
Expand Down Expand Up @@ -740,15 +757,16 @@ func BenchmarkGetAndCalculateDeltaDataPoints(b *testing.B) {
finalOtelMetrics := generateOtelTestMetrics(generateMetrics...)
rms := finalOtelMetrics.ResourceMetrics()
metrics := rms.At(0).ScopeMetrics().At(0).Metrics()

emfCalcs := setupEmfCalculators()
defer require.NoError(b, shutdownEmfCalculators(emfCalcs))
b.ResetTimer()
for n := 0; n < b.N; n++ {
for i := 0; i < metrics.Len(); i++ {
metadata := generateTestMetricMetadata("namespace", time.Now().UnixNano()/int64(time.Millisecond), "log-group", "log-stream", "cloudwatch-otel", metrics.At(i).Type())
dps := getDataPoints(metrics.At(i), metadata, zap.NewNop())

for i := 0; i < dps.Len(); i++ {
dps.CalculateDeltaDatapoints(i, "", false)
dps.CalculateDeltaDatapoints(i, "", false, emfCalcs)
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion exporter/awsemfexporter/emf_exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ func (emf *emfExporter) shutdown(_ context.Context) error {
}
}

return nil
return emf.metricTranslator.Shutdown()
}

func wrapErrorIfBadRequest(err error) error {
Expand Down
2 changes: 1 addition & 1 deletion exporter/awsemfexporter/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ require (
go.opentelemetry.io/collector/featuregate v1.0.0-rcv0014
go.opentelemetry.io/collector/pdata v1.0.0-rcv0014
go.opentelemetry.io/collector/semconv v0.82.0
go.uber.org/multierr v1.11.0
go.uber.org/zap v1.25.0
golang.org/x/exp v0.0.0-20230510235704-dd950f8aeaea
)
Expand Down Expand Up @@ -47,7 +48,6 @@ require (
go.opentelemetry.io/otel v1.16.0 // indirect
go.opentelemetry.io/otel/metric v1.16.0 // indirect
go.opentelemetry.io/otel/trace v1.16.0 // indirect
go.uber.org/multierr v1.11.0 // indirect
golang.org/x/net v0.12.0 // indirect
golang.org/x/sys v0.10.0 // indirect
golang.org/x/text v0.11.0 // indirect
Expand Down
4 changes: 2 additions & 2 deletions exporter/awsemfexporter/grouped_metric.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,15 @@ type metricInfo struct {
}

// addToGroupedMetric processes OT metrics and adds them into GroupedMetric buckets
func addToGroupedMetric(pmd pmetric.Metric, groupedMetrics map[interface{}]*groupedMetric, metadata cWMetricMetadata, patternReplaceSucceeded bool, logger *zap.Logger, descriptor map[string]MetricDescriptor, config *Config) error {
func addToGroupedMetric(pmd pmetric.Metric, groupedMetrics map[interface{}]*groupedMetric, metadata cWMetricMetadata, patternReplaceSucceeded bool, logger *zap.Logger, descriptor map[string]MetricDescriptor, config *Config, calculators *emfCalculators) error {

dps := getDataPoints(pmd, metadata, logger)
if dps == nil || dps.Len() == 0 {
return nil
}

for i := 0; i < dps.Len(); i++ {
dps, retained := dps.CalculateDeltaDatapoints(i, metadata.instrumentationScopeName, config.DetailedMetrics)
dps, retained := dps.CalculateDeltaDatapoints(i, metadata.instrumentationScopeName, config.DetailedMetrics, calculators)
if !retained {
continue
}
Expand Down
Loading

0 comments on commit a24294f

Please sign in to comment.