Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[exporter/sumologic] Make the exporter to not mutate logs data #13923

Merged
merged 1 commit into from
Sep 7, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
[exporter/sumologic] Make exporter not mutating data
The exporter unnecessary mutates log data just to temporarily copy resource attributes into log attributes. This change removes that redundant step which make the exporter not mutating anymore.
  • Loading branch information
dmitryax committed Sep 7, 2022
commit dc959d203e7e5f4e2fa842cd3802f848a19f7e26
14 changes: 2 additions & 12 deletions exporter/sumologicexporter/exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ import (
"net/http"

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer"
"go.opentelemetry.io/collector/consumer/consumererror"
"go.opentelemetry.io/collector/exporter/exporterhelper"
"go.opentelemetry.io/collector/pdata/pcommon"
Expand Down Expand Up @@ -84,7 +83,6 @@ func newLogsExporter(
// Disable exporterhelper Timeout, since we are using a custom mechanism
// within exporter itself
exporterhelper.WithTimeout(exporterhelper.TimeoutSettings{Timeout: 0}),
exporterhelper.WithCapabilities(consumer.Capabilities{MutatesData: true}),
exporterhelper.WithRetry(cfg.RetrySettings),
exporterhelper.WithQueue(cfg.QueueSettings),
exporterhelper.WithStart(se.start),
Expand All @@ -108,7 +106,6 @@ func newMetricsExporter(
// Disable exporterhelper Timeout, since we are using a custom mechanism
// within exporter itself
exporterhelper.WithTimeout(exporterhelper.TimeoutSettings{Timeout: 0}),
exporterhelper.WithCapabilities(consumer.Capabilities{MutatesData: true}),
exporterhelper.WithRetry(cfg.RetrySettings),
exporterhelper.WithQueue(cfg.QueueSettings),
exporterhelper.WithStart(se.start),
Expand Down Expand Up @@ -168,14 +165,7 @@ func (se *sumologicexporter) pushLogsData(ctx context.Context, ld plog.Logs) err
for k := 0; k < logs.Len(); k++ {
log := logs.At(k)

// copy resource attributes into logs attributes
// log attributes have precedence over resource attributes
rl.Resource().Attributes().Range(func(k string, v pcommon.Value) bool {
log.Attributes().Insert(k, v)
return true
})

currentMetadata = sdr.filter.filterIn(log.Attributes())
currentMetadata = sdr.filter.mergeAndFilterIn(rl.Resource().Attributes(), log.Attributes())

// If metadata differs from currently buffered, flush the buffer
if currentMetadata.string() != previousMetadata.string() && previousMetadata.string() != "" {
Expand Down Expand Up @@ -274,7 +264,7 @@ func (se *sumologicexporter) pushMetricsData(ctx context.Context, md pmetric.Met
attributes: attributes,
}

currentMetadata = sdr.filter.filterIn(attributes)
currentMetadata = sdr.filter.mergeAndFilterIn(attributes)

// If metadata differs from currently buffered, flush the buffer
if currentMetadata.string() != previousMetadata.string() && previousMetadata.string() != "" {
Expand Down
23 changes: 13 additions & 10 deletions exporter/sumologicexporter/filter.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,19 +41,22 @@ func newFilter(flds []string) (filter, error) {
}, nil
}

// filterIn returns fields which match at least one of the filter regexes
func (f *filter) filterIn(attributes pcommon.Map) fields {
// mergeAndFilterIn merges provided attribute maps and returns fields which match at least one of the filter regexes.
// Later attribute maps take precedence over former ones.
func (f *filter) mergeAndFilterIn(attrMaps ...pcommon.Map) fields {
returnValue := pcommon.NewMap()
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

EnsureCap?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same, this can be done in a separate PR.


attributes.Range(func(k string, v pcommon.Value) bool {
for _, regex := range f.regexes {
if regex.MatchString(k) {
v.CopyTo(returnValue.UpsertEmpty(k))
return true
for _, attributes := range attrMaps {
attributes.Range(func(k string, v pcommon.Value) bool {
for _, regex := range f.regexes {
if regex.MatchString(k) {
v.CopyTo(returnValue.UpsertEmpty(k))
return true
}
}
}
return true
})
return true
})
}
returnValue.Sort()
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Don't need to sort?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should be done in a separate PR, owners should look into this.

Copy link
Member Author

@dmitryax dmitryax Sep 7, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, sort seems redundant. didn't want to do unrelated refactoring in this PR

return newFields(returnValue)
}
Expand Down
17 changes: 10 additions & 7 deletions exporter/sumologicexporter/filter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,21 @@ import (
)

func TestGetMetadata(t *testing.T) {
attributes := pcommon.NewMap()
attributes.UpsertString("key3", "value3")
attributes.UpsertString("key1", "value1")
attributes.UpsertString("key2", "value2")
attributes.UpsertString("additional_key2", "value2")
attributes.UpsertString("additional_key3", "value3")
attributes1 := pcommon.NewMap()
attributes1.UpsertString("key3", "to-be-overridden")
attributes1.UpsertString("key1", "value1")
attributes1.UpsertString("key2", "value2")
attributes1.UpsertString("additional_key2", "value2")
attributes1.UpsertString("additional_key3", "value3")
attributes2 := pcommon.NewMap()
attributes2.UpsertString("additional_key1", "value1")
attributes2.UpsertString("key3", "value3")

regexes := []string{"^key[12]", "^key3"}
f, err := newFilter(regexes)
require.NoError(t, err)

metadata := f.filterIn(attributes)
metadata := f.mergeAndFilterIn(attributes1, attributes2)
expected := fieldsFromMap(map[string]string{
"key1": "value1",
"key2": "value2",
Expand Down