From 9396cd83e08de3a1d368d799e4b37aa6104fddbf Mon Sep 17 00:00:00 2001 From: Tyler Helmuth <12352919+TylerHelmuth@users.noreply.github.com> Date: Mon, 10 Jun 2024 14:06:45 -0600 Subject: [PATCH] [pkg/ottl] Add debug logging (#33274) **Description:** Adds debug logging to `StatementSequence.Execute` to allow printing the state of the `TransformContext` after each statement. Example logs: ``` 2024-05-28T16:19:29.198-0600 debug ottl@v0.101.0/parser.go:265 initial TransformContext {"kind": "processor", "name": "transform", "pipeline": "logs", "tCtx": {"resource": {"attributes": {}, "dropped_attribute_count": 0}, "instrumentation_scope": {"attributes": {}, "dropped_attribute_count": 0, "name": "", "version": ""}, "log_record": {"attributes": {"log.file.name": "test.log"}, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": "2024-05-28 22:19:29.098833 +0000 UTC", "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": "1970-01-01 00:00:00 +0000 UTC", "trace_id": ""}, "cache": {}}} 2024-05-28T16:19:29.199-0600 debug ottl@v0.101.0/parser.go:268 after transformation {"kind": "processor", "name": "transform", "pipeline": "logs", "statement": "set(attributes[\"test\"], \"pass\")", "tCtx": {"resource": {"attributes": {}, "dropped_attribute_count": 0}, "instrumentation_scope": {"attributes": {}, "dropped_attribute_count": 0, "name": "", "version": ""}, "log_record": {"attributes": {"log.file.name": "test.log", "test": "pass"}, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": "2024-05-28 22:19:29.098833 +0000 UTC", "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": "1970-01-01 00:00:00 +0000 UTC", "trace_id": ""}, "cache": {}}} ``` Formatted json ```json { "kind": "processor", "name": "transform", "pipeline": "logs", "statement": "set(attributes[\"test\"], \"pass\")", "tCtx": { "resource": { "attributes": {}, "dropped_attribute_count": 0 }, "instrumentation_scope": { "attributes": {}, "dropped_attribute_count": 0, "name": "", "version": "" }, "log_record": { "attributes": { "log.file.name": "test.log", "test": "pass" }, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": "2024-05-28 22:19:29.098833 +0000 UTC", "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": "1970-01-01 00:00:00 +0000 UTC", "trace_id": "" }, "cache": {} } } ``` **Link to tracking Issue:** Related to https://github.com/open-telemetry/opentelemetry-collector-contrib/issues/30932 --- .chloggen/ottl-debug-statements.yaml | 27 ++ pkg/ottl/README.md | 21 + pkg/ottl/contexts/internal/logging/logging.go | 382 ++++++++++++++++++ .../contexts/internal/logging/logging_test.go | 4 + pkg/ottl/contexts/ottldatapoint/datapoint.go | 28 +- pkg/ottl/contexts/ottllog/log.go | 33 +- pkg/ottl/contexts/ottlmetric/metrics.go | 17 +- pkg/ottl/contexts/ottlresource/resource.go | 12 +- pkg/ottl/contexts/ottlscope/scope.go | 15 +- pkg/ottl/contexts/ottlspan/span.go | 16 +- .../contexts/ottlspanevent/span_events.go | 18 +- pkg/ottl/parser.go | 5 +- processor/filterprocessor/README.md | 42 ++ processor/transformprocessor/README.md | 48 +++ 14 files changed, 652 insertions(+), 16 deletions(-) create mode 100644 .chloggen/ottl-debug-statements.yaml create mode 100644 pkg/ottl/contexts/internal/logging/logging.go create mode 100644 pkg/ottl/contexts/internal/logging/logging_test.go diff --git a/.chloggen/ottl-debug-statements.yaml b/.chloggen/ottl-debug-statements.yaml new file mode 100644 index 0000000000000..e17a8b9e8bad1 --- /dev/null +++ b/.chloggen/ottl-debug-statements.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: enhancement + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: pkg/ottl + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Add debug logs to help troubleshoot OTTL statements/conditions + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [33274] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [] diff --git a/pkg/ottl/README.md b/pkg/ottl/README.md index b57b2de5cd4b6..57a53ac9e0535 100644 --- a/pkg/ottl/README.md +++ b/pkg/ottl/README.md @@ -130,3 +130,24 @@ metrics: traces: set(attributes["test-passed"], true) where attributes["target-attribute"] != nil ``` + +## Troubleshooting + +When using OTTL you can enable debug logging in the collector to print out useful information, +such as the current Statement/Condition and the current TransformContext, to help you troubleshoot +why a statement is not behaving as you expect. This feature is very verbose, but provides you an accurate +view into how OTTL views the underlying data. + +```yaml +service: + telemetry: + logs: + level: debug +``` + +``` +2024-05-29T16:38:09.600-0600 debug ottl@v0.101.0/parser.go:265 initial TransformContext {"kind": "processor", "name": "transform", "pipeline": "logs", "TransformContext": {"resource": {"attributes": {}, "dropped_attribute_count": 0}, "scope": {"attributes": {}, "dropped_attribute_count": 0, "name": "", "version": ""}, "log_record": {"attributes": {"log.file.name": "test.log"}, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": 1717022289500721000, "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": 0, "trace_id": ""}, "cache": {}}} +2024-05-29T16:38:09.600-0600 debug ottl@v0.101.0/parser.go:268 TransformContext after statement execution {"kind": "processor", "name": "transform", "pipeline": "logs", "statement": "set(resource.attributes[\"test\"], \"pass\")", "condition matched": true, "TransformContext": {"resource": {"attributes": {"test": "pass"}, "dropped_attribute_count": 0}, "scope": {"attributes": {}, "dropped_attribute_count": 0, "name": "", "version": ""}, "log_record": {"attributes": {"log.file.name": "test.log"}, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": 1717022289500721000, "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": 0, "trace_id": ""}, "cache": {}}} +2024-05-29T16:38:09.600-0600 debug ottl@v0.101.0/parser.go:268 TransformContext after statement execution {"kind": "processor", "name": "transform", "pipeline": "logs", "statement": "set(instrumentation_scope.attributes[\"test\"], [\"pass\"])", "condition matched": true, "TransformContext": {"resource": {"attributes": {"test": "pass"}, "dropped_attribute_count": 0}, "scope": {"attributes": {"test": ["pass"]}, "dropped_attribute_count": 0, "name": "", "version": ""}, "log_record": {"attributes": {"log.file.name": "test.log"}, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": 1717022289500721000, "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": 0, "trace_id": ""}, "cache": {}}} +2024-05-29T16:38:09.601-0600 debug ottl@v0.101.0/parser.go:268 TransformContext after statement execution {"kind": "processor", "name": "transform", "pipeline": "logs", "statement": "set(attributes[\"test\"], true)", "condition matched": true, "TransformContext": {"resource": {"attributes": {"test": "pass"}, "dropped_attribute_count": 0}, "scope": {"attributes": {"test": ["pass"]}, "dropped_attribute_count": 0, "name": "", "version": ""}, "log_record": {"attributes": {"log.file.name": "test.log", "test": true}, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": 1717022289500721000, "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": 0, "trace_id": ""}, "cache": {}}} +``` diff --git a/pkg/ottl/contexts/internal/logging/logging.go b/pkg/ottl/contexts/internal/logging/logging.go new file mode 100644 index 0000000000000..9097ccbd79f26 --- /dev/null +++ b/pkg/ottl/contexts/internal/logging/logging.go @@ -0,0 +1,382 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package logging // import "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal/logging" + +import ( + "errors" + + "go.opentelemetry.io/collector/pdata/pcommon" + "go.opentelemetry.io/collector/pdata/pmetric" + "go.opentelemetry.io/collector/pdata/ptrace" + "go.uber.org/zap/zapcore" +) + +type Slice pcommon.Slice + +func (s Slice) MarshalLogArray(encoder zapcore.ArrayEncoder) error { + ss := pcommon.Slice(s) + var err error + for i := 0; i < ss.Len(); i++ { + v := ss.At(i) + switch v.Type() { + case pcommon.ValueTypeStr: + encoder.AppendString(v.Str()) + case pcommon.ValueTypeBool: + encoder.AppendBool(v.Bool()) + case pcommon.ValueTypeInt: + encoder.AppendInt64(v.Int()) + case pcommon.ValueTypeDouble: + encoder.AppendFloat64(v.Double()) + case pcommon.ValueTypeMap: + err = errors.Join(err, encoder.AppendObject(Map(v.Map()))) + case pcommon.ValueTypeSlice: + err = errors.Join(err, encoder.AppendArray(Slice(v.Slice()))) + case pcommon.ValueTypeBytes: + encoder.AppendByteString(v.Bytes().AsRaw()) + } + } + return err +} + +type Map pcommon.Map + +func (m Map) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + mm := pcommon.Map(m) + var err error + mm.Range(func(k string, v pcommon.Value) bool { + switch v.Type() { + case pcommon.ValueTypeStr: + encoder.AddString(k, v.Str()) + case pcommon.ValueTypeBool: + encoder.AddBool(k, v.Bool()) + case pcommon.ValueTypeInt: + encoder.AddInt64(k, v.Int()) + case pcommon.ValueTypeDouble: + encoder.AddFloat64(k, v.Double()) + case pcommon.ValueTypeMap: + err = errors.Join(err, encoder.AddObject(k, Map(v.Map()))) + case pcommon.ValueTypeSlice: + err = errors.Join(err, encoder.AddArray(k, Slice(v.Slice()))) + case pcommon.ValueTypeBytes: + encoder.AddByteString(k, v.Bytes().AsRaw()) + } + return true + }) + return nil +} + +type Resource pcommon.Resource + +func (r Resource) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + rr := pcommon.Resource(r) + err := encoder.AddObject("attributes", Map(rr.Attributes())) + encoder.AddUint32("dropped_attribute_count", rr.DroppedAttributesCount()) + return err +} + +type InstrumentationScope pcommon.InstrumentationScope + +func (i InstrumentationScope) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + is := pcommon.InstrumentationScope(i) + err := encoder.AddObject("attributes", Map(is.Attributes())) + encoder.AddUint32("dropped_attribute_count", is.DroppedAttributesCount()) + encoder.AddString("name", is.Name()) + encoder.AddString("version", is.Version()) + return err +} + +type Span ptrace.Span + +func (s Span) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + ss := ptrace.Span(s) + err := encoder.AddObject("attributes", Map(ss.Attributes())) + encoder.AddUint32("dropped_attribute_count", ss.DroppedAttributesCount()) + encoder.AddUint32("dropped_events_count", ss.DroppedEventsCount()) + encoder.AddUint32("dropped_links_count", ss.DroppedLinksCount()) + encoder.AddUint64("end_time_unix_nano", uint64(ss.EndTimestamp())) + err = errors.Join(err, encoder.AddArray("events", SpanEventSlice(ss.Events()))) + encoder.AddString("kind", ss.Kind().String()) + err = errors.Join(err, encoder.AddArray("links", SpanLinkSlice(ss.Links()))) + encoder.AddString("name", ss.Name()) + encoder.AddString("parent_span_id", ss.ParentSpanID().String()) + encoder.AddString("span_id", ss.SpanID().String()) + encoder.AddUint64("start_time_unix_nano", uint64(ss.StartTimestamp())) + encoder.AddString("status.code", ss.Status().Code().String()) + encoder.AddString("status.message", ss.Status().Message()) + encoder.AddString("trace_id", ss.TraceID().String()) + encoder.AddString("trace_state", ss.TraceState().AsRaw()) + return err +} + +type SpanEventSlice ptrace.SpanEventSlice + +func (s SpanEventSlice) MarshalLogArray(encoder zapcore.ArrayEncoder) error { + ses := ptrace.SpanEventSlice(s) + var err error + for i := 0; i < ses.Len(); i++ { + err = errors.Join(err, encoder.AppendObject(SpanEvent(ses.At(i)))) + } + return err +} + +type SpanEvent ptrace.SpanEvent + +func (s SpanEvent) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + se := ptrace.SpanEvent(s) + err := encoder.AddObject("attributes", Map(se.Attributes())) + encoder.AddUint32("dropped_attribute_count", se.DroppedAttributesCount()) + encoder.AddString("name", se.Name()) + encoder.AddUint64("time_unix_nano", uint64(se.Timestamp())) + return err +} + +type SpanLinkSlice ptrace.SpanLinkSlice + +func (s SpanLinkSlice) MarshalLogArray(encoder zapcore.ArrayEncoder) error { + sls := ptrace.SpanLinkSlice(s) + var err error + for i := 0; i < sls.Len(); i++ { + err = errors.Join(err, encoder.AppendObject(SpanLink(sls.At(i)))) + } + return err +} + +type SpanLink ptrace.SpanLink + +func (s SpanLink) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + sl := ptrace.SpanLink(s) + err := encoder.AddObject("attributes", Map(sl.Attributes())) + encoder.AddUint32("dropped_attribute_count", sl.DroppedAttributesCount()) + encoder.AddUint32("flags", sl.Flags()) + encoder.AddString("span_id", sl.SpanID().String()) + encoder.AddString("trace_id", sl.TraceID().String()) + encoder.AddString("trace_state", sl.TraceState().AsRaw()) + return err +} + +type Metric pmetric.Metric + +func (m Metric) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + mm := pmetric.Metric(m) + encoder.AddString("description", mm.Description()) + encoder.AddString("name", mm.Name()) + encoder.AddString("unit", mm.Unit()) + encoder.AddString("type", mm.Type().String()) + + var err error + switch mm.Type() { + case pmetric.MetricTypeSum: + encoder.AddString("aggregation_temporality", mm.Sum().AggregationTemporality().String()) + encoder.AddBool("is_monotonic", mm.Sum().IsMonotonic()) + err = encoder.AddArray("datapoints", NumberDataPointSlice(mm.Sum().DataPoints())) + case pmetric.MetricTypeGauge: + err = encoder.AddArray("datapoints", NumberDataPointSlice(mm.Gauge().DataPoints())) + case pmetric.MetricTypeHistogram: + encoder.AddString("aggregation_temporality", mm.Histogram().AggregationTemporality().String()) + err = encoder.AddArray("datapoints", HistogramDataPointSlice(mm.Histogram().DataPoints())) + case pmetric.MetricTypeExponentialHistogram: + encoder.AddString("aggregation_temporality", mm.ExponentialHistogram().AggregationTemporality().String()) + err = encoder.AddArray("datapoints", ExponentialHistogramDataPointSlice(mm.ExponentialHistogram().DataPoints())) + case pmetric.MetricTypeSummary: + err = encoder.AddArray("datapoints", SummaryDataPointSlice(mm.Summary().DataPoints())) + } + + return err +} + +type NumberDataPointSlice pmetric.NumberDataPointSlice + +func (n NumberDataPointSlice) MarshalLogArray(encoder zapcore.ArrayEncoder) error { + ndps := pmetric.NumberDataPointSlice(n) + var err error + for i := 0; i < ndps.Len(); i++ { + err = errors.Join(err, encoder.AppendObject(NumberDataPoint(ndps.At(i)))) + } + return err +} + +type NumberDataPoint pmetric.NumberDataPoint + +func (n NumberDataPoint) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + ndp := pmetric.NumberDataPoint(n) + err := encoder.AddObject("attributes", Map(ndp.Attributes())) + err = errors.Join(err, encoder.AddArray("exemplars", ExemplarSlice(ndp.Exemplars()))) + encoder.AddUint32("flags", uint32(ndp.Flags())) + encoder.AddUint64("start_time_unix_nano", uint64(ndp.StartTimestamp())) + encoder.AddUint64("time_unix_nano", uint64(ndp.Timestamp())) + if ndp.ValueType() == pmetric.NumberDataPointValueTypeInt { + encoder.AddInt64("value_int", ndp.IntValue()) + } + if ndp.ValueType() == pmetric.NumberDataPointValueTypeDouble { + encoder.AddFloat64("value_double", ndp.DoubleValue()) + } + + return err +} + +type HistogramDataPointSlice pmetric.HistogramDataPointSlice + +func (h HistogramDataPointSlice) MarshalLogArray(encoder zapcore.ArrayEncoder) error { + hdps := pmetric.HistogramDataPointSlice(h) + var err error + for i := 0; i < hdps.Len(); i++ { + err = errors.Join(err, encoder.AppendObject(HistogramDataPoint(hdps.At(i)))) + } + return err +} + +type HistogramDataPoint pmetric.HistogramDataPoint + +func (h HistogramDataPoint) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + hdp := pmetric.HistogramDataPoint(h) + err := encoder.AddObject("attributes", Map(hdp.Attributes())) + err = errors.Join(err, encoder.AddArray("bucket_counts", UInt64Slice(hdp.BucketCounts()))) + encoder.AddUint64("count", hdp.Count()) + err = errors.Join(err, encoder.AddArray("exemplars", ExemplarSlice(hdp.Exemplars()))) + err = errors.Join(err, encoder.AddArray("explicit_bounds", Float64Slice(hdp.ExplicitBounds()))) + encoder.AddUint32("flags", uint32(hdp.Flags())) + encoder.AddFloat64("max", hdp.Max()) + encoder.AddFloat64("min", hdp.Min()) + encoder.AddUint64("start_time_unix_nano", uint64(hdp.StartTimestamp())) + encoder.AddFloat64("sum", hdp.Sum()) + encoder.AddUint64("time_unix_nano", uint64(hdp.Timestamp())) + + return err +} + +type ExponentialHistogramDataPointSlice pmetric.ExponentialHistogramDataPointSlice + +func (e ExponentialHistogramDataPointSlice) MarshalLogArray(encoder zapcore.ArrayEncoder) error { + ehdps := pmetric.ExponentialHistogramDataPointSlice(e) + var err error + for i := 0; i < ehdps.Len(); i++ { + err = errors.Join(err, encoder.AppendObject(ExponentialHistogramDataPoint(ehdps.At(i)))) + } + return err +} + +type ExponentialHistogramDataPoint pmetric.ExponentialHistogramDataPoint + +func (e ExponentialHistogramDataPoint) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + ehdp := pmetric.ExponentialHistogramDataPoint(e) + err := encoder.AddObject("attributes", Map(ehdp.Attributes())) + encoder.AddUint64("count", ehdp.Count()) + err = errors.Join(err, encoder.AddArray("exemplars", ExemplarSlice(ehdp.Exemplars()))) + encoder.AddUint32("flags", uint32(ehdp.Flags())) + encoder.AddFloat64("max", ehdp.Max()) + encoder.AddFloat64("min", ehdp.Min()) + err = errors.Join(err, encoder.AddObject("negative", ExponentialHistogramDataPointBuckets(ehdp.Negative()))) + err = errors.Join(err, encoder.AddObject("positive", ExponentialHistogramDataPointBuckets(ehdp.Positive()))) + encoder.AddInt32("scale", ehdp.Scale()) + encoder.AddUint64("start_time_unix_nano", uint64(ehdp.StartTimestamp())) + encoder.AddFloat64("sum", ehdp.Sum()) + encoder.AddUint64("time_unix_nano", uint64(ehdp.Timestamp())) + encoder.AddUint64("zero_count", ehdp.ZeroCount()) + encoder.AddFloat64("zero_threshold", ehdp.ZeroThreshold()) + return err +} + +type ExponentialHistogramDataPointBuckets pmetric.ExponentialHistogramDataPointBuckets + +func (e ExponentialHistogramDataPointBuckets) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + b := pmetric.ExponentialHistogramDataPointBuckets(e) + err := encoder.AddArray("bucket_counts", UInt64Slice(b.BucketCounts())) + encoder.AddInt32("offset", b.Offset()) + return err +} + +type SummaryDataPointSlice pmetric.SummaryDataPointSlice + +func (s SummaryDataPointSlice) MarshalLogArray(encoder zapcore.ArrayEncoder) error { + sdps := pmetric.SummaryDataPointSlice(s) + var err error + for i := 0; i < sdps.Len(); i++ { + err = errors.Join(err, encoder.AppendObject(SummaryDataPoint(sdps.At(i)))) + } + return err +} + +type SummaryDataPoint pmetric.SummaryDataPoint + +func (s SummaryDataPoint) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + sdp := pmetric.SummaryDataPoint(s) + err := encoder.AddObject("attributes", Map(sdp.Attributes())) + encoder.AddUint64("count", sdp.Count()) + encoder.AddUint32("flags", uint32(sdp.Flags())) + encoder.AddUint64("start_time_unix_nano", uint64(sdp.StartTimestamp())) + encoder.AddFloat64("sum", sdp.Sum()) + encoder.AddUint64("time_unix_nano", uint64(sdp.Timestamp())) + err = errors.Join(err, encoder.AddArray("quantile_values", SummaryDataPointValueAtQuantileSlice(sdp.QuantileValues()))) + + return err +} + +type SummaryDataPointValueAtQuantileSlice pmetric.SummaryDataPointValueAtQuantileSlice + +func (s SummaryDataPointValueAtQuantileSlice) MarshalLogArray(encoder zapcore.ArrayEncoder) error { + qs := pmetric.SummaryDataPointValueAtQuantileSlice(s) + var err error + for i := 0; i < qs.Len(); i++ { + err = errors.Join(err, encoder.AppendObject(SummaryDataPointValueAtQuantile(qs.At(i)))) + } + return nil +} + +type SummaryDataPointValueAtQuantile pmetric.SummaryDataPointValueAtQuantile + +func (s SummaryDataPointValueAtQuantile) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + q := pmetric.SummaryDataPointValueAtQuantile(s) + encoder.AddFloat64("value", q.Value()) + encoder.AddFloat64("quantile", q.Quantile()) + return nil +} + +type UInt64Slice pcommon.UInt64Slice + +func (u UInt64Slice) MarshalLogArray(encoder zapcore.ArrayEncoder) error { + uis := pcommon.UInt64Slice(u) + for i := 0; i < uis.Len(); i++ { + encoder.AppendUint64(uis.At(i)) + } + return nil +} + +type Float64Slice pcommon.Float64Slice + +func (f Float64Slice) MarshalLogArray(encoder zapcore.ArrayEncoder) error { + fs := pcommon.Float64Slice(f) + for i := 0; i < fs.Len(); i++ { + encoder.AppendFloat64(fs.At(i)) + } + return nil +} + +type ExemplarSlice pmetric.ExemplarSlice + +func (e ExemplarSlice) MarshalLogArray(encoder zapcore.ArrayEncoder) error { + es := pmetric.ExemplarSlice(e) + var err error + for i := 0; i < es.Len(); i++ { + ee := es.At(i) + err = errors.Join(err, encoder.AppendObject(Exemplar(ee))) + } + return err +} + +type Exemplar pmetric.Exemplar + +func (e Exemplar) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + ee := pmetric.Exemplar(e) + err := encoder.AddObject("filtered_attributes", Map(ee.FilteredAttributes())) + encoder.AddString("span_id", ee.SpanID().String()) + encoder.AddUint64("time_unix_nano", uint64(ee.Timestamp())) + encoder.AddString("trace_id", ee.TraceID().String()) + if ee.ValueType() == pmetric.ExemplarValueTypeInt { + encoder.AddInt64("value_int", ee.IntValue()) + } + if ee.ValueType() == pmetric.ExemplarValueTypeDouble { + encoder.AddFloat64("value_double", ee.DoubleValue()) + } + return err +} diff --git a/pkg/ottl/contexts/internal/logging/logging_test.go b/pkg/ottl/contexts/internal/logging/logging_test.go new file mode 100644 index 0000000000000..34a3e71f77189 --- /dev/null +++ b/pkg/ottl/contexts/internal/logging/logging_test.go @@ -0,0 +1,4 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package logging diff --git a/pkg/ottl/contexts/ottldatapoint/datapoint.go b/pkg/ottl/contexts/ottldatapoint/datapoint.go index 6d55fa91ad65c..3d389e39aab31 100644 --- a/pkg/ottl/contexts/ottldatapoint/datapoint.go +++ b/pkg/ottl/contexts/ottldatapoint/datapoint.go @@ -5,23 +5,27 @@ package ottldatapoint // import "github.com/open-telemetry/opentelemetry-collect import ( "context" + "errors" "fmt" "time" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pmetric" + "go.uber.org/zap/zapcore" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal/logging" ) const ( contextName = "DataPoint" ) -var _ internal.ResourceContext = TransformContext{} -var _ internal.InstrumentationScopeContext = TransformContext{} +var _ internal.ResourceContext = (*TransformContext)(nil) +var _ internal.InstrumentationScopeContext = (*TransformContext)(nil) +var _ zapcore.ObjectMarshaler = (*TransformContext)(nil) type TransformContext struct { dataPoint any @@ -32,6 +36,26 @@ type TransformContext struct { cache pcommon.Map } +func (tCtx TransformContext) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + err := encoder.AddObject("resource", logging.Resource(tCtx.resource)) + err = errors.Join(err, encoder.AddObject("scope", logging.InstrumentationScope(tCtx.instrumentationScope))) + err = errors.Join(err, encoder.AddObject("metric", logging.Metric(tCtx.metric))) + + switch dp := tCtx.dataPoint.(type) { + case pmetric.NumberDataPoint: + err = encoder.AddObject("datapoint", logging.NumberDataPoint(dp)) + case pmetric.HistogramDataPoint: + err = encoder.AddObject("datapoint", logging.HistogramDataPoint(dp)) + case pmetric.ExponentialHistogramDataPoint: + err = encoder.AddObject("datapoint", logging.ExponentialHistogramDataPoint(dp)) + case pmetric.SummaryDataPoint: + err = encoder.AddObject("datapoint", logging.SummaryDataPoint(dp)) + } + + err = errors.Join(err, encoder.AddObject("cache", logging.Map(tCtx.cache))) + return err +} + type Option func(*ottl.Parser[TransformContext]) func NewTransformContext(dataPoint any, metric pmetric.Metric, metrics pmetric.MetricSlice, instrumentationScope pcommon.InstrumentationScope, resource pcommon.Resource) TransformContext { diff --git a/pkg/ottl/contexts/ottllog/log.go b/pkg/ottl/contexts/ottllog/log.go index 3111268ecb068..085e463b73f1b 100644 --- a/pkg/ottl/contexts/ottllog/log.go +++ b/pkg/ottl/contexts/ottllog/log.go @@ -6,15 +6,18 @@ package ottllog // import "github.com/open-telemetry/opentelemetry-collector-con import ( "context" "encoding/hex" + "errors" "fmt" "time" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/plog" + "go.uber.org/zap/zapcore" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal/logging" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/internal/ottlcommon" ) @@ -22,8 +25,9 @@ const ( contextName = "Log" ) -var _ internal.ResourceContext = TransformContext{} -var _ internal.InstrumentationScopeContext = TransformContext{} +var _ internal.ResourceContext = (*TransformContext)(nil) +var _ internal.InstrumentationScopeContext = (*TransformContext)(nil) +var _ zapcore.ObjectMarshaler = (*TransformContext)(nil) type TransformContext struct { logRecord plog.LogRecord @@ -32,6 +36,31 @@ type TransformContext struct { cache pcommon.Map } +type logRecord plog.LogRecord + +func (l logRecord) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + lr := plog.LogRecord(l) + err := encoder.AddObject("attributes", logging.Map(lr.Attributes())) + encoder.AddString("body", lr.Body().AsString()) + encoder.AddUint32("dropped_attribute_count", lr.DroppedAttributesCount()) + encoder.AddUint32("flags", uint32(lr.Flags())) + encoder.AddUint64("observed_time_unix_nano", uint64(lr.ObservedTimestamp())) + encoder.AddInt32("severity_number", int32(lr.SeverityNumber())) + encoder.AddString("severity_text", lr.SeverityText()) + encoder.AddString("span_id", lr.SpanID().String()) + encoder.AddUint64("time_unix_nano", uint64(lr.Timestamp())) + encoder.AddString("trace_id", lr.TraceID().String()) + return err +} + +func (tCtx TransformContext) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + err := encoder.AddObject("resource", logging.Resource(tCtx.resource)) + err = errors.Join(err, encoder.AddObject("scope", logging.InstrumentationScope(tCtx.instrumentationScope))) + err = errors.Join(err, encoder.AddObject("log_record", logRecord(tCtx.logRecord))) + err = errors.Join(err, encoder.AddObject("cache", logging.Map(tCtx.cache))) + return err +} + type Option func(*ottl.Parser[TransformContext]) func NewTransformContext(logRecord plog.LogRecord, instrumentationScope pcommon.InstrumentationScope, resource pcommon.Resource) TransformContext { diff --git a/pkg/ottl/contexts/ottlmetric/metrics.go b/pkg/ottl/contexts/ottlmetric/metrics.go index 964210d46564b..3c13e0b7b062e 100644 --- a/pkg/ottl/contexts/ottlmetric/metrics.go +++ b/pkg/ottl/contexts/ottlmetric/metrics.go @@ -5,19 +5,22 @@ package ottlmetric // import "github.com/open-telemetry/opentelemetry-collector- import ( "context" + "errors" "fmt" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pmetric" + "go.uber.org/zap/zapcore" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal/logging" ) -var _ internal.ResourceContext = TransformContext{} -var _ internal.InstrumentationScopeContext = TransformContext{} -var _ internal.MetricContext = TransformContext{} +var _ internal.ResourceContext = (*TransformContext)(nil) +var _ internal.InstrumentationScopeContext = (*TransformContext)(nil) +var _ zapcore.ObjectMarshaler = (*TransformContext)(nil) type TransformContext struct { metric pmetric.Metric @@ -27,6 +30,14 @@ type TransformContext struct { cache pcommon.Map } +func (tCtx TransformContext) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + err := encoder.AddObject("resource", logging.Resource(tCtx.resource)) + err = errors.Join(err, encoder.AddObject("scope", logging.InstrumentationScope(tCtx.instrumentationScope))) + err = errors.Join(err, encoder.AddObject("metric", logging.Metric(tCtx.metric))) + err = errors.Join(err, encoder.AddObject("cache", logging.Map(tCtx.cache))) + return err +} + type Option func(*ottl.Parser[TransformContext]) func NewTransformContext(metric pmetric.Metric, metrics pmetric.MetricSlice, instrumentationScope pcommon.InstrumentationScope, resource pcommon.Resource) TransformContext { diff --git a/pkg/ottl/contexts/ottlresource/resource.go b/pkg/ottl/contexts/ottlresource/resource.go index e37440d9dbc2b..27b0c77dde3ac 100644 --- a/pkg/ottl/contexts/ottlresource/resource.go +++ b/pkg/ottl/contexts/ottlresource/resource.go @@ -5,22 +5,32 @@ package ottlresource // import "github.com/open-telemetry/opentelemetry-collecto import ( "context" + "errors" "fmt" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/pdata/pcommon" + "go.uber.org/zap/zapcore" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal/logging" ) -var _ internal.ResourceContext = TransformContext{} +var _ internal.ResourceContext = (*TransformContext)(nil) +var _ zapcore.ObjectMarshaler = (*TransformContext)(nil) type TransformContext struct { resource pcommon.Resource cache pcommon.Map } +func (tCtx TransformContext) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + err := encoder.AddObject("resource", logging.Resource(tCtx.resource)) + err = errors.Join(err, encoder.AddObject("cache", logging.Map(tCtx.cache))) + return err +} + type Option func(*ottl.Parser[TransformContext]) func NewTransformContext(resource pcommon.Resource) TransformContext { diff --git a/pkg/ottl/contexts/ottlscope/scope.go b/pkg/ottl/contexts/ottlscope/scope.go index f7a9d92ee8d4a..218fb4a417dc4 100644 --- a/pkg/ottl/contexts/ottlscope/scope.go +++ b/pkg/ottl/contexts/ottlscope/scope.go @@ -5,17 +5,21 @@ package ottlscope // import "github.com/open-telemetry/opentelemetry-collector-c import ( "context" + "errors" "fmt" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/pdata/pcommon" + "go.uber.org/zap/zapcore" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal/logging" ) -var _ internal.ResourceContext = TransformContext{} -var _ internal.InstrumentationScopeContext = TransformContext{} +var _ internal.ResourceContext = (*TransformContext)(nil) +var _ internal.InstrumentationScopeContext = (*TransformContext)(nil) +var _ zapcore.ObjectMarshaler = (*TransformContext)(nil) type TransformContext struct { instrumentationScope pcommon.InstrumentationScope @@ -23,6 +27,13 @@ type TransformContext struct { cache pcommon.Map } +func (tCtx TransformContext) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + err := encoder.AddObject("resource", logging.Resource(tCtx.resource)) + err = errors.Join(err, encoder.AddObject("scope", logging.InstrumentationScope(tCtx.instrumentationScope))) + err = errors.Join(err, encoder.AddObject("cache", logging.Map(tCtx.cache))) + return err +} + type Option func(*ottl.Parser[TransformContext]) func NewTransformContext(instrumentationScope pcommon.InstrumentationScope, resource pcommon.Resource) TransformContext { diff --git a/pkg/ottl/contexts/ottlspan/span.go b/pkg/ottl/contexts/ottlspan/span.go index a680fd68a96a1..83e32daa637c0 100644 --- a/pkg/ottl/contexts/ottlspan/span.go +++ b/pkg/ottl/contexts/ottlspan/span.go @@ -5,18 +5,22 @@ package ottlspan // import "github.com/open-telemetry/opentelemetry-collector-co import ( "context" + "errors" "fmt" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/ptrace" + "go.uber.org/zap/zapcore" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal/logging" ) -var _ internal.ResourceContext = TransformContext{} -var _ internal.InstrumentationScopeContext = TransformContext{} +var _ internal.ResourceContext = (*TransformContext)(nil) +var _ internal.InstrumentationScopeContext = (*TransformContext)(nil) +var _ zapcore.ObjectMarshaler = (*TransformContext)(nil) type TransformContext struct { span ptrace.Span @@ -25,6 +29,14 @@ type TransformContext struct { cache pcommon.Map } +func (tCtx TransformContext) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + err := encoder.AddObject("resource", logging.Resource(tCtx.resource)) + err = errors.Join(err, encoder.AddObject("scope", logging.InstrumentationScope(tCtx.instrumentationScope))) + err = errors.Join(err, encoder.AddObject("span", logging.Span(tCtx.span))) + err = errors.Join(err, encoder.AddObject("cache", logging.Map(tCtx.cache))) + return err +} + type Option func(*ottl.Parser[TransformContext]) func NewTransformContext(span ptrace.Span, instrumentationScope pcommon.InstrumentationScope, resource pcommon.Resource) TransformContext { diff --git a/pkg/ottl/contexts/ottlspanevent/span_events.go b/pkg/ottl/contexts/ottlspanevent/span_events.go index baf2aadcb4754..971ba411e6da3 100644 --- a/pkg/ottl/contexts/ottlspanevent/span_events.go +++ b/pkg/ottl/contexts/ottlspanevent/span_events.go @@ -5,20 +5,23 @@ package ottlspanevent // import "github.com/open-telemetry/opentelemetry-collect import ( "context" + "errors" "fmt" "time" "go.opentelemetry.io/collector/component" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/ptrace" + "go.uber.org/zap/zapcore" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal" + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/ottl/contexts/internal/logging" ) -var _ internal.ResourceContext = TransformContext{} -var _ internal.InstrumentationScopeContext = TransformContext{} -var _ internal.SpanContext = TransformContext{} +var _ internal.ResourceContext = (*TransformContext)(nil) +var _ internal.InstrumentationScopeContext = (*TransformContext)(nil) +var _ zapcore.ObjectMarshaler = (*TransformContext)(nil) type TransformContext struct { spanEvent ptrace.SpanEvent @@ -28,6 +31,15 @@ type TransformContext struct { cache pcommon.Map } +func (tCtx TransformContext) MarshalLogObject(encoder zapcore.ObjectEncoder) error { + err := encoder.AddObject("resource", logging.Resource(tCtx.resource)) + err = errors.Join(err, encoder.AddObject("scope", logging.InstrumentationScope(tCtx.instrumentationScope))) + err = errors.Join(err, encoder.AddObject("span", logging.Span(tCtx.span))) + err = errors.Join(err, encoder.AddObject("spanevent", logging.SpanEvent(tCtx.spanEvent))) + err = errors.Join(err, encoder.AddObject("cache", logging.Map(tCtx.cache))) + return err +} + type Option func(*ottl.Parser[TransformContext]) func NewTransformContext(spanEvent ptrace.SpanEvent, span ptrace.Span, instrumentationScope pcommon.InstrumentationScope, resource pcommon.Resource) TransformContext { diff --git a/pkg/ottl/parser.go b/pkg/ottl/parser.go index f5b0bf632bed1..4a0b3cabe7be3 100644 --- a/pkg/ottl/parser.go +++ b/pkg/ottl/parser.go @@ -262,8 +262,10 @@ func NewStatementSequence[K any](statements []*Statement[K], telemetrySettings c // When the ErrorMode of the StatementSequence is `ignore`, errors are logged and execution continues to the next statement. // When the ErrorMode of the StatementSequence is `silent`, errors are not logged and execution continues to the next statement. func (s *StatementSequence[K]) Execute(ctx context.Context, tCtx K) error { + s.telemetrySettings.Logger.Debug("initial TransformContext", zap.Any("TransformContext", tCtx)) for _, statement := range s.statements { - _, _, err := statement.Execute(ctx, tCtx) + _, condition, err := statement.Execute(ctx, tCtx) + s.telemetrySettings.Logger.Debug("TransformContext after statement execution", zap.String("statement", statement.origText), zap.Bool("condition matched", condition), zap.Any("TransformContext", tCtx)) if err != nil { if s.errorMode == PropagateError { err = fmt.Errorf("failed to execute statement: %v, %w", statement.origText, err) @@ -333,6 +335,7 @@ func (c *ConditionSequence[K]) Eval(ctx context.Context, tCtx K) (bool, error) { var atLeastOneMatch bool for _, condition := range c.conditions { match, err := condition.Eval(ctx, tCtx) + c.telemetrySettings.Logger.Debug("condition evaluation result", zap.String("condition", condition.origText), zap.Bool("match", match), zap.Any("TransformContext", tCtx)) if err != nil { if c.errorMode == PropagateError { err = fmt.Errorf("failed to eval condition: %v, %w", condition.origText, err) diff --git a/processor/filterprocessor/README.md b/processor/filterprocessor/README.md index 092747e610837..6448b3c79897d 100644 --- a/processor/filterprocessor/README.md +++ b/processor/filterprocessor/README.md @@ -178,6 +178,48 @@ filter/keep_good_metrics: - 'HasAttrOnDatapoint("bad.metric", "true")' ``` +## Troubleshooting + +When using OTTL you can enable debug logging in the collector to print out useful information, +such as if the condition matched and the TransformContext used in the condition, to help you troubleshoot +why a condition is not behaving as you expect. This feature is very verbose, but provides you an accurate +view into how OTTL views the underlying data. + +```yaml +receivers: + filelog: + start_at: beginning + include: [ /Users/tylerhelmuth/projects/opentelemetry-collector-contrib/local/test.log ] + + +processors: + filter: + error_mode: ignore + logs: + log_record: + - body == "test" + +exporters: + debug: + +service: + telemetry: + logs: + level: debug + pipelines: + logs: + receivers: + - filelog + processors: + - filter + exporters: + - debug +``` + +``` +2024-05-29T16:47:04.362-0600 debug ottl@v0.101.0/parser.go:338 condition evaluation result {"kind": "processor", "name": "filter", "pipeline": "logs", "condition": "body == \"test\"", "match": true, "TransformContext": {"resource": {"attributes": {}, "dropped_attribute_count": 0}, "scope": {"attributes": {}, "dropped_attribute_count": 0, "name": "", "version": ""}, "log_record": {"attributes": {"log.file.name": "test.log"}, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": 1717022824262063000, "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": 0, "trace_id": ""}, "cache": {}}} +``` + ## Warnings In general, understand your data before using the filter processor. diff --git a/processor/transformprocessor/README.md b/processor/transformprocessor/README.md index acb4ebd451f8c..02ccd3ddfc427 100644 --- a/processor/transformprocessor/README.md +++ b/processor/transformprocessor/README.md @@ -22,6 +22,7 @@ Each condition and statement can access and transform telemetry using functions - [Contexts](#contexts) - [Supported functions](#supported-functions) - [Examples](#examples) +- [Troubleshooting](#troubleshooting) - [Contributing](#contributing) ## Config @@ -468,6 +469,53 @@ transform: - set(severity_number, SEVERITY_NUMBER_ERROR) where IsString(body) and IsMatch(body, "\\sERROR\\s") ``` +## Troubleshooting + +When using OTTL you can enable debug logging in the collector to print out useful information, +such as the current Statement and the current TransformContext, to help you troubleshoot +why a statement is not behaving as you expect. This feature is very verbose, but provides you an accurate +view into how OTTL views the underlying data. + +```yaml +receivers: + filelog: + start_at: beginning + include: [ test.log ] + +processors: + transform: + error_mode: ignore + log_statements: + - context: log + statements: + - set(resource.attributes["test"], "pass") + - set(instrumentation_scope.attributes["test"], ["pass"]) + - set(attributes["test"], true) + +exporters: + debug: + +service: + telemetry: + logs: + level: debug + pipelines: + logs: + receivers: + - filelog + processors: + - transform + exporters: + - debug +``` + +``` +2024-05-29T16:38:09.600-0600 debug ottl@v0.101.0/parser.go:265 initial TransformContext {"kind": "processor", "name": "transform", "pipeline": "logs", "TransformContext": {"resource": {"attributes": {}, "dropped_attribute_count": 0}, "scope": {"attributes": {}, "dropped_attribute_count": 0, "name": "", "version": ""}, "log_record": {"attributes": {"log.file.name": "test.log"}, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": 1717022289500721000, "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": 0, "trace_id": ""}, "cache": {}}} +2024-05-29T16:38:09.600-0600 debug ottl@v0.101.0/parser.go:268 TransformContext after statement execution {"kind": "processor", "name": "transform", "pipeline": "logs", "statement": "set(resource.attributes[\"test\"], \"pass\")", "condition matched": true, "TransformContext": {"resource": {"attributes": {"test": "pass"}, "dropped_attribute_count": 0}, "scope": {"attributes": {}, "dropped_attribute_count": 0, "name": "", "version": ""}, "log_record": {"attributes": {"log.file.name": "test.log"}, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": 1717022289500721000, "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": 0, "trace_id": ""}, "cache": {}}} +2024-05-29T16:38:09.600-0600 debug ottl@v0.101.0/parser.go:268 TransformContext after statement execution {"kind": "processor", "name": "transform", "pipeline": "logs", "statement": "set(instrumentation_scope.attributes[\"test\"], [\"pass\"])", "condition matched": true, "TransformContext": {"resource": {"attributes": {"test": "pass"}, "dropped_attribute_count": 0}, "scope": {"attributes": {"test": ["pass"]}, "dropped_attribute_count": 0, "name": "", "version": ""}, "log_record": {"attributes": {"log.file.name": "test.log"}, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": 1717022289500721000, "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": 0, "trace_id": ""}, "cache": {}}} +2024-05-29T16:38:09.601-0600 debug ottl@v0.101.0/parser.go:268 TransformContext after statement execution {"kind": "processor", "name": "transform", "pipeline": "logs", "statement": "set(attributes[\"test\"], true)", "condition matched": true, "TransformContext": {"resource": {"attributes": {"test": "pass"}, "dropped_attribute_count": 0}, "scope": {"attributes": {"test": ["pass"]}, "dropped_attribute_count": 0, "name": "", "version": ""}, "log_record": {"attributes": {"log.file.name": "test.log", "test": true}, "body": "test", "dropped_attribute_count": 0, "flags": 0, "observed_time_unix_nano": 1717022289500721000, "severity_number": 0, "severity_text": "", "span_id": "", "time_unix_nano": 0, "trace_id": ""}, "cache": {}}} +``` + ## Contributing See [CONTRIBUTING.md](https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/processor/transformprocessor/CONTRIBUTING.md).