diff --git a/.chloggen/countconnectorpdata-upgrade.yaml b/.chloggen/countconnectorpdata-upgrade.yaml new file mode 100644 index 0000000000000..8a7f4d49ddfc2 --- /dev/null +++ b/.chloggen/countconnectorpdata-upgrade.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: breaking + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: countconnector + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Upgrade profiles proto to 1.7.0 + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [40285] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [user] diff --git a/.chloggen/elasticsearchexporter-pdata-upgrade.yaml b/.chloggen/elasticsearchexporter-pdata-upgrade.yaml new file mode 100644 index 0000000000000..0d830b16810ec --- /dev/null +++ b/.chloggen/elasticsearchexporter-pdata-upgrade.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: breaking + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: elasticsearchexporter + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Upgrade profiles proto to 1.7.0 + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [40285] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [user] diff --git a/.chloggen/golden-pdata-upgrade.yaml b/.chloggen/golden-pdata-upgrade.yaml new file mode 100644 index 0000000000000..bf85d1f5419b9 --- /dev/null +++ b/.chloggen/golden-pdata-upgrade.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: breaking + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: golden + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Upgrade profiles proto to 1.7.0 + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [40285] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [user] diff --git a/.chloggen/ottl-pdata-upgrade.yaml b/.chloggen/ottl-pdata-upgrade.yaml new file mode 100644 index 0000000000000..631b91ae27129 --- /dev/null +++ b/.chloggen/ottl-pdata-upgrade.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: breaking + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: ottl + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Upgrade profiles proto to 1.7.0 + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [40285] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [user] diff --git a/.chloggen/signaltometricsconnector-pdata-upgrade.yaml b/.chloggen/signaltometricsconnector-pdata-upgrade.yaml new file mode 100644 index 0000000000000..370e8324ff705 --- /dev/null +++ b/.chloggen/signaltometricsconnector-pdata-upgrade.yaml @@ -0,0 +1,27 @@ +# Use this changelog template to create an entry for release notes. + +# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix' +change_type: breaking + +# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver) +component: signaltometricsconnector + +# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`). +note: Upgrade profiles proto to 1.7.0 + +# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists. +issues: [40285] + +# (Optional) One or more lines of additional information to render under the primary note. +# These lines will be padded with 2 spaces and then inserted directly into the document. +# Use pipe (|) for multiline entries. +subtext: + +# If your change doesn't affect end users or the exported elements of any package, +# you should instead start your pull request title with [chore] or use the "Skip Changelog" label. +# Optional: The change log or logs in which this entry should be included. +# e.g. '[user]' or '[user, api]' +# Include 'user' if the change is relevant to end users. +# Include 'api' if there is a change to a library API. +# Default: '[user]' +change_logs: [user] diff --git a/connector/countconnector/connector.go b/connector/countconnector/connector.go index 49bdf4da1b8cc..722cd21d1fef1 100644 --- a/connector/countconnector/connector.go +++ b/connector/countconnector/connector.go @@ -216,8 +216,8 @@ func (c *count) ConsumeProfiles(ctx context.Context, ld pprofile.Profiles) error for k := 0; k < scopeProfile.Profiles().Len(); k++ { profile := scopeProfile.Profiles().At(k) - pCtx := ottlprofile.NewTransformContext(profile, scopeProfile.Scope(), resourceProfile.Resource(), scopeProfile, resourceProfile) - attributes := pprofile.FromAttributeIndices(profile.AttributeTable(), profile) + pCtx := ottlprofile.NewTransformContext(profile, ld.ProfilesDictionary(), scopeProfile.Scope(), resourceProfile.Resource(), scopeProfile, resourceProfile) + attributes := pprofile.FromAttributeIndices(ld.ProfilesDictionary().AttributeTable(), profile) multiError = errors.Join(multiError, counter.update(ctx, attributes, pCtx)) } } diff --git a/connector/countconnector/testdata/profiles/input.yaml b/connector/countconnector/testdata/profiles/input.yaml index aea34534ba63f..68729e2f7fd1f 100644 --- a/connector/countconnector/testdata/profiles/input.yaml +++ b/connector/countconnector/testdata/profiles/input.yaml @@ -10,58 +10,31 @@ resourceProfiles: scopeProfiles: - profiles: - attributeIndices: [0, 1] - attributeTable: - - key: profile.required - value: - stringValue: foo - - key: profile.optional - value: - stringValue: bar sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 10000 - - attributeIndices: [0, 1] - attributeTable: - - key: profile.required - value: - stringValue: foo - - key: profile.optional - value: - stringValue: notbar + - attributeIndices: [0, 2] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 10000 - - attributeIndices: [0] - attributeTable: - - key: profile.required - value: - stringValue: notfoo + - attributeIndices: [3] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count - attributeIndices: [] - attributeTable: [] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 100 scope: {} @@ -76,58 +49,31 @@ resourceProfiles: scopeProfiles: - profiles: - attributeIndices: [0, 1] - attributeTable: - - key: profile.required - value: - stringValue: foo - - key: profile.optional - value: - stringValue: bar sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 10000 - - attributeIndices: [0, 1] - attributeTable: - - key: profile.required - value: - stringValue: foo - - key: profile.optional - value: - stringValue: notbar + - attributeIndices: [0, 2] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 10000 - - attributeIndices: [0] - attributeTable: - - key: profile.required - value: - stringValue: notfoo + - attributeIndices: [3] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count - attributeIndices: [] - attributeTable: [] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 100 scope: {} @@ -139,58 +85,31 @@ resourceProfiles: scopeProfiles: - profiles: - attributeIndices: [0, 1] - attributeTable: - - key: profile.required - value: - stringValue: foo - - key: profile.optional - value: - stringValue: bar sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 10000 - - attributeIndices: [0, 1] - attributeTable: - - key: profile.required - value: - stringValue: foo - - key: profile.optional - value: - stringValue: notbar + - attributeIndices: [0, 2] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 10000 - - attributeIndices: [0] - attributeTable: - - key: profile.required - value: - stringValue: notfoo + - attributeIndices: [3] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count - attributeIndices: [] - attributeTable: [] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 100 scope: {} @@ -198,57 +117,46 @@ resourceProfiles: scopeProfiles: - profiles: - attributeIndices: [0, 1] - attributeTable: - - key: profile.required - value: - stringValue: foo - - key: profile.optional - value: - stringValue: bar sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 10000 - - attributeIndices: [0, 1] - attributeTable: - - key: profile.required - value: - stringValue: foo - - key: profile.optional - value: - stringValue: notbar + - attributeIndices: [0, 2] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 10000 - - attributeIndices: [0] - attributeTable: - - key: profile.required - value: - stringValue: notfoo + - attributeIndices: [3] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count - attributeIndices: [] - attributeTable: [] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 100 scope: {} +dictionary: + attributeTable: + - key: profile.required + value: + stringValue: foo + - key: profile.optional + value: + stringValue: bar + - key: profile.optional + value: + stringValue: notbar + - key: profile.required + value: + stringValue: notfoo + stringTable: + - count diff --git a/connector/signaltometricsconnector/connector.go b/connector/signaltometricsconnector/connector.go index 15aba044c17f4..c5d69561e03e7 100644 --- a/connector/signaltometricsconnector/connector.go +++ b/connector/signaltometricsconnector/connector.go @@ -264,7 +264,7 @@ func (sm *signalToMetrics) ConsumeProfiles(ctx context.Context, profiles pprofil for k := 0; k < scopeProfile.Profiles().Len(); k++ { profile := scopeProfile.Profiles().At(k) - profileAttrs := pprofile.FromAttributeIndices(profile.AttributeTable(), profile) + profileAttrs := pprofile.FromAttributeIndices(profiles.ProfilesDictionary().AttributeTable(), profile) for _, md := range sm.profileMetricDefs { filteredProfileAttrs, ok := md.FilterAttributes(profileAttrs) @@ -274,7 +274,7 @@ func (sm *signalToMetrics) ConsumeProfiles(ctx context.Context, profiles pprofil // The transform context is created from original attributes so that the // OTTL expressions are also applied on the original attributes. - tCtx := ottlprofile.NewTransformContext(profile, scopeProfile.Scope(), resourceProfile.Resource(), scopeProfile, resourceProfile) + tCtx := ottlprofile.NewTransformContext(profile, profiles.ProfilesDictionary(), scopeProfile.Scope(), resourceProfile.Resource(), scopeProfile, resourceProfile) if md.Conditions != nil { match, err := md.Conditions.Eval(ctx, tCtx) if err != nil { diff --git a/connector/signaltometricsconnector/testdata/profiles/profiles.yaml b/connector/signaltometricsconnector/testdata/profiles/profiles.yaml index 2133d33ba8cbc..814e35f1eef2c 100644 --- a/connector/signaltometricsconnector/testdata/profiles/profiles.yaml +++ b/connector/signaltometricsconnector/testdata/profiles/profiles.yaml @@ -10,29 +10,13 @@ resourceProfiles: scopeProfiles: - profiles: - attributeIndices: [0, 1] - attributeTable: - - key: profile.foo - value: - stringValue: foo - - key: profile.bar - value: - stringValue: bar sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count duration: 10000 - - attributeIndices: [0, 1] - attributeTable: - - key: profile.required - value: - stringValue: foo - - key: profile.optional - value: - stringValue: notbar + - attributeIndices: [2, 3] sample: - locationsLength: 0 timestampsUnixNano: ["0"] @@ -41,16 +25,30 @@ resourceProfiles: stringTable: - count duration: 10000 - - attributeIndices: [0] - attributeTable: - - key: profile.required - value: - stringValue: notfoo + - attributeIndices: [5] sample: - locationsLength: 0 timestampsUnixNano: ["0"] sampleType: - unitStrindex: 0 - stringTable: - - count scope: {} +dictionary: + attributeTable: + - key: profile.foo + value: + stringValue: foo + - key: profile.bar + value: + stringValue: bar + - key: profile.required + value: + stringValue: foo + - key: profile.optional + value: + stringValue: notbar + - key: profile.optional + value: + stringValue: notbar + - key: profile.required + value: + stringValue: notfoo diff --git a/exporter/elasticsearchexporter/exporter.go b/exporter/elasticsearchexporter/exporter.go index daebc91a5c9e5..58d7591dffce8 100644 --- a/exporter/elasticsearchexporter/exporter.go +++ b/exporter/elasticsearchexporter/exporter.go @@ -505,6 +505,7 @@ func (e *elasticsearchExporter) pushProfilesData(ctx context.Context, pd pprofil // the specified mapping mode. scopeMappingModeSessions := mappingModeSessions{indexers: &e.bulkIndexers.modes} defer scopeMappingModeSessions.End() + dic := pd.ProfilesDictionary() var errs []error for _, rp := range pd.ResourceProfiles().All() { @@ -526,7 +527,7 @@ func (e *elasticsearchExporter) pushProfilesData(ctx context.Context, pd pprofil scopeSchemaURL: sp.SchemaUrl(), } if err := e.pushProfileRecord( - ctx, encoder, ec, profile, defaultSession, eventsSession, + ctx, encoder, ec, dic, profile, defaultSession, eventsSession, stackTracesSession, stackFramesSession, executablesSession, ); err != nil { if cerr := ctx.Err(); cerr != nil { @@ -557,10 +558,11 @@ func (e *elasticsearchExporter) pushProfileRecord( ctx context.Context, encoder documentEncoder, ec encodingContext, + dic pprofile.ProfilesDictionary, profile pprofile.Profile, defaultSession, eventsSession, stackTracesSession, stackFramesSession, executablesSession bulkIndexerSession, ) error { - return encoder.encodeProfile(ec, profile, func(buf *bytes.Buffer, docID, index string) error { + return encoder.encodeProfile(ec, dic, profile, func(buf *bytes.Buffer, docID, index string) error { switch index { case otelserializer.StackTraceIndex: return stackTracesSession.Add(ctx, index, docID, "", buf, nil, docappender.ActionCreate) diff --git a/exporter/elasticsearchexporter/internal/serializer/otelserializer/profile.go b/exporter/elasticsearchexporter/internal/serializer/otelserializer/profile.go index f8fe90ab74b8d..2fd4ca92031bf 100644 --- a/exporter/elasticsearchexporter/internal/serializer/otelserializer/profile.go +++ b/exporter/elasticsearchexporter/internal/serializer/otelserializer/profile.go @@ -26,7 +26,7 @@ const ( ) // SerializeProfile serializes a profile and calls the `pushData` callback for each generated document. -func (s *Serializer) SerializeProfile(resource pcommon.Resource, scope pcommon.InstrumentationScope, profile pprofile.Profile, pushData func(*bytes.Buffer, string, string) error) error { +func (s *Serializer) SerializeProfile(dic pprofile.ProfilesDictionary, resource pcommon.Resource, scope pcommon.InstrumentationScope, profile pprofile.Profile, pushData func(*bytes.Buffer, string, string) error) error { err := s.createLRUs() if err != nil { return err @@ -40,7 +40,7 @@ func (s *Serializer) SerializeProfile(resource pcommon.Resource, scope pcommon.I return pushData(c, id, index) } - data, err := serializeprofiles.Transform(resource, scope, profile) + data, err := serializeprofiles.Transform(dic, resource, scope, profile) if err != nil { return err } diff --git a/exporter/elasticsearchexporter/internal/serializer/otelserializer/profile_test.go b/exporter/elasticsearchexporter/internal/serializer/otelserializer/profile_test.go index 7dae011f024c4..7be3b8bc1c115 100644 --- a/exporter/elasticsearchexporter/internal/serializer/otelserializer/profile_test.go +++ b/exporter/elasticsearchexporter/internal/serializer/otelserializer/profile_test.go @@ -66,44 +66,50 @@ func basicProfiles() pprofiletest.Profiles { func TestSerializeProfile(t *testing.T) { tests := []struct { name string + buildDictionary func() pprofile.ProfilesDictionary profileCustomizer func(resource pcommon.Resource, scope pcommon.InstrumentationScope, record pprofile.Profile) wantErr bool expected []map[string]any }{ { name: "with a simple sample", - profileCustomizer: func(_ pcommon.Resource, _ pcommon.InstrumentationScope, profile pprofile.Profile) { - profile.StringTable().Append("samples", "count", "cpu", "nanoseconds") - st := profile.SampleType().AppendEmpty() - st.SetTypeStrindex(0) - st.SetUnitStrindex(1) - pt := profile.PeriodType() - pt.SetTypeStrindex(2) - pt.SetUnitStrindex(3) + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("samples", "count", "cpu", "nanoseconds") - a := profile.AttributeTable().AppendEmpty() + a := dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr("600DCAFE4A110000F2BF38C493F5FB92") - a = profile.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("profile.frame.type") a.Value().SetStr("native") - a = profile.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("host.id") a.Value().SetStr("localhost") - profile.AttributeIndices().Append(2) - - sample := profile.Sample().AppendEmpty() - sample.TimestampsUnixNano().Append(0) - sample.SetLocationsLength(1) - - m := profile.MappingTable().AppendEmpty() + m := dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(0) - l := profile.LocationTable().AppendEmpty() + l := dic.LocationTable().AppendEmpty() l.SetMappingIndex(0) l.SetAddress(111) l.AttributeIndices().Append(1) + + return dic + }, + profileCustomizer: func(_ pcommon.Resource, _ pcommon.InstrumentationScope, profile pprofile.Profile) { + st := profile.SampleType().AppendEmpty() + st.SetTypeStrindex(0) + st.SetUnitStrindex(1) + pt := profile.PeriodType() + pt.SetTypeStrindex(2) + pt.SetUnitStrindex(3) + + profile.AttributeIndices().Append(2) + + sample := profile.Sample().AppendEmpty() + sample.TimestampsUnixNano().Append(0) + sample.SetLocationsLength(1) }, wantErr: false, expected: []map[string]any{ @@ -152,6 +158,7 @@ func TestSerializeProfile(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + dic := tt.buildDictionary() profiles := pprofile.NewProfiles() resource := profiles.ResourceProfiles().AppendEmpty() scope := resource.ScopeProfiles().AppendEmpty() @@ -162,7 +169,7 @@ func TestSerializeProfile(t *testing.T) { buf := []*bytes.Buffer{} ser, err := New() require.NoError(t, err) - err = ser.SerializeProfile(resource.Resource(), scope.Scope(), profile, func(b *bytes.Buffer, _ string, _ string) error { + err = ser.SerializeProfile(dic, resource.Resource(), scope.Scope(), profile, func(b *bytes.Buffer, _ string, _ string) error { buf = append(buf, b) return nil }) @@ -215,6 +222,6 @@ func BenchmarkSerializeProfile(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - _ = ser.SerializeProfile(resource.Resource(), scope.Scope(), profile, pushData) + _ = ser.SerializeProfile(profiles.ProfilesDictionary(), resource.Resource(), scope.Scope(), profile, pushData) } } diff --git a/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/benchmark_test.go b/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/benchmark_test.go index 506c1cca57828..cb6823148f570 100644 --- a/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/benchmark_test.go +++ b/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/benchmark_test.go @@ -12,50 +12,56 @@ import ( func BenchmarkTransform(b *testing.B) { for _, bb := range []struct { name string + buildDictionary func() pprofile.ProfilesDictionary buildResourceProfiles func() pprofile.ResourceProfiles }{ { name: "with a basic recorded sample", - buildResourceProfiles: func() pprofile.ResourceProfiles { - rp := pprofile.NewResourceProfiles() - - sp := rp.ScopeProfiles().AppendEmpty() - p := sp.Profiles().AppendEmpty() - - a := p.AttributeTable().AppendEmpty() + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + a := dic.AttributeTable().AppendEmpty() a.SetKey("profile.frame.type") a.Value().SetStr("native") - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(buildIDEncoded) - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(buildID2Encoded) - p.StringTable().Append("firefox", "libc.so", "samples", "count", "cpu", "nanoseconds") - st := p.SampleType().AppendEmpty() - st.SetTypeStrindex(2) - st.SetUnitStrindex(3) - pt := p.PeriodType() - pt.SetTypeStrindex(4) - pt.SetUnitStrindex(5) + dic.StringTable().Append("firefox", "libc.so", "samples", "count", "cpu", "nanoseconds") - m := p.MappingTable().AppendEmpty() + m := dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(1) m.SetFilenameStrindex(0) - m = p.MappingTable().AppendEmpty() + m = dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(2) m.SetFilenameStrindex(1) - l := p.LocationTable().AppendEmpty() + l := dic.LocationTable().AppendEmpty() l.SetAddress(address) l.AttributeIndices().Append(0) l.SetMappingIndex(0) - l = p.LocationTable().AppendEmpty() + l = dic.LocationTable().AppendEmpty() l.SetAddress(address2) l.AttributeIndices().Append(0) l.SetMappingIndex(1) + return dic + }, + buildResourceProfiles: func() pprofile.ResourceProfiles { + rp := pprofile.NewResourceProfiles() + + sp := rp.ScopeProfiles().AppendEmpty() + p := sp.Profiles().AppendEmpty() + + st := p.SampleType().AppendEmpty() + st.SetTypeStrindex(2) + st.SetUnitStrindex(3) + pt := p.PeriodType() + pt.SetTypeStrindex(4) + pt.SetUnitStrindex(5) + s := p.Sample().AppendEmpty() s.TimestampsUnixNano().Append(42) s.Value().Append(1) @@ -67,6 +73,7 @@ func BenchmarkTransform(b *testing.B) { }, } { b.Run(bb.name, func(b *testing.B) { + dic := bb.buildDictionary() rp := bb.buildResourceProfiles() sp := rp.ScopeProfiles().At(0) p := sp.Profiles().At(0) @@ -75,7 +82,7 @@ func BenchmarkTransform(b *testing.B) { b.ResetTimer() for i := 0; i < b.N; i++ { - _, _ = Transform(rp.Resource(), sp.Scope(), p) + _, _ = Transform(dic, rp.Resource(), sp.Scope(), p) } }) } diff --git a/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/transform.go b/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/transform.go index 701c834435c4e..bbf26909377f1 100644 --- a/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/transform.go +++ b/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/transform.go @@ -20,15 +20,15 @@ import ( // Transform transforms a [pprofile.Profile] into our own // representation, for ingestion into Elasticsearch -func Transform(resource pcommon.Resource, scope pcommon.InstrumentationScope, profile pprofile.Profile) ([]StackPayload, error) { +func Transform(dic pprofile.ProfilesDictionary, resource pcommon.Resource, scope pcommon.InstrumentationScope, profile pprofile.Profile) ([]StackPayload, error) { var data []StackPayload - if err := checkProfileType(profile); err != nil { + if err := checkProfileType(dic, profile); err != nil { return data, err } // profileContainer is checked for nil inside stackPayloads(). - payloads, err := stackPayloads(resource, scope, profile) + payloads, err := stackPayloads(dic, resource, scope, profile) if err != nil { return nil, err } @@ -40,14 +40,14 @@ func Transform(resource pcommon.Resource, scope pcommon.InstrumentationScope, pr // checkProfileType acts as safeguard to make sure only known profiles are // accepted. Different kinds of profiles are currently not supported // and mixing profiles will make profiling information unusable. -func checkProfileType(profile pprofile.Profile) error { +func checkProfileType(dic pprofile.ProfilesDictionary, profile pprofile.Profile) error { sampleType := profile.SampleType() if sampleType.Len() != 1 { return fmt.Errorf("expected 1 sample type but got %d", sampleType.Len()) } - sType := getString(profile, int(sampleType.At(0).TypeStrindex())) - sUnit := getString(profile, int(sampleType.At(0).UnitStrindex())) + sType := getString(dic, int(sampleType.At(0).TypeStrindex())) + sUnit := getString(dic, int(sampleType.At(0).UnitStrindex())) // Make sure only on-CPU profiling data is accepted at the moment. // This needs to match with @@ -59,8 +59,8 @@ func checkProfileType(profile pprofile.Profile) error { } periodType := profile.PeriodType() - pType := getString(profile, int(periodType.TypeStrindex())) - pUnit := getString(profile, int(periodType.UnitStrindex())) + pType := getString(dic, int(periodType.TypeStrindex())) + pUnit := getString(dic, int(periodType.UnitStrindex())) // Make sure only on-CPU profiling data is accepted at the moment. // This needs to match with @@ -76,17 +76,17 @@ func checkProfileType(profile pprofile.Profile) error { // stackPayloads creates a slice of StackPayloads from the given ResourceProfiles, // ScopeProfiles, and ProfileContainer. -func stackPayloads(resource pcommon.Resource, scope pcommon.InstrumentationScope, profile pprofile.Profile) ([]StackPayload, error) { +func stackPayloads(dic pprofile.ProfilesDictionary, resource pcommon.Resource, scope pcommon.InstrumentationScope, profile pprofile.Profile) ([]StackPayload, error) { unsymbolizedLeafFramesSet := make(map[libpf.FrameID]struct{}, profile.Sample().Len()) unsymbolizedExecutablesSet := make(map[libpf.FileID]struct{}) stackPayload := make([]StackPayload, 0, profile.Sample().Len()) - hostMetadata := newHostMetadata(resource, scope, profile) + hostMetadata := newHostMetadata(dic, resource, scope, profile) for i := 0; i < profile.Sample().Len(); i++ { sample := profile.Sample().At(i) - frames, frameTypes, leafFrame, err := stackFrames(profile, sample) + frames, frameTypes, leafFrame, err := stackFrames(dic, sample) if err != nil { return nil, fmt.Errorf("failed to create stackframes: %w", err) } @@ -99,7 +99,7 @@ func stackPayloads(resource pcommon.Resource, scope pcommon.InstrumentationScope return nil, fmt.Errorf("failed to create stacktrace ID: %w", err) } - event := stackTraceEvent(traceID, profile, sample, hostMetadata) + event := stackTraceEvent(dic, traceID, sample, hostMetadata) // Set the stacktrace and stackframes to the payload. // The docs only need to be written once. @@ -147,8 +147,8 @@ func stackPayloads(resource pcommon.Resource, scope pcommon.InstrumentationScope } if len(stackPayload) > 0 { - if profile.MappingTable().Len() > 0 { - exeMetadata, err := executables(profile, profile.MappingTable()) + if dic.MappingTable().Len() > 0 { + exeMetadata, err := executables(dic, dic.MappingTable()) if err != nil { return nil, err } @@ -209,7 +209,7 @@ func isFrameSymbolized(frame StackFrame) bool { return len(frame.FileName) > 0 || len(frame.FunctionName) > 0 } -func stackTraceEvent(traceID string, profile pprofile.Profile, sample pprofile.Sample, hostMetadata map[string]string) StackTraceEvent { +func stackTraceEvent(dic pprofile.ProfilesDictionary, traceID string, sample pprofile.Sample, hostMetadata map[string]string) StackTraceEvent { event := StackTraceEvent{ EcsVersion: EcsVersion{V: EcsVersionString}, HostID: hostMetadata[string(semconv.HostIDKey)], @@ -219,10 +219,10 @@ func stackTraceEvent(traceID string, profile pprofile.Profile, sample pprofile.S // Store event-specific attributes. for i := 0; i < sample.AttributeIndices().Len(); i++ { - if profile.AttributeTable().Len() < i { + if dic.AttributeTable().Len() < i { continue } - attr := profile.AttributeTable().At(i) + attr := dic.AttributeTable().At(i) switch attribute.Key(attr.Key()) { case semconv.HostIDKey: @@ -263,10 +263,10 @@ func stackTrace(stackTraceID string, frames []StackFrame, frameTypes []libpf.Fra } } -func stackFrames(profile pprofile.Profile, sample pprofile.Sample) ([]StackFrame, []libpf.FrameType, *libpf.FrameID, error) { +func stackFrames(dic pprofile.ProfilesDictionary, sample pprofile.Sample) ([]StackFrame, []libpf.FrameType, *libpf.FrameID, error) { frames := make([]StackFrame, 0, sample.LocationsLength()) - locations := getLocations(profile, sample) + locations := getLocations(dic, sample) totalFrames := 0 for _, location := range locations { totalFrames += location.Line().Len() @@ -276,11 +276,11 @@ func stackFrames(profile pprofile.Profile, sample pprofile.Sample) ([]StackFrame var leafFrameID *libpf.FrameID for locationIdx, location := range locations { - if location.MappingIndex() >= int32(profile.MappingTable().Len()) { + if location.MappingIndex() >= int32(dic.MappingTable().Len()) { continue } - frameTypeStr, err := getStringFromAttribute(profile, location, "profile.frame.type") + frameTypeStr, err := getStringFromAttribute(dic, location, "profile.frame.type") if err != nil { return nil, nil, nil, err } @@ -293,14 +293,14 @@ func stackFrames(profile pprofile.Profile, sample pprofile.Sample) ([]StackFrame for i := 0; i < location.Line().Len(); i++ { line := location.Line().At(i) - if line.FunctionIndex() < int32(profile.FunctionTable().Len()) { - functionNames = append(functionNames, getString(profile, int(profile.FunctionTable().At(int(line.FunctionIndex())).NameStrindex()))) - fileNames = append(fileNames, getString(profile, int(profile.FunctionTable().At(int(line.FunctionIndex())).FilenameStrindex()))) + if line.FunctionIndex() < int32(dic.FunctionTable().Len()) { + functionNames = append(functionNames, getString(dic, int(dic.FunctionTable().At(int(line.FunctionIndex())).NameStrindex()))) + fileNames = append(fileNames, getString(dic, int(dic.FunctionTable().At(int(line.FunctionIndex())).FilenameStrindex()))) } lineNumbers = append(lineNumbers, int32(line.Line())) } - frameID, err := getFrameID(profile, location) + frameID, err := getFrameID(dic, location) if err != nil { return nil, nil, nil, err } @@ -323,10 +323,10 @@ func stackFrames(profile pprofile.Profile, sample pprofile.Sample) ([]StackFrame return frames, frameTypes, leafFrameID, nil } -func getFrameID(profile pprofile.Profile, location pprofile.Location) (*libpf.FrameID, error) { +func getFrameID(dic pprofile.ProfilesDictionary, location pprofile.Location) (*libpf.FrameID, error) { // The MappingIndex is known to be valid. - mapping := profile.MappingTable().At(int(location.MappingIndex())) - buildID, err := getBuildID(profile, mapping) + mapping := dic.MappingTable().At(int(location.MappingIndex())) + buildID, err := getBuildID(dic, mapping) if err != nil { return nil, err } @@ -348,8 +348,8 @@ type attributable interface { // getStringFromAttribute returns a string from one of attrIndices from the attribute table // of the profile if the attribute key matches the expected attrKey. -func getStringFromAttribute(profile pprofile.Profile, record attributable, attrKey string) (string, error) { - lenAttrTable := profile.AttributeTable().Len() +func getStringFromAttribute(dic pprofile.ProfilesDictionary, record attributable, attrKey string) (string, error) { + lenAttrTable := dic.AttributeTable().Len() for i := 0; i < record.AttributeIndices().Len(); i++ { idx := int(record.AttributeIndices().At(i)) @@ -358,8 +358,8 @@ func getStringFromAttribute(profile pprofile.Profile, record attributable, attrK return "", fmt.Errorf("requested attribute index (%d) "+ "exceeds size of attribute table (%d)", idx, lenAttrTable) } - if profile.AttributeTable().At(idx).Key() == attrKey { - return profile.AttributeTable().At(idx).Value().AsString(), nil + if dic.AttributeTable().At(idx).Key() == attrKey { + return dic.AttributeTable().At(idx).Value().AsString(), nil } } @@ -368,29 +368,29 @@ func getStringFromAttribute(profile pprofile.Profile, record attributable, attrK // getBuildID returns the Build ID for the given mapping. It checks for both // old-style Build ID (stored with the mapping) and Build ID as attribute. -func getBuildID(profile pprofile.Profile, mapping pprofile.Mapping) (libpf.FileID, error) { +func getBuildID(dic pprofile.ProfilesDictionary, mapping pprofile.Mapping) (libpf.FileID, error) { // Fetch build ID from profiles.attribute_table. - buildIDStr, err := getStringFromAttribute(profile, mapping, "process.executable.build_id.htlhash") + buildIDStr, err := getStringFromAttribute(dic, mapping, "process.executable.build_id.htlhash") if err != nil { return libpf.FileID{}, err } return libpf.FileIDFromString(buildIDStr) } -func executables(profile pprofile.Profile, mappings pprofile.MappingSlice) ([]ExeMetadata, error) { +func executables(dic pprofile.ProfilesDictionary, mappings pprofile.MappingSlice) ([]ExeMetadata, error) { metadata := make([]ExeMetadata, 0, mappings.Len()) lastSeen := GetStartOfWeekFromTime(time.Now()) for i := 0; i < mappings.Len(); i++ { mapping := mappings.At(i) - filename := profile.StringTable().At(int(mapping.FilenameStrindex())) + filename := dic.StringTable().At(int(mapping.FilenameStrindex())) if filename == "" { // This is true for interpreted languages like Python. continue } - buildID, err := getBuildID(profile, mapping) + buildID, err := getBuildID(dic, mapping) if err != nil { return nil, err } @@ -438,18 +438,18 @@ func stackTraceID(frames []StackFrame) (string, error) { return traceHash.Base64(), nil } -func getLocations(profile pprofile.Profile, sample pprofile.Sample) []pprofile.Location { +func getLocations(dic pprofile.ProfilesDictionary, sample pprofile.Sample) []pprofile.Location { locations := make([]pprofile.Location, 0, sample.LocationsLength()) - lastIndex := min(int(sample.LocationsStartIndex()+sample.LocationsLength()), profile.LocationTable().Len()) + lastIndex := min(int(sample.LocationsStartIndex()+sample.LocationsLength()), dic.LocationTable().Len()) for i := int(sample.LocationsStartIndex()); i < lastIndex; i++ { - locations = append(locations, profile.LocationTable().At(i)) + locations = append(locations, dic.LocationTable().At(i)) } return locations } -func getString(profile pprofile.Profile, index int) string { - if index < profile.StringTable().Len() { - return profile.StringTable().At(index) +func getString(dic pprofile.ProfilesDictionary, index int) string { + if index < dic.StringTable().Len() { + return dic.StringTable().At(index) } return "" } @@ -458,12 +458,12 @@ func GetStartOfWeekFromTime(t time.Time) uint32 { return uint32(t.Truncate(time.Hour * 24 * 7).Unix()) } -func newHostMetadata(resource pcommon.Resource, scope pcommon.InstrumentationScope, profile pprofile.Profile) map[string]string { +func newHostMetadata(dic pprofile.ProfilesDictionary, resource pcommon.Resource, scope pcommon.InstrumentationScope, profile pprofile.Profile) map[string]string { attrs := make(map[string]string, 128) addEventHostData(attrs, resource.Attributes()) addEventHostData(attrs, scope.Attributes()) - addEventHostData(attrs, pprofile.FromAttributeIndices(profile.AttributeTable(), profile)) + addEventHostData(attrs, pprofile.FromAttributeIndices(dic.AttributeTable(), profile)) if len(attrs) == 0 { return nil diff --git a/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/transform_test.go b/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/transform_test.go index b4f6adc29d421..db70f299ba5dd 100644 --- a/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/transform_test.go +++ b/exporter/elasticsearchexporter/internal/serializer/otelserializer/serializeprofiles/transform_test.go @@ -60,6 +60,7 @@ func TestTransform(t *testing.T) { }) for _, tt := range []struct { name string + buildDictionary func() pprofile.ProfilesDictionary buildResourceProfiles func() pprofile.ResourceProfiles wantPayload []StackPayload @@ -67,13 +68,18 @@ func TestTransform(t *testing.T) { }{ { name: "with an empty sample", + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("samples", "count", "cpu", "nanoseconds") + + return dic + }, buildResourceProfiles: func() pprofile.ResourceProfiles { rp := pprofile.NewResourceProfiles() sp := rp.ScopeProfiles().AppendEmpty() p := sp.Profiles().AppendEmpty() - p.StringTable().Append("samples", "count", "cpu", "nanoseconds") st := p.SampleType().AppendEmpty() st.SetTypeStrindex(0) st.SetUnitStrindex(1) @@ -91,13 +97,18 @@ func TestTransform(t *testing.T) { }, { name: "with an invalid profiling type", + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("off-CPU", "events") + + return dic + }, buildResourceProfiles: func() pprofile.ResourceProfiles { rp := pprofile.NewResourceProfiles() sp := rp.ScopeProfiles().AppendEmpty() p := sp.Profiles().AppendEmpty() - p.StringTable().Append("off-CPU", "events") st := p.SampleType().AppendEmpty() st.SetTypeStrindex(0) st.SetUnitStrindex(1) @@ -112,13 +123,20 @@ func TestTransform(t *testing.T) { }, { name: "with no sample value and no line number on location", + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("samples", "count", "cpu", "nanoseconds") + l := dic.LocationTable().AppendEmpty() + l.SetAddress(111) + + return dic + }, buildResourceProfiles: func() pprofile.ResourceProfiles { rp := pprofile.NewResourceProfiles() sp := rp.ScopeProfiles().AppendEmpty() p := sp.Profiles().AppendEmpty() - p.StringTable().Append("samples", "count", "cpu", "nanoseconds") st := p.SampleType().AppendEmpty() st.SetTypeStrindex(0) st.SetUnitStrindex(1) @@ -129,9 +147,6 @@ func TestTransform(t *testing.T) { s := p.Sample().AppendEmpty() s.TimestampsUnixNano().Append(42) - l := p.LocationTable().AppendEmpty() - l.SetAddress(111) - return rp }, @@ -140,46 +155,51 @@ func TestTransform(t *testing.T) { }, { name: "with a single indexed sample", - buildResourceProfiles: func() pprofile.ResourceProfiles { - rp := pprofile.NewResourceProfiles() - - sp := rp.ScopeProfiles().AppendEmpty() - p := sp.Profiles().AppendEmpty() - - a := p.AttributeTable().AppendEmpty() + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + a := dic.AttributeTable().AppendEmpty() a.SetKey("profile.frame.type") a.Value().SetStr("native") - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(buildIDEncoded) - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(buildID2Encoded) - p.StringTable().Append("firefox", "libc.so", "samples", "count", "cpu", "nanoseconds") - st := p.SampleType().AppendEmpty() - st.SetTypeStrindex(2) - st.SetUnitStrindex(3) - pt := p.PeriodType() - pt.SetTypeStrindex(4) - pt.SetUnitStrindex(5) + dic.StringTable().Append("firefox", "libc.so", "samples", "count", "cpu", "nanoseconds") - m := p.MappingTable().AppendEmpty() + m := dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(1) m.SetFilenameStrindex(0) - m = p.MappingTable().AppendEmpty() + m = dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(2) m.SetFilenameStrindex(1) - l := p.LocationTable().AppendEmpty() + l := dic.LocationTable().AppendEmpty() l.SetAddress(address) l.AttributeIndices().Append(0) l.SetMappingIndex(0) - l = p.LocationTable().AppendEmpty() + l = dic.LocationTable().AppendEmpty() l.SetAddress(address2) l.AttributeIndices().Append(0) l.SetMappingIndex(1) + return dic + }, + buildResourceProfiles: func() pprofile.ResourceProfiles { + rp := pprofile.NewResourceProfiles() + + sp := rp.ScopeProfiles().AppendEmpty() + p := sp.Profiles().AppendEmpty() + + st := p.SampleType().AppendEmpty() + st.SetTypeStrindex(2) + st.SetUnitStrindex(3) + pt := p.PeriodType() + pt.SetTypeStrindex(4) + pt.SetUnitStrindex(5) + s := p.Sample().AppendEmpty() s.TimestampsUnixNano().Append(42) s.Value().Append(1) @@ -248,10 +268,11 @@ func TestTransform(t *testing.T) { }, } { t.Run(tt.name, func(t *testing.T) { + dic := tt.buildDictionary() rp := tt.buildResourceProfiles() sp := rp.ScopeProfiles().At(0) - payload, err := Transform(rp.Resource(), sp.Scope(), sp.Profiles().At(0)) + payload, err := Transform(dic, rp.Resource(), sp.Scope(), sp.Profiles().At(0)) require.NoError(t, checkAndResetTimes(payload)) sortPayloads(payload) sortPayloads(tt.wantPayload) @@ -268,6 +289,7 @@ func TestStackPayloads(t *testing.T) { }) for _, tt := range []struct { name string + buildDictionary func() pprofile.ProfilesDictionary buildResourceProfiles func() pprofile.ResourceProfiles wantPayload []StackPayload @@ -275,42 +297,47 @@ func TestStackPayloads(t *testing.T) { }{ { //nolint:dupl name: "with a single indexed sample", - buildResourceProfiles: func() pprofile.ResourceProfiles { - rp := pprofile.NewResourceProfiles() + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append(stacktraceIDBase64, "firefox", "libc.so") - sp := rp.ScopeProfiles().AppendEmpty() - p := sp.Profiles().AppendEmpty() - p.StringTable().Append(stacktraceIDBase64, "firefox", "libc.so") - - a := p.AttributeTable().AppendEmpty() + a := dic.AttributeTable().AppendEmpty() a.SetKey("profile.frame.type") a.Value().SetStr("native") - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(buildIDEncoded) - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(buildID2Encoded) - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("profile.frame.type") a.Value().SetStr("native") - l := p.LocationTable().AppendEmpty() + l := dic.LocationTable().AppendEmpty() l.SetMappingIndex(0) l.SetAddress(address) l.AttributeIndices().Append(3) - l = p.LocationTable().AppendEmpty() + l = dic.LocationTable().AppendEmpty() l.SetMappingIndex(1) l.SetAddress(address2) l.AttributeIndices().Append(3) - m := p.MappingTable().AppendEmpty() + m := dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(1) m.SetFilenameStrindex(1) - m = p.MappingTable().AppendEmpty() + m = dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(2) m.SetFilenameStrindex(2) + return dic + }, + buildResourceProfiles: func() pprofile.ResourceProfiles { + rp := pprofile.NewResourceProfiles() + + sp := rp.ScopeProfiles().AppendEmpty() + p := sp.Profiles().AppendEmpty() + s := p.Sample().AppendEmpty() s.TimestampsUnixNano().Append(1) s.Value().Append(1) @@ -378,40 +405,44 @@ func TestStackPayloads(t *testing.T) { }, { name: "with a duplicated sample", - buildResourceProfiles: func() pprofile.ResourceProfiles { - rp := pprofile.NewResourceProfiles() - - sp := rp.ScopeProfiles().AppendEmpty() - p := sp.Profiles().AppendEmpty() - - p.StringTable().Append(stacktraceIDBase64, "firefox", "libc.so") + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append(stacktraceIDBase64, "firefox", "libc.so") - a := p.AttributeTable().AppendEmpty() + a := dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(buildIDEncoded) - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(buildID2Encoded) - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("profile.frame.type") a.Value().SetStr("native") - l := p.LocationTable().AppendEmpty() + l := dic.LocationTable().AppendEmpty() l.SetMappingIndex(0) l.SetAddress(address) l.AttributeIndices().Append(2) - l = p.LocationTable().AppendEmpty() + l = dic.LocationTable().AppendEmpty() l.SetMappingIndex(1) l.SetAddress(address2) l.AttributeIndices().Append(2) - m := p.MappingTable().AppendEmpty() + m := dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(0) m.SetFilenameStrindex(1) - m = p.MappingTable().AppendEmpty() + m = dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(1) m.SetFilenameStrindex(2) + return dic + }, + buildResourceProfiles: func() pprofile.ResourceProfiles { + rp := pprofile.NewResourceProfiles() + + sp := rp.ScopeProfiles().AppendEmpty() + p := sp.Profiles().AppendEmpty() + s := p.Sample().AppendEmpty() s.TimestampsUnixNano().Append(1) s.Value().Append(2) @@ -479,10 +510,11 @@ func TestStackPayloads(t *testing.T) { }, } { t.Run(tt.name, func(t *testing.T) { + dic := tt.buildDictionary() rp := tt.buildResourceProfiles() sp := rp.ScopeProfiles().At(0) - payloads, err := stackPayloads(rp.Resource(), sp.Scope(), sp.Profiles().At(0)) + payloads, err := stackPayloads(dic, rp.Resource(), sp.Scope(), sp.Profiles().At(0)) require.NoError(t, checkAndResetTimes(payloads)) sortPayloads(payloads) sortPayloads(tt.wantPayload) @@ -496,12 +528,19 @@ func TestStackTraceEvent(t *testing.T) { for _, tt := range []struct { name string timestamp uint64 + buildDictionary func() pprofile.ProfilesDictionary buildResourceProfiles func() pprofile.ResourceProfiles wantEvent StackTraceEvent }{ { name: "sets host specific data", + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append(stacktraceIDBase64) + + return dic + }, buildResourceProfiles: func() pprofile.ResourceProfiles { rp := pprofile.NewResourceProfiles() _ = rp.Resource().Attributes().FromRaw(map[string]any{ @@ -510,7 +549,6 @@ func TestStackTraceEvent(t *testing.T) { sp := rp.ScopeProfiles().AppendEmpty() p := sp.Profiles().AppendEmpty() - p.StringTable().Append(stacktraceIDBase64) p.Sample().AppendEmpty() @@ -526,11 +564,16 @@ func TestStackTraceEvent(t *testing.T) { { name: "sets the timestamp", timestamp: 1000000000, + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append(stacktraceIDBase64) + + return dic + }, buildResourceProfiles: func() pprofile.ResourceProfiles { rp := pprofile.NewResourceProfiles() sp := rp.ScopeProfiles().AppendEmpty() p := sp.Profiles().AppendEmpty() - p.StringTable().Append(stacktraceIDBase64) p.Sample().AppendEmpty() @@ -546,11 +589,16 @@ func TestStackTraceEvent(t *testing.T) { }, { name: "sets the stack trace ID", + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append(stacktraceIDBase64) + + return dic + }, buildResourceProfiles: func() pprofile.ResourceProfiles { rp := pprofile.NewResourceProfiles() sp := rp.ScopeProfiles().AppendEmpty() p := sp.Profiles().AppendEmpty() - p.StringTable().Append(stacktraceIDBase64) p.Sample().AppendEmpty() @@ -565,25 +613,30 @@ func TestStackTraceEvent(t *testing.T) { }, { name: "sets event specific data", - buildResourceProfiles: func() pprofile.ResourceProfiles { - rp := pprofile.NewResourceProfiles() - sp := rp.ScopeProfiles().AppendEmpty() - p := sp.Profiles().AppendEmpty() - p.StringTable().Append(stacktraceIDBase64) + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append(stacktraceIDBase64) - a := p.AttributeTable().AppendEmpty() + a := dic.AttributeTable().AppendEmpty() a.SetKey(string(semconv.K8SPodNameKey)) a.Value().SetStr("my_pod") - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey(string(semconv.ContainerNameKey)) a.Value().SetStr("my_container") - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey(string(semconv.ThreadNameKey)) a.Value().SetStr("my_thread") - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey(string(semconv.ServiceNameKey)) a.Value().SetStr("my_service") + return dic + }, + buildResourceProfiles: func() pprofile.ResourceProfiles { + rp := pprofile.NewResourceProfiles() + sp := rp.ScopeProfiles().AppendEmpty() + p := sp.Profiles().AppendEmpty() + s := p.Sample().AppendEmpty() s.AttributeIndices().Append(0, 1, 2, 3) @@ -601,11 +654,12 @@ func TestStackTraceEvent(t *testing.T) { }, } { t.Run(tt.name, func(t *testing.T) { + dic := tt.buildDictionary() rp := tt.buildResourceProfiles() p := rp.ScopeProfiles().At(0).Profiles().At(0) s := p.Sample().At(0) - event := stackTraceEvent(stacktraceIDBase64, p, s, map[string]string{}) + event := stackTraceEvent(dic, stacktraceIDBase64, s, map[string]string{}) event.TimeStamp = newUnixTime64(tt.timestamp) assert.Equal(t, tt.wantEvent, event) @@ -615,51 +669,51 @@ func TestStackTraceEvent(t *testing.T) { func TestStackTrace(t *testing.T) { for _, tt := range []struct { - name string - buildProfile func() pprofile.Profile + name string + buildDictionary func() pprofile.ProfilesDictionary + buildProfile func() pprofile.Profile wantTrace StackTrace }{ { name: "creates a stack trace", - buildProfile: func() pprofile.Profile { - p := pprofile.NewProfile() - - a := p.AttributeTable().AppendEmpty() + buildDictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + a := dic.AttributeTable().AppendEmpty() a.SetKey("profile.frame.type") a.Value().SetStr("kernel") - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("profile.frame.type") a.Value().SetStr("dotnet") - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("profile.frame.type") a.Value().SetStr("native") - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(buildIDEncoded) - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(buildID2Encoded) - a = p.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(buildID3Encoded) - p.StringTable().Append( + dic.StringTable().Append( stacktraceIDBase64, "kernel", "native", "dotnet", ) - l := p.LocationTable().AppendEmpty() + l := dic.LocationTable().AppendEmpty() l.SetMappingIndex(0) l.SetAddress(address) l.AttributeIndices().Append(0) - l = p.LocationTable().AppendEmpty() + l = dic.LocationTable().AppendEmpty() l.SetMappingIndex(1) l.SetAddress(address2) l.AttributeIndices().Append(1) - l = p.LocationTable().AppendEmpty() + l = dic.LocationTable().AppendEmpty() l.SetMappingIndex(2) l.SetAddress(address3) l.AttributeIndices().Append(2) @@ -669,13 +723,18 @@ func TestStackTrace(t *testing.T) { li = l.Line().AppendEmpty() li.SetLine(3) - m := p.MappingTable().AppendEmpty() + m := dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(3) - m = p.MappingTable().AppendEmpty() + m = dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(4) - m = p.MappingTable().AppendEmpty() + m = dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(5) + return dic + }, + buildProfile: func() pprofile.Profile { + p := pprofile.NewProfile() + s := p.Sample().AppendEmpty() s.SetLocationsLength(3) @@ -694,10 +753,11 @@ func TestStackTrace(t *testing.T) { }, } { t.Run(tt.name, func(t *testing.T) { + dic := tt.buildDictionary() p := tt.buildProfile() s := p.Sample().At(0) - frames, frameTypes, _, err := stackFrames(p, s) + frames, frameTypes, _, err := stackFrames(dic, s) require.NoError(t, err) stacktrace := stackTrace("", frames, frameTypes) @@ -720,31 +780,32 @@ func frameTypesToString(frameTypes []libpf.FrameType) string { } func mkStackTraceID(t *testing.T, frameIDs []libpf.FrameID) string { + dic := pprofile.NewProfilesDictionary() p := pprofile.NewProfile() s := p.Sample().AppendEmpty() s.SetLocationsLength(int32(len(frameIDs))) - a := p.AttributeTable().AppendEmpty() + a := dic.AttributeTable().AppendEmpty() a.SetKey("profile.frame.type") a.Value().SetStr("native") for i, frameID := range frameIDs { - p.StringTable().Append(frameID.FileID().StringNoQuotes()) + dic.StringTable().Append(frameID.FileID().StringNoQuotes()) - a := p.AttributeTable().AppendEmpty() + a := dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr(frameID.FileID().StringNoQuotes()) - m := p.MappingTable().AppendEmpty() + m := dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(int32(i + 1)) - l := p.LocationTable().AppendEmpty() + l := dic.LocationTable().AppendEmpty() l.SetMappingIndex(int32(i)) l.SetAddress(uint64(frameID.AddressOrLine())) l.AttributeIndices().Append(0) } - frames, _, _, err := stackFrames(p, s) + frames, _, _, err := stackFrames(dic, s) require.NoError(t, err) traceID, err := stackTraceID(frames) diff --git a/exporter/elasticsearchexporter/model.go b/exporter/elasticsearchexporter/model.go index 940954904439a..a6904ab2a818a 100644 --- a/exporter/elasticsearchexporter/model.go +++ b/exporter/elasticsearchexporter/model.go @@ -78,7 +78,7 @@ type documentEncoder interface { encodeSpan(encodingContext, ptrace.Span, elasticsearch.Index, *bytes.Buffer) error encodeSpanEvent(encodingContext, ptrace.Span, ptrace.SpanEvent, elasticsearch.Index, *bytes.Buffer) error encodeMetrics(_ encodingContext, _ []datapoints.DataPoint, validationErrors *[]error, _ elasticsearch.Index, _ *bytes.Buffer) (map[string]string, error) - encodeProfile(_ encodingContext, _ pprofile.Profile, _ func(*bytes.Buffer, string, string) error) error + encodeProfile(_ encodingContext, _ pprofile.ProfilesDictionary, _ pprofile.Profile, _ func(*bytes.Buffer, string, string) error) error } type encodingContext struct { @@ -291,10 +291,11 @@ func (e otelModeEncoder) encodeMetrics( func (e otelModeEncoder) encodeProfile( ec encodingContext, + dic pprofile.ProfilesDictionary, profile pprofile.Profile, pushData func(*bytes.Buffer, string, string) error, ) error { - return e.serializer.SerializeProfile(ec.resource, ec.scope, profile, pushData) + return e.serializer.SerializeProfile(dic, ec.resource, ec.scope, profile, pushData) } func (e bodymapModeEncoder) encodeLog( @@ -339,7 +340,7 @@ type profilesUnsupportedEncoder struct { } func (e profilesUnsupportedEncoder) encodeProfile( - _ encodingContext, _ pprofile.Profile, _ func(*bytes.Buffer, string, string) error, + _ encodingContext, _ pprofile.ProfilesDictionary, _ pprofile.Profile, _ func(*bytes.Buffer, string, string) error, ) error { return fmt.Errorf("mapping mode %q (%d) does not support profiles", e.mode, int(e.mode)) } diff --git a/internal/coreinternal/testdata/profile.go b/internal/coreinternal/testdata/profile.go index 4775543984f77..0e76ccb409b86 100644 --- a/internal/coreinternal/testdata/profile.go +++ b/internal/coreinternal/testdata/profile.go @@ -36,42 +36,42 @@ func GenerateProfilesOneEmptyProfile() pprofile.Profiles { func GenerateProfilesOneProfile() pprofile.Profiles { pd := GenerateProfilesOneEmptyProfile() - fillProfileOne(pd.ResourceProfiles().At(0).ScopeProfiles().At(0).Profiles().At(0)) + fillProfileOne(pd.ProfilesDictionary(), pd.ResourceProfiles().At(0).ScopeProfiles().At(0).Profiles().At(0)) return pd } func GenerateProfilesTwoProfilesSameResource() pprofile.Profiles { pd := GenerateProfilesOneEmptyProfile() profiles := pd.ResourceProfiles().At(0).ScopeProfiles().At(0).Profiles() - fillProfileOne(profiles.At(0)) - fillProfileTwo(profiles.AppendEmpty()) + fillProfileOne(pd.ProfilesDictionary(), profiles.At(0)) + fillProfileTwo(pd.ProfilesDictionary(), profiles.AppendEmpty()) return pd } -func fillProfileOne(profile pprofile.Profile) { +func fillProfileOne(dic pprofile.ProfilesDictionary, profile pprofile.Profile) { profile.SetStartTime(TestProfileStartTimestamp) profile.SetProfileID([16]byte{0x01, 0x02, 0x03, 0x04}) profile.AttributeIndices().Append(0) - a := profile.AttributeTable().AppendEmpty() + a := dic.AttributeTable().AppendEmpty() a.SetKey("app") a.Value().SetStr("server") profile.AttributeIndices().Append(0) - a = profile.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("instance_num") a.Value().SetInt(1) } -func fillProfileTwo(profile pprofile.Profile) { +func fillProfileTwo(dic pprofile.ProfilesDictionary, profile pprofile.Profile) { profile.SetStartTime(TestProfileStartTimestamp) profile.SetProfileID([16]byte{0x05, 0x06, 0x07, 0x08}) profile.AttributeIndices().Append(0) - a := profile.AttributeTable().AppendEmpty() + a := dic.AttributeTable().AppendEmpty() a.SetKey("customer") a.Value().SetStr("acme") profile.AttributeIndices().Append(0) - a = profile.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("env") a.Value().SetStr("dev") } diff --git a/pkg/golden/golden_test.go b/pkg/golden/golden_test.go index 854288422177d..78f149068430b 100644 --- a/pkg/golden/golden_test.go +++ b/pkg/golden/golden_test.go @@ -368,11 +368,12 @@ func TestProfilesRoundTrip(t *testing.T) { func CreateTestProfiles() pprofile.Profiles { profiles := pprofile.NewProfiles() + dic := profiles.ProfilesDictionary() resource := profiles.ResourceProfiles().AppendEmpty() scope := resource.ScopeProfiles().AppendEmpty() profile := scope.Profiles().AppendEmpty() - profile.StringTable().Append("samples", "count", "cpu", "nanoseconds") + dic.StringTable().Append("samples", "count", "cpu", "nanoseconds") st := profile.SampleType().AppendEmpty() st.SetTypeStrindex(0) st.SetUnitStrindex(1) @@ -380,13 +381,13 @@ func CreateTestProfiles() pprofile.Profiles { pt.SetTypeStrindex(2) pt.SetUnitStrindex(3) - a := profile.AttributeTable().AppendEmpty() + a := dic.AttributeTable().AppendEmpty() a.SetKey("process.executable.build_id.htlhash") a.Value().SetStr("600DCAFE4A110000F2BF38C493F5FB92") - a = profile.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("profile.frame.type") a.Value().SetStr("native") - a = profile.AttributeTable().AppendEmpty() + a = dic.AttributeTable().AppendEmpty() a.SetKey("host.id") a.Value().SetStr("localhost") @@ -396,10 +397,10 @@ func CreateTestProfiles() pprofile.Profiles { sample.TimestampsUnixNano().Append(0) sample.SetLocationsLength(1) - m := profile.MappingTable().AppendEmpty() + m := dic.MappingTable().AppendEmpty() m.AttributeIndices().Append(0) - l := profile.LocationTable().AppendEmpty() + l := dic.LocationTable().AppendEmpty() l.SetMappingIndex(0) l.SetAddress(111) l.AttributeIndices().Append(1) diff --git a/pkg/golden/testdata/profiles-roundtrip/expected.yaml b/pkg/golden/testdata/profiles-roundtrip/expected.yaml index f2dfcf5214f9b..40f074ee6b585 100644 --- a/pkg/golden/testdata/profiles-roundtrip/expected.yaml +++ b/pkg/golden/testdata/profiles-roundtrip/expected.yaml @@ -1,27 +1,33 @@ +dictionary: + attributeTable: + - key: process.executable.build_id.htlhash + value: + stringValue: 600DCAFE4A110000F2BF38C493F5FB92 + - key: profile.frame.type + value: + stringValue: native + - key: host.id + value: + stringValue: localhost + locationTable: + - address: "111" + attributeIndices: + - 1 + mappingIndex: 0 + mappingTable: + - attributeIndices: + - 0 + stringTable: + - samples + - count + - cpu + - nanoseconds resourceProfiles: - resource: {} scopeProfiles: - profiles: - attributeIndices: - 2 - attributeTable: - - key: process.executable.build_id.htlhash - value: - stringValue: 600DCAFE4A110000F2BF38C493F5FB92 - - key: profile.frame.type - value: - stringValue: native - - key: host.id - value: - stringValue: localhost - locationTable: - - address: "111" - attributeIndices: - - 1 - mappingIndex: 0 - mappingTable: - - attributeIndices: - - 0 periodType: typeStrindex: 2 unitStrindex: 3 @@ -32,9 +38,4 @@ resourceProfiles: - "0" sampleType: - unitStrindex: 1 - stringTable: - - samples - - count - - cpu - - nanoseconds scope: {} diff --git a/pkg/ottl/contexts/internal/ctxprofile/profile.go b/pkg/ottl/contexts/internal/ctxprofile/profile.go index ca812056e81c8..2262403f762e2 100644 --- a/pkg/ottl/contexts/internal/ctxprofile/profile.go +++ b/pkg/ottl/contexts/internal/ctxprofile/profile.go @@ -47,8 +47,8 @@ func PathGetSetter[K ProfileContext](path ottl.Path[K]) (ottl.GetSetter[K], erro return accessPeriod[K](), nil case "comment_string_indices": return accessCommentStringIndices[K](), nil - case "default_sample_type_string_index": - return accessDefaultSampleTypeStringIndex[K](), nil + case "default_sample_type_index": + return accessDefaultSampleTypeIndex[K](), nil case "profile_id": nextPath := path.Next() if nextPath != nil { @@ -205,14 +205,14 @@ func accessCommentStringIndices[K ProfileContext]() ottl.StandardGetSetter[K] { } } -func accessDefaultSampleTypeStringIndex[K ProfileContext]() ottl.StandardGetSetter[K] { +func accessDefaultSampleTypeIndex[K ProfileContext]() ottl.StandardGetSetter[K] { return ottl.StandardGetSetter[K]{ Getter: func(_ context.Context, tCtx K) (any, error) { - return int64(tCtx.GetProfile().DefaultSampleTypeStrindex()), nil + return int64(tCtx.GetProfile().DefaultSampleTypeIndex()), nil }, Setter: func(_ context.Context, tCtx K, val any) error { if i, ok := val.(int64); ok { - tCtx.GetProfile().SetDefaultSampleTypeStrindex(int32(i)) + tCtx.GetProfile().SetDefaultSampleTypeIndex(int32(i)) } return nil }, diff --git a/pkg/ottl/contexts/internal/ctxprofile/profile_test.go b/pkg/ottl/contexts/internal/ctxprofile/profile_test.go index 8f4a5dd5fb460..a6bf7c1d322f1 100644 --- a/pkg/ottl/contexts/internal/ctxprofile/profile_test.go +++ b/pkg/ottl/contexts/internal/ctxprofile/profile_test.go @@ -69,7 +69,7 @@ func TestPathGetSetter(t *testing.T) { val: []int64{345}, }, { - path: "default_sample_type_string_index", + path: "default_sample_type_index", val: int64(456), }, { diff --git a/pkg/ottl/contexts/internal/logprofile/logging.go b/pkg/ottl/contexts/internal/logprofile/logging.go index 855a9bdb5f55c..6187cd8bfc75a 100644 --- a/pkg/ottl/contexts/internal/logprofile/logging.go +++ b/pkg/ottl/contexts/internal/logprofile/logging.go @@ -13,44 +13,46 @@ import ( "go.uber.org/zap/zapcore" ) -type Profile pprofile.Profile +type Profile struct { + pprofile.Profile + Dictionary pprofile.ProfilesDictionary +} func (p Profile) MarshalLogObject(encoder zapcore.ObjectEncoder) error { - pp := pprofile.Profile(p) var joinedErr error - vts, err := newValueTypes(p, pp.SampleType()) + vts, err := newValueTypes(p, p.SampleType()) joinedErr = errors.Join(joinedErr, err) joinedErr = errors.Join(joinedErr, encoder.AddArray("sample_type", vts)) - ss, err := newSamples(p, pp.Sample()) + ss, err := newSamples(p, p.Sample()) joinedErr = errors.Join(joinedErr, err) joinedErr = errors.Join(joinedErr, encoder.AddArray("sample", ss)) - encoder.AddInt64("time_nanos", int64(pp.Time())) - encoder.AddInt64("duration_nanos", int64(pp.Duration())) + encoder.AddInt64("time_nanos", int64(p.Time())) + encoder.AddInt64("duration_nanos", int64(p.Duration())) - vt, err := newValueType(p, pp.PeriodType()) + vt, err := newValueType(p, p.PeriodType()) joinedErr = errors.Join(joinedErr, err) joinedErr = errors.Join(joinedErr, encoder.AddObject("period_type", vt)) - encoder.AddInt64("period", pp.Period()) + encoder.AddInt64("period", p.Period()) cs, err := p.getComments() joinedErr = errors.Join(joinedErr, err) joinedErr = errors.Join(joinedErr, encoder.AddArray("comments", cs)) - dst, err := p.getString(pp.DefaultSampleTypeStrindex()) + dst, err := p.getString(p.DefaultSampleTypeIndex()) joinedErr = errors.Join(joinedErr, err) encoder.AddString("default_sample_type", dst) - pid := pp.ProfileID() + pid := p.ProfileID() encoder.AddString("profile_id", hex.EncodeToString(pid[:])) - encoder.AddUint32("dropped_attributes_count", pp.DroppedAttributesCount()) - encoder.AddString("original_payload_format", pp.OriginalPayloadFormat()) - encoder.AddByteString("original_payload", pp.OriginalPayload().AsRaw()) + encoder.AddUint32("dropped_attributes_count", p.DroppedAttributesCount()) + encoder.AddString("original_payload_format", p.OriginalPayloadFormat()) + encoder.AddByteString("original_payload", p.OriginalPayload().AsRaw()) - ats, err := newAttributes(p, pp.AttributeIndices()) + ats, err := newAttributes(p, p.AttributeIndices()) joinedErr = errors.Join(joinedErr, err) joinedErr = errors.Join(joinedErr, encoder.AddArray("attributes", ats)) @@ -58,8 +60,7 @@ func (p Profile) MarshalLogObject(encoder zapcore.ObjectEncoder) error { } func (p Profile) getString(idx int32) (string, error) { - pp := pprofile.Profile(p) - strTable := pp.StringTable() + strTable := p.Dictionary.StringTable() if idx >= int32(strTable.Len()) { return "", fmt.Errorf("string index out of bounds: %d", idx) } @@ -67,8 +68,7 @@ func (p Profile) getString(idx int32) (string, error) { } func (p Profile) getFunction(idx int32) (function, error) { - pp := pprofile.Profile(p) - fnTable := pp.FunctionTable() + fnTable := p.Dictionary.FunctionTable() if idx >= int32(fnTable.Len()) { return function{}, fmt.Errorf("function index out of bounds: %d", idx) } @@ -76,8 +76,7 @@ func (p Profile) getFunction(idx int32) (function, error) { } func (p Profile) getMapping(idx int32) (mapping, error) { - pp := pprofile.Profile(p) - mTable := pp.MappingTable() + mTable := p.Dictionary.MappingTable() if idx >= int32(mTable.Len()) { return mapping{}, fmt.Errorf("mapping index out of bounds: %d", idx) } @@ -85,8 +84,7 @@ func (p Profile) getMapping(idx int32) (mapping, error) { } func (p Profile) getLink(idx int32) (link, error) { - pp := pprofile.Profile(p) - lTable := pp.LinkTable() + lTable := p.Dictionary.LinkTable() if idx >= int32(lTable.Len()) { return link{}, fmt.Errorf("link index out of bounds: %d", idx) } @@ -94,8 +92,7 @@ func (p Profile) getLink(idx int32) (link, error) { } func (p Profile) getLocations(start, length int32) (locations, error) { - pp := pprofile.Profile(p) - locTable := pp.LocationTable() + locTable := p.Dictionary.LocationTable() if start >= int32(locTable.Len()) { return locations{}, fmt.Errorf("location start index out of bounds: %d", start) } @@ -115,8 +112,7 @@ func (p Profile) getLocations(start, length int32) (locations, error) { } func (p Profile) getAttribute(idx int32) (attribute, error) { - pp := pprofile.Profile(p) - attrTable := pp.AttributeTable() + attrTable := p.Dictionary.AttributeTable() if idx >= int32(attrTable.Len()) { return attribute{}, fmt.Errorf("attribute index out of bounds: %d", idx) } @@ -504,11 +500,10 @@ type comments []string func (p Profile) getComments() (comments, error) { var joinedErr error - pp := pprofile.Profile(p) - l := pp.CommentStrindices().Len() + l := p.CommentStrindices().Len() cs := make(comments, 0, l) for i := range l { - c, err := p.getString(pp.CommentStrindices().At(i)) + c, err := p.getString(p.CommentStrindices().At(i)) if err != nil { joinedErr = errors.Join(joinedErr, err) } diff --git a/pkg/ottl/contexts/internal/logprofile/logging_test.go b/pkg/ottl/contexts/internal/logprofile/logging_test.go index 3c2531578368a..62932c87dc8dc 100644 --- a/pkg/ottl/contexts/internal/logprofile/logging_test.go +++ b/pkg/ottl/contexts/internal/logprofile/logging_test.go @@ -17,31 +17,35 @@ import ( func TestProfile_MarshalLogObject(t *testing.T) { tests := []struct { name string - profile pprofile.Profile + profile func() (pprofile.ProfilesDictionary, pprofile.Profile) contains []string notContains []string }{ { name: "valid", - profile: func() pprofile.Profile { + profile: func() (pprofile.ProfilesDictionary, pprofile.Profile) { + dic := pprofile.NewProfilesDictionary() p := &pprofiletest.Profile{ ProfileID: pprofile.ProfileID([]byte("profileid1111111")), Attributes: []pprofiletest.Attribute{{Key: "container-attr1", Value: "value1"}}, } - return p.Transform(pprofile.NewScopeProfiles()) - }(), + return dic, p.Transform(dic, pprofile.NewScopeProfiles()) + }, notContains: []string{"profileError"}, }, { - name: "invalid", - profile: pprofile.NewProfile(), // doesn't include the required empty string in stringTable + name: "invalid", + profile: func() (pprofile.ProfilesDictionary, pprofile.Profile) { + return pprofile.NewProfilesDictionary(), pprofile.NewProfile() + }, contains: []string{"profileError", "string index out of bounds: 0"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { + dic, prof := tt.profile() encoder := zapcore.NewJSONEncoder(zapcore.EncoderConfig{}) - buf, err := encoder.EncodeEntry(zapcore.Entry{}, []zapcore.Field{zap.Object("profile", Profile(tt.profile))}) + buf, err := encoder.EncodeEntry(zapcore.Entry{}, []zapcore.Field{zap.Object("profile", Profile{prof, dic})}) assert.NoError(t, err) for _, s := range tt.contains { diff --git a/pkg/ottl/contexts/ottlprofile/README.md b/pkg/ottl/contexts/ottlprofile/README.md index b5caa0f0beb77..05495adc9fd33 100644 --- a/pkg/ottl/contexts/ottlprofile/README.md +++ b/pkg/ottl/contexts/ottlprofile/README.md @@ -32,7 +32,7 @@ The following paths are supported. | profile.period_type | the period type of the profile being processed | pprofile.ValueType | | profile.period | the period of the profile being processed | int64 | | profile.comment_string_indices | the comment string indices of the profile being processed | []int64 | -| profile.default_sample_type_string_index | the default sample type string index of the profile being processed | int64 | +| profile.default_sample_type_index | the default sample type string index of the profile being processed | int64 | | profile.profile_id | the profile id of the profile being processed | pprofile.ProfileID | | profile.profile_id.string | a string representation of the profile id | string | | profile.attribute_indices | the attribute indices of the profile being processed | []int64 | diff --git a/pkg/ottl/contexts/ottlprofile/profile.go b/pkg/ottl/contexts/ottlprofile/profile.go index 0dec41a9b1532..32e54a15516b3 100644 --- a/pkg/ottl/contexts/ottlprofile/profile.go +++ b/pkg/ottl/contexts/ottlprofile/profile.go @@ -37,7 +37,7 @@ var ( func (tCtx TransformContext) MarshalLogObject(encoder zapcore.ObjectEncoder) error { err := encoder.AddObject("resource", logging.Resource(tCtx.resource)) err = errors.Join(err, encoder.AddObject("scope", logging.InstrumentationScope(tCtx.instrumentationScope))) - err = errors.Join(err, encoder.AddObject("profile", logprofile.Profile(tCtx.profile))) + err = errors.Join(err, encoder.AddObject("profile", logprofile.Profile{Profile: tCtx.profile, Dictionary: tCtx.dictionary})) err = errors.Join(err, encoder.AddObject("cache", logging.Map(tCtx.cache))) return err } @@ -45,6 +45,7 @@ func (tCtx TransformContext) MarshalLogObject(encoder zapcore.ObjectEncoder) err // TransformContext represents a profile and its associated hierarchy. type TransformContext struct { profile pprofile.Profile + dictionary pprofile.ProfilesDictionary instrumentationScope pcommon.InstrumentationScope resource pcommon.Resource cache pcommon.Map @@ -56,9 +57,10 @@ type TransformContext struct { type TransformContextOption func(*TransformContext) // NewTransformContext creates a new TransformContext with the provided parameters. -func NewTransformContext(profile pprofile.Profile, instrumentationScope pcommon.InstrumentationScope, resource pcommon.Resource, scopeProfiles pprofile.ScopeProfiles, resourceProfiles pprofile.ResourceProfiles, options ...TransformContextOption) TransformContext { +func NewTransformContext(profile pprofile.Profile, dictionary pprofile.ProfilesDictionary, instrumentationScope pcommon.InstrumentationScope, resource pcommon.Resource, scopeProfiles pprofile.ScopeProfiles, resourceProfiles pprofile.ResourceProfiles, options ...TransformContextOption) TransformContext { tc := TransformContext{ profile: profile, + dictionary: dictionary, instrumentationScope: instrumentationScope, resource: resource, cache: pcommon.NewMap(), diff --git a/pkg/ottl/contexts/ottlprofile/profile_test.go b/pkg/ottl/contexts/ottlprofile/profile_test.go index c7ed7998e0daf..58523a8daa460 100644 --- a/pkg/ottl/contexts/ottlprofile/profile_test.go +++ b/pkg/ottl/contexts/ottlprofile/profile_test.go @@ -120,12 +120,12 @@ func Test_newPathGetSetter(t *testing.T) { profile := createProfileTelemetry() - tCtx := NewTransformContext(profile, pcommon.NewInstrumentationScope(), pcommon.NewResource(), pprofile.NewScopeProfiles(), pprofile.NewResourceProfiles()) + tCtx := NewTransformContext(profile, pprofile.NewProfilesDictionary(), pcommon.NewInstrumentationScope(), pcommon.NewResource(), pprofile.NewScopeProfiles(), pprofile.NewResourceProfiles()) got, err := accessor.Get(context.Background(), tCtx) assert.NoError(t, err) assert.Equal(t, tt.orig, got) - tCtx = NewTransformContext(profile, pcommon.NewInstrumentationScope(), pcommon.NewResource(), pprofile.NewScopeProfiles(), pprofile.NewResourceProfiles()) + tCtx = NewTransformContext(profile, pprofile.NewProfilesDictionary(), pcommon.NewInstrumentationScope(), pcommon.NewResource(), pprofile.NewScopeProfiles(), pprofile.NewResourceProfiles()) err = accessor.Set(context.Background(), tCtx, tt.newVal) assert.NoError(t, err) @@ -146,7 +146,7 @@ func Test_newPathGetSetter_higherContextPath(t *testing.T) { instrumentationScope := pcommon.NewInstrumentationScope() instrumentationScope.SetName("instrumentation_scope") - ctx := NewTransformContext(pprofile.NewProfile(), instrumentationScope, resource, pprofile.NewScopeProfiles(), pprofile.NewResourceProfiles()) + ctx := NewTransformContext(pprofile.NewProfile(), pprofile.NewProfilesDictionary(), instrumentationScope, resource, pprofile.NewScopeProfiles(), pprofile.NewResourceProfiles()) tests := []struct { name string diff --git a/pkg/pdatatest/pprofiletest/options.go b/pkg/pdatatest/pprofiletest/options.go index 056fac9a3be0b..2567007e4fb50 100644 --- a/pkg/pdatatest/pprofiletest/options.go +++ b/pkg/pdatatest/pprofiletest/options.go @@ -98,20 +98,11 @@ func (opt ignoreProfileAttributeValue) applyOnProfiles(expected, actual pprofile } func (opt ignoreProfileAttributeValue) maskProfileAttributeValue(profiles pprofile.Profiles) { - rls := profiles.ResourceProfiles() - for i := 0; i < profiles.ResourceProfiles().Len(); i++ { - sls := rls.At(i).ScopeProfiles() - for j := 0; j < sls.Len(); j++ { - lrs := sls.At(j).Profiles() - for k := 0; k < lrs.Len(); k++ { - lr := lrs.At(k) - for l := 0; l < lr.AttributeTable().Len(); l++ { - a := lr.AttributeTable().At(l) - if a.Key() == opt.attributeName { - a.Value().SetEmptyBytes() - } - } - } + dic := profiles.ProfilesDictionary() + for l := 0; l < dic.AttributeTable().Len(); l++ { + a := dic.AttributeTable().At(l) + if a.Key() == opt.attributeName { + a.Value().SetEmptyBytes() } } } @@ -205,32 +196,16 @@ func sortProfileSlices(ls pprofile.Profiles) { } as := a.ProfileID() bs := b.ProfileID() - if !bytes.Equal(as[:], bs[:]) { - return bytes.Compare(as[:], bs[:]) < 0 - } - am := pcommon.NewMap() - for _, i := range a.AttributeIndices().AsRaw() { - v := a.AttributeTable().At(int(i)) - am.PutStr(v.Key(), v.Value().AsString()) - } - bm := pcommon.NewMap() - for _, i := range b.AttributeIndices().AsRaw() { - v := b.AttributeTable().At(int(i)) - bm.PutStr(v.Key(), v.Value().AsString()) - } - - aAttrs := pdatautil.MapHash(am) - bAttrs := pdatautil.MapHash(bm) - return bytes.Compare(aAttrs[:], bAttrs[:]) < 0 + return bytes.Compare(as[:], bs[:]) < 0 }) } } } -func profileAttributesToMap(p pprofile.Profile) map[string]string { +func profileAttributesToMap(dic pprofile.ProfilesDictionary, p pprofile.Profile) map[string]string { d := map[string]string{} for _, i := range p.AttributeIndices().AsRaw() { - v := p.AttributeTable().At(int(i)) + v := dic.AttributeTable().At(int(i)) d[v.Key()] = v.Value().AsString() } diff --git a/pkg/pdatatest/pprofiletest/profiles.go b/pkg/pdatatest/pprofiletest/profiles.go index d67884aedf1e2..4884d7108dcfa 100644 --- a/pkg/pdatatest/pprofiletest/profiles.go +++ b/pkg/pdatatest/pprofiletest/profiles.go @@ -86,7 +86,7 @@ func CompareProfiles(expected, actual pprofile.Profiles, options ...CompareProfi for ar, er := range matchingResources { errPrefix := fmt.Sprintf(`resource "%v"`, er.Resource().Attributes().AsRaw()) - errs = multierr.Append(errs, internal.AddErrPrefix(errPrefix, CompareResourceProfiles(er, ar))) + errs = multierr.Append(errs, internal.AddErrPrefix(errPrefix, CompareResourceProfiles(exp.ProfilesDictionary(), act.ProfilesDictionary(), er, ar))) } return errs @@ -94,7 +94,7 @@ func CompareProfiles(expected, actual pprofile.Profiles, options ...CompareProfi // CompareResourceProfiles compares each part of two given ResourceProfiles and returns // an error if they don't match. The error describes what didn't match. -func CompareResourceProfiles(expected, actual pprofile.ResourceProfiles) error { +func CompareResourceProfiles(expectedDic, actualDic pprofile.ProfilesDictionary, expected, actual pprofile.ResourceProfiles) error { errs := multierr.Combine( internal.CompareResource(expected.Resource(), actual.Resource()), internal.CompareSchemaURL(expected.SchemaUrl(), actual.SchemaUrl()), @@ -154,7 +154,7 @@ func CompareResourceProfiles(expected, actual pprofile.ResourceProfiles) error { for i := 0; i < esls.Len(); i++ { errPrefix := fmt.Sprintf(`scope "%s"`, esls.At(i).Scope().Name()) - errs = multierr.Append(errs, internal.AddErrPrefix(errPrefix, CompareScopeProfiles(esls.At(i), asls.At(i)))) + errs = multierr.Append(errs, internal.AddErrPrefix(errPrefix, CompareScopeProfiles(expectedDic, actualDic, esls.At(i), asls.At(i)))) } return errs @@ -162,7 +162,7 @@ func CompareResourceProfiles(expected, actual pprofile.ResourceProfiles) error { // CompareScopeProfiles compares each part of two given ProfilesSlices and returns // an error if they don't match. The error describes what didn't match. -func CompareScopeProfiles(expected, actual pprofile.ScopeProfiles) error { +func CompareScopeProfiles(expectedDic, actualDic pprofile.ProfilesDictionary, expected, actual pprofile.ScopeProfiles) error { errs := multierr.Combine( internal.CompareInstrumentationScope(expected.Scope(), actual.Scope()), internal.CompareSchemaURL(expected.SchemaUrl(), actual.SchemaUrl()), @@ -182,17 +182,17 @@ func CompareScopeProfiles(expected, actual pprofile.ScopeProfiles) error { var outOfOrderErrs error for e := 0; e < numProfiles; e++ { elr := expected.Profiles().At(e) - errs = multierr.Append(errs, ValidateProfile(elr)) - em := profileAttributesToMap(elr) + errs = multierr.Append(errs, ValidateProfile(expectedDic, elr)) + em := profileAttributesToMap(expectedDic, elr) var foundMatch bool for a := 0; a < numProfiles; a++ { alr := actual.Profiles().At(a) - errs = multierr.Append(errs, ValidateProfile(alr)) + errs = multierr.Append(errs, ValidateProfile(actualDic, alr)) if _, ok := matchingProfiles[alr]; ok { continue } - am := profileAttributesToMap(alr) + am := profileAttributesToMap(actualDic, alr) if reflect.DeepEqual(em, am) { foundMatch = true @@ -213,7 +213,7 @@ func CompareScopeProfiles(expected, actual pprofile.ScopeProfiles) error { for i := 0; i < numProfiles; i++ { if _, ok := matchingProfiles[actual.Profiles().At(i)]; !ok { errs = multierr.Append(errs, fmt.Errorf("unexpected profile: %v", - profileAttributesToMap(actual.Profiles().At(i)))) + profileAttributesToMap(actualDic, actual.Profiles().At(i)))) } } @@ -225,15 +225,15 @@ func CompareScopeProfiles(expected, actual pprofile.ScopeProfiles) error { } for alr, elr := range matchingProfiles { - errPrefix := fmt.Sprintf(`profile "%v"`, profileAttributesToMap(elr)) - errs = multierr.Append(errs, internal.AddErrPrefix(errPrefix, CompareProfile(elr, alr))) + errPrefix := fmt.Sprintf(`profile "%v"`, profileAttributesToMap(expectedDic, elr)) + errs = multierr.Append(errs, internal.AddErrPrefix(errPrefix, CompareProfile(expectedDic, actualDic, elr, alr))) } return errs } -func compareAttributes(a, b pprofile.Profile) error { - aa := profileAttributesToMap(a) - ba := profileAttributesToMap(b) +func compareAttributes(expectedDic, actualDic pprofile.ProfilesDictionary, a, b pprofile.Profile) error { + aa := profileAttributesToMap(expectedDic, a) + ba := profileAttributesToMap(actualDic, b) if !reflect.DeepEqual(aa, ba) { return fmt.Errorf("attributes don't match expected: %v, actual: %v", aa, ba) @@ -242,9 +242,9 @@ func compareAttributes(a, b pprofile.Profile) error { return nil } -func CompareProfile(expected, actual pprofile.Profile) error { +func CompareProfile(expectedDic, actualDic pprofile.ProfilesDictionary, expected, actual pprofile.Profile) error { errs := multierr.Combine( - compareAttributes(expected, actual), + compareAttributes(expectedDic, actualDic, expected, actual), internal.CompareDroppedAttributesCount(expected.DroppedAttributesCount(), actual.DroppedAttributesCount()), ) @@ -268,10 +268,6 @@ func CompareProfile(expected, actual pprofile.Profile) error { errs = multierr.Append(errs, fmt.Errorf("time doesn't match expected: %d, actual: %d", expected.Time(), actual.Time())) } - if !reflect.DeepEqual(expected.StringTable(), actual.StringTable()) { - errs = multierr.Append(errs, fmt.Errorf("stringTable '%v' does not match expected '%v'", actual.StringTable().AsRaw(), expected.StringTable().AsRaw())) - } - if expected.OriginalPayloadFormat() != actual.OriginalPayloadFormat() { errs = multierr.Append(errs, fmt.Errorf("originalPayloadFormat does not match expected '%s', actual '%s'", expected.OriginalPayloadFormat(), actual.OriginalPayloadFormat())) } @@ -292,8 +288,8 @@ func CompareProfile(expected, actual pprofile.Profile) error { errs = multierr.Append(errs, fmt.Errorf("period does not match expected '%d', actual '%d'", expected.Period(), actual.Period())) } - if expected.DefaultSampleTypeStrindex() != actual.DefaultSampleTypeStrindex() { - errs = multierr.Append(errs, fmt.Errorf("defaultSampleType does not match expected '%d', actual '%d'", expected.DefaultSampleTypeStrindex(), actual.DefaultSampleTypeStrindex())) + if expected.DefaultSampleTypeIndex() != actual.DefaultSampleTypeIndex() { + errs = multierr.Append(errs, fmt.Errorf("defaultSampleType does not match expected '%d', actual '%d'", expected.DefaultSampleTypeIndex(), actual.DefaultSampleTypeIndex())) } if expected.PeriodType().TypeStrindex() != actual.PeriodType().TypeStrindex() || @@ -308,16 +304,6 @@ func CompareProfile(expected, actual pprofile.Profile) error { errs = multierr.Append(errs, internal.AddErrPrefix("sample", CompareProfileSampleSlice(expected.Sample(), actual.Sample()))) - errs = multierr.Append(errs, internal.AddErrPrefix("mapping", CompareProfileMappingSlice(expected.MappingTable(), actual.MappingTable()))) - - errs = multierr.Append(errs, internal.AddErrPrefix("location", CompareProfileLocationSlice(expected.LocationTable(), actual.LocationTable()))) - - errs = multierr.Append(errs, internal.AddErrPrefix("function", CompareProfileFunctionSlice(expected.FunctionTable(), actual.FunctionTable()))) - - errs = multierr.Append(errs, internal.AddErrPrefix("attributeUnits", CompareProfileAttributeUnitSlice(expected.AttributeUnits(), actual.AttributeUnits()))) - - errs = multierr.Append(errs, internal.AddErrPrefix("linkTable", CompareProfileLinkSlice(expected.LinkTable(), actual.LinkTable()))) - return errs } diff --git a/pkg/pdatatest/pprofiletest/profiles_test.go b/pkg/pdatatest/pprofiletest/profiles_test.go index 6cfafa3c5360a..81721f15cde89 100644 --- a/pkg/pdatatest/pprofiletest/profiles_test.go +++ b/pkg/pdatatest/pprofiletest/profiles_test.go @@ -391,67 +391,78 @@ func TestCompareProfiles(t *testing.T) { func TestCompareResourceProfiles(t *testing.T) { tests := []struct { name string - expected pprofile.ResourceProfiles - actual pprofile.ResourceProfiles + expected func() (pprofile.ProfilesDictionary, pprofile.ResourceProfiles) + actual func() (pprofile.ProfilesDictionary, pprofile.ResourceProfiles) err error }{ { name: "equal", - expected: func() pprofile.ResourceProfiles { - return basicProfiles().Transform().ResourceProfiles().At(0) - }(), - actual: func() pprofile.ResourceProfiles { - return basicProfiles().Transform().ResourceProfiles().At(0) - }(), + expected: func() (pprofile.ProfilesDictionary, pprofile.ResourceProfiles) { + prof := basicProfiles().Transform() + return prof.ProfilesDictionary(), prof.ResourceProfiles().At(0) + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.ResourceProfiles) { + prof := basicProfiles().Transform() + return prof.ProfilesDictionary(), prof.ResourceProfiles().At(0) + }, }, { name: "resource-attributes-mismatch", - expected: func() pprofile.ResourceProfiles { + expected: func() (pprofile.ProfilesDictionary, pprofile.ResourceProfiles) { + dic := pprofile.NewProfilesDictionary() rl := pprofile.NewResourceProfiles() rl.Resource().Attributes().PutStr("key1", "value1") rl.Resource().Attributes().PutStr("key2", "value2") - return rl - }(), - actual: func() pprofile.ResourceProfiles { + return dic, rl + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.ResourceProfiles) { + dic := pprofile.NewProfilesDictionary() rl := pprofile.NewResourceProfiles() rl.Resource().Attributes().PutStr("key1", "value1") - return rl - }(), + return dic, rl + }, err: errors.New("attributes don't match expected: map[key1:value1 key2:value2], actual: map[key1:value1]"), }, { name: "resource-schema-url-mismatch", - expected: func() pprofile.ResourceProfiles { + expected: func() (pprofile.ProfilesDictionary, pprofile.ResourceProfiles) { + dic := pprofile.NewProfilesDictionary() rl := pprofile.NewResourceProfiles() rl.SetSchemaUrl("schema-url") - return rl - }(), - actual: func() pprofile.ResourceProfiles { + return dic, rl + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.ResourceProfiles) { + dic := pprofile.NewProfilesDictionary() rl := pprofile.NewResourceProfiles() rl.SetSchemaUrl("schema-url-2") - return rl - }(), + return dic, rl + }, err: errors.New("schema url doesn't match expected: schema-url, actual: schema-url-2"), }, { name: "scope-profiles-number-mismatch", - expected: func() pprofile.ResourceProfiles { + expected: func() (pprofile.ProfilesDictionary, pprofile.ResourceProfiles) { + dic := pprofile.NewProfilesDictionary() rl := pprofile.NewResourceProfiles() rl.ScopeProfiles().AppendEmpty() rl.ScopeProfiles().AppendEmpty() - return rl - }(), - actual: func() pprofile.ResourceProfiles { + return dic, rl + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.ResourceProfiles) { + dic := pprofile.NewProfilesDictionary() rl := pprofile.NewResourceProfiles() rl.ScopeProfiles().AppendEmpty() - return rl - }(), + return dic, rl + }, err: errors.New("number of scopes doesn't match expected: 2, actual: 1"), }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - require.Equal(t, test.err, CompareResourceProfiles(test.expected, test.actual)) + eDic, eRp := test.expected() + aDic, aRp := test.actual() + + require.Equal(t, test.err, CompareResourceProfiles(eDic, aDic, eRp, aRp)) }) } } @@ -459,65 +470,73 @@ func TestCompareResourceProfiles(t *testing.T) { func TestCompareScopeProfiles(t *testing.T) { tests := []struct { name string - expected pprofile.ScopeProfiles - actual pprofile.ScopeProfiles + expected func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) + actual func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) err error }{ { name: "equal", - expected: func() pprofile.ScopeProfiles { + expected: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { p := basicProfiles() p.ResourceProfiles[0].ScopeProfiles[0].Scope.Name = "scope1" - return p.Transform().ResourceProfiles().At(0).ScopeProfiles().At(0) - }(), - actual: func() pprofile.ScopeProfiles { + tr := p.Transform() + return tr.ProfilesDictionary(), tr.ResourceProfiles().At(0).ScopeProfiles().At(0) + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { p := basicProfiles() p.ResourceProfiles[0].ScopeProfiles[0].Scope.Name = "scope1" - return p.Transform().ResourceProfiles().At(0).ScopeProfiles().At(0) - }(), + tr := p.Transform() + return tr.ProfilesDictionary(), tr.ResourceProfiles().At(0).ScopeProfiles().At(0) + }, }, { name: "scope-name-mismatch", - expected: func() pprofile.ScopeProfiles { + expected: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { + dic := pprofile.NewProfilesDictionary() sl := pprofile.NewScopeProfiles() sl.Scope().SetName("scope-name") - return sl - }(), - actual: func() pprofile.ScopeProfiles { + return dic, sl + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { + dic := pprofile.NewProfilesDictionary() sl := pprofile.NewScopeProfiles() sl.Scope().SetName("scope-name-2") - return sl - }(), + return dic, sl + }, err: errors.New("name doesn't match expected: scope-name, actual: scope-name-2"), }, { name: "scope-version-mismatch", - expected: func() pprofile.ScopeProfiles { + expected: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { + dic := pprofile.NewProfilesDictionary() sl := pprofile.NewScopeProfiles() sl.Scope().SetVersion("scope-version") - return sl - }(), - actual: func() pprofile.ScopeProfiles { + return dic, sl + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { + dic := pprofile.NewProfilesDictionary() sl := pprofile.NewScopeProfiles() sl.Scope().SetVersion("scope-version-2") - return sl - }(), + return dic, sl + }, err: errors.New("version doesn't match expected: scope-version, actual: scope-version-2"), }, { name: "scope-attributes-mismatch", - expected: func() pprofile.ScopeProfiles { + expected: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { + dic := pprofile.NewProfilesDictionary() sl := pprofile.NewScopeProfiles() sl.Scope().Attributes().PutStr("scope-attr1", "value1") sl.Scope().Attributes().PutStr("scope-attr2", "value2") - return sl - }(), - actual: func() pprofile.ScopeProfiles { + return dic, sl + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { + dic := pprofile.NewProfilesDictionary() sl := pprofile.NewScopeProfiles() sl.Scope().Attributes().PutStr("scope-attr1", "value1") sl.Scope().SetDroppedAttributesCount(1) - return sl - }(), + return dic, sl + }, err: multierr.Combine( errors.New("attributes don't match expected: map[scope-attr1:value1 scope-attr2:value2], "+ "actual: map[scope-attr1:value1]"), @@ -526,36 +545,40 @@ func TestCompareScopeProfiles(t *testing.T) { }, { name: "scope-schema-url-mismatch", - expected: func() pprofile.ScopeProfiles { + expected: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { + dic := pprofile.NewProfilesDictionary() rl := pprofile.NewScopeProfiles() rl.SetSchemaUrl("schema-url") - return rl - }(), - actual: func() pprofile.ScopeProfiles { + return dic, rl + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { + dic := pprofile.NewProfilesDictionary() rl := pprofile.NewScopeProfiles() rl.SetSchemaUrl("schema-url-2") - return rl - }(), + return dic, rl + }, err: errors.New("schema url doesn't match expected: schema-url, actual: schema-url-2"), }, { name: "profiles-number-mismatch", - expected: func() pprofile.ScopeProfiles { + expected: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { + dic := pprofile.NewProfilesDictionary() sl := pprofile.NewScopeProfiles() sl.Profiles().AppendEmpty() sl.Profiles().AppendEmpty() - return sl - }(), - actual: func() pprofile.ScopeProfiles { + return dic, sl + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { + dic := pprofile.NewProfilesDictionary() sl := pprofile.NewScopeProfiles() sl.Profiles().AppendEmpty() - return sl - }(), + return dic, sl + }, err: errors.New("number of profiles doesn't match expected: 2, actual: 1"), }, { name: "profile-records-order-mismatch", - expected: func() pprofile.ScopeProfiles { + expected: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { p := basicProfiles() p.ResourceProfiles[0].ScopeProfiles[0].Profile = []Profile{ { @@ -567,9 +590,10 @@ func TestCompareScopeProfiles(t *testing.T) { Attributes: []Attribute{{"scope-attr2", "value2"}}, }, } - return p.Transform().ResourceProfiles().At(0).ScopeProfiles().At(0) - }(), - actual: func() pprofile.ScopeProfiles { + tr := p.Transform() + return tr.ProfilesDictionary(), tr.ResourceProfiles().At(0).ScopeProfiles().At(0) + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.ScopeProfiles) { p := basicProfiles() p.ResourceProfiles[0].ScopeProfiles[0].Profile = []Profile{ { @@ -581,8 +605,9 @@ func TestCompareScopeProfiles(t *testing.T) { Attributes: []Attribute{{"scope-attr1", "value1"}}, }, } - return p.Transform().ResourceProfiles().At(0).ScopeProfiles().At(0) - }(), + tr := p.Transform() + return tr.ProfilesDictionary(), tr.ResourceProfiles().At(0).ScopeProfiles().At(0) + }, err: multierr.Combine( errors.New(`profiles are out of order: profile "map[scope-attr1:value1]" expected at index 0, found at index 1`), errors.New(`profiles are out of order: profile "map[scope-attr2:value2]" expected at index 1, found at index 0`), @@ -591,7 +616,10 @@ func TestCompareScopeProfiles(t *testing.T) { } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - require.Equal(t, test.err, CompareScopeProfiles(test.expected, test.actual)) + eDic, eSp := test.expected() + aDic, aSp := test.actual() + + require.Equal(t, test.err, CompareScopeProfiles(eDic, aDic, eSp, aSp)) }) } } @@ -599,24 +627,26 @@ func TestCompareScopeProfiles(t *testing.T) { func TestCompareProfile(t *testing.T) { tests := []struct { name string - expected pprofile.Profile - actual pprofile.Profile + expected func() (pprofile.ProfilesDictionary, pprofile.Profile) + actual func() (pprofile.ProfilesDictionary, pprofile.Profile) err error }{ { name: "empty", - expected: func() pprofile.Profile { + expected: func() (pprofile.ProfilesDictionary, pprofile.Profile) { + dic := pprofile.NewProfilesDictionary() l := pprofile.NewProfile() - return l - }(), - actual: func() pprofile.Profile { + return dic, l + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.Profile) { + dic := pprofile.NewProfilesDictionary() l := pprofile.NewProfile() - return l - }(), + return dic, l + }, }, { name: "equal", - expected: func() pprofile.Profile { + expected: func() (pprofile.ProfilesDictionary, pprofile.Profile) { p := basicProfiles() p.ResourceProfiles[0].ScopeProfiles[0].Profile = []Profile{ { @@ -639,9 +669,10 @@ func TestCompareProfile(t *testing.T) { AttributeUnits: []AttributeUnit{{AttributeKey: "cpu", Unit: "nanoseconds"}}, }, } - return p.Transform().ResourceProfiles().At(0).ScopeProfiles().At(0).Profiles().At(0) - }(), - actual: func() pprofile.Profile { + tr := p.Transform() + return tr.ProfilesDictionary(), tr.ResourceProfiles().At(0).ScopeProfiles().At(0).Profiles().At(0) + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.Profile) { p := basicProfiles() p.ResourceProfiles[0].ScopeProfiles[0].Profile = []Profile{ { @@ -664,12 +695,13 @@ func TestCompareProfile(t *testing.T) { AttributeUnits: []AttributeUnit{{AttributeKey: "cpu", Unit: "nanoseconds"}}, }, } - return p.Transform().ResourceProfiles().At(0).ScopeProfiles().At(0).Profiles().At(0) - }(), + tr := p.Transform() + return tr.ProfilesDictionary(), tr.ResourceProfiles().At(0).ScopeProfiles().At(0).Profiles().At(0) + }, }, { name: "not equal", - expected: func() pprofile.Profile { + expected: func() (pprofile.ProfilesDictionary, pprofile.Profile) { p := basicProfiles() p.ResourceProfiles[0].ScopeProfiles[0].Profile = []Profile{ { @@ -692,9 +724,10 @@ func TestCompareProfile(t *testing.T) { AttributeUnits: []AttributeUnit{{AttributeKey: "cpu", Unit: "nanoseconds"}}, }, } - return p.Transform().ResourceProfiles().At(0).ScopeProfiles().At(0).Profiles().At(0) - }(), - actual: func() pprofile.Profile { + tr := p.Transform() + return tr.ProfilesDictionary(), tr.ResourceProfiles().At(0).ScopeProfiles().At(0).Profiles().At(0) + }, + actual: func() (pprofile.ProfilesDictionary, pprofile.Profile) { p := basicProfiles() p.ResourceProfiles[0].ScopeProfiles[0].Profile = []Profile{ { @@ -717,22 +750,22 @@ func TestCompareProfile(t *testing.T) { AttributeUnits: []AttributeUnit{{AttributeKey: "cpu2", Unit: "nanoseconds2"}}, }, } - return p.Transform().ResourceProfiles().At(0).ScopeProfiles().At(0).Profiles().At(0) - }(), + tr := p.Transform() + return tr.ProfilesDictionary(), tr.ResourceProfiles().At(0).ScopeProfiles().At(0).Profiles().At(0) + }, err: multierr.Combine( errors.New(`attributes don't match expected: map[key:val], actual: map[key1:val1]`), - errors.New(`stringTable '[ cpu1 nanoseconds1 samples count samples1 count1 cpu2 nanoseconds2]' does not match expected '[ cpu nanoseconds samples count]'`), errors.New(`period does not match expected '1', actual '2'`), fmt.Errorf(`sampleType: %w`, errors.New(`missing expected valueType "unit: 4, type: 3, aggregationTemporality: 1"`)), fmt.Errorf(`sampleType: %w`, errors.New(`unexpected valueType "unit: 6, type: 5, aggregationTemporality: 1"`)), - fmt.Errorf(`attributeUnits: %w`, errors.New(`missing expected attributeUnit "attributeKey: 1"`)), - fmt.Errorf(`attributeUnits: %w`, errors.New(`unexpected profile attributeUnit "attributeKey: 7"`)), ), }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { - require.Equal(t, test.err, CompareProfile(test.expected, test.actual)) + eDic, eP := test.expected() + aDic, aP := test.actual() + require.Equal(t, test.err, CompareProfile(eDic, aDic, eP, aP)) }) } } diff --git a/pkg/pdatatest/pprofiletest/types.go b/pkg/pdatatest/pprofiletest/types.go index e64b9145580e0..0b2413eb3aadd 100644 --- a/pkg/pdatatest/pprofiletest/types.go +++ b/pkg/pdatatest/pprofiletest/types.go @@ -29,7 +29,7 @@ type ResourceProfile struct { func (rp ResourceProfile) Transform(pp pprofile.Profiles) pprofile.ResourceProfiles { prp := pp.ResourceProfiles().AppendEmpty() for _, sp := range rp.ScopeProfiles { - sp.Transform(prp) + sp.Transform(pp.ProfilesDictionary(), prp) } for _, a := range rp.Resource.Attributes { if prp.Resource().Attributes().PutEmpty(a.Key).FromRaw(a.Value) != nil { @@ -50,10 +50,10 @@ type ScopeProfile struct { SchemaURL string } -func (sp ScopeProfile) Transform(prp pprofile.ResourceProfiles) pprofile.ScopeProfiles { +func (sp ScopeProfile) Transform(dic pprofile.ProfilesDictionary, prp pprofile.ResourceProfiles) pprofile.ScopeProfiles { psp := prp.ScopeProfiles().AppendEmpty() for _, p := range sp.Profile { - p.Transform(psp) + p.Transform(dic, psp) } sp.Scope.Transform(psp) psp.SetSchemaUrl(sp.SchemaURL) @@ -100,11 +100,11 @@ type Profile struct { AttributeUnits []AttributeUnit } -func (p *Profile) Transform(psp pprofile.ScopeProfiles) pprofile.Profile { +func (p *Profile) Transform(dic pprofile.ProfilesDictionary, psp pprofile.ScopeProfiles) pprofile.Profile { pp := psp.Profiles().AppendEmpty() // Avoids that 0 (default) string indices point to nowhere. - addString(pp, "") + addString(dic, "") // If valueTypes are not set, set them to the default value. defaultValueType := ValueType{Typ: "samples", Unit: "count", AggregationTemporality: pprofile.AggregationTemporalityDelta} @@ -115,47 +115,47 @@ func (p *Profile) Transform(psp pprofile.ScopeProfiles) pprofile.Profile { p.DefaultSampleType = defaultValueType } - p.SampleType.Transform(pp) + p.SampleType.Transform(dic, pp) for _, sa := range p.Sample { - sa.Transform(pp) + sa.Transform(dic, pp) } pp.SetTime(p.TimeNanos) pp.SetDuration(p.DurationNanos) - p.PeriodType.CopyTo(pp, pp.PeriodType()) + p.PeriodType.CopyTo(dic, pp.PeriodType()) pp.SetPeriod(p.Period) for _, c := range p.Comment { - pp.CommentStrindices().Append(addString(pp, c)) + pp.CommentStrindices().Append(addString(dic, c)) } - p.DefaultSampleType.Transform(pp) + p.DefaultSampleType.Transform(dic, pp) pp.SetProfileID(p.ProfileID) pp.SetDroppedAttributesCount(p.DroppedAttributesCount) pp.SetOriginalPayloadFormat(p.OriginalPayloadFormat) pp.OriginalPayload().FromRaw(p.OriginalPayload) for _, at := range p.Attributes { - pp.AttributeIndices().Append(at.Transform(pp)) + pp.AttributeIndices().Append(at.Transform(dic)) } for _, au := range p.AttributeUnits { - au.Transform(pp) + au.Transform(dic) } return pp } -func addString(pp pprofile.Profile, s string) int32 { - for i := range pp.StringTable().Len() { - if pp.StringTable().At(i) == s { +func addString(dic pprofile.ProfilesDictionary, s string) int32 { + for i := range dic.StringTable().Len() { + if dic.StringTable().At(i) == s { return int32(i) } } - pp.StringTable().Append(s) - return int32(pp.StringTable().Len() - 1) + dic.StringTable().Append(s) + return int32(dic.StringTable().Len() - 1) } type ValueTypes []ValueType -func (vts *ValueTypes) Transform(pp pprofile.Profile) { +func (vts *ValueTypes) Transform(dic pprofile.ProfilesDictionary, pp pprofile.Profile) { for _, vt := range *vts { - vt.Transform(pp) + vt.Transform(dic, pp) } } @@ -165,11 +165,11 @@ type ValueType struct { AggregationTemporality pprofile.AggregationTemporality } -func (vt *ValueType) exists(pp pprofile.Profile) bool { +func (vt *ValueType) exists(dic pprofile.ProfilesDictionary, pp pprofile.Profile) bool { for i := range pp.SampleType().Len() { st := pp.SampleType().At(i) - if vt.Typ == pp.StringTable().At(int(st.TypeStrindex())) && - vt.Unit == pp.StringTable().At(int(st.UnitStrindex())) && + if vt.Typ == dic.StringTable().At(int(st.TypeStrindex())) && + vt.Unit == dic.StringTable().At(int(st.UnitStrindex())) && vt.AggregationTemporality == st.AggregationTemporality() { return true } @@ -177,15 +177,15 @@ func (vt *ValueType) exists(pp pprofile.Profile) bool { return false } -func (vt *ValueType) CopyTo(pp pprofile.Profile, pvt pprofile.ValueType) { - pvt.SetTypeStrindex(addString(pp, vt.Typ)) - pvt.SetUnitStrindex(addString(pp, vt.Unit)) +func (vt *ValueType) CopyTo(dic pprofile.ProfilesDictionary, pvt pprofile.ValueType) { + pvt.SetTypeStrindex(addString(dic, vt.Typ)) + pvt.SetUnitStrindex(addString(dic, vt.Unit)) pvt.SetAggregationTemporality(vt.AggregationTemporality) } -func (vt *ValueType) Transform(pp pprofile.Profile) { - if !vt.exists(pp) { - vt.CopyTo(pp, pp.SampleType().AppendEmpty()) +func (vt *ValueType) Transform(dic pprofile.ProfilesDictionary, pp pprofile.Profile) { + if !vt.exists(dic, pp) { + vt.CopyTo(dic, pp.SampleType().AppendEmpty()) } } @@ -197,7 +197,7 @@ type Sample struct { TimestampsUnixNano []uint64 } -func (sa *Sample) Transform(pp pprofile.Profile) { +func (sa *Sample) Transform(dic pprofile.ProfilesDictionary, pp pprofile.Profile) { if len(sa.Value) != pp.SampleType().Len() { panic("length of profile.sample_type must be equal to the length of sample.value") } @@ -205,9 +205,9 @@ func (sa *Sample) Transform(pp pprofile.Profile) { psa.SetLocationsStartIndex(int32(pp.LocationIndices().Len())) for _, loc := range sa.Locations { pp.LocationIndices().Append(int32(pp.LocationIndices().Len())) - ploc := pp.LocationTable().AppendEmpty() + ploc := dic.LocationTable().AppendEmpty() if loc.Mapping != nil { - loc.Mapping.Transform(pp) + loc.Mapping.Transform(dic) } ploc.SetAddress(loc.Address) ploc.SetIsFolded(loc.IsFolded) @@ -215,16 +215,16 @@ func (sa *Sample) Transform(pp pprofile.Profile) { pl := ploc.Line().AppendEmpty() pl.SetLine(l.Line) pl.SetColumn(l.Column) - pl.SetFunctionIndex(l.Function.Transform(pp)) + pl.SetFunctionIndex(l.Function.Transform(dic)) } for _, at := range loc.Attributes { - ploc.AttributeIndices().Append(at.Transform(pp)) + ploc.AttributeIndices().Append(at.Transform(dic)) } } psa.SetLocationsLength(int32(pp.LocationIndices().Len()) - psa.LocationsStartIndex()) psa.Value().FromRaw(sa.Value) for _, at := range sa.Attributes { - psa.AttributeIndices().Append(at.Transform(pp)) + psa.AttributeIndices().Append(at.Transform(dic)) } //nolint:revive,staticcheck if sa.Link != nil { @@ -246,11 +246,11 @@ type Link struct { SpanID pcommon.SpanID } -func (l *Link) Transform(pp pprofile.Profile) int32 { - pl := pp.LinkTable().AppendEmpty() +func (l *Link) Transform(dic pprofile.ProfilesDictionary) int32 { + pl := dic.LinkTable().AppendEmpty() pl.SetTraceID(l.TraceID) pl.SetSpanID(l.SpanID) - return int32(pp.LinkTable().Len() - 1) + return int32(dic.LinkTable().Len() - 1) } type Mapping struct { @@ -265,14 +265,14 @@ type Mapping struct { HasInlineFrames bool } -func (m *Mapping) Transform(pp pprofile.Profile) { - pm := pp.MappingTable().AppendEmpty() +func (m *Mapping) Transform(dic pprofile.ProfilesDictionary) { + pm := dic.MappingTable().AppendEmpty() pm.SetMemoryStart(m.MemoryStart) pm.SetMemoryLimit(m.MemoryLimit) pm.SetFileOffset(m.FileOffset) - pm.SetFilenameStrindex(addString(pp, m.Filename)) + pm.SetFilenameStrindex(addString(dic, m.Filename)) for _, at := range m.Attributes { - pm.AttributeIndices().Append(at.Transform(pp)) + pm.AttributeIndices().Append(at.Transform(dic)) } pm.SetHasFunctions(m.HasFunctions) pm.SetHasFilenames(m.HasFileNames) @@ -285,14 +285,14 @@ type Attribute struct { Value any } -func (a *Attribute) Transform(pp pprofile.Profile) int32 { - pa := pp.AttributeTable().AppendEmpty() +func (a *Attribute) Transform(dic pprofile.ProfilesDictionary) int32 { + pa := dic.AttributeTable().AppendEmpty() pa.SetKey(a.Key) if pa.Value().FromRaw(a.Value) != nil { panic(fmt.Sprintf("unsupported attribute value: {%s: %v (type %T)}", a.Key, a.Value, a.Value)) } - return int32(pp.AttributeTable().Len() - 1) + return int32(dic.AttributeTable().Len() - 1) } type AttributeUnit struct { @@ -300,11 +300,11 @@ type AttributeUnit struct { Unit string } -func (a *AttributeUnit) Transform(pp pprofile.Profile) int32 { - pa := pp.AttributeUnits().AppendEmpty() - pa.SetAttributeKeyStrindex(addString(pp, a.AttributeKey)) - pa.SetUnitStrindex(addString(pp, a.Unit)) - return int32(pp.AttributeTable().Len() - 1) +func (a *AttributeUnit) Transform(dic pprofile.ProfilesDictionary) int32 { + pa := dic.AttributeUnits().AppendEmpty() + pa.SetAttributeKeyStrindex(addString(dic, a.AttributeKey)) + pa.SetUnitStrindex(addString(dic, a.Unit)) + return int32(dic.AttributeTable().Len() - 1) } type Line struct { @@ -320,11 +320,11 @@ type Function struct { StartLine int64 } -func (f *Function) Transform(pp pprofile.Profile) int32 { - pf := pp.FunctionTable().AppendEmpty() - pf.SetNameStrindex(addString(pp, f.Name)) - pf.SetSystemNameStrindex(addString(pp, f.SystemName)) - pf.SetFilenameStrindex(addString(pp, f.Filename)) +func (f *Function) Transform(dic pprofile.ProfilesDictionary) int32 { + pf := dic.FunctionTable().AppendEmpty() + pf.SetNameStrindex(addString(dic, f.Name)) + pf.SetSystemNameStrindex(addString(dic, f.SystemName)) + pf.SetFilenameStrindex(addString(dic, f.Filename)) pf.SetStartLine(f.StartLine) - return int32(pp.FunctionTable().Len() - 1) + return int32(dic.FunctionTable().Len() - 1) } diff --git a/pkg/pdatatest/pprofiletest/validate.go b/pkg/pdatatest/pprofiletest/validate.go index 45b41cec22bf1..95c133f94bea0 100644 --- a/pkg/pdatatest/pprofiletest/validate.go +++ b/pkg/pdatatest/pprofiletest/validate.go @@ -10,16 +10,16 @@ import ( "go.opentelemetry.io/collector/pdata/pprofile" ) -func ValidateProfile(pp pprofile.Profile) error { +func ValidateProfile(dic pprofile.ProfilesDictionary, pp pprofile.Profile) error { var errs error - stLen := pp.StringTable().Len() + stLen := dic.StringTable().Len() if stLen < 1 { // Return here to avoid panicking when accessing the string table. return errors.New("empty string table, must at least contain the empty string") } - if pp.StringTable().At(0) != "" { + if dic.StringTable().At(0) != "" { errs = errors.Join(errs, errors.New("string table must start with the empty string")) } @@ -29,15 +29,15 @@ func ValidateProfile(pp pprofile.Profile) error { errs = errors.Join(errs, errors.New("missing sample type, need at least a default")) } - errs = errors.Join(errs, validateSampleType(pp)) + errs = errors.Join(errs, validateSampleType(dic, pp)) - errs = errors.Join(errs, validateSamples(pp)) + errs = errors.Join(errs, validateSamples(dic, pp)) if err := validateValueType(stLen, pp.PeriodType()); err != nil { errs = errors.Join(errs, fmt.Errorf("period_type: %w", err)) } - if err := validateIndex(stLen, pp.DefaultSampleTypeStrindex()); err != nil { + if err := validateIndex(stLen, pp.DefaultSampleTypeIndex()); err != nil { errs = errors.Join(errs, fmt.Errorf("default_sample_type_strindex: %w", err)) } @@ -45,11 +45,11 @@ func ValidateProfile(pp pprofile.Profile) error { errs = errors.Join(errs, fmt.Errorf("comment_strindices: %w", err)) } - if err := validateIndices(pp.AttributeTable().Len(), pp.AttributeIndices()); err != nil { + if err := validateIndices(dic.AttributeTable().Len(), pp.AttributeIndices()); err != nil { errs = errors.Join(errs, fmt.Errorf("attribute_indices: %w", err)) } - errs = errors.Join(errs, validateAttributeUnits(pp)) + errs = errors.Join(errs, validateAttributeUnits(dic)) return errs } @@ -74,10 +74,10 @@ func validateIndex(length int, idx int32) error { return nil } -func validateSampleType(pp pprofile.Profile) error { +func validateSampleType(dic pprofile.ProfilesDictionary, pp pprofile.Profile) error { var errs error - stLen := pp.StringTable().Len() + stLen := dic.StringTable().Len() for i := range pp.SampleType().Len() { if err := validateValueType(stLen, pp.SampleType().At(i)); err != nil { errs = errors.Join(errs, fmt.Errorf("sample_type[%d]: %w", i, err)) @@ -107,11 +107,11 @@ func validateValueType(stLen int, pvt pprofile.ValueType) error { return errs } -func validateSamples(pp pprofile.Profile) error { +func validateSamples(dic pprofile.ProfilesDictionary, pp pprofile.Profile) error { var errs error for i := range pp.Sample().Len() { - if err := validateSample(pp, pp.Sample().At(i)); err != nil { + if err := validateSample(dic, pp, pp.Sample().At(i)); err != nil { errs = errors.Join(errs, fmt.Errorf("sample[%d]: %w", i, err)) } } @@ -119,7 +119,7 @@ func validateSamples(pp pprofile.Profile) error { return errs } -func validateSample(pp pprofile.Profile, sample pprofile.Sample) error { +func validateSample(dic pprofile.ProfilesDictionary, pp pprofile.Profile, sample pprofile.Sample) error { var errs error length := sample.LocationsLength() @@ -145,11 +145,11 @@ func validateSample(pp pprofile.Profile, sample pprofile.Sample) error { for i := start; i < end; i++ { locIdx := pp.LocationIndices().At(int(i)) - if err := validateIndex(pp.LocationTable().Len(), locIdx); err != nil { + if err := validateIndex(dic.LocationTable().Len(), locIdx); err != nil { errs = errors.Join(errs, fmt.Errorf("location_indices[%d]: %w", i, err)) continue } - if err := validateLocation(pp, pp.LocationTable().At(int(locIdx))); err != nil { + if err := validateLocation(dic, dic.LocationTable().At(int(locIdx))); err != nil { errs = errors.Join(errs, fmt.Errorf("locations[%d]: %w", i, err)) } } @@ -161,7 +161,7 @@ func validateSample(pp pprofile.Profile, sample pprofile.Sample) error { sample.Value().Len(), numValues)) } - if err := validateIndices(pp.AttributeTable().Len(), sample.AttributeIndices()); err != nil { + if err := validateIndices(dic.AttributeTable().Len(), sample.AttributeIndices()); err != nil { errs = errors.Join(errs, fmt.Errorf("attribute_indices: %w", err)) } @@ -176,7 +176,7 @@ func validateSample(pp pprofile.Profile, sample pprofile.Sample) error { } if sample.HasLinkIndex() { - if err := validateIndex(pp.LinkTable().Len(), sample.LinkIndex()); err != nil { + if err := validateIndex(dic.LinkTable().Len(), sample.LinkIndex()); err != nil { errs = errors.Join(errs, fmt.Errorf("link_index: %w", err)) } } @@ -184,60 +184,60 @@ func validateSample(pp pprofile.Profile, sample pprofile.Sample) error { return errs } -func validateLocation(pp pprofile.Profile, loc pprofile.Location) error { +func validateLocation(dic pprofile.ProfilesDictionary, loc pprofile.Location) error { var errs error if loc.HasMappingIndex() { - if err := validateIndex(pp.MappingTable().Len(), loc.MappingIndex()); err != nil { + if err := validateIndex(dic.MappingTable().Len(), loc.MappingIndex()); err != nil { // Continuing would run into a panic. return fmt.Errorf("mapping_index: %w", err) } - if err := validateMapping(pp, pp.MappingTable().At(int(loc.MappingIndex()))); err != nil { + if err := validateMapping(dic, dic.MappingTable().At(int(loc.MappingIndex()))); err != nil { errs = errors.Join(errs, fmt.Errorf("mapping: %w", err)) } } for i := range loc.Line().Len() { - if err := validateLine(pp, loc.Line().At(i)); err != nil { + if err := validateLine(dic, loc.Line().At(i)); err != nil { errs = errors.Join(errs, fmt.Errorf("line[%d]: %w", i, err)) } } - if err := validateIndices(pp.AttributeTable().Len(), loc.AttributeIndices()); err != nil { + if err := validateIndices(dic.AttributeTable().Len(), loc.AttributeIndices()); err != nil { errs = errors.Join(errs, fmt.Errorf("attribute_indices: %w", err)) } return errs } -func validateLine(pp pprofile.Profile, line pprofile.Line) error { - if err := validateIndex(pp.FunctionTable().Len(), line.FunctionIndex()); err != nil { +func validateLine(dic pprofile.ProfilesDictionary, line pprofile.Line) error { + if err := validateIndex(dic.FunctionTable().Len(), line.FunctionIndex()); err != nil { return fmt.Errorf("function_index: %w", err) } return nil } -func validateMapping(pp pprofile.Profile, mapping pprofile.Mapping) error { +func validateMapping(dic pprofile.ProfilesDictionary, mapping pprofile.Mapping) error { var errs error - if err := validateIndex(pp.StringTable().Len(), mapping.FilenameStrindex()); err != nil { + if err := validateIndex(dic.StringTable().Len(), mapping.FilenameStrindex()); err != nil { errs = errors.Join(errs, fmt.Errorf("filename_strindex: %w", err)) } - if err := validateIndices(pp.AttributeTable().Len(), mapping.AttributeIndices()); err != nil { + if err := validateIndices(dic.AttributeTable().Len(), mapping.AttributeIndices()); err != nil { errs = errors.Join(errs, fmt.Errorf("attribute_indices: %w", err)) } return errs } -func validateAttributeUnits(pp pprofile.Profile) error { +func validateAttributeUnits(dic pprofile.ProfilesDictionary) error { var errs error - for i := range pp.AttributeUnits().Len() { - if err := validateAttributeUnit(pp, pp.AttributeUnits().At(i)); err != nil { + for i := range dic.AttributeUnits().Len() { + if err := validateAttributeUnit(dic, dic.AttributeUnits().At(i)); err != nil { errs = errors.Join(errs, fmt.Errorf("attribute_units[%d]: %w", i, err)) } } @@ -245,14 +245,14 @@ func validateAttributeUnits(pp pprofile.Profile) error { return errs } -func validateAttributeUnit(pp pprofile.Profile, au pprofile.AttributeUnit) error { +func validateAttributeUnit(dic pprofile.ProfilesDictionary, au pprofile.AttributeUnit) error { var errs error - if err := validateIndex(pp.StringTable().Len(), au.AttributeKeyStrindex()); err != nil { + if err := validateIndex(dic.StringTable().Len(), au.AttributeKeyStrindex()); err != nil { errs = errors.Join(errs, fmt.Errorf("attribute_key: %w", err)) } - if err := validateIndex(pp.StringTable().Len(), au.UnitStrindex()); err != nil { + if err := validateIndex(dic.StringTable().Len(), au.UnitStrindex()); err != nil { errs = errors.Join(errs, fmt.Errorf("unit: %w", err)) } diff --git a/pkg/pdatatest/pprofiletest/validate_test.go b/pkg/pdatatest/pprofiletest/validate_test.go index 0156b1b950699..65a8e4ef69a92 100644 --- a/pkg/pdatatest/pprofiletest/validate_test.go +++ b/pkg/pdatatest/pprofiletest/validate_test.go @@ -13,38 +13,46 @@ import ( func Test_validateProfile(t *testing.T) { tests := []struct { - name string - profile pprofile.Profile - wantErr assert.ErrorAssertionFunc + name string + dictionary pprofile.ProfilesDictionary + profile pprofile.Profile + wantErr assert.ErrorAssertionFunc }{ { - name: "empty string table", - profile: pprofile.NewProfile(), - wantErr: assert.Error, + name: "empty string table", + dictionary: pprofile.NewProfilesDictionary(), + profile: pprofile.NewProfile(), + wantErr: assert.Error, }, { name: "no empty string at index 0", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("x") - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("x") + return dic }(), + profile: pprofile.NewProfile(), wantErr: assert.Error, }, { name: "empty sample type", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic }(), + profile: pprofile.NewProfile(), wantErr: assert.Error, }, { name: "invalid sample type", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.StringTable().Append("") pp.SampleType().AppendEmpty() return pp }(), @@ -52,9 +60,13 @@ func Test_validateProfile(t *testing.T) { }, { name: "invalid sample", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.StringTable().Append("") st := pp.SampleType().AppendEmpty() st.SetAggregationTemporality(pprofile.AggregationTemporalityDelta) pp.PeriodType().SetAggregationTemporality(pprofile.AggregationTemporalityDelta) @@ -65,30 +77,38 @@ func Test_validateProfile(t *testing.T) { }, { name: "invalid default sample type string index", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.StringTable().Append("") st := pp.SampleType().AppendEmpty() st.SetAggregationTemporality(pprofile.AggregationTemporalityDelta) pp.PeriodType().SetAggregationTemporality(pprofile.AggregationTemporalityDelta) s := pp.Sample().AppendEmpty() s.Value().Append(0) - pp.SetDefaultSampleTypeStrindex(1) + pp.SetDefaultSampleTypeIndex(1) return pp }(), wantErr: assert.Error, }, { name: "invalid comment string index", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.StringTable().Append("") st := pp.SampleType().AppendEmpty() st.SetAggregationTemporality(pprofile.AggregationTemporalityDelta) pp.PeriodType().SetAggregationTemporality(pprofile.AggregationTemporalityDelta) s := pp.Sample().AppendEmpty() s.Value().Append(0) - pp.SetDefaultSampleTypeStrindex(0) + pp.SetDefaultSampleTypeIndex(0) pp.CommentStrindices().Append(1) return pp }(), @@ -96,15 +116,19 @@ func Test_validateProfile(t *testing.T) { }, { name: "invalid attribute index", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.StringTable().Append("") st := pp.SampleType().AppendEmpty() st.SetAggregationTemporality(pprofile.AggregationTemporalityDelta) pp.PeriodType().SetAggregationTemporality(pprofile.AggregationTemporalityDelta) s := pp.Sample().AppendEmpty() s.Value().Append(0) - pp.SetDefaultSampleTypeStrindex(0) + pp.SetDefaultSampleTypeIndex(0) pp.CommentStrindices().Append(0) pp.AttributeIndices().Append(1) return pp @@ -113,38 +137,46 @@ func Test_validateProfile(t *testing.T) { }, { name: "invalid attribute unit index", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + au := dic.AttributeUnits().AppendEmpty() + au.SetAttributeKeyStrindex(1) + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.StringTable().Append("") st := pp.SampleType().AppendEmpty() st.SetAggregationTemporality(pprofile.AggregationTemporalityDelta) pp.PeriodType().SetAggregationTemporality(pprofile.AggregationTemporalityDelta) s := pp.Sample().AppendEmpty() s.Value().Append(0) - pp.SetDefaultSampleTypeStrindex(0) + pp.SetDefaultSampleTypeIndex(0) pp.CommentStrindices().Append(0) pp.AttributeIndices().Append(0) - au := pp.AttributeUnits().AppendEmpty() - au.SetAttributeKeyStrindex(1) return pp }(), wantErr: assert.Error, }, { name: "valid", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + au := dic.AttributeUnits().AppendEmpty() + au.SetAttributeKeyStrindex(0) + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.StringTable().Append("") st := pp.SampleType().AppendEmpty() st.SetAggregationTemporality(pprofile.AggregationTemporalityDelta) pp.PeriodType().SetAggregationTemporality(pprofile.AggregationTemporalityDelta) s := pp.Sample().AppendEmpty() s.Value().Append(0) - pp.SetDefaultSampleTypeStrindex(0) + pp.SetDefaultSampleTypeIndex(0) pp.CommentStrindices().Append(0) pp.AttributeIndices().Append(0) - au := pp.AttributeUnits().AppendEmpty() - au.SetAttributeKeyStrindex(0) return pp }(), wantErr: assert.Error, @@ -152,7 +184,7 @@ func Test_validateProfile(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tt.wantErr(t, ValidateProfile(tt.profile)) + tt.wantErr(t, ValidateProfile(tt.dictionary, tt.profile)) }) } } @@ -175,20 +207,26 @@ func Test_validateIndex(t *testing.T) { func Test_validateSampleTypes(t *testing.T) { tests := []struct { - name string - profile pprofile.Profile - wantErr assert.ErrorAssertionFunc + name string + dictionary pprofile.ProfilesDictionary + profile pprofile.Profile + wantErr assert.ErrorAssertionFunc }{ { - name: "empty", - profile: pprofile.NewProfile(), - wantErr: assert.NoError, + name: "empty", + dictionary: pprofile.NewProfilesDictionary(), + profile: pprofile.NewProfile(), + wantErr: assert.NoError, }, { name: "valid", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.StringTable().Append("") s := pp.SampleType().AppendEmpty() s.SetAggregationTemporality(pprofile.AggregationTemporalityDelta) s = pp.SampleType().AppendEmpty() @@ -199,9 +237,13 @@ func Test_validateSampleTypes(t *testing.T) { }, { name: "invalid", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.StringTable().Append("") s := pp.SampleType().AppendEmpty() s.SetAggregationTemporality(pprofile.AggregationTemporalityDelta) s = pp.SampleType().AppendEmpty() @@ -213,40 +255,40 @@ func Test_validateSampleTypes(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tt.wantErr(t, validateSampleType(tt.profile)) + tt.wantErr(t, validateSampleType(tt.dictionary, tt.profile)) }) } } func Test_validateValueType(t *testing.T) { tests := []struct { - name string - profile pprofile.Profile - valueType pprofile.ValueType - wantErr assert.ErrorAssertionFunc + name string + dictionary pprofile.ProfilesDictionary + valueType pprofile.ValueType + wantErr assert.ErrorAssertionFunc }{ { - name: "type string index out of range", - profile: pprofile.NewProfile(), - valueType: pprofile.NewValueType(), - wantErr: assert.Error, + name: "type string index out of range", + dictionary: pprofile.NewProfilesDictionary(), + valueType: pprofile.NewValueType(), + wantErr: assert.Error, }, { name: "invalid aggregation temporality", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic }(), valueType: pprofile.NewValueType(), wantErr: assert.Error, }, { name: "unit string index out of range", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic }(), valueType: func() pprofile.ValueType { pp := pprofile.NewValueType() @@ -258,10 +300,10 @@ func Test_validateValueType(t *testing.T) { }, { name: "valid delta", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic }(), valueType: func() pprofile.ValueType { pp := pprofile.NewValueType() @@ -272,10 +314,10 @@ func Test_validateValueType(t *testing.T) { }, { name: "valid cumulative", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic }(), valueType: func() pprofile.ValueType { pp := pprofile.NewValueType() @@ -287,24 +329,27 @@ func Test_validateValueType(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tt.wantErr(t, validateValueType(tt.profile.StringTable().Len(), tt.valueType)) + tt.wantErr(t, validateValueType(tt.dictionary.StringTable().Len(), tt.valueType)) }) } } func Test_validateSamples(t *testing.T) { tests := []struct { - name string - profile pprofile.Profile - wantErr assert.ErrorAssertionFunc + name string + dictionary pprofile.ProfilesDictionary + profile pprofile.Profile + wantErr assert.ErrorAssertionFunc }{ { - name: "no samples", - profile: pprofile.NewProfile(), - wantErr: assert.NoError, + name: "no samples", + dictionary: pprofile.NewProfilesDictionary(), + profile: pprofile.NewProfile(), + wantErr: assert.NoError, }, { - name: "valid samples", + name: "valid samples", + dictionary: pprofile.NewProfilesDictionary(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() pp.Sample().AppendEmpty() @@ -314,7 +359,8 @@ func Test_validateSamples(t *testing.T) { wantErr: assert.NoError, }, { - name: "invalid sample", + name: "invalid sample", + dictionary: pprofile.NewProfilesDictionary(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() pp.Sample().AppendEmpty() @@ -327,27 +373,30 @@ func Test_validateSamples(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tt.wantErr(t, validateSamples(tt.profile)) + tt.wantErr(t, validateSamples(tt.dictionary, tt.profile)) }) } } func Test_validateSample(t *testing.T) { tests := []struct { - name string - profile pprofile.Profile - sample pprofile.Sample - wantErr assert.ErrorAssertionFunc + name string + dictionary pprofile.ProfilesDictionary + profile pprofile.Profile + sample pprofile.Sample + wantErr assert.ErrorAssertionFunc }{ { - name: "empty", - profile: pprofile.NewProfile(), - sample: pprofile.NewSample(), - wantErr: assert.NoError, + name: "empty", + dictionary: pprofile.NewProfilesDictionary(), + profile: pprofile.NewProfile(), + sample: pprofile.NewSample(), + wantErr: assert.NoError, }, { - name: "negative location length", - profile: pprofile.NewProfile(), + name: "negative location length", + dictionary: pprofile.NewProfilesDictionary(), + profile: pprofile.NewProfile(), sample: func() pprofile.Sample { s := pprofile.NewSample() s.SetLocationsLength(-1) @@ -356,8 +405,9 @@ func Test_validateSample(t *testing.T) { wantErr: assert.Error, }, { - name: "location length out of range", - profile: pprofile.NewProfile(), + name: "location length out of range", + dictionary: pprofile.NewProfilesDictionary(), + profile: pprofile.NewProfile(), sample: func() pprofile.Sample { s := pprofile.NewSample() s.SetLocationsStartIndex(0) @@ -368,9 +418,13 @@ func Test_validateSample(t *testing.T) { }, { name: "location start plus location length in range", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.LocationTable().AppendEmpty() + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.LocationTable().AppendEmpty() pp.LocationIndices().Append(0) return pp }(), @@ -384,9 +438,13 @@ func Test_validateSample(t *testing.T) { }, { name: "location start plus location length out of range", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.LocationTable().AppendEmpty() + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.LocationTable().AppendEmpty() pp.LocationIndices().Append(0) return pp }(), @@ -400,9 +458,13 @@ func Test_validateSample(t *testing.T) { }, { name: "location index out of range", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.LocationTable().AppendEmpty() + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.LocationTable().AppendEmpty() pp.LocationIndices().Append(1) return pp }(), @@ -415,8 +477,9 @@ func Test_validateSample(t *testing.T) { wantErr: assert.Error, }, { - name: "sample type length does not match", - profile: pprofile.NewProfile(), + name: "sample type length does not match", + dictionary: pprofile.NewProfilesDictionary(), + profile: pprofile.NewProfile(), sample: func() pprofile.Sample { s := pprofile.NewSample() s.Value().Append(123) @@ -426,9 +489,13 @@ func Test_validateSample(t *testing.T) { }, { name: "attribute in range", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.AttributeTable().AppendEmpty() + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.AttributeTable().AppendEmpty() return pp }(), sample: func() pprofile.Sample { @@ -440,9 +507,13 @@ func Test_validateSample(t *testing.T) { }, { name: "attribute out of range", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.AttributeTable().AppendEmpty() + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.AttributeTable().AppendEmpty() return pp }(), sample: func() pprofile.Sample { @@ -453,7 +524,8 @@ func Test_validateSample(t *testing.T) { wantErr: assert.Error, }, { - name: "timestamp in range", + name: "timestamp in range", + dictionary: pprofile.NewProfilesDictionary(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() pp.SetTime(1) @@ -468,7 +540,8 @@ func Test_validateSample(t *testing.T) { wantErr: assert.NoError, }, { - name: "timestamp too small", + name: "timestamp too small", + dictionary: pprofile.NewProfilesDictionary(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() pp.SetTime(1) @@ -483,7 +556,8 @@ func Test_validateSample(t *testing.T) { wantErr: assert.Error, }, { - name: "timestamp too high", + name: "timestamp too high", + dictionary: pprofile.NewProfilesDictionary(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() pp.SetTime(1) @@ -499,9 +573,13 @@ func Test_validateSample(t *testing.T) { }, { name: "link in range", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.LinkTable().AppendEmpty() + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.LinkTable().AppendEmpty() return pp }(), sample: func() pprofile.Sample { @@ -513,9 +591,13 @@ func Test_validateSample(t *testing.T) { }, { name: "link out of range", + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.LinkTable().AppendEmpty() + return dic + }(), profile: func() pprofile.Profile { pp := pprofile.NewProfile() - pp.LinkTable().AppendEmpty() return pp }(), sample: func() pprofile.Sample { @@ -528,31 +610,32 @@ func Test_validateSample(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tt.wantErr(t, validateSample(tt.profile, tt.sample)) + tt.wantErr(t, validateSample(tt.dictionary, tt.profile, tt.sample)) }) } } func Test_validateLocation(t *testing.T) { tests := []struct { - name string - profile pprofile.Profile - location pprofile.Location - wantErr assert.ErrorAssertionFunc + name string + dictionary pprofile.ProfilesDictionary + profile pprofile.Profile + location pprofile.Location + wantErr assert.ErrorAssertionFunc }{ { - name: "empty", - profile: pprofile.NewProfile(), - location: pprofile.NewLocation(), - wantErr: assert.NoError, + name: "empty", + dictionary: pprofile.NewProfilesDictionary(), + location: pprofile.NewLocation(), + wantErr: assert.NoError, }, { name: "mapping index in range", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - pp.MappingTable().AppendEmpty() - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + dic.MappingTable().AppendEmpty() + return dic }(), location: func() pprofile.Location { l := pprofile.NewLocation() @@ -563,13 +646,13 @@ func Test_validateLocation(t *testing.T) { }, { name: "with line and attribute", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - pp.MappingTable().AppendEmpty() - pp.AttributeTable().AppendEmpty() - pp.FunctionTable().AppendEmpty() - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + dic.MappingTable().AppendEmpty() + dic.AttributeTable().AppendEmpty() + dic.FunctionTable().AppendEmpty() + return dic }(), location: func() pprofile.Location { l := pprofile.NewLocation() @@ -582,31 +665,31 @@ func Test_validateLocation(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tt.wantErr(t, validateLocation(tt.profile, tt.location)) + tt.wantErr(t, validateLocation(tt.dictionary, tt.location)) }) } } func Test_validateLine(t *testing.T) { tests := []struct { - name string - profile pprofile.Profile - line pprofile.Line - wantErr assert.ErrorAssertionFunc + name string + dictionary pprofile.ProfilesDictionary + line pprofile.Line + wantErr assert.ErrorAssertionFunc }{ { - name: "function index out of range", - profile: pprofile.NewProfile(), - line: pprofile.NewLine(), - wantErr: assert.Error, + name: "function index out of range", + dictionary: pprofile.NewProfilesDictionary(), + line: pprofile.NewLine(), + wantErr: assert.Error, }, { name: "function index in range", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - pp.FunctionTable().AppendEmpty() - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + dic.FunctionTable().AppendEmpty() + return dic }(), line: pprofile.NewLine(), wantErr: assert.NoError, @@ -614,40 +697,40 @@ func Test_validateLine(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tt.wantErr(t, validateLine(tt.profile, tt.line)) + tt.wantErr(t, validateLine(tt.dictionary, tt.line)) }) } } func Test_validateMapping(t *testing.T) { tests := []struct { - name string - profile pprofile.Profile - mapping pprofile.Mapping - wantErr assert.ErrorAssertionFunc + name string + dictionary pprofile.ProfilesDictionary + mapping pprofile.Mapping + wantErr assert.ErrorAssertionFunc }{ { - name: "filename index out of range", - profile: pprofile.NewProfile(), - mapping: pprofile.NewMapping(), - wantErr: assert.Error, + name: "filename index out of range", + dictionary: pprofile.NewProfilesDictionary(), + mapping: pprofile.NewMapping(), + wantErr: assert.Error, }, { name: "filename index in range", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic }(), mapping: pprofile.NewMapping(), wantErr: assert.NoError, }, { name: "attribute out of range", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic }(), mapping: func() pprofile.Mapping { m := pprofile.NewMapping() @@ -658,11 +741,11 @@ func Test_validateMapping(t *testing.T) { }, { name: "attribute in range", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - pp.AttributeTable().AppendEmpty() - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + dic.AttributeTable().AppendEmpty() + return dic }(), mapping: func() pprofile.Mapping { m := pprofile.NewMapping() @@ -674,81 +757,81 @@ func Test_validateMapping(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tt.wantErr(t, validateMapping(tt.profile, tt.mapping)) + tt.wantErr(t, validateMapping(tt.dictionary, tt.mapping)) }) } } func Test_validateAttributeUnits(t *testing.T) { tests := []struct { - name string - profile pprofile.Profile - wantErr assert.ErrorAssertionFunc + name string + dictionary pprofile.ProfilesDictionary + wantErr assert.ErrorAssertionFunc }{ { - name: "empty", - profile: pprofile.NewProfile(), - wantErr: assert.NoError, + name: "empty", + dictionary: pprofile.NewProfilesDictionary(), + wantErr: assert.NoError, }, { name: "in range", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - pp.AttributeUnits().AppendEmpty() - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + dic.AttributeUnits().AppendEmpty() + return dic }(), wantErr: assert.NoError, }, { name: "unit index out of range", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - au := pp.AttributeUnits().AppendEmpty() + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + au := dic.AttributeUnits().AppendEmpty() au.SetUnitStrindex(1) - return pp + return dic }(), wantErr: assert.Error, }, { name: "attribute key index out of range", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - au := pp.AttributeUnits().AppendEmpty() + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + au := dic.AttributeUnits().AppendEmpty() au.SetAttributeKeyStrindex(1) - return pp + return dic }(), wantErr: assert.Error, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tt.wantErr(t, validateAttributeUnits(tt.profile)) + tt.wantErr(t, validateAttributeUnits(tt.dictionary)) }) } } func Test_validateAttributeUnitAt(t *testing.T) { tests := []struct { - name string - profile pprofile.Profile - attrUnit pprofile.AttributeUnit - wantErr assert.ErrorAssertionFunc + name string + dictionary pprofile.ProfilesDictionary + attrUnit pprofile.AttributeUnit + wantErr assert.ErrorAssertionFunc }{ { - name: "out of range", - profile: pprofile.NewProfile(), - attrUnit: pprofile.NewAttributeUnit(), - wantErr: assert.Error, + name: "out of range", + dictionary: pprofile.NewProfilesDictionary(), + attrUnit: pprofile.NewAttributeUnit(), + wantErr: assert.Error, }, { name: "in range", - profile: func() pprofile.Profile { - pp := pprofile.NewProfile() - pp.StringTable().Append("") - return pp + dictionary: func() pprofile.ProfilesDictionary { + dic := pprofile.NewProfilesDictionary() + dic.StringTable().Append("") + return dic }(), attrUnit: pprofile.NewAttributeUnit(), wantErr: assert.NoError, @@ -756,7 +839,7 @@ func Test_validateAttributeUnitAt(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tt.wantErr(t, validateAttributeUnit(tt.profile, tt.attrUnit)) + tt.wantErr(t, validateAttributeUnit(tt.dictionary, tt.attrUnit)) }) } }