diff --git a/exporter/signalfxexporter/internal/translation/converter_test.go b/exporter/signalfxexporter/internal/translation/converter_test.go index 55f91da8bd18..2b5e067e70f6 100644 --- a/exporter/signalfxexporter/internal/translation/converter_test.go +++ b/exporter/signalfxexporter/internal/translation/converter_test.go @@ -21,7 +21,6 @@ import ( "go.uber.org/zap/zaptest/observer" "github.com/open-telemetry/opentelemetry-collector-contrib/exporter/signalfxexporter/internal/translation/dpfilters" - "github.com/open-telemetry/opentelemetry-collector-contrib/internal/common/maps" ) const ( @@ -232,7 +231,7 @@ func Test_MetricDataToSignalFxV2(t *testing.T) { doubleSFxDataPoint( "gauge_double_with_dims", &sfxMetricTypeGauge, - maps.MergeRawMaps(map[string]any{ + mergeRawMaps(map[string]any{ "k_n0": "vn0", "k_n1": "vn1", "k_r0": "vr0", @@ -241,7 +240,7 @@ func Test_MetricDataToSignalFxV2(t *testing.T) { int64SFxDataPoint( "gauge_int_with_dims", &sfxMetricTypeGauge, - maps.MergeRawMaps(map[string]any{ + mergeRawMaps(map[string]any{ "k_n0": "vn0", "k_n1": "vn1", "k_r0": "vr0", @@ -295,7 +294,7 @@ func Test_MetricDataToSignalFxV2(t *testing.T) { int64SFxDataPoint( "gauge_int_with_dims", &sfxMetricTypeGauge, - maps.MergeRawMaps(map[string]any{ + mergeRawMaps(map[string]any{ "k_n0": "vn0", "k_n1": "vn1", "k_r0": "vr0", @@ -304,7 +303,7 @@ func Test_MetricDataToSignalFxV2(t *testing.T) { int64SFxDataPoint( "gauge_int_with_dims", &sfxMetricTypeGauge, - maps.MergeRawMaps(map[string]any{ + mergeRawMaps(map[string]any{ "k_n0": "vn0", "k_n1": "vn1", "k_r0": "vr0", @@ -338,7 +337,7 @@ func Test_MetricDataToSignalFxV2(t *testing.T) { doubleSFxDataPoint( "gauge_double_with_dims", &sfxMetricTypeGauge, - maps.MergeRawMaps(map[string]any{ + mergeRawMaps(map[string]any{ "k_n0": "vn0", "k_n1": "vn1", "k_r0": "vr0", @@ -372,7 +371,7 @@ func Test_MetricDataToSignalFxV2(t *testing.T) { doubleSFxDataPoint( "gauge_double_with_dims", &sfxMetricTypeGauge, - maps.MergeRawMaps(labelMap, map[string]any{ + mergeRawMaps(labelMap, map[string]any{ "cloud_account_id": "efgh", "cloud_provider": conventions.AttributeCloudProviderAWS, "cloud_region": "us-east", @@ -405,7 +404,7 @@ func Test_MetricDataToSignalFxV2(t *testing.T) { doubleSFxDataPoint( "gauge_double_with_dims", &sfxMetricTypeGauge, - maps.MergeRawMaps(labelMap, map[string]any{ + mergeRawMaps(labelMap, map[string]any{ "cloud_provider": conventions.AttributeCloudProviderAWS, "cloud_account_id": "efgh", "cloud_region": "us-east", @@ -438,7 +437,7 @@ func Test_MetricDataToSignalFxV2(t *testing.T) { doubleSFxDataPoint( "gauge_double_with_dims", &sfxMetricTypeGauge, - maps.MergeRawMaps(labelMap, map[string]any{ + mergeRawMaps(labelMap, map[string]any{ "host_id": "abcd", "cloud_provider": conventions.AttributeCloudProviderGCP, "k_r0": "vr0", @@ -469,7 +468,7 @@ func Test_MetricDataToSignalFxV2(t *testing.T) { doubleSFxDataPoint( "gauge_double_with_dims", &sfxMetricTypeGauge, - maps.MergeRawMaps(labelMap, map[string]any{ + mergeRawMaps(labelMap, map[string]any{ "gcp_id": "efgh_abcd", "k_r0": "vr0", "k_r1": "vr1", @@ -1364,3 +1363,16 @@ func TestMetricsConverter_ConvertDimension(t *testing.T) { }) } } + +// mergeRawMaps merges n maps with a later map's keys overriding earlier maps. +func mergeRawMaps(maps ...map[string]any) map[string]any { + ret := map[string]any{} + + for _, m := range maps { + for k, v := range m { + ret[k] = v + } + } + + return ret +} diff --git a/internal/common/maps/maps.go b/internal/common/maps/maps.go deleted file mode 100644 index 8f8a3bc9e136..000000000000 --- a/internal/common/maps/maps.go +++ /dev/null @@ -1,39 +0,0 @@ -// Copyright The OpenTelemetry Authors -// SPDX-License-Identifier: Apache-2.0 - -package maps // import "github.com/open-telemetry/opentelemetry-collector-contrib/internal/common/maps" - -// MergeRawMaps merges n maps with a later map's keys overriding earlier maps. -func MergeRawMaps(maps ...map[string]any) map[string]any { - ret := map[string]any{} - - for _, m := range maps { - for k, v := range m { - ret[k] = v - } - } - - return ret -} - -// MergeStringMaps merges n maps with a later map's keys overriding earlier maps. -func MergeStringMaps(maps ...map[string]string) map[string]string { - ret := map[string]string{} - - for _, m := range maps { - for k, v := range m { - ret[k] = v - } - } - - return ret -} - -// CloneStringMap makes a shallow copy of a map[string]string. -func CloneStringMap(m map[string]string) map[string]string { - m2 := make(map[string]string, len(m)) - for k, v := range m { - m2[k] = v - } - return m2 -} diff --git a/internal/common/maps/maps_test.go b/internal/common/maps/maps_test.go deleted file mode 100644 index ca3f40d66ec1..000000000000 --- a/internal/common/maps/maps_test.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright The OpenTelemetry Authors -// SPDX-License-Identifier: Apache-2.0 - -package maps - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -func TestMergeStringMaps(t *testing.T) { - m1 := map[string]string{ - "key-1": "val-1", - } - - m2 := map[string]string{ - "key-2": "val-2", - } - - actual := MergeStringMaps(m1, m2) - expected := map[string]string{ - "key-1": "val-1", - "key-2": "val-2", - } - - require.Equal(t, expected, actual) -} - -func TestCloneStringMap(t *testing.T) { - m := map[string]string{ - "key-1": "val-1", - } - - actual := CloneStringMap(m) - expected := map[string]string{ - "key-1": "val-1", - } - - require.Equal(t, expected, actual) -} diff --git a/internal/common/maps/package_test.go b/internal/common/maps/package_test.go deleted file mode 100644 index 5a6d3748e8b6..000000000000 --- a/internal/common/maps/package_test.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright The OpenTelemetry Authors -// SPDX-License-Identifier: Apache-2.0 - -package maps - -import ( - "testing" - - "go.uber.org/goleak" -) - -func TestMain(m *testing.M) { - goleak.VerifyTestMain(m) -} diff --git a/pkg/translator/signalfx/from_metrics_test.go b/pkg/translator/signalfx/from_metrics_test.go index 70d091ac4080..66fa73000441 100644 --- a/pkg/translator/signalfx/from_metrics_test.go +++ b/pkg/translator/signalfx/from_metrics_test.go @@ -13,8 +13,6 @@ import ( "github.com/stretchr/testify/require" "go.opentelemetry.io/collector/pdata/pcommon" "go.opentelemetry.io/collector/pdata/pmetric" - - "github.com/open-telemetry/opentelemetry-collector-contrib/internal/common/maps" ) const ( @@ -215,7 +213,7 @@ func Test_FromMetrics(t *testing.T) { doubleSFxDataPoint( "gauge_double_with_dims", &sfxMetricTypeGauge, - maps.MergeStringMaps(map[string]string{ + mergeStringMaps(map[string]string{ "k_n0": "v_n0", "k_n1": "v_n1", "k_r0": "v_r0", @@ -225,7 +223,7 @@ func Test_FromMetrics(t *testing.T) { int64SFxDataPoint( "gauge_int_with_dims", &sfxMetricTypeGauge, - maps.MergeStringMaps(map[string]string{ + mergeStringMaps(map[string]string{ "k_n0": "v_n0", "k_n1": "v_n1", "k_r0": "v_r0", @@ -251,13 +249,13 @@ func Test_FromMetrics(t *testing.T) { doubleSFxDataPoint("histogram_min", &sfxMetricTypeGauge, labelMap, 0.1), doubleSFxDataPoint("histogram_max", &sfxMetricTypeGauge, labelMap, 11.11), int64SFxDataPoint("histogram_bucket", &sfxMetricTypeCumulativeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "1"}, labelMap), 4), + mergeStringMaps(map[string]string{bucketDimensionKey: "1"}, labelMap), 4), int64SFxDataPoint("histogram_bucket", &sfxMetricTypeCumulativeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "2"}, labelMap), 6), + mergeStringMaps(map[string]string{bucketDimensionKey: "2"}, labelMap), 6), int64SFxDataPoint("histogram_bucket", &sfxMetricTypeCumulativeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "4"}, labelMap), 9), + mergeStringMaps(map[string]string{bucketDimensionKey: "4"}, labelMap), 9), int64SFxDataPoint("histogram_bucket", &sfxMetricTypeCumulativeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "+Inf"}, labelMap), 16), + mergeStringMaps(map[string]string{bucketDimensionKey: "+Inf"}, labelMap), 16), }, }, { @@ -274,13 +272,13 @@ func Test_FromMetrics(t *testing.T) { wantSfxDataPoints: []*sfxpb.DataPoint{ int64SFxDataPoint("histogram_count", &sfxMetricTypeCumulativeCounter, labelMap, 16), int64SFxDataPoint("histogram_bucket", &sfxMetricTypeCumulativeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "1"}, labelMap), 4), + mergeStringMaps(map[string]string{bucketDimensionKey: "1"}, labelMap), 4), int64SFxDataPoint("histogram_bucket", &sfxMetricTypeCumulativeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "2"}, labelMap), 6), + mergeStringMaps(map[string]string{bucketDimensionKey: "2"}, labelMap), 6), int64SFxDataPoint("histogram_bucket", &sfxMetricTypeCumulativeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "4"}, labelMap), 9), + mergeStringMaps(map[string]string{bucketDimensionKey: "4"}, labelMap), 9), int64SFxDataPoint("histogram_bucket", &sfxMetricTypeCumulativeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "+Inf"}, labelMap), 16), + mergeStringMaps(map[string]string{bucketDimensionKey: "+Inf"}, labelMap), 16), }, }, { @@ -300,13 +298,13 @@ func Test_FromMetrics(t *testing.T) { doubleSFxDataPoint("delta_histogram_min", &sfxMetricTypeGauge, labelMap, 0.1), doubleSFxDataPoint("delta_histogram_max", &sfxMetricTypeGauge, labelMap, 11.11), int64SFxDataPoint("delta_histogram_bucket", &sfxMetricTypeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "1"}, labelMap), 4), + mergeStringMaps(map[string]string{bucketDimensionKey: "1"}, labelMap), 4), int64SFxDataPoint("delta_histogram_bucket", &sfxMetricTypeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "2"}, labelMap), 6), + mergeStringMaps(map[string]string{bucketDimensionKey: "2"}, labelMap), 6), int64SFxDataPoint("delta_histogram_bucket", &sfxMetricTypeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "4"}, labelMap), 9), + mergeStringMaps(map[string]string{bucketDimensionKey: "4"}, labelMap), 9), int64SFxDataPoint("delta_histogram_bucket", &sfxMetricTypeCounter, - maps.MergeStringMaps(map[string]string{bucketDimensionKey: "+Inf"}, labelMap), 16), + mergeStringMaps(map[string]string{bucketDimensionKey: "+Inf"}, labelMap), 16), }, }, { @@ -352,13 +350,13 @@ func Test_FromMetrics(t *testing.T) { int64SFxDataPoint("summary_count", &sfxMetricTypeCumulativeCounter, labelMap, 111), doubleSFxDataPoint("summary_sum", &sfxMetricTypeCumulativeCounter, labelMap, 123.4), doubleSFxDataPoint("summary_quantile", &sfxMetricTypeGauge, - maps.MergeStringMaps(map[string]string{quantileDimensionKey: "0.25"}, labelMap), 0), + mergeStringMaps(map[string]string{quantileDimensionKey: "0.25"}, labelMap), 0), doubleSFxDataPoint("summary_quantile", &sfxMetricTypeGauge, - maps.MergeStringMaps(map[string]string{quantileDimensionKey: "0.5"}, labelMap), 1), + mergeStringMaps(map[string]string{quantileDimensionKey: "0.5"}, labelMap), 1), doubleSFxDataPoint("summary_quantile", &sfxMetricTypeGauge, - maps.MergeStringMaps(map[string]string{quantileDimensionKey: "0.75"}, labelMap), 2), + mergeStringMaps(map[string]string{quantileDimensionKey: "0.75"}, labelMap), 2), doubleSFxDataPoint("summary_quantile", &sfxMetricTypeGauge, - maps.MergeStringMaps(map[string]string{quantileDimensionKey: "1"}, labelMap), 3), + mergeStringMaps(map[string]string{quantileDimensionKey: "1"}, labelMap), 3), }, }, { @@ -570,3 +568,15 @@ func sfxDimensions(m map[string]string) []*sfxpb.Dimension { return sfxDims } + +func mergeStringMaps(maps ...map[string]string) map[string]string { + ret := map[string]string{} + + for _, m := range maps { + for k, v := range m { + ret[k] = v + } + } + + return ret +} diff --git a/pkg/translator/signalfx/go.mod b/pkg/translator/signalfx/go.mod index e04173446e68..0af547b4b013 100644 --- a/pkg/translator/signalfx/go.mod +++ b/pkg/translator/signalfx/go.mod @@ -3,7 +3,6 @@ module github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/ go 1.22.0 require ( - github.com/open-telemetry/opentelemetry-collector-contrib/internal/common v0.113.0 github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.113.0 github.com/signalfx/com_signalfx_metrics_protobuf v0.0.3 github.com/stretchr/testify v1.9.0 @@ -31,8 +30,6 @@ require ( gopkg.in/yaml.v3 v3.0.1 // indirect ) -replace github.com/open-telemetry/opentelemetry-collector-contrib/internal/common => ../../../internal/common - replace github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest => ../../../pkg/pdatatest replace github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil => ../../../pkg/pdatautil diff --git a/receiver/k8sclusterreceiver/internal/metadata/metadata.go b/receiver/k8sclusterreceiver/internal/metadata/metadata.go index 5ea50a953f7b..e2ccdc07626f 100644 --- a/receiver/k8sclusterreceiver/internal/metadata/metadata.go +++ b/receiver/k8sclusterreceiver/internal/metadata/metadata.go @@ -5,12 +5,12 @@ package metadata // import "github.com/open-telemetry/opentelemetry-collector-co import ( "fmt" + "maps" "strings" "time" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "github.com/open-telemetry/opentelemetry-collector-contrib/internal/common/maps" metadataPkg "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata" "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver/internal/constants" ) @@ -50,7 +50,7 @@ func TransformObjectMeta(om v1.ObjectMeta) v1.ObjectMeta { // live on v1.ObjectMeta. func GetGenericMetadata(om *v1.ObjectMeta, resourceType string) *KubernetesMetadata { rType := strings.ToLower(resourceType) - metadata := maps.MergeStringMaps(map[string]string{}, om.Labels) + metadata := maps.Clone(om.Labels) metadata[constants.K8sKeyWorkLoadKind] = resourceType metadata[constants.K8sKeyWorkLoadName] = om.Name diff --git a/receiver/k8sclusterreceiver/internal/node/nodes.go b/receiver/k8sclusterreceiver/internal/node/nodes.go index ad94b9ef9008..052e053dd571 100644 --- a/receiver/k8sclusterreceiver/internal/node/nodes.go +++ b/receiver/k8sclusterreceiver/internal/node/nodes.go @@ -5,6 +5,7 @@ package node // import "github.com/open-telemetry/opentelemetry-collector-contri import ( "fmt" + "maps" "strings" "time" @@ -16,10 +17,8 @@ import ( corev1 "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/resource" - "github.com/open-telemetry/opentelemetry-collector-contrib/internal/common/maps" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata" "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver/internal/metadata" - imetadata "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver/internal/metadata" ) const ( @@ -51,7 +50,7 @@ func Transform(node *corev1.Node) *corev1.Node { return newNode } -func RecordMetrics(mb *imetadata.MetricsBuilder, node *corev1.Node, ts pcommon.Timestamp) { +func RecordMetrics(mb *metadata.MetricsBuilder, node *corev1.Node, ts pcommon.Timestamp) { for _, c := range node.Status.Conditions { mb.RecordK8sNodeConditionDataPoint(ts, nodeConditionValues[c.Status], string(c.Type)) } @@ -60,7 +59,7 @@ func RecordMetrics(mb *imetadata.MetricsBuilder, node *corev1.Node, ts pcommon.T rb.SetK8sNodeName(node.Name) rb.SetK8sKubeletVersion(node.Status.NodeInfo.KubeletVersion) - mb.EmitForResource(imetadata.WithResource(rb.Emit())) + mb.EmitForResource(metadata.WithResource(rb.Emit())) } func CustomMetrics(set receiver.Settings, rb *metadata.ResourceBuilder, node *corev1.Node, nodeConditionTypesToReport, @@ -146,7 +145,7 @@ func nodeConditionValue(node *corev1.Node, condType corev1.NodeConditionType) in } func GetMetadata(node *corev1.Node) map[experimentalmetricmetadata.ResourceID]*metadata.KubernetesMetadata { - meta := maps.MergeStringMaps(map[string]string{}, node.Labels) + meta := maps.Clone(node.Labels) meta[conventions.AttributeK8SNodeName] = node.Name meta[nodeCreationTime] = node.GetCreationTimestamp().Format(time.RFC3339) diff --git a/receiver/k8sclusterreceiver/internal/pod/pods.go b/receiver/k8sclusterreceiver/internal/pod/pods.go index 81cf2a206789..4793b58edcc6 100644 --- a/receiver/k8sclusterreceiver/internal/pod/pods.go +++ b/receiver/k8sclusterreceiver/internal/pod/pods.go @@ -4,6 +4,7 @@ package pod // import "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver/internal/pod" import ( + "maps" "strings" "time" @@ -17,7 +18,6 @@ import ( "k8s.io/apimachinery/pkg/types" "k8s.io/client-go/tools/cache" - "github.com/open-telemetry/opentelemetry-collector-contrib/internal/common/maps" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata" "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver/internal/constants" "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver/internal/container" @@ -123,7 +123,7 @@ func phaseToInt(phase corev1.PodPhase) int32 { // GetMetadata returns all metadata associated with the pod. func GetMetadata(pod *corev1.Pod, mc *metadata.Store, logger *zap.Logger) map[experimentalmetricmetadata.ResourceID]*metadata.KubernetesMetadata { - meta := maps.MergeStringMaps(map[string]string{}, pod.Labels) + meta := maps.Clone(pod.Labels) meta[podCreationTime] = pod.CreationTimestamp.Format(time.RFC3339) @@ -141,15 +141,15 @@ func GetMetadata(pod *corev1.Pod, mc *metadata.Store, logger *zap.Logger) map[ex } if store := mc.Get(gvk.Service); store != nil { - meta = maps.MergeStringMaps(meta, service.GetPodServiceTags(pod, store)) + service.GetPodServiceTags(meta, pod, store) } if store := mc.Get(gvk.Job); store != nil { - meta = maps.MergeStringMaps(meta, collectPodJobProperties(pod, store, logger)) + collectPodJobProperties(meta, pod, store, logger) } if store := mc.Get(gvk.ReplicaSet); store != nil { - meta = maps.MergeStringMaps(meta, collectPodReplicaSetProperties(pod, store, logger)) + collectPodReplicaSetProperties(meta, pod, store, logger) } podID := experimentalmetricmetadata.ResourceID(pod.UID) @@ -165,48 +165,52 @@ func GetMetadata(pod *corev1.Pod, mc *metadata.Store, logger *zap.Logger) map[ex // collectPodJobProperties checks if pod owner of type Job is cached. Check owners reference // on Job to see if it was created by a CronJob. Sync metadata accordingly. -func collectPodJobProperties(pod *corev1.Pod, jobStore cache.Store, logger *zap.Logger) map[string]string { +func collectPodJobProperties(dest map[string]string, pod *corev1.Pod, jobStore cache.Store, logger *zap.Logger) { jobRef := utils.FindOwnerWithKind(pod.OwnerReferences, constants.K8sKindJob) - if jobRef != nil { - job, exists, err := jobStore.GetByKey(utils.GetIDForCache(pod.Namespace, jobRef.Name)) - if err != nil { - logError(err, jobRef, pod.UID, logger) - return nil - } else if !exists { - logDebug(jobRef, pod.UID, logger) - return nil - } + if jobRef == nil { + return + } + job, exists, err := jobStore.GetByKey(utils.GetIDForCache(pod.Namespace, jobRef.Name)) + if err != nil { + logError(err, jobRef, pod.UID, logger) + return + } + if !exists { + logDebug(jobRef, pod.UID, logger) + return + } - jobObj := job.(*batchv1.Job) - if cronJobRef := utils.FindOwnerWithKind(jobObj.OwnerReferences, constants.K8sKindCronJob); cronJobRef != nil { - return getWorkloadProperties(cronJobRef, conventions.AttributeK8SCronJobName) - } - return getWorkloadProperties(jobRef, conventions.AttributeK8SJobName) + jobObj := job.(*batchv1.Job) + if cronJobRef := utils.FindOwnerWithKind(jobObj.OwnerReferences, constants.K8sKindCronJob); cronJobRef != nil { + getWorkloadProperties(dest, cronJobRef, conventions.AttributeK8SCronJobName) + return } - return nil + getWorkloadProperties(dest, jobRef, conventions.AttributeK8SJobName) } // collectPodReplicaSetProperties checks if pod owner of type ReplicaSet is cached. Check owners reference // on ReplicaSet to see if it was created by a Deployment. Sync metadata accordingly. -func collectPodReplicaSetProperties(pod *corev1.Pod, replicaSetstore cache.Store, logger *zap.Logger) map[string]string { +func collectPodReplicaSetProperties(dest map[string]string, pod *corev1.Pod, replicaSetstore cache.Store, logger *zap.Logger) { rsRef := utils.FindOwnerWithKind(pod.OwnerReferences, constants.K8sKindReplicaSet) - if rsRef != nil { - replicaSet, exists, err := replicaSetstore.GetByKey(utils.GetIDForCache(pod.Namespace, rsRef.Name)) - if err != nil { - logError(err, rsRef, pod.UID, logger) - return nil - } else if !exists { - logDebug(rsRef, pod.UID, logger) - return nil - } + if rsRef == nil { + return + } + replicaSet, exists, err := replicaSetstore.GetByKey(utils.GetIDForCache(pod.Namespace, rsRef.Name)) + if err != nil { + logError(err, rsRef, pod.UID, logger) + return + } + if !exists { + logDebug(rsRef, pod.UID, logger) + return + } - replicaSetObj := replicaSet.(*appsv1.ReplicaSet) - if deployRef := utils.FindOwnerWithKind(replicaSetObj.OwnerReferences, constants.K8sKindDeployment); deployRef != nil { - return getWorkloadProperties(deployRef, conventions.AttributeK8SDeploymentName) - } - return getWorkloadProperties(rsRef, conventions.AttributeK8SReplicaSetName) + replicaSetObj := replicaSet.(*appsv1.ReplicaSet) + if deployRef := utils.FindOwnerWithKind(replicaSetObj.OwnerReferences, constants.K8sKindDeployment); deployRef != nil { + getWorkloadProperties(dest, deployRef, conventions.AttributeK8SDeploymentName) + return } - return nil + getWorkloadProperties(dest, rsRef, conventions.AttributeK8SReplicaSetName) } func logDebug(ref *v1.OwnerReference, podUID types.UID, logger *zap.Logger) { @@ -227,14 +231,12 @@ func logError(err error, ref *v1.OwnerReference, podUID types.UID, logger *zap.L } // getWorkloadProperties returns workload metadata for provided owner reference. -func getWorkloadProperties(ref *v1.OwnerReference, labelKey string) map[string]string { +func getWorkloadProperties(dest map[string]string, ref *v1.OwnerReference, labelKey string) { uidKey := metadata.GetOTelUIDFromKind(strings.ToLower(ref.Kind)) - return map[string]string{ - constants.K8sKeyWorkLoadKind: ref.Kind, - constants.K8sKeyWorkLoadName: ref.Name, - labelKey: ref.Name, - uidKey: string(ref.UID), - } + dest[constants.K8sKeyWorkLoadKind] = ref.Kind + dest[constants.K8sKeyWorkLoadName] = ref.Name + dest[labelKey] = ref.Name + dest[uidKey] = string(ref.UID) } func getPodContainerProperties(pod *corev1.Pod) map[experimentalmetricmetadata.ResourceID]*metadata.KubernetesMetadata { diff --git a/receiver/k8sclusterreceiver/internal/service/service.go b/receiver/k8sclusterreceiver/internal/service/service.go index 58792c71af5d..47ee61ba2fd4 100644 --- a/receiver/k8sclusterreceiver/internal/service/service.go +++ b/receiver/k8sclusterreceiver/internal/service/service.go @@ -25,16 +25,12 @@ func Transform(service *corev1.Service) *corev1.Service { } // GetPodServiceTags returns a set of services associated with the pod. -func GetPodServiceTags(pod *corev1.Pod, services cache.Store) map[string]string { - properties := map[string]string{} - +func GetPodServiceTags(dest map[string]string, pod *corev1.Pod, services cache.Store) { for _, ser := range services.List() { serObj := ser.(*corev1.Service) if serObj.Namespace == pod.Namespace && labels.Set(serObj.Spec.Selector).AsSelectorPreValidated().Matches(labels.Set(pod.Labels)) { - properties[fmt.Sprintf("%s%s", constants.K8sServicePrefix, serObj.Name)] = "" + dest[fmt.Sprintf("%s%s", constants.K8sServicePrefix, serObj.Name)] = "" } } - - return properties } diff --git a/receiver/k8sclusterreceiver/watcher_test.go b/receiver/k8sclusterreceiver/watcher_test.go index c9b104e6d906..095bd857d9c9 100644 --- a/receiver/k8sclusterreceiver/watcher_test.go +++ b/receiver/k8sclusterreceiver/watcher_test.go @@ -22,7 +22,6 @@ import ( "k8s.io/client-go/informers" "k8s.io/client-go/kubernetes/fake" - "github.com/open-telemetry/opentelemetry-collector-contrib/internal/common/maps" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/experimentalmetricmetadata" "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver/internal/gvk" "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/k8sclusterreceiver/internal/metadata" @@ -550,8 +549,10 @@ func TestObjMetadata(t *testing.T) { } } -var allPodMetadata = func(metadata map[string]string) map[string]string { - out := maps.MergeStringMaps(metadata, commonPodMetadata) +var allPodMetadata = func(out map[string]string) map[string]string { + for k, v := range commonPodMetadata { + out[k] = v + } return out }