@@ -26,7 +26,7 @@ metadata:
2626 app.kubernetes.io/managed-by : Helm
2727 app.kubernetes.io/name : nr-k8s-otel-collector
2828 app.kubernetes.io/version : 1.2.0
29- helm.sh/chart : nr-k8s-otel-collector-0.10.0
29+ helm.sh/chart : nr-k8s-otel-collector-0.10.7
3030 annotations :
3131---
3232# Source: nr-k8s-otel-collector/templates/daemonset-configmap.yaml
@@ -40,7 +40,7 @@ metadata:
4040 app.kubernetes.io/managed-by : Helm
4141 app.kubernetes.io/name : nr-k8s-otel-collector
4242 app.kubernetes.io/version : 1.2.0
43- helm.sh/chart : nr-k8s-otel-collector-0.10.0
43+ helm.sh/chart : nr-k8s-otel-collector-0.10.7
4444data :
4545 daemonset-config.yaml : |
4646 receivers:
7373 metrics:
7474 system.filesystem.utilization:
7575 enabled: true
76+ # Exclude container storage overlay mount to avoid permission errors when running as non-root
77+ # This is only relevant for CRI-O container runtime (used by OKE, OpenShift, etc.)
78+ exclude_mount_points:
79+ mount_points:
80+ - ^/var/lib/containers/storage/overlay$
81+ match_type: regexp
7682 disk:
7783 metrics:
7884 system.disk.merged:
8591 metrics:
8692 system.network.connections:
8793 enabled: false
88- # Uncomment to enable process metrics, which can be noisy but valuable.
89- # processes:
90- # process:
91- # metrics:
92- # process.cpu.utilization:
93- # enabled: true
94- # process.cpu.time:
95- # enabled: false
96- # mute_process_name_error: true
97- # mute_process_exe_error: true
98- # mute_process_io_error: true
99- # mute_process_user_error: true
10094
10195 kubeletstats:
10296 collection_interval: 1m
@@ -430,6 +424,8 @@ data:
430424
431425 metricstransform/hostmetrics:
432426 transforms:
427+ # When ATP is disabled: Only include a limited subset of process metrics.
428+ # This matches the original behavior before ATP was introduced.
433429 - include: process\.(cpu\.utilization|disk\.io|memory\.(usage|virtual))
434430 action: update
435431 match_type: regexp
@@ -626,6 +622,8 @@ data:
626622 detectors: [gcp, eks, ec2, aks, azure, oraclecloud]
627623 timeout: 2s
628624 override: false
625+ eks:
626+ node_from_env_var: KUBE_NODE_NAME
629627
630628 resource/newrelic:
631629 attributes:
@@ -635,7 +633,7 @@ data:
635633 value: opentelemetry-demo
636634 - key: "newrelic.chart.version"
637635 action: upsert
638- value: 0.10.0
636+ value: 0.10.7
639637 - key: newrelic.entity.type
640638 action: upsert
641639 value: "k8s"
@@ -763,6 +761,12 @@ data:
763761 - context: metric
764762 condition: instrumentation_scope.name == "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/pagingscraper"
765763 pipelines: [metrics/nr]
764+ - context: metric
765+ condition: instrumentation_scope.name == "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/processscraper"
766+ pipelines: [metrics/nr]
767+ - context: metric
768+ condition: instrumentation_scope.name == "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/hostmetricsreceiver/internal/scraper/processesscraper"
769+ pipelines: [metrics/nr]
766770 - context: metric
767771 condition: instrumentation_scope.name == "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/prometheusreceiver"
768772 pipelines: [metrics/nr_prometheus_cadv_kubelet]
@@ -905,7 +909,7 @@ metadata:
905909 app.kubernetes.io/managed-by : Helm
906910 app.kubernetes.io/name : nr-k8s-otel-collector
907911 app.kubernetes.io/version : 1.2.0
908- helm.sh/chart : nr-k8s-otel-collector-0.10.0
912+ helm.sh/chart : nr-k8s-otel-collector-0.10.7
909913data :
910914 deployment-config.yaml : |
911915 receivers:
@@ -1040,7 +1044,26 @@ data:
10401044 replacement: scheduler
10411045
10421046 processors:
1043-
1047+ filter/python-system-metrics:
1048+ error_mode: ignore
1049+ metrics:
1050+ metric:
1051+ - instrumentation_scope.name == "opentelemetry.instrumentation.system_metrics"
1052+ resource/otel-demo:
1053+ attributes:
1054+ - action: insert
1055+ from_attribute: k8s.pod.uid
1056+ key: service.instance.id
1057+ resourcedetection/otel-demo:
1058+ detectors:
1059+ - env
1060+ transform/otel-demo:
1061+ error_mode: ignore
1062+ trace_statements:
1063+ - context: span
1064+ statements:
1065+ - replace_pattern(name, "\\?.*", "")
1066+ - replace_match(name, "GET /api/products/*", "GET /api/products/{productId}")
10441067
10451068
10461069 transform/promote_job_label:
@@ -1411,7 +1434,7 @@ data:
14111434 value: opentelemetry-demo
14121435 - key: "newrelic.chart.version"
14131436 action: upsert
1414- value: 0.10.0
1437+ value: 0.10.7
14151438 - key: newrelic.entity.type
14161439 action: upsert
14171440 value: "k8s"
@@ -1429,7 +1452,7 @@ data:
14291452 value: opentelemetry-demo
14301453 - key: "newrelic.chart.version"
14311454 action: upsert
1432- value: 0.10.0
1455+ value: 0.10.7
14331456
14341457 transform/events:
14351458 log_statements:
@@ -1583,7 +1606,7 @@ data:
15831606 send_batch_size : 800
15841607
15851608 exporters:
1586-
1609+ debug: {}
15871610
15881611 otlphttp/newrelic:
15891612 endpoint: "https://otlp.nr-data.net"
@@ -1636,7 +1659,42 @@ data:
16361659 metrics:
16371660 readers:
16381661 pipelines:
1639-
1662+ logs/otel-demo:
1663+ exporters:
1664+ - otlphttp/newrelic
1665+ processors:
1666+ - memory_limiter
1667+ - resourcedetection/otel-demo
1668+ - resource/otel-demo
1669+ - transform/otel-demo
1670+ - batch
1671+ receivers:
1672+ - otlp
1673+ metrics/otel-demo:
1674+ exporters:
1675+ - otlphttp/newrelic
1676+ - debug
1677+ processors:
1678+ - memory_limiter
1679+ - filter/python-system-metrics
1680+ - resourcedetection/otel-demo
1681+ - resource/otel-demo
1682+ - k8sattributes/ksm
1683+ - batch
1684+ receivers:
1685+ - otlp
1686+ traces/otel-demo:
1687+ exporters:
1688+ - otlphttp/newrelic
1689+ processors:
1690+ - memory_limiter
1691+ - resourcedetection/otel-demo
1692+ - resource/otel-demo
1693+ - transform/otel-demo
1694+ - k8sattributes/ksm
1695+ - batch
1696+ receivers:
1697+ - otlp
16401698 metrics/ingress:
16411699 receivers:
16421700 - prometheus/ksm
@@ -1902,7 +1960,7 @@ metadata:
19021960 app.kubernetes.io/managed-by : Helm
19031961 app.kubernetes.io/name : nr-k8s-otel-collector
19041962 app.kubernetes.io/version : 1.2.0
1905- helm.sh/chart : nr-k8s-otel-collector-0.10.0
1963+ helm.sh/chart : nr-k8s-otel-collector-0.10.7
19061964rules :
19071965 - apiGroups :
19081966 - " "
@@ -2007,7 +2065,7 @@ metadata:
20072065 app.kubernetes.io/managed-by : Helm
20082066 app.kubernetes.io/name : nr-k8s-otel-collector
20092067 app.kubernetes.io/version : 1.2.0
2010- helm.sh/chart : nr-k8s-otel-collector-0.10.0
2068+ helm.sh/chart : nr-k8s-otel-collector-0.10.7
20112069subjects :
20122070 - kind : ServiceAccount
20132071 name : nr-k8s-otel-collector
@@ -2055,7 +2113,7 @@ metadata:
20552113 app.kubernetes.io/managed-by : Helm
20562114 app.kubernetes.io/name : nr-k8s-otel-collector
20572115 app.kubernetes.io/version : 1.2.0
2058- helm.sh/chart : nr-k8s-otel-collector-0.10.0
2116+ helm.sh/chart : nr-k8s-otel-collector-0.10.7
20592117spec :
20602118 type : ClusterIP
20612119 ports :
@@ -2084,7 +2142,7 @@ metadata:
20842142 app.kubernetes.io/managed-by : Helm
20852143 app.kubernetes.io/name : nr-k8s-otel-collector
20862144 app.kubernetes.io/version : 1.2.0
2087- helm.sh/chart : nr-k8s-otel-collector-0.10.0
2145+ helm.sh/chart : nr-k8s-otel-collector-0.10.7
20882146spec :
20892147 selector :
20902148 matchLabels :
@@ -2098,7 +2156,7 @@ spec:
20982156 app.kubernetes.io/name : nr-k8s-otel-collector
20992157 component : daemonset
21002158 annotations :
2101- checksum/config : 2b30748172ebc1d25c34cde7dd6c3436120e74028d504248a682ba0b3fc0a9eb
2159+ checksum/config : 581a5a058bba11cac3cf31ef9ac8d967916a62715414d5ab3290d1649f80193a
21022160 spec :
21032161 serviceAccountName : nr-k8s-otel-collector
21042162 initContainers :
@@ -2125,7 +2183,13 @@ spec:
21252183 export NODE_CPU_ALLOCATABLE=$(awk "BEGIN {print ${NODE_CPU_ALLOCATABLE%?} / 1000}")
21262184 fi
21272185
2128- export NODE_MEMORY_ALLOCATABLE=$(kubectl get node $NODE_NAME -o jsonpath='{.status.allocatable.memory}' | numfmt --from=auto)
2186+ export NODE_MEMORY_ALLOCATABLE=$(kubectl get node $NODE_NAME -o jsonpath='{.status.allocatable.memory}' | awk '
2187+ /Ki$/ {printf "%.0f\n", $1 * 1024; next}
2188+ /Mi$/ {printf "%.0f\n", $1 * 1024^2; next}
2189+ /Gi$/ {printf "%.0f\n", $1 * 1024^3; next}
2190+ /m$/ {printf "%.0f\n", $1 / 1000; next}
2191+ {print $1}
2192+ ')
21292193
21302194 if [[ -z "NODE_MEMORY_ALLOCATABLE" ]] || [[ "NODE_MEMORY_ALLOCATABLE" == "0" ]]; then
21312195 echo "Could not retrieve Memory allocatable for node $NODE_NAME"
@@ -2168,7 +2232,7 @@ spec:
21682232 - --config
21692233 - /config/daemonset-config.yaml
21702234 - --feature-gates
2171- - metricsgeneration.MatchAttributes
2235+ - metricsgeneration.MatchAttributes,-processor.resourcedetection.propagateerrors
21722236 resources :
21732237 {}
21742238 env :
@@ -2317,7 +2381,7 @@ metadata:
23172381 app.kubernetes.io/managed-by : Helm
23182382 app.kubernetes.io/name : nr-k8s-otel-collector
23192383 app.kubernetes.io/version : 1.2.0
2320- helm.sh/chart : nr-k8s-otel-collector-0.10.0
2384+ helm.sh/chart : nr-k8s-otel-collector-0.10.7
23212385spec :
23222386 replicas : 1
23232387 minReadySeconds : 5
@@ -2333,7 +2397,7 @@ spec:
23332397 app.kubernetes.io/name : nr-k8s-otel-collector
23342398 component : deployment
23352399 annotations :
2336- checksum/config : a0d06691977232bc8023b721d7ca97feb36408f8447b2b57fac3eb13881631ef
2400+ checksum/config : f27ed1b7d5e8a7032908eed8272a4e5a2afecff25ab8aee138d597836fa15c2d
23372401 spec :
23382402 serviceAccountName : nr-k8s-otel-collector
23392403 containers :
0 commit comments