diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index f950389e079..6b409dccfc2 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -111,7 +111,21 @@
/integration-tests/config-jest-multiproject.js @DataDog/ci-app-libraries
/integration-tests/config-jest.js @DataDog/ci-app-libraries
/integration-tests/cypress-config.json @DataDog/ci-app-libraries
+/integration-tests/cypress-custom-after-hooks.config.js @DataDog/ci-app-libraries
+/integration-tests/cypress-custom-after-hooks.config.mjs @DataDog/ci-app-libraries
+/integration-tests/cypress-auto-esm.config.mjs @DataDog/ci-app-libraries
+/integration-tests/cypress-double-run.js @DataDog/ci-app-libraries
+/integration-tests/cypress-double-run.mjs @DataDog/ci-app-libraries
/integration-tests/cypress-esm-config.mjs @DataDog/ci-app-libraries
+/integration-tests/cypress-legacy-plugin.config.js @DataDog/ci-app-libraries
+/integration-tests/cypress-legacy-plugin.config.mjs @DataDog/ci-app-libraries
+/integration-tests/cypress-plain-object-auto.config.js @DataDog/ci-app-libraries
+/integration-tests/cypress-plain-object-auto.config.mjs @DataDog/ci-app-libraries
+/integration-tests/cypress-plain-object-manual.config.js @DataDog/ci-app-libraries
+/integration-tests/cypress-plain-object-manual.config.mjs @DataDog/ci-app-libraries
+/integration-tests/cypress-return-config.config.js @DataDog/ci-app-libraries
+/integration-tests/cypress-return-config.config.mjs @DataDog/ci-app-libraries
+/integration-tests/cypress-typescript.config.ts @DataDog/ci-app-libraries
/integration-tests/cypress.config.js @DataDog/ci-app-libraries
/integration-tests/my-nyc.config.js @DataDog/ci-app-libraries
/integration-tests/playwright.config.js @DataDog/ci-app-libraries
@@ -143,6 +157,8 @@
/packages/**/*.dsm.spec.js @DataDog/data-streams-monitoring
# API SDK Capabilities
+/eslint-rules/ @DataDog/apm-sdk-capabilities-js
+
/integration-tests/log_injection.spec.js @DataDog/apm-sdk-capabilities-js
/integration-tests/opentelemetry/ @DataDog/apm-sdk-capabilities-js
/integration-tests/opentelemetry-logs.spec.js @DataDog/apm-sdk-capabilities-js
@@ -217,6 +233,7 @@
/integration-tests/bun/ @DataDog/lang-platform-js
/integration-tests/init.spec.js @DataDog/lang-platform-js
/integration-tests/package-guardrails.spec.js @DataDog/lang-platform-js
+/integration-tests/package-guardrails/flush.js @DataDog/lang-platform-js
/integration-tests/startup.spec.js @DataDog/lang-platform-js
/packages/datadog-core @DataDog/lang-platform-js
diff --git a/.github/chainguard/self.github.release.push-tags.sts.yaml b/.github/chainguard/self.github.release.push-tags.sts.yaml
new file mode 100644
index 00000000000..c69957e33c7
--- /dev/null
+++ b/.github/chainguard/self.github.release.push-tags.sts.yaml
@@ -0,0 +1,12 @@
+issuer: https://token.actions.githubusercontent.com
+
+subject: repo:DataDog/dd-trace-js:environment:npm
+
+claim_pattern:
+ event_name: (push|workflow_dispatch)
+ ref: refs/heads/(v[345]\.x|master)
+ repository: DataDog/dd-trace-js
+ job_workflow_ref: DataDog/dd-trace-js/\.github/workflows/release\.yml@refs/heads/(v[345]\.x|master)
+
+permissions:
+ contents: write
diff --git a/.github/workflows/flakiness.yml b/.github/workflows/flakiness.yml
index 9a069eecbba..02dec302075 100644
--- a/.github/workflows/flakiness.yml
+++ b/.github/workflows/flakiness.yml
@@ -36,6 +36,7 @@ jobs:
with:
method: chat.postMessage
token: ${{ secrets.SLACK_BOT_TOKEN }}
+ errors: true
payload: |
channel: ${{ secrets.SLACK_CHANNEL_ID }}
blocks:
diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml
index a9801e5d064..6df7e4e985e 100644
--- a/.github/workflows/project.yml
+++ b/.github/workflows/project.yml
@@ -52,6 +52,14 @@ jobs:
- uses: ./.github/actions/install
- run: npm run verify-exercised-tests
+ generated-config-types:
+ runs-on: ubuntu-latest
+ name: Generated config types
+ steps:
+ - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ - uses: ./.github/actions/node/latest
+ - run: npm run verify:config:types
+
workflow-job-names:
runs-on: ubuntu-latest
name: Workflow job names (unique)
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 36f3525a84b..6f11f755020 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -21,11 +21,14 @@ jobs:
url: https://npmjs.com/package/dd-trace
permissions:
id-token: write
- contents: write
- pull-requests: read
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
+ - uses: DataDog/dd-octo-sts-action@acaa02eee7e3bb0839e4272dacb37b8f3b58ba80 # v1.0.3
+ id: octo-sts
+ with:
+ scope: DataDog/dd-trace-js
+ policy: self.github.release.push-tags
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: ./.github/actions/node
- run: npm publish --tag latest-node14
@@ -35,7 +38,7 @@ jobs:
echo "json=$content" >> $GITHUB_OUTPUT
- run: |
git tag v${{ fromJson(steps.pkg.outputs.json).version }}
- git push origin v${{ fromJson(steps.pkg.outputs.json).version }}
+ git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git v${{ fromJson(steps.pkg.outputs.json).version }}
- run: node scripts/release/notes
publish-v4:
@@ -46,11 +49,14 @@ jobs:
url: https://npmjs.com/package/dd-trace
permissions:
id-token: write
- contents: write
- pull-requests: read
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
+ - uses: DataDog/dd-octo-sts-action@acaa02eee7e3bb0839e4272dacb37b8f3b58ba80 # v1.0.3
+ id: octo-sts
+ with:
+ scope: DataDog/dd-trace-js
+ policy: self.github.release.push-tags
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- uses: ./.github/actions/node
- run: npm publish --tag latest-node16
@@ -60,7 +66,7 @@ jobs:
echo "json=$content" >> $GITHUB_OUTPUT
- run: |
git tag v${{ fromJson(steps.pkg.outputs.json).version }}
- git push origin v${{ fromJson(steps.pkg.outputs.json).version }}
+ git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git v${{ fromJson(steps.pkg.outputs.json).version }}
- run: node scripts/release/notes
publish-latest:
@@ -71,12 +77,17 @@ jobs:
url: https://npmjs.com/package/dd-trace
permissions:
id-token: write
- contents: write
- pull-requests: read
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
+ - uses: DataDog/dd-octo-sts-action@acaa02eee7e3bb0839e4272dacb37b8f3b58ba80 # v1.0.3
+ id: octo-sts
+ with:
+ scope: DataDog/dd-trace-js
+ policy: self.github.release.push-tags
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ persist-credentials: false
- uses: ./.github/actions/node
- run: npm publish
- id: pkg
@@ -85,7 +96,7 @@ jobs:
echo "json=$content" >> $GITHUB_OUTPUT
- run: |
git tag v${{ fromJson(steps.pkg.outputs.json).version }}
- git push origin v${{ fromJson(steps.pkg.outputs.json).version }}
+ git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git v${{ fromJson(steps.pkg.outputs.json).version }}
- run: node scripts/release/notes --latest
docs:
@@ -130,9 +141,15 @@ jobs:
url: https://npmjs.com/package/dd-trace
permissions:
id-token: write
- contents: write
steps:
+ - uses: DataDog/dd-octo-sts-action@acaa02eee7e3bb0839e4272dacb37b8f3b58ba80 # v1.0.3
+ id: octo-sts
+ with:
+ scope: DataDog/dd-trace-js
+ policy: self.github.release.push-tags
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ persist-credentials: false
- uses: ./.github/actions/node
- uses: ./.github/actions/install
- id: pkg
@@ -143,5 +160,22 @@ jobs:
- run: npm publish --tag dev
- run: |
git tag --force dev
- git push origin :refs/tags/dev
- git push origin --tags
+ git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git :refs/tags/dev
+ git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git --tags
+
+ status:
+ needs: ["publish-v3", "publish-v4", "publish-latest"]
+ if: always() && contains(needs.*.result, 'success')
+ runs-on: ubuntu-latest
+ permissions:
+ id-token: write
+ contents: read
+ pull-requests: read
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ steps:
+ - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
+ with:
+ sparse-checkout: scripts/release/status.js
+ - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
+ - run: node scripts/release/status.js
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 182a99016f0..9c18fb3f318 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -466,3 +466,244 @@ $ yarn bench
```
[1]: https://docs.datadoghq.com/help
+
+## Working with Configurations
+
+`packages/dd-trace/src/config/supported-configurations.json` is the source of truth for tracer configuration metadata.
+
+When you add a new configuration here, the config system can usually derive:
+
+- default values
+- env var parsing
+- `tracer.init({...})` option mapping
+- generated config types
+- config telemetry
+
+## What A Developer Needs To Know
+
+Each entry defines:
+
+- the canonical env var name
+- the runtime type
+- the default value
+- the programmatic option path
+- optional aliases, validation, and transforms
+
+Minimal example:
+
+```json
+"DD_AGENT_HOST": [{
+ "implementation": "E",
+ "type": "string",
+ "configurationNames": ["hostname"],
+ "default": "127.0.0.1",
+ "aliases": ["DD_TRACE_AGENT_HOSTNAME"]
+}]
+```
+
+Important fields:
+
+- `type`: parser to use for environment variables. Common values are `string`, `boolean`, `int`, `decimal`, `array`, `map`, `json`.
+- `default`: parsed into the runtime type. `null` means `undefined` at runtime.
+- `configurationNames`: programmatic option names. The first entry becomes the main internal property path.
+- `internalPropertyName`: use this instead of `configurationNames` when the runtime property path should differ from the public option name.
+- `transform`: extra conversion after parsing. This applies to both env vars and programmatic options.
+- `allowed`: whitelist of accepted values.
+- `aliases`: old or alternate env var names.
+- `deprecated`: emits a deprecation warning when used.
+- `description`: developer-facing note in the JSON.
+- `implementation`: metadata only in the current flow.
+
+## Runtime Flow
+
+```mermaid
+flowchart LR
+ A["supported-configurations.json"] --> B["defaults.js
build defaults + lookup tables"]
+ A --> C["helper.js
aliases + deprecations"]
+ A --> D["generate-config-types.js
generated-config-types.d.ts"]
+ B --> E["config/index.js"]
+ C --> E
+ E --> F["Config singleton"]
+ E --> G["Config telemetry"]
+ H["remote_config.js"] --> E
+```
+
+Load order in `config/index.js`:
+
+1. defaults
+2. local stable config
+3. env vars
+4. fleet stable config
+5. `tracer.init({...})` options
+6. calculated values
+
+## Examples That Matter
+
+### Simple boolean
+
+```json
+"DD_RUNTIME_METRICS_ENABLED": [{
+ "type": "boolean",
+ "configurationNames": ["runtimeMetrics.enabled", "runtimeMetrics"],
+ "default": "false"
+}]
+```
+
+Both of these work:
+
+```js
+tracer.init({ runtimeMetrics: true })
+```
+
+```js
+tracer.init({
+ runtimeMetrics: {
+ enabled: true
+ }
+})
+```
+
+Result:
+
+```js
+config.runtimeMetrics.enabled === true
+```
+
+### Decimal with transform
+
+```json
+"DD_TRACE_SAMPLE_RATE": [{
+ "type": "decimal",
+ "configurationNames": ["sampleRate", "ingestion.sampleRate"],
+ "default": null,
+ "transform": "sampleRate"
+}]
+```
+
+The `sampleRate` transform validates and clamps the value to the supported `0..1` range.
+
+### Array with transform
+
+```json
+"DD_TRACE_HEADER_TAGS": [{
+ "type": "array",
+ "configurationNames": ["headerTags"],
+ "default": "",
+ "transform": "stripColonWhitespace"
+}]
+```
+
+This matters because the transform is reused for both input styles:
+
+```bash
+DD_TRACE_HEADER_TAGS="x-user-id : user.id, x-team : team"
+```
+
+```js
+tracer.init({
+ headerTags: ['x-user-id : user.id', 'x-team : team']
+})
+```
+
+Both become:
+
+```js
+config.headerTags
+// ['x-user-id:user.id', 'x-team:team']
+```
+
+### JSON with nested output
+
+```json
+"DD_TRACE_SAMPLING_RULES": [{
+ "type": "json",
+ "configurationNames": ["samplingRules"],
+ "default": "[]",
+ "transform": "toCamelCase"
+}]
+```
+
+```bash
+DD_TRACE_SAMPLING_RULES='[{"sample_rate":0.5,"service":"api"}]'
+```
+
+Result:
+
+```js
+config.samplingRules
+// [{ sampleRate: 0.5, service: 'api' }]
+```
+
+### Internal property path
+
+```json
+"DD_API_KEY": [{
+ "type": "string",
+ "default": null,
+ "internalPropertyName": "apiKey"
+}]
+```
+
+Result:
+
+```js
+config.apiKey
+```
+
+## Nested Properties
+
+Dot notation creates nested objects on the config singleton.
+
+```json
+"DD_API_SECURITY_ENABLED": [{
+ "type": "boolean",
+ "configurationNames": [
+ "appsec.apiSecurity.enabled",
+ "experimental.appsec.apiSecurity.enabled"
+ ],
+ "default": "true"
+}]
+```
+
+```js
+tracer.init({
+ appsec: {
+ apiSecurity: {
+ enabled: true
+ }
+ }
+})
+```
+
+Result:
+
+```js
+config.appsec.apiSecurity.enabled === true
+```
+
+## Telemetry And Remote Config
+
+Config telemetry is handled automatically by the standard config flow.
+
+If your config is defined in `supported-configurations.json` and goes through the normal parsing/application path, telemetry usually works without extra code. Telemetry records the canonical name, the normalized value, and the origin such as `default`, `env_var`, `code`, `remote_config`, or `calculated`.
+
+Remote config is not a separate system. `packages/dd-trace/src/config/remote_config.js` translates remote field names into local option names and then applies them through `config.setRemoteConfig(...)`. After that, the normal pipeline runs again: apply options, recompute calculated values, and update telemetry.
+
+## Adding A New Configuration
+
+Use this checklist:
+
+1. Add the new entry to `packages/dd-trace/src/config/supported-configurations.json`.
+2. Pick the correct `type` and `default`.
+3. Add `configurationNames` if the setting should be exposed via `tracer.init({...})`. Add the documentation to `index.d.ts`.
+4. Use `internalPropertyName` if the runtime property path should differ.
+5. Add `transform` or `allowed` only if the raw parsed value is not enough.
+6. Add `aliases` or `deprecated` only for compatibility.
+7. Regenerate types if needed.
+8. Add tests for env vars, programmatic options, and edge cases.
+
+## Mental Model
+
+Think of `supported-configurations.json` as the schema for one config singleton.
+
+You describe the input shape once, and the runtime uses that to build defaults, parse env vars, map programmatic options, generate types, and emit telemetry.
diff --git a/benchmark/sirun/exporting-pipeline/index.js b/benchmark/sirun/exporting-pipeline/index.js
index b8588c62973..f3395667f00 100644
--- a/benchmark/sirun/exporting-pipeline/index.js
+++ b/benchmark/sirun/exporting-pipeline/index.js
@@ -7,7 +7,7 @@ const SpanProcessor = require('../../../packages/dd-trace/src/span_processor')
const Exporter = require('../../../packages/dd-trace/src/exporters/agent/index')
const PrioritySampler = require('../../../packages/dd-trace/src/priority_sampler')
const id = require('../../../packages/dd-trace/src/id')
-const defaults = require('../../../packages/dd-trace/src/config/defaults')
+const { defaults } = require('../../../packages/dd-trace/src/config/defaults')
const config = {
url: `http://${defaults.hostname}:${defaults.port}`,
diff --git a/benchmark/sirun/statsd.js b/benchmark/sirun/statsd.js
index 462889874f1..dc71e6d71a3 100644
--- a/benchmark/sirun/statsd.js
+++ b/benchmark/sirun/statsd.js
@@ -1,7 +1,7 @@
'use strict'
const dgram = require('dgram')
-const defaults = require('../../packages/dd-trace/src/config/defaults')
+const { defaults } = require('../../packages/dd-trace/src/config/defaults')
const port = process.env.SIRUN_STATSD_PORT || defaults['dogstatsd.port']
class StatsD {
diff --git a/docs/test.ts b/docs/test.ts
index c6418d5780d..979c84abfaf 100644
--- a/docs/test.ts
+++ b/docs/test.ts
@@ -522,6 +522,14 @@ const res = {} as OutgoingMessage
resBlockRequest = tracer.appsec.blockRequest(req, res)
tracer.appsec.setUser(user)
+// Profiling custom labels
+tracer.profiling.setCustomLabelKeys(['customer', 'region'])
+tracer.profiling.setCustomLabelKeys(new Set(['customer', 'region']))
+const labelResult: number = tracer.profiling.runWithLabels({ customer: 'acme', region: 'us-east' }, () => 42)
+tracer.profiling.runWithLabels({ tier: 'premium' }, () => {
+ tracer.profiling.runWithLabels({ region: 'eu-west' }, () => {})
+})
+
// OTel TracerProvider registers and provides a tracer
const provider: opentelemetry.TracerProvider = new tracer.TracerProvider();
provider.register();
diff --git a/eslint-rules/eslint-config-names-sync.mjs b/eslint-rules/eslint-config-names-sync.mjs
new file mode 100644
index 00000000000..ed716f68893
--- /dev/null
+++ b/eslint-rules/eslint-config-names-sync.mjs
@@ -0,0 +1,578 @@
+import fs from 'node:fs'
+import path from 'node:path'
+
+import ts from 'typescript'
+
+const IGNORED_CONFIGURATION_NAMES = new Set([
+ 'tracePropagationStyle',
+ 'tracing',
+])
+const UNSUPPORTED_CONFIGURATION_ROOTS = new Set([
+ 'isCiVisibility',
+ 'logger',
+ 'lookup',
+ 'plugins',
+])
+
+/**
+ * @typedef {{
+ * node: import('typescript').InterfaceDeclaration | import('typescript').TypeAliasDeclaration
+ * namespaceKey: string
+ * key: string
+ * }} DeclarationEntry
+ */
+
+/**
+ * @typedef {{
+ * hasEnvDescendant: boolean
+ * hasBooleanBranch: boolean
+ * hasObjectBranch: boolean
+ * }} TypeInspectionResult
+ */
+
+/**
+ * @typedef {{
+ * names: Set
+ * primaryEnvTargets: Map>
+ * knownAliasEnvNames: Set
+ * }} SupportedConfigurationInfo
+ */
+
+/**
+ * @typedef {{
+ * declarations: Map
+ * primaryEnvTargets: Map>
+ * knownAliasEnvNames: Set
+ * names: Set
+ * visitedDeclarations: Set
+ * envTagNamesCache: WeakMap>
+ * interfacePropertiesCache: WeakMap<
+ * import('typescript').InterfaceDeclaration,
+ * Map
+ * >
+ * }} InspectionState
+ */
+
+/** @type {InspectionState} */
+let currentInspectionState
+
+/**
+ * @param {Partial} [overrides]
+ * @returns {TypeInspectionResult}
+ */
+function createInspectionResult (overrides) {
+ return {
+ hasEnvDescendant: false,
+ hasBooleanBranch: false,
+ hasObjectBranch: false,
+ ...overrides,
+ }
+}
+
+/**
+ * @param {string} filePath
+ * @returns {SupportedConfigurationInfo}
+ */
+function getSupportedConfigurationInfo (filePath) {
+ const parsed = JSON.parse(fs.readFileSync(filePath, 'utf8'))
+ const supportedConfigurations = parsed?.supportedConfigurations
+
+ const names = new Set()
+ const primaryEnvTargets = new Map()
+ const knownAliasEnvNames = new Set()
+
+ /**
+ * @param {string} envName
+ * @param {Set} targets
+ */
+ function addPrimaryEnvTargets (envName, targets) {
+ let existingTargets = primaryEnvTargets.get(envName)
+ if (!existingTargets) {
+ existingTargets = new Set()
+ primaryEnvTargets.set(envName, existingTargets)
+ }
+
+ for (const target of targets) {
+ existingTargets.add(target)
+ }
+ }
+
+ for (const [envName, entries] of Object.entries(supportedConfigurations)) {
+ /** @type {Set} */
+ const targets = new Set()
+
+ for (const entry of entries) {
+ if (typeof entry.internalPropertyName === 'string') {
+ targets.add(entry.internalPropertyName)
+ }
+
+ for (const alias of entry.aliases ?? []) {
+ if (typeof alias === 'string') {
+ knownAliasEnvNames.add(alias)
+ }
+ }
+
+ for (const name of entry.configurationNames ?? []) {
+ if (typeof name === 'string' && !IGNORED_CONFIGURATION_NAMES.has(name)) {
+ names.add(name)
+ targets.add(name)
+ }
+ }
+ }
+
+ addPrimaryEnvTargets(envName, targets)
+ }
+
+ return { names, primaryEnvTargets, knownAliasEnvNames }
+}
+
+/**
+ * @param {import('typescript').EntityName} entityName
+ * @returns {string}
+ */
+function getEntityNameText (entityName) {
+ if (ts.isIdentifier(entityName)) {
+ return entityName.text
+ }
+
+ return `${getEntityNameText(entityName.left)}.${entityName.right.text}`
+}
+
+/**
+ * @param {import('typescript').SourceFile} sourceFile
+ * @returns {Map}
+ */
+function getDeclarationRegistry (sourceFile) {
+ const declarations = new Map()
+
+ /**
+ * @param {readonly import('typescript').Statement[]} statements
+ * @param {string} namespaceKey
+ */
+ function visitStatements (statements, namespaceKey) {
+ for (const statement of statements) {
+ if (ts.isModuleDeclaration(statement)) {
+ visitModuleDeclaration(statement, namespaceKey)
+ continue
+ }
+
+ if (!ts.isInterfaceDeclaration(statement) && !ts.isTypeAliasDeclaration(statement)) continue
+
+ const key = namespaceKey ? `${namespaceKey}.${statement.name.text}` : statement.name.text
+ declarations.set(key, {
+ node: statement,
+ namespaceKey,
+ key,
+ })
+ }
+ }
+
+ /**
+ * @param {import('typescript').ModuleDeclaration} declaration
+ * @param {string} namespaceKey
+ */
+ function visitModuleDeclaration (declaration, namespaceKey) {
+ const nextNamespaceKey = namespaceKey ? `${namespaceKey}.${declaration.name.text}` : declaration.name.text
+
+ if (!declaration.body) return
+
+ if (ts.isModuleBlock(declaration.body)) {
+ visitStatements(declaration.body.statements, nextNamespaceKey)
+ return
+ }
+
+ visitModuleDeclaration(
+ /** @type {import('typescript').ModuleDeclaration} */ (declaration.body),
+ nextNamespaceKey
+ )
+ }
+
+ visitStatements(sourceFile.statements, '')
+
+ return declarations
+}
+
+/**
+ * @param {Map} declarations
+ * @param {import('typescript').EntityName} identifier
+ * @param {string} namespaceKey
+ * @returns {DeclarationEntry | undefined}
+ */
+function resolveDeclaration (declarations, identifier, namespaceKey) {
+ const typeName = getEntityNameText(identifier)
+ let currentNamespaceKey = namespaceKey
+
+ while (true) {
+ const key = currentNamespaceKey ? `${currentNamespaceKey}.${typeName}` : typeName
+ const declaration = declarations.get(key)
+ if (declaration) {
+ return declaration
+ }
+
+ if (!currentNamespaceKey) {
+ return undefined
+ }
+
+ const lastSeparatorIndex = currentNamespaceKey.lastIndexOf('.')
+ currentNamespaceKey = lastSeparatorIndex === -1
+ ? ''
+ : currentNamespaceKey.slice(0, lastSeparatorIndex)
+ }
+}
+
+/**
+ * @param {import('typescript').PropertyName} propertyName
+ * @returns {string | undefined}
+ */
+function getPropertyName (propertyName) {
+ if (ts.isIdentifier(propertyName) || ts.isStringLiteral(propertyName)) {
+ return propertyName.text
+ }
+}
+
+/**
+ * @param {import('typescript').Node} node
+ * @returns {Set}
+ */
+function getEnvTagNames (node) {
+ const { envTagNamesCache } = currentInspectionState
+ const cachedNames = envTagNamesCache.get(node)
+ if (cachedNames) return cachedNames
+
+ const envTagNames = new Set()
+ for (const tag of ts.getJSDocTags(node)) {
+ if (tag.tagName.text !== 'env' || typeof tag.comment !== 'string') continue
+
+ for (const match of tag.comment.matchAll(/\b(?:DD|OTEL)_[A-Z0-9_]+\b/g)) {
+ envTagNames.add(match[0])
+ }
+ }
+
+ envTagNamesCache.set(node, envTagNames)
+ return envTagNames
+}
+
+/**
+ * @param {import('typescript').InterfaceDeclaration} declaration
+ * @param {string} propertyName
+ * @returns {import('typescript').PropertySignature | undefined}
+ */
+function getInterfaceProperty (declaration, propertyName) {
+ const { interfacePropertiesCache } = currentInspectionState
+ let properties = interfacePropertiesCache.get(declaration)
+
+ if (!properties) {
+ properties = new Map()
+
+ for (const member of declaration.members) {
+ if (!ts.isPropertySignature(member) || !member.type) continue
+
+ const memberName = getPropertyName(member.name)
+ if (memberName) {
+ properties.set(memberName, member)
+ }
+ }
+
+ interfacePropertiesCache.set(declaration, properties)
+ }
+
+ return properties.get(propertyName)
+}
+
+/**
+ * @param {string} fullPath
+ * @param {Set} envTagNames
+ * @returns {boolean}
+ */
+function hasSupportedDirectEnvTag (fullPath, envTagNames) {
+ const { primaryEnvTargets, knownAliasEnvNames } = currentInspectionState
+
+ for (const envName of envTagNames) {
+ const targets = primaryEnvTargets.get(envName)
+ if (targets?.has(fullPath) || (!targets && !knownAliasEnvNames.has(envName))) {
+ return true
+ }
+ }
+
+ return false
+}
+
+/**
+ * @param {readonly import('typescript').TypeElement[]} members
+ * @param {string} namespaceKey
+ * @param {string} pathPrefix
+ * @returns {TypeInspectionResult}
+ */
+function inspectMembers (members, namespaceKey, pathPrefix) {
+ const result = createInspectionResult({ hasObjectBranch: true })
+
+ for (const member of members) {
+ if (!ts.isPropertySignature(member) || !member.type) continue
+
+ const propertyName = getPropertyName(member.name)
+ if (!propertyName) continue
+
+ const propertyResult = inspectProperty(
+ member,
+ namespaceKey,
+ pathPrefix ? `${pathPrefix}.${propertyName}` : propertyName
+ )
+ result.hasEnvDescendant ||= propertyResult.hasEnvDescendant
+ }
+
+ return result
+}
+
+/**
+ * @param {import('typescript').PropertySignature} property
+ * @param {string} namespaceKey
+ * @param {string} fullPath
+ * @returns {TypeInspectionResult}
+ */
+function inspectProperty (property, namespaceKey, fullPath) {
+ if (UNSUPPORTED_CONFIGURATION_ROOTS.has(fullPath.split('.', 1)[0])) {
+ return createInspectionResult()
+ }
+
+ const result = inspectTypeNode(property.type, namespaceKey, fullPath)
+ const envTagNames = getEnvTagNames(property)
+ const isLeafConfiguration = !result.hasObjectBranch
+ const isBooleanAlias =
+ result.hasBooleanBranch &&
+ result.hasObjectBranch &&
+ result.hasEnvDescendant
+ const hasSupportedOwnEnvTag = hasSupportedDirectEnvTag(fullPath, envTagNames)
+
+ if (hasSupportedOwnEnvTag || isLeafConfiguration || isBooleanAlias) {
+ currentInspectionState.names.add(fullPath)
+ }
+
+ result.hasEnvDescendant ||= hasSupportedOwnEnvTag
+
+ return result
+}
+
+/**
+ * @param {DeclarationEntry} declaration
+ * @param {string} fullPath
+ * @returns {TypeInspectionResult}
+ */
+function inspectDeclaration (declaration, fullPath) {
+ const state = currentInspectionState
+
+ if (state.visitedDeclarations.has(declaration.key)) {
+ return createInspectionResult({ hasObjectBranch: true })
+ }
+
+ state.visitedDeclarations.add(declaration.key)
+
+ try {
+ return ts.isInterfaceDeclaration(declaration.node)
+ ? inspectMembers(declaration.node.members, declaration.namespaceKey, fullPath)
+ : inspectTypeNode(declaration.node.type, declaration.namespaceKey, fullPath)
+ } finally {
+ state.visitedDeclarations.delete(declaration.key)
+ }
+}
+
+/**
+ * @param {import('typescript').TypeNode | undefined} typeNode
+ * @param {string} namespaceKey
+ * @param {string} fullPath
+ * @returns {TypeInspectionResult}
+ */
+function inspectTypeNode (typeNode, namespaceKey, fullPath) {
+ const { declarations } = currentInspectionState
+
+ if (!typeNode) {
+ return createInspectionResult()
+ }
+
+ if (ts.isParenthesizedTypeNode(typeNode)) {
+ return inspectTypeNode(typeNode.type, namespaceKey, fullPath)
+ }
+
+ if (typeNode.kind === ts.SyntaxKind.BooleanKeyword) {
+ return createInspectionResult({ hasBooleanBranch: true })
+ }
+
+ if (ts.isTypeLiteralNode(typeNode)) {
+ return inspectMembers(typeNode.members, namespaceKey, fullPath)
+ }
+
+ if (ts.isUnionTypeNode(typeNode) || ts.isIntersectionTypeNode(typeNode)) {
+ const result = createInspectionResult()
+
+ for (const part of typeNode.types) {
+ const partResult = inspectTypeNode(part, namespaceKey, fullPath)
+ result.hasEnvDescendant ||= partResult.hasEnvDescendant
+ result.hasBooleanBranch ||= partResult.hasBooleanBranch
+ result.hasObjectBranch ||= partResult.hasObjectBranch
+ }
+
+ return result
+ }
+
+ if (ts.isTypeReferenceNode(typeNode)) {
+ const declaration = resolveDeclaration(declarations, typeNode.typeName, namespaceKey)
+ return declaration ? inspectDeclaration(declaration, fullPath) : createInspectionResult()
+ }
+
+ if (
+ ts.isIndexedAccessTypeNode(typeNode) &&
+ ts.isLiteralTypeNode(typeNode.indexType) &&
+ ts.isStringLiteral(typeNode.indexType.literal) &&
+ ts.isTypeReferenceNode(typeNode.objectType)
+ ) {
+ const declaration = resolveDeclaration(declarations, typeNode.objectType.typeName, namespaceKey)
+
+ if (!declaration || !ts.isInterfaceDeclaration(declaration.node)) {
+ return createInspectionResult()
+ }
+
+ const property = getInterfaceProperty(declaration.node, typeNode.indexType.literal.text)
+ return property ? inspectProperty(property, declaration.namespaceKey, fullPath) : createInspectionResult()
+ }
+
+ return createInspectionResult()
+}
+
+/**
+ * @param {string} filePath
+ * @param {SupportedConfigurationInfo} supportedConfigurationInfo
+ * @returns {Set}
+ */
+function getIndexDtsConfigurationNames (filePath, supportedConfigurationInfo) {
+ const sourceFile = ts.createSourceFile(
+ filePath,
+ fs.readFileSync(filePath, 'utf8'),
+ ts.ScriptTarget.Latest,
+ true,
+ ts.ScriptKind.TS
+ )
+ const declarations = getDeclarationRegistry(sourceFile)
+ const tracerOptions = declarations.get('tracer.TracerOptions')
+
+ if (!tracerOptions || !ts.isInterfaceDeclaration(tracerOptions.node)) {
+ throw new Error('Could not resolve tracer.TracerOptions.')
+ }
+
+ const names = new Set()
+ currentInspectionState = {
+ declarations,
+ primaryEnvTargets: supportedConfigurationInfo.primaryEnvTargets,
+ knownAliasEnvNames: supportedConfigurationInfo.knownAliasEnvNames,
+ names,
+ visitedDeclarations: new Set(),
+ envTagNamesCache: new WeakMap(),
+ interfacePropertiesCache: new WeakMap(),
+ }
+
+ inspectMembers(tracerOptions.node.members, tracerOptions.namespaceKey, '')
+
+ for (const ignoredConfigurationName of IGNORED_CONFIGURATION_NAMES) {
+ names.delete(ignoredConfigurationName)
+ }
+
+ return names
+}
+
+/**
+ * @param {import('eslint').Rule.RuleContext} context
+ * @param {import('estree').Program} node
+ * @param {Set} sourceNames
+ * @param {Set} targetNames
+ * @param {string} messageId
+ * @returns {void}
+ */
+function reportMissingConfigurations (context, node, sourceNames, targetNames, messageId) {
+ const missing = []
+
+ for (const name of sourceNames) {
+ if (!targetNames.has(name)) {
+ missing.push(name)
+ }
+ }
+
+ for (const configurationName of missing.sort()) {
+ context.report({
+ node,
+ messageId,
+ data: { configurationName },
+ })
+ }
+}
+
+/** @type {import('eslint').Rule.RuleModule} */
+export default {
+ meta: {
+ type: 'problem',
+ docs: {
+ description: 'Ensure supported configuration names stay in sync with index.d.ts',
+ },
+ schema: [{
+ type: 'object',
+ properties: {
+ indexDtsPath: {
+ type: 'string',
+ },
+ supportedConfigurationsPath: {
+ type: 'string',
+ },
+ },
+ additionalProperties: false,
+ }],
+ messages: {
+ configurationMissingInIndexDts:
+ "Configuration name '{{configurationName}}' exists in supported-configurations.json but not in index.d.ts.",
+ configurationMissingInSupportedConfigurations:
+ "Configuration name '{{configurationName}}' exists in index.d.ts but not in supported-configurations.json.",
+ readFailure:
+ 'Unable to compare supported configuration names: {{reason}}',
+ },
+ },
+ create (context) {
+ const options = context.options[0] || {}
+ const indexDtsPath = path.resolve(context.cwd, options.indexDtsPath || 'index.d.ts')
+ const supportedConfigurationsPath = path.resolve(
+ context.cwd,
+ options.supportedConfigurationsPath || 'packages/dd-trace/src/config/supported-configurations.json'
+ )
+
+ return {
+ Program (node) {
+ let indexDtsNames
+ let supportedConfigurationInfo
+
+ try {
+ supportedConfigurationInfo = getSupportedConfigurationInfo(supportedConfigurationsPath)
+ indexDtsNames = getIndexDtsConfigurationNames(indexDtsPath, supportedConfigurationInfo)
+ } catch (error) {
+ context.report({
+ node,
+ messageId: 'readFailure',
+ data: {
+ reason: error instanceof Error ? error.message : String(error),
+ },
+ })
+ return
+ }
+
+ reportMissingConfigurations(
+ context,
+ node,
+ supportedConfigurationInfo.names,
+ indexDtsNames,
+ 'configurationMissingInIndexDts'
+ )
+ reportMissingConfigurations(
+ context,
+ node,
+ indexDtsNames,
+ supportedConfigurationInfo.names,
+ 'configurationMissingInSupportedConfigurations'
+ )
+ },
+ }
+ },
+}
diff --git a/eslint-rules/eslint-config-names-sync.test.mjs b/eslint-rules/eslint-config-names-sync.test.mjs
new file mode 100644
index 00000000000..46d0913fa0d
--- /dev/null
+++ b/eslint-rules/eslint-config-names-sync.test.mjs
@@ -0,0 +1,93 @@
+import path from 'node:path'
+
+import { RuleTester } from 'eslint'
+
+import rule from './eslint-config-names-sync.mjs'
+
+const ruleTester = new RuleTester({
+ languageOptions: {
+ ecmaVersion: 2022,
+ sourceType: 'script',
+ },
+})
+
+const fixturesDirectory = path.join(process.cwd(), 'eslint-rules/fixtures/config-names-sync')
+
+/**
+ * @param {string} fixtureName
+ * @returns {{ indexDtsPath: string, supportedConfigurationsPath: string }}
+ */
+function getFixtureOptions (fixtureName) {
+ const fixtureDirectory = path.join(fixturesDirectory, fixtureName)
+
+ return {
+ indexDtsPath: path.relative(process.cwd(), path.join(fixtureDirectory, 'index.d.ts')),
+ supportedConfigurationsPath: path.relative(
+ process.cwd(),
+ path.join(fixtureDirectory, 'supported-configurations.json')
+ ),
+ }
+}
+
+ruleTester.run('eslint-config-names-sync', rule, {
+ valid: [
+ {
+ filename: path.join(fixturesDirectory, 'valid', 'lint-anchor.js'),
+ code: '',
+ options: [getFixtureOptions('valid')],
+ },
+ {
+ filename: path.join(fixturesDirectory, 'trace-propagation-style-exception', 'lint-anchor.js'),
+ code: '',
+ options: [getFixtureOptions('trace-propagation-style-exception')],
+ },
+ {
+ filename: path.join(fixturesDirectory, 'internal-env-and-ignored-names', 'lint-anchor.js'),
+ code: '',
+ options: [getFixtureOptions('internal-env-and-ignored-names')],
+ },
+ ],
+ invalid: [
+ {
+ filename: path.join(fixturesDirectory, 'missing-in-index-dts', 'lint-anchor.js'),
+ code: '',
+ options: [getFixtureOptions('missing-in-index-dts')],
+ errors: [
+ {
+ messageId: 'configurationMissingInIndexDts',
+ data: {
+ configurationName: 'missingFromTypes',
+ },
+ },
+ {
+ messageId: 'configurationMissingInIndexDts',
+ data: {
+ configurationName: 'telemetry',
+ },
+ },
+ ],
+ },
+ {
+ filename: path.join(fixturesDirectory, 'missing-in-supported-configurations', 'lint-anchor.js'),
+ code: '',
+ options: [getFixtureOptions('missing-in-supported-configurations')],
+ errors: [{
+ messageId: 'configurationMissingInSupportedConfigurations',
+ data: {
+ configurationName: 'missingFromJson',
+ },
+ }],
+ },
+ {
+ filename: path.join(fixturesDirectory, 'missing-nested-leaf-in-supported-configurations', 'lint-anchor.js'),
+ code: '',
+ options: [getFixtureOptions('missing-nested-leaf-in-supported-configurations')],
+ errors: [{
+ messageId: 'configurationMissingInSupportedConfigurations',
+ data: {
+ configurationName: 'llmobs.agentlessEnabledasd',
+ },
+ }],
+ },
+ ],
+})
diff --git a/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/index.d.ts b/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/index.d.ts
new file mode 100644
index 00000000000..e46ff0b872c
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/index.d.ts
@@ -0,0 +1,14 @@
+declare namespace tracer {
+ export interface TracerOptions {
+ /**
+ * @env DD_LLMOBS_ENABLED
+ * The environment variable listed above takes precedence over programmatic configuration.
+ */
+ llmobs?: {
+ /**
+ * @env DD_LLMOBS_ML_APP
+ */
+ mlApp?: string
+ }
+ }
+}
diff --git a/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/supported-configurations.json
new file mode 100644
index 00000000000..5e989725d9c
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/supported-configurations.json
@@ -0,0 +1,23 @@
+{
+ "supportedConfigurations": {
+ "DD_LLMOBS_ENABLED": [
+ {
+ "internalPropertyName": "llmobs.enabled"
+ }
+ ],
+ "DD_LLMOBS_ML_APP": [
+ {
+ "configurationNames": [
+ "llmobs.mlApp"
+ ]
+ }
+ ],
+ "DD_TRACE_ENABLED": [
+ {
+ "configurationNames": [
+ "tracing"
+ ]
+ }
+ ]
+ }
+}
diff --git a/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/index.d.ts b/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/index.d.ts
new file mode 100644
index 00000000000..e6db5eb3358
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/index.d.ts
@@ -0,0 +1,20 @@
+declare namespace tracer {
+ export interface TracerOptions {
+ /**
+ * @env DD_SIMPLE
+ */
+ simple?: string
+
+ /**
+ * @env DD_TRACE_TELEMETRY_ENABLED
+ */
+ telemetry?: {
+ exporter?: {
+ /**
+ * @env DD_TELEMETRY_EXPORTER_URL
+ */
+ url?: string
+ }
+ }
+ }
+}
diff --git a/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/supported-configurations.json
new file mode 100644
index 00000000000..df3d4c17dd0
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/supported-configurations.json
@@ -0,0 +1,35 @@
+{
+ "supportedConfigurations": {
+ "DD_SIMPLE": [
+ {
+ "configurationNames": [
+ "simple"
+ ]
+ }
+ ],
+ "DD_MISSING_FROM_TYPES": [
+ {
+ "configurationNames": [
+ "missingFromTypes"
+ ]
+ }
+ ],
+ "DD_INSTRUMENTATION_TELEMETRY_ENABLED": [
+ {
+ "configurationNames": [
+ "telemetry"
+ ],
+ "aliases": [
+ "DD_TRACE_TELEMETRY_ENABLED"
+ ]
+ }
+ ],
+ "DD_TELEMETRY_EXPORTER_URL": [
+ {
+ "configurationNames": [
+ "telemetry.exporter.url"
+ ]
+ }
+ ]
+ }
+}
diff --git a/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/index.d.ts b/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/index.d.ts
new file mode 100644
index 00000000000..491702f278b
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/index.d.ts
@@ -0,0 +1,13 @@
+declare namespace tracer {
+ export interface TracerOptions {
+ /**
+ * @env DD_SIMPLE
+ */
+ simple?: string
+
+ /**
+ * @env DD_MISSING_FROM_JSON
+ */
+ missingFromJson?: boolean
+ }
+}
diff --git a/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/supported-configurations.json
new file mode 100644
index 00000000000..90553c533af
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/supported-configurations.json
@@ -0,0 +1,11 @@
+{
+ "supportedConfigurations": {
+ "DD_SIMPLE": [
+ {
+ "configurationNames": [
+ "simple"
+ ]
+ }
+ ]
+ }
+}
diff --git a/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/index.d.ts b/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/index.d.ts
new file mode 100644
index 00000000000..597e4028452
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/index.d.ts
@@ -0,0 +1,16 @@
+declare namespace tracer {
+ export interface TracerOptions {
+ /**
+ * @env DD_LLMOBS_ENABLED
+ * The environment variable listed above takes precedence over programmatic configuration.
+ */
+ llmobs?: {
+ /**
+ * @env DD_LLMOBS_ML_APP
+ */
+ mlApp?: string
+
+ agentlessEnabledasd?: string
+ }
+ }
+}
diff --git a/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/supported-configurations.json
new file mode 100644
index 00000000000..8cbeba651ac
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/supported-configurations.json
@@ -0,0 +1,16 @@
+{
+ "supportedConfigurations": {
+ "DD_LLMOBS_ENABLED": [
+ {
+ "internalPropertyName": "llmobs.enabled"
+ }
+ ],
+ "DD_LLMOBS_ML_APP": [
+ {
+ "configurationNames": [
+ "llmobs.mlApp"
+ ]
+ }
+ ]
+ }
+}
diff --git a/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/index.d.ts b/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/index.d.ts
new file mode 100644
index 00000000000..87aa771ca5d
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/index.d.ts
@@ -0,0 +1,20 @@
+declare namespace tracer {
+ export interface PropagationStyle {
+ /**
+ * @env DD_TRACE_PROPAGATION_STYLE, DD_TRACE_PROPAGATION_STYLE_INJECT
+ */
+ inject: string[]
+
+ /**
+ * @env DD_TRACE_PROPAGATION_STYLE, DD_TRACE_PROPAGATION_STYLE_EXTRACT
+ */
+ extract: string[]
+ }
+
+ export interface TracerOptions {
+ /**
+ * @env DD_TRACE_PROPAGATION_STYLE, DD_TRACE_PROPAGATION_STYLE_INJECT, DD_TRACE_PROPAGATION_STYLE_EXTRACT
+ */
+ tracePropagationStyle?: string[] | PropagationStyle
+ }
+}
diff --git a/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/supported-configurations.json
new file mode 100644
index 00000000000..5816e540058
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/supported-configurations.json
@@ -0,0 +1,23 @@
+{
+ "supportedConfigurations": {
+ "DD_TRACE_PROPAGATION_STYLE": [
+ {
+ "type": "array"
+ }
+ ],
+ "DD_TRACE_PROPAGATION_STYLE_INJECT": [
+ {
+ "configurationNames": [
+ "tracePropagationStyle.inject"
+ ]
+ }
+ ],
+ "DD_TRACE_PROPAGATION_STYLE_EXTRACT": [
+ {
+ "configurationNames": [
+ "tracePropagationStyle.extract"
+ ]
+ }
+ ]
+ }
+}
diff --git a/eslint-rules/fixtures/config-names-sync/valid/index.d.ts b/eslint-rules/fixtures/config-names-sync/valid/index.d.ts
new file mode 100644
index 00000000000..1a3b57c3edf
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/valid/index.d.ts
@@ -0,0 +1,35 @@
+declare namespace tracer {
+ export interface TracerOptions {
+ /**
+ * @env DD_SIMPLE
+ */
+ simple?: string
+
+ objectOnly?: {
+ /**
+ * @env DD_OBJECT_ONLY_ENABLED
+ */
+ enabled?: boolean
+ }
+
+ appsec?: boolean | {
+ /**
+ * @env DD_APPSEC_ENABLED
+ */
+ enabled?: boolean
+ }
+
+ experimental?: {
+ appsec?: boolean | TracerOptions['appsec']
+
+ iast?: boolean | IastOptions
+ }
+ }
+
+ interface IastOptions {
+ /**
+ * @env DD_IAST_ENABLED
+ */
+ enabled?: boolean
+ }
+}
diff --git a/eslint-rules/fixtures/config-names-sync/valid/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/valid/supported-configurations.json
new file mode 100644
index 00000000000..17238484371
--- /dev/null
+++ b/eslint-rules/fixtures/config-names-sync/valid/supported-configurations.json
@@ -0,0 +1,36 @@
+{
+ "supportedConfigurations": {
+ "DD_SIMPLE": [
+ {
+ "configurationNames": [
+ "simple"
+ ]
+ }
+ ],
+ "DD_OBJECT_ONLY_ENABLED": [
+ {
+ "configurationNames": [
+ "objectOnly.enabled"
+ ]
+ }
+ ],
+ "DD_APPSEC_ENABLED": [
+ {
+ "configurationNames": [
+ "appsec.enabled",
+ "appsec",
+ "experimental.appsec.enabled",
+ "experimental.appsec"
+ ]
+ }
+ ],
+ "DD_IAST_ENABLED": [
+ {
+ "configurationNames": [
+ "experimental.iast.enabled",
+ "experimental.iast"
+ ]
+ }
+ ]
+ }
+}
diff --git a/eslint.config.mjs b/eslint.config.mjs
index 3a39c81ebaa..bb79b34eafe 100644
--- a/eslint.config.mjs
+++ b/eslint.config.mjs
@@ -14,11 +14,12 @@ import eslintPluginPromise from 'eslint-plugin-promise'
import eslintPluginUnicorn from 'eslint-plugin-unicorn'
import globals from 'globals'
-import eslintProcessEnv from './eslint-rules/eslint-process-env.mjs'
+import eslintConfigNamesSync from './eslint-rules/eslint-config-names-sync.mjs'
import eslintEnvAliases from './eslint-rules/eslint-env-aliases.mjs'
-import eslintSafeTypeOfObject from './eslint-rules/eslint-safe-typeof-object.mjs'
import eslintLogPrintfStyle from './eslint-rules/eslint-log-printf-style.mjs'
+import eslintProcessEnv from './eslint-rules/eslint-process-env.mjs'
import eslintRequireExportExists from './eslint-rules/eslint-require-export-exists.mjs'
+import eslintSafeTypeOfObject from './eslint-rules/eslint-safe-typeof-object.mjs'
const { dependencies } = JSON.parse(readFileSync('./vendor/package.json', 'utf8'))
@@ -269,6 +270,7 @@ export default [
}],
'import/no-useless-path-segments': 'error',
'import/no-webpack-loader-syntax': 'error',
+ 'jsdoc/check-param-names': ['error', { disableMissingParamChecks: true }],
'jsdoc/check-tag-names': ['error', { definedTags: ['datadog'] }],
// TODO: Enable the rules that we want to use.
// no-defaults: This should be activated, since the defaults will not be picked up in a description.
@@ -375,6 +377,7 @@ export default [
rules: {
'eslint-process-env': eslintProcessEnv,
'eslint-env-aliases': eslintEnvAliases,
+ 'eslint-config-names-sync': eslintConfigNamesSync,
'eslint-safe-typeof-object': eslintSafeTypeOfObject,
'eslint-log-printf-style': eslintLogPrintfStyle,
'eslint-require-export-exists': eslintRequireExportExists,
@@ -514,6 +517,15 @@ export default [
'unicorn/switch-case-braces': 'off', // Questionable benefit
},
},
+ {
+ name: 'dd-trace/config-sync',
+ files: [
+ 'eslint.config.mjs',
+ ],
+ rules: {
+ 'eslint-rules/eslint-config-names-sync': 'error',
+ },
+ },
{
name: 'dd-trace/scripts',
files: [
diff --git a/index.d.ts b/index.d.ts
index 3d567238fd3..099a8afdddd 100644
--- a/index.d.ts
+++ b/index.d.ts
@@ -130,6 +130,11 @@ interface Tracer extends opentracing.Tracer {
appsec: tracer.Appsec;
+ /**
+ * Profiling API for attaching custom labels to profiler samples.
+ */
+ profiling: tracer.Profiling;
+
TracerProvider: tracer.opentelemetry.TracerProvider;
dogstatsd: tracer.DogStatsD;
@@ -1570,6 +1575,35 @@ declare namespace tracer {
trackUserLoginFailure(login: string, metadata?: any): void;
}
+ export interface Profiling {
+ /**
+ * Declares the set of custom label keys that will be used with
+ * {@link runWithLabels}. This is used for profile upload metadata
+ * (so the Datadog UI knows which keys to index for filtering) and
+ * for pprof serialization optimization.
+ *
+ * @param keys Custom label key names.
+ */
+ setCustomLabelKeys(keys: Iterable): void;
+
+ /**
+ * Runs a function with custom profiling labels attached to all wall profiler
+ * samples taken during its execution. Labels are key-value pairs that appear
+ * in the pprof output and can be used to filter flame graphs in the Datadog UI.
+ *
+ * Requires AsyncContextFrame (ACF) to be enabled. Supports nesting: inner
+ * calls merge labels with outer calls, with inner values taking precedence.
+ *
+ * When profiling is not enabled or ACF is not active, the function is still
+ * called but labels are silently dropped.
+ *
+ * @param labels Custom labels to attach to profiler samples.
+ * @param fn Function to execute with the labels.
+ * @returns The return value of fn.
+ */
+ runWithLabels(labels: Record, fn: () => T): T;
+ }
+
export interface Appsec {
/**
* Links a successful login event to the current trace. Will link the passed user to the current trace with Appsec.setUser() internally.
diff --git a/integration-tests/aiguard/index.spec.js b/integration-tests/aiguard/index.spec.js
index a9e325f55a9..61e5d613dbc 100644
--- a/integration-tests/aiguard/index.spec.js
+++ b/integration-tests/aiguard/index.spec.js
@@ -39,8 +39,8 @@ describe('AIGuard SDK integration tests', () => {
env: {
DD_SERVICE: 'ai_guard_integration_test',
DD_ENV: 'test',
- DD_TRACING_ENABLED: 'true',
- DD_TRACE_AGENT_PORT: agent.port,
+ DD_TRACE_ENABLED: 'true',
+ DD_TRACE_AGENT_PORT: String(agent.port),
DD_AI_GUARD_ENABLED: 'true',
DD_AI_GUARD_BLOCK: 'true',
DD_AI_GUARD_ENDPOINT: `http://localhost:${api.address().port}`,
diff --git a/integration-tests/ci-visibility/dynamic-instrumentation/fake-timers-test-hit-breakpoint.js b/integration-tests/ci-visibility/dynamic-instrumentation/fake-timers-test-hit-breakpoint.js
new file mode 100644
index 00000000000..f924cea0f5f
--- /dev/null
+++ b/integration-tests/ci-visibility/dynamic-instrumentation/fake-timers-test-hit-breakpoint.js
@@ -0,0 +1,22 @@
+'use strict'
+
+const assert = require('assert')
+const sinon = require('sinon')
+
+const sum = require('./dependency')
+
+describe('dynamic-instrumentation-fake-timers', () => {
+ let clock
+
+ beforeEach(function () {
+ clock = sinon.useFakeTimers()
+ })
+
+ afterEach(function () {
+ clock.restore()
+ })
+
+ it('retries with DI and fake timers', function () {
+ assert.strictEqual(sum(11, 3), 14)
+ })
+})
diff --git a/integration-tests/ci-visibility/features-di-fake-timers/support/steps.js b/integration-tests/ci-visibility/features-di-fake-timers/support/steps.js
new file mode 100644
index 00000000000..6f9754b0a19
--- /dev/null
+++ b/integration-tests/ci-visibility/features-di-fake-timers/support/steps.js
@@ -0,0 +1,25 @@
+'use strict'
+
+const assert = require('assert')
+const { When, Then, BeforeAll, AfterAll } = require('@cucumber/cucumber')
+const sinon = require('sinon')
+const sum = require('../../features-di/support/sum')
+
+let clock
+
+BeforeAll(function () {
+ clock = sinon.useFakeTimers()
+})
+
+AfterAll(function () {
+ clock.restore()
+})
+
+When('the greeter says hello', function () {
+ this.whatIHeard = 'hello'
+})
+
+Then('I should have heard {string}', function (expectedResponse) {
+ sum(11, 3)
+ assert.equal(this.whatIHeard, expectedResponse)
+})
diff --git a/integration-tests/ci-visibility/features-di-fake-timers/test-hit-breakpoint.feature b/integration-tests/ci-visibility/features-di-fake-timers/test-hit-breakpoint.feature
new file mode 100644
index 00000000000..7ec60251966
--- /dev/null
+++ b/integration-tests/ci-visibility/features-di-fake-timers/test-hit-breakpoint.feature
@@ -0,0 +1,6 @@
+
+Feature: Greeting with fake timers
+
+ Scenario: Say hello with fake timers
+ When the greeter says hello
+ Then I should have heard "hello"
diff --git a/integration-tests/ci-visibility/jest-flaky/fake-timers-flaky-fails.js b/integration-tests/ci-visibility/jest-flaky/fake-timers-flaky-fails.js
new file mode 100644
index 00000000000..a860280abd6
--- /dev/null
+++ b/integration-tests/ci-visibility/jest-flaky/fake-timers-flaky-fails.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const assert = require('assert')
+
+describe('test-fake-timers', () => {
+ beforeAll(() => {
+ jest.useFakeTimers()
+ })
+
+ afterEach(() => {
+ // This pattern (from @testing-library/react's enableFakeTimers helper)
+ // clears all pending timers after each test but BEFORE test_done fires.
+ // If dd-trace scheduled a setTimeout in test_done, clearAllTimers
+ // destroys it, orphaning the promise and deadlocking the process.
+ jest.runOnlyPendingTimers()
+ jest.clearAllTimers()
+ })
+
+ afterAll(() => {
+ jest.useRealTimers()
+ })
+
+ it('can retry failed tests with fake timers', () => {
+ assert.deepStrictEqual(1, 2)
+ })
+})
diff --git a/integration-tests/ci-visibility/subproject/cypress.config.js b/integration-tests/ci-visibility/subproject/cypress.config.js
index 7d9c2df8db4..3544598d6c0 100644
--- a/integration-tests/ci-visibility/subproject/cypress.config.js
+++ b/integration-tests/ci-visibility/subproject/cypress.config.js
@@ -1,13 +1,12 @@
'use strict'
-module.exports = {
+const { defineConfig } = require('cypress')
+
+module.exports = defineConfig({
defaultCommandTimeout: 1000,
e2e: {
- setupNodeEvents (on, config) {
- return require('dd-trace/ci/cypress/plugin')(on, config)
- },
specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
},
video: false,
screenshotOnRunFailure: false,
-}
+})
diff --git a/integration-tests/ci-visibility/vitest-tests/fake-timers-di.mjs b/integration-tests/ci-visibility/vitest-tests/fake-timers-di.mjs
new file mode 100644
index 00000000000..c5319a4f846
--- /dev/null
+++ b/integration-tests/ci-visibility/vitest-tests/fake-timers-di.mjs
@@ -0,0 +1,18 @@
+import { describe, test, expect, beforeAll, afterAll, vi } from 'vitest'
+import { sum } from './bad-sum'
+
+describe('dynamic instrumentation fake timers', () => {
+ // Install fake timers in beforeAll — they persist through test finish hooks,
+ // which is the pattern that triggers the deadlock with DI's setTimeout.
+ beforeAll(() => {
+ vi.useFakeTimers()
+ })
+
+ afterAll(() => {
+ vi.useRealTimers()
+ })
+
+ test('can sum with fake timers', () => {
+ expect(sum(11, 2)).to.equal(13)
+ })
+})
diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js
index 6058d512c99..5fbd5b6bc21 100644
--- a/integration-tests/cucumber/cucumber.spec.js
+++ b/integration-tests/cucumber/cucumber.spec.js
@@ -97,7 +97,7 @@ describe(`cucumber@${version} commonJS`, () => {
let cwd, receiver, childProcess, testOutput
- useSandbox([`@cucumber/cucumber@${version}`, 'assert', 'nyc'], true)
+ useSandbox([`@cucumber/cucumber@${version}`, 'assert', 'nyc', 'sinon'], true)
before(function () {
cwd = sandboxCwd()
@@ -2160,6 +2160,36 @@ describe(`cucumber@${version} commonJS`, () => {
})
})
+ onlyLatestIt('does not hang when tests use fake timers and Failed Test Replay is enabled', async () => {
+ receiver.setSettings({
+ flaky_test_retries_enabled: true,
+ di_enabled: true,
+ })
+
+ const eventsPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads.flatMap(({ payload }) => payload.events)
+ const tests = events.filter(event => event.type === 'test').map(event => event.content)
+ assert.strictEqual(tests.length, 2)
+ const retriedTests = tests.filter(
+ t => t.meta[TEST_RETRY_REASON] === TEST_RETRY_REASON_TYPES.atr
+ )
+ assert.strictEqual(retriedTests.length, 1)
+ })
+
+ const featurePath = 'ci-visibility/features-di-fake-timers/test-hit-breakpoint.feature'
+ childProcess = exec(
+ `./node_modules/.bin/cucumber-js ${featurePath} --retry 1`,
+ {
+ cwd,
+ env: envVars,
+ }
+ )
+
+ const [[exitCode]] = await Promise.all([once(childProcess, 'exit'), eventsPromise])
+ assert.strictEqual(exitCode, 1)
+ })
+
onlyLatestIt('does not crash if the retry does not hit the breakpoint', (done) => {
receiver.setSettings({
flaky_test_retries_enabled: true,
@@ -2596,6 +2626,54 @@ describe(`cucumber@${version} commonJS`, () => {
})
})
+ it('does not tag known attempt to fix tests as new', async () => {
+ receiver.setKnownTests({
+ cucumber: {
+ 'ci-visibility/features-test-management/attempt-to-fix.feature': [
+ 'Say attempt to fix',
+ ],
+ },
+ })
+ receiver.setSettings({
+ test_management: { enabled: true, attempt_to_fix_retries: 2 },
+ early_flake_detection: {
+ enabled: true,
+ slow_test_retries: { '5s': 2 },
+ faulty_session_threshold: 100,
+ },
+ known_tests_enabled: true,
+ })
+
+ const eventsPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads.flatMap(({ payload }) => payload.events)
+ const tests = events.filter(event => event.type === 'test').map(event => event.content)
+ const atfTests = tests.filter(
+ t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true'
+ )
+ assert.ok(atfTests.length > 0)
+ for (const test of atfTests) {
+ assert.ok(
+ !(TEST_IS_NEW in test.meta),
+ 'ATF test that is in known tests should not be tagged as new'
+ )
+ }
+ })
+
+ childProcess = exec(
+ './node_modules/.bin/cucumber-js ci-visibility/features-test-management/attempt-to-fix.feature',
+ {
+ cwd,
+ env: getCiVisAgentlessConfig(receiver.port),
+ }
+ )
+
+ await Promise.all([
+ once(childProcess, 'exit'),
+ eventsPromise,
+ ])
+ })
+
it('does not fail retry if a test is quarantined', (done) => {
receiver.setSettings({ test_management: { enabled: true, attempt_to_fix_retries: 3 } })
receiver.setTestManagementTests({
diff --git a/integration-tests/cypress-auto-esm.config.mjs b/integration-tests/cypress-auto-esm.config.mjs
new file mode 100644
index 00000000000..ad0f92f07e8
--- /dev/null
+++ b/integration-tests/cypress-auto-esm.config.mjs
@@ -0,0 +1,11 @@
+import { defineConfig } from 'cypress'
+
+export default defineConfig({
+ defaultCommandTimeout: 1000,
+ e2e: {
+ testIsolation: process.env.CYPRESS_TEST_ISOLATION !== 'false',
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+})
diff --git a/integration-tests/cypress-custom-after-hooks.config.js b/integration-tests/cypress-custom-after-hooks.config.js
new file mode 100644
index 00000000000..78d271b7b9f
--- /dev/null
+++ b/integration-tests/cypress-custom-after-hooks.config.js
@@ -0,0 +1,36 @@
+'use strict'
+
+const { defineConfig } = require('cypress')
+
+module.exports = defineConfig({
+ defaultCommandTimeout: 1000,
+ e2e: {
+ setupNodeEvents (on, config) {
+ on('after:spec', (spec, results) => {
+ // eslint-disable-next-line no-console
+ console.log('[custom:after:spec]', spec.relative, results.stats.passes)
+ return new Promise((resolve) => {
+ setTimeout(() => {
+ // eslint-disable-next-line no-console
+ console.log('[custom:after:spec:resolved]')
+ resolve()
+ }, 50)
+ })
+ })
+ on('after:run', (results) => {
+ // eslint-disable-next-line no-console
+ console.log('[custom:after:run]', results.totalPassed)
+ return new Promise((resolve) => {
+ setTimeout(() => {
+ // eslint-disable-next-line no-console
+ console.log('[custom:after:run:resolved]')
+ resolve()
+ }, 50)
+ })
+ })
+ },
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+})
diff --git a/integration-tests/cypress-custom-after-hooks.config.mjs b/integration-tests/cypress-custom-after-hooks.config.mjs
new file mode 100644
index 00000000000..a4cb02b6e3c
--- /dev/null
+++ b/integration-tests/cypress-custom-after-hooks.config.mjs
@@ -0,0 +1,34 @@
+import { defineConfig } from 'cypress'
+
+export default defineConfig({
+ defaultCommandTimeout: 1000,
+ e2e: {
+ setupNodeEvents (on, config) {
+ on('after:spec', (spec, results) => {
+ // eslint-disable-next-line no-console
+ console.log('[custom:after:spec]', spec.relative, results.stats.passes)
+ return new Promise((resolve) => {
+ setTimeout(() => {
+ // eslint-disable-next-line no-console
+ console.log('[custom:after:spec:resolved]')
+ resolve()
+ }, 50)
+ })
+ })
+ on('after:run', (results) => {
+ // eslint-disable-next-line no-console
+ console.log('[custom:after:run]', results.totalPassed)
+ return new Promise((resolve) => {
+ setTimeout(() => {
+ // eslint-disable-next-line no-console
+ console.log('[custom:after:run:resolved]')
+ resolve()
+ }, 50)
+ })
+ })
+ },
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+})
diff --git a/integration-tests/cypress-double-run.js b/integration-tests/cypress-double-run.js
new file mode 100644
index 00000000000..acd5ace5ea3
--- /dev/null
+++ b/integration-tests/cypress-double-run.js
@@ -0,0 +1,35 @@
+'use strict'
+
+// Tests that cypress.run() works twice in the same process (resetRunState).
+// Instrumentation works via the default cypress.config.js in the project
+// (which uses defineConfig), NOT via the inline config below — Cypress
+// does not call setupNodeEvents from inline config objects.
+const cypress = require('cypress')
+
+const runOptions = {
+ config: {
+ defaultCommandTimeout: 1000,
+ e2e: {
+ supportFile: 'cypress/support/e2e.js',
+ testIsolation: process.env.CYPRESS_TEST_ISOLATION !== 'false',
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+ },
+}
+
+async function runCypressTwice () {
+ for (let runNumber = 0; runNumber < 2; runNumber++) {
+ const results = await cypress.run(runOptions)
+ if (results.totalFailed !== 0) {
+ process.exit(1)
+ }
+ }
+}
+
+runCypressTwice().catch((error) => {
+ // eslint-disable-next-line no-console
+ console.error(error)
+ process.exit(1)
+})
diff --git a/integration-tests/cypress-double-run.mjs b/integration-tests/cypress-double-run.mjs
new file mode 100644
index 00000000000..a4e6d2a87d3
--- /dev/null
+++ b/integration-tests/cypress-double-run.mjs
@@ -0,0 +1,25 @@
+// Tests that cypress.run() works twice in the same process (resetRunState).
+// Instrumentation works via the default cypress.config.js in the project
+// (which uses defineConfig), NOT via the inline config below — Cypress
+// does not call setupNodeEvents from inline config objects.
+import cypress from 'cypress'
+
+const runOptions = {
+ config: {
+ defaultCommandTimeout: 1000,
+ e2e: {
+ supportFile: 'cypress/support/e2e.js',
+ testIsolation: process.env.CYPRESS_TEST_ISOLATION !== 'false',
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+ },
+}
+
+for (let runNumber = 0; runNumber < 2; runNumber++) {
+ const results = await cypress.run(runOptions)
+ if (results.totalFailed !== 0) {
+ process.exit(1)
+ }
+}
diff --git a/integration-tests/cypress-esm-config.mjs b/integration-tests/cypress-esm-config.mjs
index 4e36b444ae0..e835d1636a2 100644
--- a/integration-tests/cypress-esm-config.mjs
+++ b/integration-tests/cypress-esm-config.mjs
@@ -1,3 +1,7 @@
+// Programmatic ESM entry point for the 'esm' module type tests.
+// Instrumentation works via the default cypress.config.js in the project
+// (which uses defineConfig), NOT via the inline setupNodeEvents below —
+// Cypress does not call setupNodeEvents from inline config objects.
import cypress from 'cypress'
async function runCypress () {
@@ -8,31 +12,10 @@ async function runCypress () {
testIsolation: process.env.CYPRESS_TEST_ISOLATION !== 'false',
setupNodeEvents (on, config) {
if (process.env.CYPRESS_ENABLE_INCOMPATIBLE_PLUGIN) {
- import('cypress-fail-fast/plugin').then(module => {
+ return import('cypress-fail-fast/plugin').then(module => {
module.default(on, config)
})
}
- if (process.env.CYPRESS_ENABLE_AFTER_RUN_CUSTOM) {
- on('after:run', (...args) => {
- // do custom stuff
- // and call after-run at the end
- return import('dd-trace/ci/cypress/after-run').then(module => {
- module.default(...args)
- })
- })
- }
- if (process.env.CYPRESS_ENABLE_AFTER_SPEC_CUSTOM) {
- on('after:spec', (...args) => {
- // do custom stuff
- // and call after-spec at the end
- return import('dd-trace/ci/cypress/after-spec').then(module => {
- module.default(...args)
- })
- })
- }
- return import('dd-trace/ci/cypress/plugin').then(module => {
- return module.default(on, config)
- })
},
specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
},
@@ -40,6 +23,7 @@ async function runCypress () {
screenshotOnRunFailure: false,
},
})
+
if (results.totalFailed !== 0) {
process.exit(1)
}
diff --git a/integration-tests/cypress-legacy-plugin.config.js b/integration-tests/cypress-legacy-plugin.config.js
new file mode 100644
index 00000000000..016ae6ee76c
--- /dev/null
+++ b/integration-tests/cypress-legacy-plugin.config.js
@@ -0,0 +1,20 @@
+'use strict'
+
+// Backwards compatibility config: uses defineConfig AND the old manual plugin.
+// When NODE_OPTIONS is set, the instrumentation wraps defineConfig and injects
+// setupNodeEvents. The manual plugin call sets cypressPlugin._isInit = true,
+// so the instrumentation skips its own registration to avoid double hooks.
+const { defineConfig } = require('cypress')
+const ddTracePlugin = require('dd-trace/ci/cypress/plugin')
+
+module.exports = defineConfig({
+ defaultCommandTimeout: 1000,
+ e2e: {
+ setupNodeEvents (on, config) {
+ return ddTracePlugin(on, config)
+ },
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+})
diff --git a/integration-tests/cypress-legacy-plugin.config.mjs b/integration-tests/cypress-legacy-plugin.config.mjs
new file mode 100644
index 00000000000..23d8791a826
--- /dev/null
+++ b/integration-tests/cypress-legacy-plugin.config.mjs
@@ -0,0 +1,14 @@
+import { defineConfig } from 'cypress'
+import ddTracePlugin from 'dd-trace/ci/cypress/plugin.js'
+
+export default defineConfig({
+ defaultCommandTimeout: 1000,
+ e2e: {
+ setupNodeEvents (on, config) {
+ return ddTracePlugin(on, config)
+ },
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+})
diff --git a/integration-tests/cypress-plain-object-auto.config.js b/integration-tests/cypress-plain-object-auto.config.js
new file mode 100644
index 00000000000..4c5bf96a93b
--- /dev/null
+++ b/integration-tests/cypress-plain-object-auto.config.js
@@ -0,0 +1,13 @@
+'use strict'
+
+// Plain object config without defineConfig and without manual plugin.
+// Relies solely on the CLI wrapper to inject setupNodeEvents.
+module.exports = {
+ defaultCommandTimeout: 1000,
+ e2e: {
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ supportFile: 'cypress/support/e2e.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+}
diff --git a/integration-tests/cypress-plain-object-auto.config.mjs b/integration-tests/cypress-plain-object-auto.config.mjs
new file mode 100644
index 00000000000..e43e70e2026
--- /dev/null
+++ b/integration-tests/cypress-plain-object-auto.config.mjs
@@ -0,0 +1,11 @@
+// Plain object config without defineConfig and without manual plugin.
+// Relies solely on the CLI wrapper to inject setupNodeEvents.
+export default {
+ defaultCommandTimeout: 1000,
+ e2e: {
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ supportFile: 'cypress/support/e2e.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+}
diff --git a/integration-tests/cypress-plain-object-manual.config.js b/integration-tests/cypress-plain-object-manual.config.js
new file mode 100644
index 00000000000..c8e76f42994
--- /dev/null
+++ b/integration-tests/cypress-plain-object-manual.config.js
@@ -0,0 +1,16 @@
+'use strict'
+
+const ddTracePlugin = require('dd-trace/ci/cypress/plugin')
+
+module.exports = {
+ defaultCommandTimeout: 1000,
+ e2e: {
+ setupNodeEvents (on, config) {
+ return ddTracePlugin(on, config)
+ },
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ supportFile: 'cypress/support/e2e.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+}
diff --git a/integration-tests/cypress-plain-object-manual.config.mjs b/integration-tests/cypress-plain-object-manual.config.mjs
new file mode 100644
index 00000000000..c4925f3c3ca
--- /dev/null
+++ b/integration-tests/cypress-plain-object-manual.config.mjs
@@ -0,0 +1,14 @@
+import ddTracePlugin from 'dd-trace/ci/cypress/plugin.js'
+
+export default {
+ defaultCommandTimeout: 1000,
+ e2e: {
+ setupNodeEvents (on, config) {
+ return ddTracePlugin(on, config)
+ },
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ supportFile: 'cypress/support/e2e.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+}
diff --git a/integration-tests/cypress-return-config.config.js b/integration-tests/cypress-return-config.config.js
new file mode 100644
index 00000000000..fde4398e7a0
--- /dev/null
+++ b/integration-tests/cypress-return-config.config.js
@@ -0,0 +1,21 @@
+'use strict'
+
+const { defineConfig } = require('cypress')
+
+module.exports = defineConfig({
+ defaultCommandTimeout: 1000,
+ e2e: {
+ async setupNodeEvents () {
+ await new Promise((resolve) => setTimeout(resolve, 50))
+ return {
+ env: {
+ RETURNED_CONFIG_FLAG: 'true',
+ },
+ specPattern: 'cypress/e2e/returned-config.cy.js',
+ }
+ },
+ specPattern: 'cypress/e2e/basic-fail.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+})
diff --git a/integration-tests/cypress-return-config.config.mjs b/integration-tests/cypress-return-config.config.mjs
new file mode 100644
index 00000000000..25ab995728a
--- /dev/null
+++ b/integration-tests/cypress-return-config.config.mjs
@@ -0,0 +1,19 @@
+import { defineConfig } from 'cypress'
+
+export default defineConfig({
+ defaultCommandTimeout: 1000,
+ e2e: {
+ async setupNodeEvents () {
+ await new Promise((resolve) => setTimeout(resolve, 50))
+ return {
+ env: {
+ RETURNED_CONFIG_FLAG: 'true',
+ },
+ specPattern: 'cypress/e2e/returned-config.cy.js',
+ }
+ },
+ specPattern: 'cypress/e2e/basic-fail.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+})
diff --git a/integration-tests/cypress-typescript.config.ts b/integration-tests/cypress-typescript.config.ts
new file mode 100644
index 00000000000..35111b34608
--- /dev/null
+++ b/integration-tests/cypress-typescript.config.ts
@@ -0,0 +1,11 @@
+import { defineConfig } from 'cypress'
+
+export default defineConfig({
+ defaultCommandTimeout: 1000,
+ e2e: {
+ specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
+ supportFile: 'cypress/support/e2e.js',
+ },
+ video: false,
+ screenshotOnRunFailure: false,
+})
diff --git a/integration-tests/cypress.config.js b/integration-tests/cypress.config.js
index 091320304c9..3c665d78524 100644
--- a/integration-tests/cypress.config.js
+++ b/integration-tests/cypress.config.js
@@ -1,36 +1,33 @@
'use strict'
-const ddAfterRun = require('dd-trace/ci/cypress/after-run')
-const ddAfterSpec = require('dd-trace/ci/cypress/after-spec')
-const cypressFailFast = require('cypress-fail-fast/plugin')
-const ddTracePlugin = require('dd-trace/ci/cypress/plugin')
+const { defineConfig } = require('cypress')
-module.exports = {
+module.exports = defineConfig({
defaultCommandTimeout: 1000,
e2e: {
testIsolation: process.env.CYPRESS_TEST_ISOLATION !== 'false',
setupNodeEvents (on, config) {
if (process.env.CYPRESS_ENABLE_INCOMPATIBLE_PLUGIN) {
- cypressFailFast(on, config)
+ require('cypress-fail-fast/plugin')(on, config)
}
if (process.env.CYPRESS_ENABLE_AFTER_RUN_CUSTOM) {
+ const ddAfterRun = require('dd-trace/ci/cypress/after-run')
on('after:run', (...args) => {
- // do custom stuff
- // and call after-run at the end
return ddAfterRun(...args)
})
}
if (process.env.CYPRESS_ENABLE_AFTER_SPEC_CUSTOM) {
+ const ddAfterSpec = require('dd-trace/ci/cypress/after-spec')
on('after:spec', (...args) => {
- // do custom stuff
- // and call after-spec at the end
return ddAfterSpec(...args)
})
}
- return ddTracePlugin(on, config)
+ if (process.env.CYPRESS_ENABLE_MANUAL_PLUGIN) {
+ return require('dd-trace/ci/cypress/plugin')(on, config)
+ }
},
specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js',
},
video: false,
screenshotOnRunFailure: false,
-}
+})
diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js
index f6642405c4e..b78ad582d57 100644
--- a/integration-tests/cypress/cypress.spec.js
+++ b/integration-tests/cypress/cypress.spec.js
@@ -76,7 +76,6 @@ const version = process.env.CYPRESS_VERSION
const hookFile = 'dd-trace/loader-hook.mjs'
const NUM_RETRIES_EFD = 3
const CYPRESS_PRECOMPILED_SPEC_DIST_DIR = 'cypress/e2e/dist'
-
const over12It = (version === 'latest' || semver.gte(version, '12.0.0')) ? it : it.skip
const moduleTypes = [
@@ -151,7 +150,10 @@ moduleTypes.forEach(({
// cypress-fail-fast is required as an incompatible plugin.
// typescript is required to compile .cy.ts spec files in the pre-compiled JS tests.
- useSandbox([`cypress@${version}`, 'cypress-fail-fast@7.1.0', 'typescript'], true)
+ // typescript@5 is pinned because typescript@6 emits "use strict" on line 1 for
+ // non-module files, shifting compiled line numbers and breaking source map resolution.
+ // TODO: Update tests files accordingly and test with different TS versions
+ useSandbox([`cypress@${version}`, 'cypress-fail-fast@7.1.0', 'typescript@5'], true)
before(async function () {
// Note: Cypress binary is already installed during useSandbox() via the postinstall script
@@ -288,26 +290,568 @@ moduleTypes.forEach(({
assert.ok(!('addTagsAfterFailure' in failedTestSpan.meta))
}, 60000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/basic-*.js'
- // For Cypress 6.7.0, we need to override the --spec flag that's hardcoded in testCommand
- const command = version === '6.7.0'
- ? `./node_modules/.bin/cypress run --config-file cypress-config.json --spec "${specToRun}"`
- : testCommand
+ // For Cypress 6.7.0, we need to override the --spec flag that's hardcoded in testCommand
+ const command = version === '6.7.0'
+ ? `./node_modules/.bin/cypress run --config-file cypress-config.json --spec "${specToRun}"`
+ : testCommand
+
+ childProcess = exec(
+ command,
+ {
+ cwd,
+ env: {
+ ...envVars,
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ SPEC_PATTERN: specToRun,
+ },
+ }
+ )
+
+ await Promise.all([
+ once(childProcess, 'exit'),
+ receiverPromise,
+ ])
+ })
+
+ if (version === '6.7.0') {
+ it('logs a warning if using a deprecated version of cypress', async () => {
+ let stdout = ''
+ const {
+ NODE_OPTIONS,
+ ...restEnvVars
+ } = getCiVisEvpProxyConfig(receiver.port)
+
+ childProcess = exec(
+ `${testCommand} --spec cypress/e2e/spec.cy.js`,
+ {
+ cwd,
+ env: {
+ ...restEnvVars,
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ },
+ }
+ )
+
+ childProcess.stdout?.on('data', (chunk) => {
+ stdout += chunk.toString()
+ })
+
+ await Promise.all([
+ once(childProcess, 'exit'),
+ once(childProcess.stdout, 'end'),
+ ])
+ assert.match(
+ stdout,
+ /WARNING: dd-trace support for Cypress<10.2.0 is deprecated/
+ )
+ })
+ }
+
+ // These tests require Cypress >=10 features (defineConfig, setupNodeEvents)
+ const over10It = (version !== '6.7.0') ? it : it.skip
+ over10It('is backwards compatible with the old manual plugin approach', async () => {
+ receiver.setInfoResponse({ endpoints: [] })
+
+ const receiverPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url === '/v0.4/traces', (payloads) => {
+ const testSpans = payloads.flatMap(({ payload }) => payload.flatMap(trace => trace))
+
+ const passedTestSpan = testSpans.find(span =>
+ span.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass'
+ )
+
+ assertObjectContains(passedTestSpan, {
+ name: 'cypress.test',
+ type: 'test',
+ meta: {
+ [TEST_STATUS]: 'pass',
+ [TEST_FRAMEWORK]: 'cypress',
+ },
+ })
+ }, 60000)
+
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
+
+ const legacyConfigFile = type === 'esm'
+ ? 'cypress-legacy-plugin.config.mjs'
+ : 'cypress-legacy-plugin.config.js'
+
+ childProcess = exec(
+ `./node_modules/.bin/cypress run --config-file ${legacyConfigFile}`,
+ {
+ cwd,
+ env: {
+ ...envVars,
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ SPEC_PATTERN: 'cypress/e2e/basic-pass.js',
+ },
+ }
+ )
+
+ await Promise.all([
+ once(childProcess, 'exit'),
+ receiverPromise,
+ ])
+ })
+
+ over10It('reports tests when using cypress.config.mjs with NODE_OPTIONS', async () => {
+ const receiverPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads
+ .flatMap(({ payload }) => payload.events)
+ .filter(event => event.type === 'test')
+ const passedTest = events.find(event =>
+ event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass'
+ )
+
+ assertObjectContains(passedTest?.content, {
+ meta: {
+ [TEST_STATUS]: 'pass',
+ [TEST_FRAMEWORK]: 'cypress',
+ },
+ })
+ }, 20000)
+
+ let testOutput = ''
+ const envVars = getCiVisAgentlessConfig(receiver.port)
+
+ childProcess = exec(
+ './node_modules/.bin/cypress run --config-file cypress-auto-esm.config.mjs',
+ {
+ cwd,
+ env: {
+ ...envVars,
+ NODE_OPTIONS: '-r dd-trace/ci/init',
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ SPEC_PATTERN: 'cypress/e2e/basic-pass.js',
+ },
+ }
+ )
+ childProcess.stdout?.on('data', (d) => { testOutput += d })
+ childProcess.stderr?.on('data', (d) => { testOutput += d })
+
+ const [[exitCode]] = await Promise.all([
+ once(childProcess, 'exit'),
+ receiverPromise,
+ ])
+
+ assert.strictEqual(exitCode, 0, `cypress process should exit successfully\n${testOutput}`)
+ })
+
+ over10It('reports tests when cypress.run is called twice (multi-run state reset)', async () => {
+ const receiverPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const passedTests = payloads
+ .flatMap(({ payload }) => payload.events)
+ .filter(event => event.type === 'test')
+ .filter(event => event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass')
+
+ assert.strictEqual(passedTests.length, 2)
+ passedTests.forEach((passedTest) => {
+ assertObjectContains(passedTest.content, {
+ meta: {
+ [TEST_STATUS]: 'pass',
+ [TEST_FRAMEWORK]: 'cypress',
+ },
+ })
+ })
+ }, 60000)
+
+ const envVars = getCiVisAgentlessConfig(receiver.port)
+
+ const doubleRunScript = type === 'esm'
+ ? 'node ./cypress-double-run.mjs'
+ : 'node ./cypress-double-run.js'
+
+ childProcess = exec(
+ doubleRunScript,
+ {
+ cwd,
+ env: {
+ ...envVars,
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ SPEC_PATTERN: 'cypress/e2e/basic-pass.js',
+ },
+ }
+ )
+
+ const [[exitCode]] = await Promise.all([
+ once(childProcess, 'exit'),
+ receiverPromise,
+ ])
+
+ assert.strictEqual(exitCode, 0, 'cypress process should exit successfully')
+ })
+
+ over10It(
+ 'reports tests with a plain-object config when dd-trace is manually configured',
+ async () => {
+ const receiverPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads
+ .flatMap(({ payload }) => payload.events)
+ .filter(event => event.type === 'test')
+ const passedTest = events.find(event =>
+ event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass'
+ )
+
+ assertObjectContains(passedTest?.content, {
+ meta: {
+ [TEST_STATUS]: 'pass',
+ [TEST_FRAMEWORK]: 'cypress',
+ },
+ })
+ }, 60000)
+
+ const envVars = getCiVisAgentlessConfig(receiver.port)
+
+ const plainObjectConfigFile = type === 'esm'
+ ? 'cypress-plain-object-manual.config.mjs'
+ : 'cypress-plain-object-manual.config.js'
+
+ childProcess = exec(
+ `./node_modules/.bin/cypress run --config-file ${plainObjectConfigFile}`,
+ {
+ cwd,
+ env: {
+ ...envVars,
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ SPEC_PATTERN: 'cypress/e2e/basic-pass.js',
+ },
+ }
+ )
+
+ const [[exitCode]] = await Promise.all([
+ once(childProcess, 'exit'),
+ receiverPromise,
+ ])
+
+ assert.strictEqual(exitCode, 0, 'cypress process should exit successfully')
+ }
+ )
+
+ over10It(
+ 'auto-instruments a plain-object config without defineConfig or manual plugin',
+ async () => {
+ const receiverPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads
+ .flatMap(({ payload }) => payload.events)
+ .filter(event => event.type === 'test')
+ const passedTest = events.find(event =>
+ event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass'
+ )
+
+ assertObjectContains(passedTest?.content, {
+ meta: {
+ [TEST_STATUS]: 'pass',
+ [TEST_FRAMEWORK]: 'cypress',
+ },
+ })
+ }, 20000)
+
+ const envVars = getCiVisAgentlessConfig(receiver.port)
+
+ const plainObjectAutoConfigFile = type === 'esm'
+ ? 'cypress-plain-object-auto.config.mjs'
+ : 'cypress-plain-object-auto.config.js'
+
+ childProcess = exec(
+ `./node_modules/.bin/cypress run --config-file ${plainObjectAutoConfigFile}`,
+ {
+ cwd,
+ env: {
+ ...envVars,
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ SPEC_PATTERN: 'cypress/e2e/basic-pass.js',
+ },
+ }
+ )
+
+ const [[exitCode]] = await Promise.all([
+ once(childProcess, 'exit'),
+ receiverPromise,
+ ])
+
+ assert.strictEqual(exitCode, 0, 'cypress process should exit successfully')
+ }
+ )
+
+ over10It(
+ 'auto-instruments a plain-object default config (no --config-file)',
+ async () => {
+ const originalConfig = path.join(cwd, 'cypress.config.js')
+ const backupConfig = path.join(cwd, 'cypress.config.js.bak')
+ const plainObjectConfig = path.join(cwd, 'cypress-plain-object-auto.config.js')
+
+ // Replace default cypress.config.js with the plain-object config
+ fs.renameSync(originalConfig, backupConfig)
+ fs.copyFileSync(plainObjectConfig, originalConfig)
+
+ try {
+ const receiverPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads
+ .flatMap(({ payload }) => payload.events)
+ .filter(event => event.type === 'test')
+ const passedTest = events.find(event =>
+ event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass'
+ )
+
+ assertObjectContains(passedTest?.content, {
+ meta: {
+ [TEST_STATUS]: 'pass',
+ [TEST_FRAMEWORK]: 'cypress',
+ },
+ })
+ }, 20000)
+
+ const envVars = getCiVisAgentlessConfig(receiver.port)
+
+ childProcess = exec(
+ './node_modules/.bin/cypress run',
+ {
+ cwd,
+ env: {
+ ...envVars,
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ SPEC_PATTERN: 'cypress/e2e/basic-pass.js',
+ },
+ }
+ )
+
+ const [[exitCode]] = await Promise.all([
+ once(childProcess, 'exit'),
+ receiverPromise,
+ ])
+
+ assert.strictEqual(exitCode, 0, 'cypress process should exit successfully')
+ } finally {
+ fs.renameSync(backupConfig, originalConfig)
+ }
+ }
+ )
+
+ over10It('reports tests with a TypeScript config file', async () => {
+ const receiverPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads
+ .flatMap(({ payload }) => payload.events)
+ .filter(event => event.type === 'test')
+ const passedTest = events.find(event =>
+ event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass'
+ )
+
+ assertObjectContains(passedTest?.content, {
+ meta: {
+ [TEST_STATUS]: 'pass',
+ [TEST_FRAMEWORK]: 'cypress',
+ },
+ })
+ }, 20000)
+
+ const envVars = getCiVisAgentlessConfig(receiver.port)
+
+ childProcess = exec(
+ './node_modules/.bin/cypress run --config-file cypress-typescript.config.ts',
+ {
+ cwd,
+ env: {
+ ...envVars,
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ SPEC_PATTERN: 'cypress/e2e/basic-pass.js',
+ },
+ }
+ )
+
+ const [[exitCode]] = await Promise.all([
+ once(childProcess, 'exit'),
+ receiverPromise,
+ ])
+
+ assert.strictEqual(exitCode, 0, 'cypress process should exit successfully')
+ })
+
+ over10It('does not modify the user support file and cleans up the injected wrapper', async () => {
+ const supportFilePath = path.join(cwd, 'cypress/support/e2e.js')
+ const originalSupportContent = fs.readFileSync(supportFilePath, 'utf8')
+ const supportContentWithoutDdTrace = originalSupportContent
+ .split('\n')
+ .filter(line => !line.includes("require('dd-trace/ci/cypress/support')"))
+ .join('\n')
+
+ const getSupportWrappers = () => fs.readdirSync(os.tmpdir())
+ .filter(filename => filename.startsWith('dd-cypress-support-'))
+ .sort()
+
+ fs.writeFileSync(supportFilePath, supportContentWithoutDdTrace)
+
+ const receiverPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads
+ .flatMap(({ payload }) => payload.events)
+ .filter(event => event.type === 'test')
+ const passedTest = events.find(event =>
+ event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass'
+ )
+
+ assertObjectContains(passedTest?.content, {
+ meta: {
+ [TEST_STATUS]: 'pass',
+ [TEST_FRAMEWORK]: 'cypress',
+ },
+ })
+ }, 60000)
+
+ const envVars = getCiVisAgentlessConfig(receiver.port)
+ const wrapperFilesBefore = getSupportWrappers()
+
+ try {
+ childProcess = exec(testCommand, {
+ cwd,
+ env: {
+ ...envVars,
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ SPEC_PATTERN: 'cypress/e2e/basic-pass.js',
+ },
+ })
+
+ const [[exitCode]] = await Promise.all([
+ once(childProcess, 'exit'),
+ receiverPromise,
+ ])
+
+ assert.strictEqual(exitCode, 0, 'cypress process should exit successfully')
+ assert.strictEqual(fs.readFileSync(supportFilePath, 'utf8'), supportContentWithoutDdTrace)
+ assert.doesNotMatch(fs.readFileSync(supportFilePath, 'utf8'), /dd-trace\/ci\/cypress\/support/)
+ assert.deepStrictEqual(getSupportWrappers(), wrapperFilesBefore)
+ } finally {
+ fs.writeFileSync(supportFilePath, originalSupportContent)
+ }
+ })
+
+ over10It('preserves config returned from setupNodeEvents', async () => {
+ const receiverPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads
+ .flatMap(({ payload }) => payload.events)
+ .filter(event => event.type === 'test')
+ const passedTest = events.find(event =>
+ event.content.resource ===
+ 'cypress/e2e/returned-config.cy.js.returned config uses env from setupNodeEvents return value'
+ )
+
+ assertObjectContains(passedTest?.content, {
+ meta: {
+ [TEST_STATUS]: 'pass',
+ [TEST_FRAMEWORK]: 'cypress',
+ },
+ })
+ }, 60000)
+
+ const envVars = getCiVisAgentlessConfig(receiver.port)
+
+ const returnConfigFile = type === 'esm'
+ ? 'cypress-return-config.config.mjs'
+ : 'cypress-return-config.config.js'
+
+ childProcess = exec(
+ `./node_modules/.bin/cypress run --config-file ${returnConfigFile}`,
+ {
+ cwd,
+ env: envVars,
+ }
+ )
+
+ const [[exitCode]] = await Promise.all([
+ once(childProcess, 'exit'),
+ receiverPromise,
+ ])
+
+ assert.strictEqual(exitCode, 0, 'cypress process should exit successfully')
+ })
+
+ over10It('custom after:spec and after:run handlers are chained with dd-trace instrumentation', async () => {
+ const receiverPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads
+ .flatMap(({ payload }) => payload.events)
+ .filter(event => event.type === 'test')
+ const passedTest = events.find(event =>
+ event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass'
+ )
+ assertObjectContains(passedTest?.content, {
+ meta: {
+ [TEST_STATUS]: 'pass',
+ [TEST_FRAMEWORK]: 'cypress',
+ },
+ })
+ }, 60000)
+
+ const envVars = getCiVisAgentlessConfig(receiver.port)
+
+ let testOutput = ''
+ const customHooksConfigFile = type === 'esm'
+ ? 'cypress-custom-after-hooks.config.mjs'
+ : 'cypress-custom-after-hooks.config.js'
+
+ childProcess = exec(
+ `./node_modules/.bin/cypress run --config-file ${customHooksConfigFile}`,
+ {
+ cwd,
+ env: {
+ ...envVars,
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ SPEC_PATTERN: 'cypress/e2e/basic-pass.js',
+ },
+ }
+ )
+ childProcess.stdout?.on('data', (d) => { testOutput += d })
+ childProcess.stderr?.on('data', (d) => { testOutput += d })
+
+ await Promise.all([
+ once(childProcess, 'exit'),
+ once(childProcess.stdout, 'end'),
+ once(childProcess.stderr, 'end'),
+ receiverPromise,
+ ])
+
+ // Verify both dd-trace spans AND the custom handlers ran (including their async resolutions)
+ assert.match(testOutput, /\[custom:after:spec\]/)
+ assert.match(testOutput, /\[custom:after:spec:resolved\]/)
+ assert.match(testOutput, /\[custom:after:run\]/)
+ assert.match(testOutput, /\[custom:after:run:resolved\]/)
+ })
+
+ // Tests the old manual API: dd-trace/ci/cypress/after-run and after-spec
+ // used alongside the manual plugin, without NODE_OPTIONS auto-instrumentation.
+ over10It('works if after:run and after:spec are explicitly used with the manual plugin', async () => {
+ const receiverPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads.flatMap(({ payload }) => payload.events)
+ const testSessionEvent = events.find(event => event.type === 'test_session_end')
+ assert.ok(testSessionEvent)
+ const testEvents = events.filter(event => event.type === 'test')
+ assert.ok(testEvents.length > 0)
+ }, 30000)
+
+ const envVars = getCiVisAgentlessConfig(receiver.port)
childProcess = exec(
- command,
+ testCommand,
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
- SPEC_PATTERN: specToRun,
+ CYPRESS_ENABLE_AFTER_RUN_CUSTOM: '1',
+ CYPRESS_ENABLE_AFTER_SPEC_CUSTOM: '1',
+ CYPRESS_ENABLE_MANUAL_PLUGIN: '1',
+ SPEC_PATTERN: 'cypress/e2e/basic-pass.js',
},
}
)
@@ -319,16 +863,14 @@ moduleTypes.forEach(({
})
over12It('reports correct source file and line for pre-compiled typescript test files', async function () {
- const { NODE_OPTIONS, ...restEnvVars } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
try {
cleanupPrecompiledSourceLineDist(cwd)
// Compile the TypeScript spec to JS + source map so the plugin can resolve
// the original TypeScript source file and line via the adjacent .js.map file.
- // We intentionally run with NODE_OPTIONS removed because sandboxed CWDs may not
- // have local preload paths (e.g. -r ./ci/init) set by outer test environments.
- compilePrecompiledTypeScriptSpecs(cwd, restEnvVars)
+ compilePrecompiledTypeScriptSpecs(cwd, envVars)
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
@@ -379,7 +921,7 @@ moduleTypes.forEach(({
childProcess = exec(testCommand, {
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/dist/spec-source-line.cy.js',
},
@@ -434,12 +976,12 @@ moduleTypes.forEach(({
})
over12It('uses declaration scanning fallback when invocationDetails line is invalid', async function () {
- const { NODE_OPTIONS, ...restEnvVars } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
try {
cleanupPrecompiledSourceLineDist(cwd)
- compilePrecompiledTypeScriptSpecs(cwd, restEnvVars)
+ compilePrecompiledTypeScriptSpecs(cwd, envVars)
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
@@ -465,7 +1007,7 @@ moduleTypes.forEach(({
childProcess = exec(testCommand, {
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/dist/spec-source-line-fallback.cy.js',
},
@@ -480,12 +1022,12 @@ moduleTypes.forEach(({
over12It('keeps original invocationDetails line when no declaration match is found', async function () {
this.timeout(140000)
- const { NODE_OPTIONS, ...restEnvVars } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
try {
cleanupPrecompiledSourceLineDist(cwd)
- compilePrecompiledTypeScriptSpecs(cwd, restEnvVars)
+ compilePrecompiledTypeScriptSpecs(cwd, envVars)
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
@@ -513,7 +1055,7 @@ moduleTypes.forEach(({
childProcess = exec(testCommand, {
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/dist/spec-source-line-no-match.cy.js',
},
@@ -527,7 +1069,7 @@ moduleTypes.forEach(({
})
over12It('uses invocationDetails line directly for plain javascript specs without source maps', async function () {
- const { NODE_OPTIONS, ...restEnvVars } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
@@ -555,7 +1097,7 @@ moduleTypes.forEach(({
childProcess = exec(testCommand, {
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/spec-source-line-invocation.cy.js',
},
@@ -609,14 +1151,14 @@ moduleTypes.forEach(({
)
}, 60000)
- const { NODE_OPTIONS, ...restEnvVars } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
// Run Cypress directly with the TypeScript spec file — no manual compilation step.
// Cypress compiles .cy.ts files on the fly via its own preprocessor/bundler.
childProcess = exec(testCommand, {
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/spec-source-line.cy.ts',
},
@@ -626,46 +1168,8 @@ moduleTypes.forEach(({
assert.strictEqual(exitCode, 0, 'cypress process should exit successfully')
})
- if (version === '6.7.0') {
- // to be removed when we drop support for cypress@6.7.0
- it('logs a warning if using a deprecated version of cypress', async () => {
- let stdout = ''
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
-
- childProcess = exec(
- `${testCommand} --spec cypress/e2e/spec.cy.js`,
- {
- cwd,
- env: {
- ...restEnvVars,
- CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
- },
- }
- )
-
- childProcess.stdout?.on('data', (chunk) => {
- stdout += chunk.toString()
- })
-
- await Promise.all([
- once(childProcess, 'exit'),
- once(childProcess.stdout, 'end'),
- ])
- assert.match(
- stdout,
- /WARNING: dd-trace support for Cypress<10.2.0 is deprecated and will not be supported in future versions of dd-trace./
- )
- })
- }
-
it('tags session and children with _dd.ci.library_configuration_error when settings fails 4xx', async () => {
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
receiver.setSettingsResponseCode(404)
const eventsPromise = receiver
@@ -685,7 +1189,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/spec.cy.js',
},
@@ -697,9 +1201,8 @@ moduleTypes.forEach(({
it('does not crash if badly init', async () => {
const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
DD_CIVISIBILITY_AGENTLESS_URL,
- ...restEnvVars
+ ...envVars
} = getCiVisAgentlessConfig(receiver.port)
let hasReceivedEvents = false
@@ -715,7 +1218,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
DD_SITE: '= invalid = url',
SPEC_PATTERN: 'cypress/e2e/spec.cy.js',
@@ -755,14 +1258,15 @@ moduleTypes.forEach(({
it('can run and report tests', async () => {
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => {
- const metadataDicts = payloads.flatMap(({ payload }) => payload.metadata)
+ const ciVisPayloads = payloads.filter(({ payload }) => payload.metadata?.test)
+ const ciVisMetadataDicts = ciVisPayloads.flatMap(({ payload }) => payload.metadata)
- metadataDicts.forEach(metadata => {
+ ciVisMetadataDicts.forEach(metadata => {
for (const testLevel of TEST_LEVEL_EVENT_TYPES) {
assert.strictEqual(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session')
}
})
- const events = payloads.flatMap(({ payload }) => payload.events)
+ const events = ciVisPayloads.flatMap(({ payload }) => payload.events)
const testSessionEvent = events.find(event => event.type === 'test_session_end')
const testModuleEvent = events.find(event => event.type === 'test_module_end')
@@ -896,17 +1400,14 @@ moduleTypes.forEach(({
assert.match(describeHookSuite.content.meta[ERROR_MESSAGE], /error in after hook/)
}, 25000)
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
childProcess = exec(
testCommand,
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2',
DD_TEST_SESSION_NAME: 'my-test-session',
@@ -923,10 +1424,7 @@ moduleTypes.forEach(({
})
it('can report code coverage if it is available', async () => {
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
const receiverPromise = receiver.gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcov', payloads => {
const [{ payload: coveragePayloads }] = payloads
@@ -953,7 +1451,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/spec.cy.js',
},
@@ -975,17 +1473,14 @@ moduleTypes.forEach(({
const packfileRequestPromise = receiver
.payloadReceived(({ url }) => url.endsWith('/api/v2/git/repository/packfile'), 25000)
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
childProcess = exec(
testCommand,
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/spec.cy.js',
},
@@ -1023,17 +1518,14 @@ moduleTypes.forEach(({
assertObjectContains(eventTypes, ['test', 'test_session_end', 'test_module_end', 'test_suite_end'])
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
childProcess = exec(
testCommand,
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/spec.cy.js',
},
@@ -1094,17 +1586,14 @@ moduleTypes.forEach(({
assert.strictEqual(skippableRequest.headers['dd-api-key'], '1')
})
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
childProcess = exec(
testCommand,
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/{other,spec}.cy.js',
},
@@ -1152,17 +1641,14 @@ moduleTypes.forEach(({
assert.strictEqual(notSkippedTest.content.meta[TEST_STATUS], 'pass')
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
childProcess = exec(
testCommand,
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/other.cy.js',
},
@@ -1230,17 +1716,14 @@ moduleTypes.forEach(({
assert.ok(!(TEST_ITR_FORCED_RUN in unskippableFailedTest.content.meta))
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
childProcess = exec(
testCommand,
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/{other,spec}.cy.js',
},
@@ -1302,17 +1785,14 @@ moduleTypes.forEach(({
assert.ok(!(TEST_ITR_FORCED_RUN in unskippableFailedTest.content.meta))
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
childProcess = exec(
testCommand,
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/{other,spec}.cy.js',
},
@@ -1358,17 +1838,14 @@ moduleTypes.forEach(({
assert.strictEqual(skippableRequest.headers['dd-api-key'], '1')
})
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
childProcess = exec(
testCommand,
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/spec.cy.js',
},
@@ -1398,17 +1875,14 @@ moduleTypes.forEach(({
})
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
childProcess = exec(
testCommand,
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/spec.cy.js',
},
@@ -1438,10 +1912,7 @@ moduleTypes.forEach(({
command = `node --loader=${hookFile} ../../cypress-esm-config.mjs`
}
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
const eventsPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcov'), (payloads) => {
@@ -1464,7 +1935,7 @@ moduleTypes.forEach(({
{
cwd: `${cwd}/ci-visibility/subproject`,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
},
}
@@ -1482,10 +1953,7 @@ moduleTypes.forEach(({
})
it('still reports correct format if there is a plugin incompatibility', async () => {
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => {
@@ -1507,7 +1975,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
CYPRESS_ENABLE_INCOMPATIBLE_PLUGIN: '1',
SPEC_PATTERN: 'cypress/e2e/spec.cy.js',
@@ -1521,45 +1989,6 @@ moduleTypes.forEach(({
])
})
- it('works if after:run and after:spec are explicitly used', async () => {
- const receiverPromise = receiver
- .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => {
- const events = payloads.flatMap(({ payload }) => payload.events)
- const testSessionEvent = events.find(event => event.type === 'test_session_end')
- assert.ok(testSessionEvent)
- const testModuleEvent = events.find(event => event.type === 'test_module_end')
- assert.ok(testModuleEvent)
- const testSuiteEvents = events.filter(event => event.type === 'test_suite_end')
- assert.strictEqual(testSuiteEvents.length, 4)
- const testEvents = events.filter(event => event.type === 'test')
- assert.strictEqual(testEvents.length, 9)
- }, 30000)
-
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
-
- childProcess = exec(
- testCommand,
- {
- cwd,
- env: {
- ...restEnvVars,
- CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
- CYPRESS_ENABLE_AFTER_RUN_CUSTOM: '1',
- CYPRESS_ENABLE_AFTER_SPEC_CUSTOM: '1',
- SPEC_PATTERN: 'cypress/e2e/{spec,other,hook-describe-error,hook-test-error}.cy.js',
- },
- }
- )
-
- await Promise.all([
- once(childProcess, 'exit'),
- receiverPromise,
- ])
- })
-
context('early flake detection', () => {
it('retries new tests', async () => {
receiver.setSettings({
@@ -1609,10 +2038,7 @@ moduleTypes.forEach(({
assert.strictEqual(testSession.meta[TEST_EARLY_FLAKE_ENABLED], 'true')
}, 25000)
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/spec.cy.js'
@@ -1621,7 +2047,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
},
@@ -1654,10 +2080,7 @@ moduleTypes.forEach(({
},
})
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => {
@@ -1682,7 +2105,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED: 'false',
@@ -1711,10 +2134,7 @@ moduleTypes.forEach(({
cypress: {},
})
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => {
@@ -1739,7 +2159,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: 'cypress/e2e/skipped-test.js',
},
@@ -1768,10 +2188,7 @@ moduleTypes.forEach(({
cypress: {},
})
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
// Request module waits before retrying; browser runs are slow — need longer gather timeout
const receiverPromise = receiver
@@ -1796,7 +2213,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
},
@@ -1829,10 +2246,7 @@ moduleTypes.forEach(({
},
})
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => {
@@ -1857,7 +2271,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED: 'false',
@@ -1890,10 +2304,7 @@ moduleTypes.forEach(({
},
})
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => {
@@ -1919,7 +2330,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
},
@@ -1976,10 +2387,7 @@ moduleTypes.forEach(({
})
}, 25000)
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/spec.cy.js'
@@ -1988,7 +2396,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
CYPRESS_TEST_ISOLATION: 'false',
@@ -2076,10 +2484,7 @@ moduleTypes.forEach(({
})
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/spec.cy.js'
@@ -2088,7 +2493,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
},
@@ -2195,10 +2600,7 @@ moduleTypes.forEach(({
assert.equal(testExecutionOrder[9].isRetry, false)
}, 30000)
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/flaky-test-retries.js'
@@ -2207,7 +2609,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
},
@@ -2253,10 +2655,7 @@ moduleTypes.forEach(({
assert.ok(!tests.some(test => test.meta[TEST_RETRY_REASON] === TEST_RETRY_REASON_TYPES.atr))
}, 25000)
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/flaky-test-retries.js'
@@ -2265,7 +2664,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
DD_CIVISIBILITY_FLAKY_RETRY_ENABLED: 'false',
SPEC_PATTERN: specToRun,
@@ -2314,10 +2713,7 @@ moduleTypes.forEach(({
)
}, 25000)
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/flaky-test-retries.js'
@@ -2326,7 +2722,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
DD_CIVISIBILITY_FLAKY_RETRY_COUNT: '1',
SPEC_PATTERN: specToRun,
@@ -2368,10 +2764,7 @@ moduleTypes.forEach(({
assert.strictEqual(lastFailed.meta[TEST_RETRY_REASON], TEST_RETRY_REASON_TYPES.atr)
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/flaky-test-retries.js'
@@ -2380,7 +2773,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
DD_CIVISIBILITY_FLAKY_RETRY_COUNT: '1',
SPEC_PATTERN: specToRun,
@@ -2418,10 +2811,7 @@ moduleTypes.forEach(({
assert.equal(tests.filter(test => test.meta[TEST_RETRY_REASON] === TEST_RETRY_REASON_TYPES.atr).length, 0)
}, 30000)
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/flaky-test-retries.js'
@@ -2430,7 +2820,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
CYPRESS_TEST_ISOLATION: 'false',
@@ -2457,10 +2847,7 @@ moduleTypes.forEach(({
command = `node --loader=${hookFile} ../../cypress-esm-config.mjs`
}
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisAgentlessConfig(receiver.port)
+ const envVars = getCiVisAgentlessConfig(receiver.port)
const eventsPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
@@ -2479,7 +2866,7 @@ moduleTypes.forEach(({
{
cwd: `${cwd}/ci-visibility/subproject`,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
},
}
@@ -2530,11 +2917,6 @@ moduleTypes.forEach(({
25000
)
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
-
const specToRun = 'cypress/e2e/dynamic-name-test.cy.js'
childProcess = exec(
@@ -2542,7 +2924,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...getCiVisEvpProxyConfig(receiver.port),
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
},
@@ -2573,10 +2955,7 @@ moduleTypes.forEach(({
},
})
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => {
@@ -2601,7 +2980,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED: 'false',
@@ -2619,14 +2998,12 @@ moduleTypes.forEach(({
// cy.origin is not available in old versions of Cypress
if (version === 'latest') {
it('does not crash for multi origin tests', async () => {
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => {
const events = payloads.flatMap(({ payload }) => payload.events)
+ .filter(event => event.type !== 'span')
assert.strictEqual(events.length, 4)
const test = events.find(event => event.type === 'test').content
@@ -2656,7 +3033,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
CYPRESS_BASE_URL_SECOND: `http://localhost:${secondWebAppPort}`,
SPEC_PATTERN: specToRun,
@@ -2686,17 +3063,14 @@ moduleTypes.forEach(({
})
}, 25000)
- const {
- NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
childProcess = exec(
testCommand,
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
DD_SERVICE: 'my-service',
SPEC_PATTERN: 'cypress/e2e/spec.cy.js',
@@ -2835,10 +3209,7 @@ moduleTypes.forEach(({
isDisabled,
})
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/attempt-to-fix.js'
@@ -2847,7 +3218,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
...extraEnvVars,
@@ -2903,6 +3274,62 @@ moduleTypes.forEach(({
await runAttemptToFixTest({ extraEnvVars: { DD_TEST_MANAGEMENT_ENABLED: '0' } })
})
+ it('does not tag known attempt to fix tests as new', async () => {
+ receiver.setKnownTests({
+ cypress: {
+ 'cypress/e2e/attempt-to-fix.js': [
+ 'attempt to fix is attempt to fix',
+ ],
+ },
+ })
+ receiver.setSettings({
+ test_management: { enabled: true, attempt_to_fix_retries: 2 },
+ early_flake_detection: {
+ enabled: true,
+ slow_test_retries: { '5s': 2 },
+ faulty_session_threshold: 100,
+ },
+ known_tests_enabled: true,
+ })
+
+ const eventsPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads.flatMap(({ payload }) => payload.events)
+ const tests = events.filter(event => event.type === 'test').map(event => event.content)
+ const atfTests = tests.filter(
+ t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true'
+ )
+ assert.ok(atfTests.length > 0)
+ for (const test of atfTests) {
+ assert.ok(
+ !(TEST_IS_NEW in test.meta),
+ 'ATF test that is in known tests should not be tagged as new'
+ )
+ }
+ }, 25000)
+
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
+ const specToRun = 'cypress/e2e/attempt-to-fix.js'
+
+ childProcess = exec(
+ version === 'latest' ? testCommand : `${testCommand} --spec ${specToRun}`,
+ {
+ cwd,
+ env: {
+ ...envVars,
+ CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
+ SPEC_PATTERN: specToRun,
+ CYPRESS_SHOULD_ALWAYS_PASS: '1',
+ },
+ }
+ )
+
+ await Promise.all([
+ once(childProcess, 'exit'),
+ eventsPromise,
+ ])
+ })
+
/**
* TODO:
* The spec says that quarantined tests that are not attempted to fix should be run and their result ignored.
@@ -3004,10 +3431,7 @@ moduleTypes.forEach(({
const runDisableTest = async (isDisabling, extraEnvVars = {}) => {
const testAssertionsPromise = getTestAssertions(isDisabling)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/disable.js'
@@ -3016,7 +3440,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
...extraEnvVars,
@@ -3107,10 +3531,7 @@ moduleTypes.forEach(({
const runQuarantineTest = async (isQuarantining, extraEnvVars = {}) => {
const testAssertionsPromise = getTestAssertions(isQuarantining)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/quarantine.js'
@@ -3119,7 +3540,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
...extraEnvVars,
@@ -3178,10 +3599,7 @@ moduleTypes.forEach(({
assert.strictEqual(tests.length, 1)
}, 60000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/attempt-to-fix.js'
@@ -3190,7 +3608,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
DD_TRACE_DEBUG: '1',
@@ -3250,10 +3668,7 @@ moduleTypes.forEach(({
})
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/attempt-to-fix.js'
@@ -3262,7 +3677,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
CYPRESS_SHOULD_ALWAYS_PASS: '1',
@@ -3341,10 +3756,7 @@ moduleTypes.forEach(({
})
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/attempt-to-fix-order.js'
@@ -3353,7 +3765,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
},
@@ -3382,7 +3794,9 @@ moduleTypes.forEach(({
it('adds capabilities to tests', async () => {
const receiverPromise = receiver
.gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => {
- const metadataDicts = payloads.flatMap(({ payload }) => payload.metadata)
+ const metadataDicts = payloads
+ .filter(({ payload }) => payload.metadata?.test)
+ .flatMap(({ payload }) => payload.metadata)
assert.ok(metadataDicts.length > 0)
metadataDicts.forEach(metadata => {
@@ -3399,10 +3813,7 @@ moduleTypes.forEach(({
})
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/spec.cy.js'
@@ -3411,7 +3822,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
DD_TEST_SESSION_NAME: 'my-test-session-name',
SPEC_PATTERN: specToRun,
@@ -3542,10 +3953,7 @@ moduleTypes.forEach(({
) => {
const testAssertionsPromise = getTestAssertions({ isModified, isEfd, isNew })
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/impacted-test.js'
@@ -3554,7 +3962,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
GITHUB_BASE_REF: '',
@@ -3661,10 +4069,7 @@ moduleTypes.forEach(({
assert.equal(retriedTests.length, 0)
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/impacted-test.js'
@@ -3673,7 +4078,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
GITHUB_BASE_REF: '',
@@ -3754,10 +4159,7 @@ moduleTypes.forEach(({
})
}, 25000)
- const {
- NODE_OPTIONS,
- ...restEnvVars
- } = getCiVisEvpProxyConfig(receiver.port)
+ const envVars = getCiVisEvpProxyConfig(receiver.port)
const specToRun = 'cypress/e2e/impacted-test-order.js'
@@ -3766,7 +4168,7 @@ moduleTypes.forEach(({
{
cwd,
env: {
- ...restEnvVars,
+ ...envVars,
CYPRESS_BASE_URL: `http://localhost:${webAppPort}`,
SPEC_PATTERN: specToRun,
GITHUB_BASE_REF: '',
diff --git a/integration-tests/cypress/e2e/returned-config.cy.js b/integration-tests/cypress/e2e/returned-config.cy.js
new file mode 100644
index 00000000000..b8206ac74ba
--- /dev/null
+++ b/integration-tests/cypress/e2e/returned-config.cy.js
@@ -0,0 +1,6 @@
+/* eslint-disable */
+describe('returned config', () => {
+ it('uses env from setupNodeEvents return value', () => {
+ expect(Cypress.env('RETURNED_CONFIG_FLAG')).to.equal('true')
+ })
+})
diff --git a/integration-tests/debugger/tracing-integration.spec.js b/integration-tests/debugger/tracing-integration.spec.js
index c654c88f382..048ba645b76 100644
--- a/integration-tests/debugger/tracing-integration.spec.js
+++ b/integration-tests/debugger/tracing-integration.spec.js
@@ -4,6 +4,45 @@ const assert = require('assert')
const { setup, testBasicInput, testBasicInputWithoutDD } = require('./utils')
describe('Dynamic Instrumentation', function () {
+ describe('DD_TRACE_ENABLED=true, DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED=true', function () {
+ const t = setup({
+ testApp: 'target-app/basic.js',
+ env: { DD_TRACE_ENABLED: 'true', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED: true },
+ dependencies: ['fastify'],
+ })
+
+ describe('input messages', function () {
+ it('should capture and send expected payload when a log line probe is triggered', testBasicInput.bind(null, t))
+ })
+ })
+
+ describe('DD_TRACE_ENABLED=true, DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED=false', function () {
+ const t = setup({
+ testApp: 'target-app/basic.js',
+ env: { DD_TRACE_ENABLED: 'true', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED: false },
+ dependencies: ['fastify'],
+ })
+
+ describe('input messages', function () {
+ it('should capture and send expected payload when a log line probe is triggered', testBasicInput.bind(null, t))
+ })
+ })
+
+ describe('DD_TRACE_ENABLED=false', function () {
+ const t = setup({
+ testApp: 'target-app/basic.js',
+ env: { DD_TRACE_ENABLED: 'false' },
+ dependencies: ['fastify'],
+ })
+
+ describe('input messages', function () {
+ it(
+ 'should capture and send expected payload when a log line probe is triggered',
+ testBasicInputWithoutDD.bind(null, t)
+ )
+ })
+ })
+
describe('DD_TRACING_ENABLED=true, DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED=true', function () {
const t = setup({
testApp: 'target-app/basic.js',
diff --git a/integration-tests/debugger/utils.js b/integration-tests/debugger/utils.js
index 0074f88943d..1af0b399653 100644
--- a/integration-tests/debugger/utils.js
+++ b/integration-tests/debugger/utils.js
@@ -304,14 +304,15 @@ function setupAssertionListeners (t, done, probe) {
let traceId, spanId, dd
const messageListener = ({ payload }) => {
- const span = payload.find((arr) => arr[0].name === 'fastify.request')?.[0]
+ const span = payload
+ .flat()
+ .find((span) => span.name === 'fastify.request' && (!dd || span.span_id.toString() === dd.span_id))
+
if (!span) return
traceId = span.trace_id.toString()
spanId = span.span_id.toString()
- t.agent.removeListener('message', messageListener)
-
assertDD()
}
@@ -336,6 +337,7 @@ function setupAssertionListeners (t, done, probe) {
if (!traceId || !spanId || !dd) return
assert.strictEqual(dd.trace_id, traceId)
assert.strictEqual(dd.span_id, spanId)
+ t.agent.removeListener('message', messageListener)
done()
}
}
diff --git a/integration-tests/init.spec.js b/integration-tests/init.spec.js
index 30674a338d8..79340120571 100644
--- a/integration-tests/init.spec.js
+++ b/integration-tests/init.spec.js
@@ -273,7 +273,9 @@ describe('init.js', () => {
// or on 18.0.0 in particular.
if (semver.satisfies(process.versions.node, '>=14.13.1')) {
describe('initialize.mjs', () => {
- setShouldKill(false)
+ // Node 20.0.0 can leave short-lived loader-based children alive after they
+ // print the expected output, so terminate them after a short grace period.
+ setShouldKill(process.versions.node === '20.0.0')
useSandbox()
stubTracerIfNeeded()
diff --git a/integration-tests/jest/jest.spec.js b/integration-tests/jest/jest.spec.js
index bd69529db43..fbb3260ad7d 100644
--- a/integration-tests/jest/jest.spec.js
+++ b/integration-tests/jest/jest.spec.js
@@ -996,6 +996,36 @@ describe(`jest@${JEST_VERSION} commonJS`, () => {
}).catch(done)
})
})
+
+ onlyLatestIt('does not hang when tests use fake timers and Failed Test Replay is enabled', async () => {
+ receiver.setSettings({
+ flaky_test_retries_enabled: true,
+ di_enabled: true,
+ })
+
+ const eventsPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads.flatMap(({ payload }) => payload.events)
+ const tests = events.filter(event => event.type === 'test').map(event => event.content)
+ // Must have 2 tests: 1 original + 1 ATR retry
+ assert.strictEqual(tests.length, 2)
+ const retriedTests = tests.filter(t => t.meta[TEST_IS_RETRY] === 'true')
+ assert.strictEqual(retriedTests.length, 1)
+ })
+
+ childProcess = exec(runTestsCommand, {
+ cwd,
+ env: {
+ ...getCiVisAgentlessConfig(receiver.port),
+ TESTS_TO_RUN: 'jest-flaky/fake-timers-flaky-fails',
+ DD_CIVISIBILITY_FLAKY_RETRY_COUNT: '1',
+ SHOULD_CHECK_RESULTS: '1',
+ },
+ })
+
+ const [[exitCode]] = await Promise.all([once(childProcess, 'exit'), eventsPromise])
+ assert.strictEqual(exitCode, 1)
+ })
})
context('when jest is using worker threads', () => {
@@ -5560,6 +5590,74 @@ describe(`jest@${JEST_VERSION} commonJS`, () => {
])
})
+ it('does not tag known attempt to fix tests as new', async () => {
+ receiver.setKnownTests({
+ jest: {
+ 'ci-visibility/jest-flaky/flaky-fails.js': [
+ 'test-flaky-test-retries can retry failed tests',
+ ],
+ },
+ })
+ receiver.setSettings({
+ test_management: { enabled: true, attempt_to_fix_retries: 2 },
+ early_flake_detection: {
+ enabled: true,
+ slow_test_retries: {
+ '5s': 2,
+ },
+ faulty_session_threshold: 100,
+ },
+ known_tests_enabled: true,
+ })
+
+ receiver.setTestManagementTests({
+ jest: {
+ suites: {
+ 'ci-visibility/jest-flaky/flaky-fails.js': {
+ tests: {
+ 'test-flaky-test-retries can retry failed tests': {
+ properties: {
+ attempt_to_fix: true,
+ },
+ },
+ },
+ },
+ },
+ },
+ })
+ const eventsPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads.flatMap(({ payload }) => payload.events)
+ const tests = events.filter(event => event.type === 'test').map(event => event.content)
+ const atfTests = tests.filter(
+ t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true'
+ )
+ assert.ok(atfTests.length > 0)
+ for (const test of atfTests) {
+ assert.ok(
+ !(TEST_IS_NEW in test.meta),
+ 'ATF test that is in known tests should not be tagged as new'
+ )
+ }
+ })
+
+ childProcess = exec(
+ runTestsCommand,
+ {
+ cwd,
+ env: {
+ ...getCiVisAgentlessConfig(receiver.port),
+ TESTS_TO_RUN: 'jest-flaky/flaky-fails.js',
+ },
+ }
+ )
+
+ await Promise.all([
+ once(childProcess, 'exit'),
+ eventsPromise,
+ ])
+ })
+
it('resets mock state between attempt to fix retries', async () => {
const NUM_RETRIES = 3
receiver.setSettings({ test_management: { enabled: true, attempt_to_fix_retries: NUM_RETRIES } })
diff --git a/integration-tests/mocha/mocha.spec.js b/integration-tests/mocha/mocha.spec.js
index 68c6a93f176..9ed82aa46d4 100644
--- a/integration-tests/mocha/mocha.spec.js
+++ b/integration-tests/mocha/mocha.spec.js
@@ -105,6 +105,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () {
'nyc',
'mocha-each',
'workerpool',
+ 'sinon',
],
true
)
@@ -3811,6 +3812,41 @@ describe(`mocha@${MOCHA_VERSION}`, function () {
})
})
+ onlyLatestIt('does not hang when tests use fake timers and Failed Test Replay is enabled', async () => {
+ receiver.setSettings({
+ flaky_test_retries_enabled: true,
+ di_enabled: true,
+ })
+
+ const eventsPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads.flatMap(({ payload }) => payload.events)
+ const tests = events.filter(event => event.type === 'test').map(event => event.content)
+ assert.strictEqual(tests.length, 2)
+ const retriedTests = tests.filter(
+ t => t.meta[TEST_IS_RETRY] === 'true'
+ )
+ assert.strictEqual(retriedTests.length, 1)
+ })
+
+ childProcess = exec(
+ runTestsCommand,
+ {
+ cwd,
+ env: {
+ ...getCiVisAgentlessConfig(receiver.port),
+ TESTS_TO_RUN: JSON.stringify([
+ './dynamic-instrumentation/fake-timers-test-hit-breakpoint',
+ ]),
+ DD_CIVISIBILITY_FLAKY_RETRY_COUNT: '1',
+ },
+ }
+ )
+
+ const [[exitCode]] = await Promise.all([once(childProcess, 'exit'), eventsPromise])
+ assert.strictEqual(exitCode, 0)
+ })
+
it('tags new tests with dynamic names and logs a warning', async () => {
receiver.setKnownTests({ mocha: {} })
receiver.setSettings({
@@ -4153,6 +4189,59 @@ describe(`mocha@${MOCHA_VERSION}`, function () {
runAttemptToFixTest(done, { extraEnvVars: { DD_TEST_MANAGEMENT_ENABLED: '0' } })
})
+ onlyLatestIt('does not tag known attempt to fix tests as new', async () => {
+ receiver.setKnownTests({
+ mocha: {
+ 'ci-visibility/test-management/test-attempt-to-fix-1.js': [
+ 'attempt to fix tests can attempt to fix a test',
+ ],
+ },
+ })
+ receiver.setSettings({
+ test_management: { enabled: true, attempt_to_fix_retries: 2 },
+ early_flake_detection: {
+ enabled: true,
+ slow_test_retries: { '5s': 2 },
+ faulty_session_threshold: 100,
+ },
+ known_tests_enabled: true,
+ })
+
+ const eventsPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => {
+ const events = payloads.flatMap(({ payload }) => payload.events)
+ const tests = events.filter(event => event.type === 'test').map(event => event.content)
+ const atfTests = tests.filter(
+ t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true'
+ )
+ assert.ok(atfTests.length > 0)
+ for (const test of atfTests) {
+ assert.ok(
+ !(TEST_IS_NEW in test.meta),
+ 'ATF test that is in known tests should not be tagged as new'
+ )
+ }
+ })
+
+ childProcess = exec(
+ runTestsCommand,
+ {
+ cwd,
+ env: {
+ ...getCiVisAgentlessConfig(receiver.port),
+ TESTS_TO_RUN: JSON.stringify([
+ './test-management/test-attempt-to-fix-1.js',
+ ]),
+ },
+ }
+ )
+
+ await Promise.all([
+ once(childProcess, 'exit'),
+ eventsPromise,
+ ])
+ })
+
onlyLatestIt('does not fail retry if a test is quarantined', (done) => {
receiver.setSettings({ test_management: { enabled: true, attempt_to_fix_retries: 3 } })
receiver.setTestManagementTests({
diff --git a/integration-tests/opentelemetry.spec.js b/integration-tests/opentelemetry.spec.js
index 2e4e93e1f84..b0feff6328e 100644
--- a/integration-tests/opentelemetry.spec.js
+++ b/integration-tests/opentelemetry.spec.js
@@ -50,10 +50,12 @@ function nearNow (ts, now = Date.now(), range = 1000) {
return delta < range && delta >= 0
}
-describe('opentelemetry', () => {
- let agent
+describe('opentelemetry', function () {
+ this.timeout(20000)
+
+ let agent = /** @type {FakeAgent | null} */ (null)
let proc
- let cwd
+ let cwd = /** @type {string} */ ('')
const timeout = 5000
const dependencies = [
'@opentelemetry/api@1.8.0',
@@ -75,14 +77,14 @@ describe('opentelemetry', () => {
after(async () => {
await stopProc(proc)
- await agent.stop()
+ await agent?.stop()
})
it("should not capture telemetry DD and OTEL vars don't conflict", async () => {
proc = fork(join(cwd, 'opentelemetry/basic.js'), {
cwd,
env: {
- DD_TRACE_AGENT_PORT: agent.port,
+ DD_TRACE_AGENT_PORT: agent?.port,
DD_TRACE_OTEL_ENABLED: '1',
DD_TELEMETRY_HEARTBEAT_INTERVAL: '1',
TIMEOUT: '1500',
@@ -114,7 +116,7 @@ describe('opentelemetry', () => {
proc = fork(join(cwd, 'opentelemetry/basic.js'), {
cwd,
env: {
- DD_TRACE_AGENT_PORT: agent.port,
+ DD_TRACE_AGENT_PORT: agent?.port,
DD_TRACE_OTEL_ENABLED: '1',
DD_TELEMETRY_HEARTBEAT_INTERVAL: '1',
TIMEOUT: '1500',
@@ -147,42 +149,20 @@ describe('opentelemetry', () => {
const otelHiding = metrics.series.filter(({ metric }) => metric === 'otel.env.hiding')
const otelInvalid = metrics.series.filter(({ metric }) => metric === 'otel.env.invalid')
- assert.strictEqual(otelHiding.length, 9)
- assert.strictEqual(otelInvalid.length, 0)
-
- assert.deepStrictEqual(otelHiding[0].tags, [
- 'config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level',
- ])
- assert.deepStrictEqual(otelHiding[1].tags, [
- 'config_datadog:dd_trace_propagation_style', 'config_opentelemetry:otel_propagators',
- ])
- assert.deepStrictEqual(otelHiding[2].tags, [
- 'config_datadog:dd_service', 'config_opentelemetry:otel_service_name',
- ])
-
- assert.deepStrictEqual(otelHiding[3].tags, [
- 'config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler',
- ])
-
- assert.deepStrictEqual(otelHiding[4].tags, [
- 'config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler_arg',
- ])
-
- assert.deepStrictEqual(otelHiding[5].tags, [
- 'config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter',
- ])
-
- assert.deepStrictEqual(otelHiding[6].tags, [
- 'config_datadog:dd_runtime_metrics_enabled', 'config_opentelemetry:otel_metrics_exporter',
- ])
- assert.deepStrictEqual(otelHiding[7].tags, [
- 'config_datadog:dd_tags', 'config_opentelemetry:otel_resource_attributes',
- ])
+ assert.deepStrictEqual(sortMetricTags(otelHiding), sortMetricTags([
+ ['config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level'],
+ ['config_datadog:dd_trace_propagation_style', 'config_opentelemetry:otel_propagators'],
+ ['config_datadog:dd_service', 'config_opentelemetry:otel_service_name'],
+ ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler'],
+ ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler_arg'],
+ ['config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter'],
+ ['config_datadog:dd_runtime_metrics_enabled', 'config_opentelemetry:otel_metrics_exporter'],
+ ['config_datadog:dd_tags', 'config_opentelemetry:otel_resource_attributes'],
+ ['config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled'],
+ ]))
- assert.deepStrictEqual(otelHiding[8].tags, [
- 'config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled',
- ])
+ assert.deepStrictEqual(sortMetricTags(otelInvalid), [])
for (const metric of otelHiding) {
assert.strictEqual(metric.points[0][1], 1)
@@ -194,7 +174,7 @@ describe('opentelemetry', () => {
proc = fork(join(cwd, 'opentelemetry/basic.js'), {
cwd,
env: {
- DD_TRACE_AGENT_PORT: agent.port,
+ DD_TRACE_AGENT_PORT: agent?.port,
DD_TRACE_OTEL_ENABLED: '1',
DD_TELEMETRY_HEARTBEAT_INTERVAL: '1',
TIMEOUT: '1500',
@@ -221,47 +201,20 @@ describe('opentelemetry', () => {
const otelHiding = metrics.series.filter(({ metric }) => metric === 'otel.env.hiding')
const otelInvalid = metrics.series.filter(({ metric }) => metric === 'otel.env.invalid')
- assert.strictEqual(otelHiding.length, 1)
- assert.strictEqual(otelInvalid.length, 8)
-
- assert.deepStrictEqual(otelHiding[0].tags, [
- 'config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled',
- ])
-
- assert.deepStrictEqual(otelInvalid[0].tags, [
- 'config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level',
- ])
-
- assert.deepStrictEqual(otelInvalid[1].tags, [
- 'config_datadog:dd_trace_sample_rate',
- 'config_opentelemetry:otel_traces_sampler',
- ])
-
- assert.deepStrictEqual(otelInvalid[2].tags, [
- 'config_datadog:dd_trace_sample_rate',
- 'config_opentelemetry:otel_traces_sampler_arg',
- ])
- assert.deepStrictEqual(otelInvalid[3].tags, [
- 'config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter',
- ])
-
- assert.deepStrictEqual(otelInvalid[4].tags, [
- 'config_datadog:dd_runtime_metrics_enabled',
- 'config_opentelemetry:otel_metrics_exporter',
- ])
-
- assert.deepStrictEqual(otelInvalid[5].tags, [
- 'config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled',
- ])
-
- assert.deepStrictEqual(otelInvalid[6].tags, [
- 'config_opentelemetry:otel_logs_exporter',
- ])
-
- assert.deepStrictEqual(otelInvalid[7].tags, [
- 'config_datadog:dd_trace_propagation_style',
- 'config_opentelemetry:otel_propagators',
- ])
+ assert.deepStrictEqual(sortMetricTags(otelHiding), sortMetricTags([
+ ['config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled'],
+ ]))
+
+ assert.deepStrictEqual(sortMetricTags(otelInvalid), sortMetricTags([
+ ['config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level'],
+ ['config_datadog:dd_trace_propagation_style', 'config_opentelemetry:otel_propagators'],
+ ['config_opentelemetry:otel_logs_exporter'],
+ ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler'],
+ ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler_arg'],
+ ['config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter'],
+ ['config_datadog:dd_runtime_metrics_enabled', 'config_opentelemetry:otel_metrics_exporter'],
+ ['config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled'],
+ ]))
for (const metric of otelInvalid) {
assert.strictEqual(metric.points[0][1], 1)
@@ -273,7 +226,7 @@ describe('opentelemetry', () => {
proc = fork(join(cwd, 'opentelemetry/basic.js'), {
cwd,
env: {
- DD_TRACE_AGENT_PORT: agent.port,
+ DD_TRACE_AGENT_PORT: agent?.port,
},
})
await check(agent, proc, timeout, ({ payload }) => {
@@ -292,7 +245,7 @@ describe('opentelemetry', () => {
proc = fork(join(cwd, 'opentelemetry/basic.js'), {
cwd,
env: {
- DD_TRACE_AGENT_PORT: agent.port,
+ DD_TRACE_AGENT_PORT: agent?.port,
DD_TRACE_OTEL_ENABLED: '1',
DD_TELEMETRY_HEARTBEAT_INTERVAL: '1',
TIMEOUT: '1500',
@@ -334,7 +287,7 @@ describe('opentelemetry', () => {
proc = fork(join(cwd, 'opentelemetry/auto-instrumentation.js'), {
cwd,
env: {
- DD_TRACE_AGENT_PORT: agent.port,
+ DD_TRACE_AGENT_PORT: agent?.port,
DD_TRACE_OTEL_ENABLED: '1',
SERVER_PORT,
DD_TRACE_DISABLED_INSTRUMENTATIONS: 'http,dns,express,net',
@@ -378,7 +331,7 @@ describe('opentelemetry', () => {
proc = fork(join(cwd, 'opentelemetry/server.js'), {
cwd,
env: {
- DD_TRACE_AGENT_PORT: agent.port,
+ DD_TRACE_AGENT_PORT: agent?.port,
},
})
await check(agent, proc, timeout, ({ payload }) => {
@@ -407,7 +360,7 @@ describe('opentelemetry', () => {
proc = fork(join(cwd, 'opentelemetry/auto-instrumentation.js'), {
cwd,
env: {
- DD_TRACE_AGENT_PORT: agent.port,
+ DD_TRACE_AGENT_PORT: agent?.port,
DD_TRACE_OTEL_ENABLED: '1',
SERVER_PORT,
DD_TRACE_DISABLED_INSTRUMENTATIONS: 'http,dns,express,net',
@@ -456,18 +409,12 @@ describe('opentelemetry', () => {
proc = fork(join(cwd, 'opentelemetry/env-var.js'), {
cwd,
env: {
- DD_TRACE_AGENT_PORT: agent.port,
+ DD_TRACE_AGENT_PORT: agent?.port,
},
})
await check(agent, proc, timeout, ({ payload }) => {
- // Should have a single trace with a single span
- assert.strictEqual(payload.length, 1)
- const [trace] = payload
- assert.strictEqual(trace.length, 1)
- const [span] = trace
-
- // Should be the expected otel span
- assert.strictEqual(span.name, 'otel-sub')
+ const trace = payload.find(trace => trace.length === 1 && trace[0].name === 'otel-sub')
+ assert.ok(trace)
})
})
})
@@ -477,3 +424,9 @@ function isChildOf (childSpan, parentSpan) {
assert.notStrictEqual(childSpan.span_id.toString(), parentSpan.span_id.toString())
assert.strictEqual(childSpan.parent_id.toString(), parentSpan.span_id.toString())
}
+
+function sortMetricTags (metrics) {
+ return metrics
+ .map(metric => Array.isArray(metric) ? metric : metric.tags)
+ .sort((a, b) => a.join(',').localeCompare(b.join(',')))
+}
diff --git a/integration-tests/package-guardrails.spec.js b/integration-tests/package-guardrails.spec.js
index 7b8ec191930..563a5beab41 100644
--- a/integration-tests/package-guardrails.spec.js
+++ b/integration-tests/package-guardrails.spec.js
@@ -13,7 +13,8 @@ const {
const NODE_OPTIONS = '--require dd-trace/init.js'
const DD_TRACE_DEBUG = 'true'
const DD_INJECTION_ENABLED = 'tracing'
-const DD_LOG_LEVEL = 'error'
+const DD_LOG_LEVEL = 'info'
+const DD_TRACE_FLUSH_INTERVAL = '0'
const NODE_MAJOR = Number(process.versions.node.split('.')[0])
const FASTIFY_DEP = NODE_MAJOR < 20 ? 'fastify@4' : 'fastify'
@@ -41,6 +42,17 @@ describe('package guardrails', () => {
))
})
+ context('when flushing and DD_INJECTION_ENABLED', () => {
+ useEnv({ DD_INJECTION_ENABLED, DD_TRACE_FLUSH_INTERVAL })
+
+ it('should send abort.integration on first flush via diagnostic channel', () =>
+ testFile('package-guardrails/flush.js', 'false\n',
+ ['complete', 'injection_forced:false',
+ 'abort.integration', 'integration:bluebird,integration_version:1.0.0',
+ ]
+ ))
+ })
+
context('with logging disabled', () => {
it('should not instrument the package', () => runTest('false\n', []))
})
@@ -50,8 +62,9 @@ describe('package guardrails', () => {
it('should not instrument the package', () =>
runTest(`Application instrumentation bootstrapping complete
-Found incompatible integration version: bluebird@1.0.0
false
+instrumentation source: manual
+Found incompatible integration version: bluebird@1.0.0
`, []))
})
})
diff --git a/integration-tests/package-guardrails/flush.js b/integration-tests/package-guardrails/flush.js
new file mode 100644
index 00000000000..d7943e7a4c8
--- /dev/null
+++ b/integration-tests/package-guardrails/flush.js
@@ -0,0 +1,19 @@
+'use strict'
+
+// Remove only the register.js beforeExit handler so this test verifies
+// that abort.integration comes from the first flush diagnostic channel.
+const beforeExitHandlers = globalThis[Symbol.for('dd-trace')].beforeExitHandlers
+for (const handler of beforeExitHandlers) {
+ if (handler.name === 'logAbortedIntegrations') {
+ beforeExitHandlers.delete(handler)
+ }
+}
+
+const tracer = require('dd-trace')
+const P = require('bluebird')
+
+const isWrapped = P.prototype._then.toString().includes('AsyncResource')
+tracer.trace('first.flush.guardrails', () => {})
+
+// eslint-disable-next-line no-console
+console.log(isWrapped)
diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js
index 030ce61b34e..b1c7010c7e3 100644
--- a/integration-tests/playwright/playwright.spec.js
+++ b/integration-tests/playwright/playwright.spec.js
@@ -84,7 +84,8 @@ versions.forEach((version) => {
this.retries(2)
this.timeout(80000)
- useSandbox([`@playwright/test@${version}`, '@types/node', 'typescript'], true)
+ // TODO: Update tests files accordingly and test with different TS versions
+ useSandbox([`@playwright/test@${version}`, '@types/node', 'typescript@5'], true)
before(function (done) {
// Increase timeout for this hook specifically to account for slow chromium installation in CI
@@ -1659,6 +1660,59 @@ versions.forEach((version) => {
await runAttemptToFixTest({ extraEnvVars: { DD_TEST_MANAGEMENT_ENABLED: '0' } })
})
+ it('does not tag known attempt to fix tests as new', async () => {
+ receiver.setKnownTests({
+ playwright: {
+ 'attempt-to-fix-test.js': [
+ 'attempt to fix should attempt to fix failed test',
+ 'attempt to fix should attempt to fix passed test',
+ ],
+ },
+ })
+ receiver.setSettings({
+ test_management: { enabled: true, attempt_to_fix_retries: 2 },
+ early_flake_detection: {
+ enabled: true,
+ slow_test_retries: { '5s': 2 },
+ faulty_session_threshold: 100,
+ },
+ known_tests_enabled: true,
+ })
+
+ const eventsPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', (payloads) => {
+ const events = payloads.flatMap(({ payload }) => payload.events)
+ const tests = events.filter(event => event.type === 'test').map(event => event.content)
+ const atfTests = tests.filter(
+ t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true'
+ )
+ assert.ok(atfTests.length > 0)
+ for (const test of atfTests) {
+ assert.ok(
+ !(TEST_IS_NEW in test.meta),
+ 'ATF test that is in known tests should not be tagged as new'
+ )
+ }
+ })
+
+ childProcess = exec(
+ './node_modules/.bin/playwright test -c playwright.config.js attempt-to-fix-test.js',
+ {
+ cwd,
+ env: {
+ ...getCiVisAgentlessConfig(receiver.port),
+ PW_BASE_URL: `http://localhost:${webAppPort}`,
+ TEST_DIR: './ci-visibility/playwright-tests-test-management',
+ },
+ }
+ )
+
+ await Promise.all([
+ once(childProcess, 'exit'),
+ eventsPromise,
+ ])
+ })
+
it('does not fail retry if a test is quarantined', async () => {
receiver.setSettings({
test_management: { enabled: true, attempt_to_fix_retries: ATTEMPT_TO_FIX_NUM_RETRIES },
diff --git a/integration-tests/telemetry.spec.js b/integration-tests/telemetry.spec.js
index 462b36bc777..617d45c3486 100644
--- a/integration-tests/telemetry.spec.js
+++ b/integration-tests/telemetry.spec.js
@@ -26,7 +26,7 @@ describe('telemetry', () => {
proc = await spawnProc(startupTestFile, {
cwd,
env: {
- AGENT_PORT: agent.port,
+ AGENT_PORT: String(agent.port),
DD_LOGS_INJECTION: 'true',
},
})
@@ -66,9 +66,9 @@ describe('telemetry', () => {
await agent.assertTelemetryReceived(msg => {
const { configuration } = msg.payload.payload
assertObjectContains(configuration, [
- { name: 'DD_LOG_INJECTION', value: true, origin: 'default' },
- { name: 'DD_LOG_INJECTION', value: true, origin: 'env_var' },
- { name: 'DD_LOG_INJECTION', value: false, origin: 'code' },
+ { name: 'DD_LOGS_INJECTION', value: true, origin: 'default' },
+ { name: 'DD_LOGS_INJECTION', value: true, origin: 'env_var' },
+ { name: 'DD_LOGS_INJECTION', value: false, origin: 'code' },
])
}, 'app-started', 5_000, 1)
})
diff --git a/integration-tests/vitest/vitest.spec.js b/integration-tests/vitest/vitest.spec.js
index 105f67b46a6..20f37a296e0 100644
--- a/integration-tests/vitest/vitest.spec.js
+++ b/integration-tests/vitest/vitest.spec.js
@@ -1639,6 +1639,37 @@ versions.forEach((version) => {
}).catch(done)
})
})
+
+ it('does not hang when tests use fake timers and Failed Test Replay is enabled', async () => {
+ receiver.setSettings({
+ flaky_test_retries_enabled: true,
+ di_enabled: true,
+ })
+
+ const eventsPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', (payloads) => {
+ const events = payloads.flatMap(({ payload }) => payload.events)
+ const tests = events.filter(event => event.type === 'test').map(event => event.content)
+ assert.strictEqual(tests.length, 2)
+ const retriedTests = tests.filter(t => t.meta[TEST_IS_RETRY] === 'true')
+ assert.strictEqual(retriedTests.length, 1)
+ })
+
+ childProcess = exec(
+ './node_modules/.bin/vitest run --retry=1',
+ {
+ cwd,
+ env: {
+ ...getCiVisAgentlessConfig(receiver.port),
+ TEST_DIR: 'ci-visibility/vitest-tests/fake-timers-di*',
+ NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init',
+ },
+ }
+ )
+
+ const [[exitCode]] = await Promise.all([once(childProcess, 'exit'), eventsPromise])
+ assert.strictEqual(exitCode, 1)
+ })
})
}
@@ -1931,6 +1962,58 @@ versions.forEach((version) => {
runAttemptToFixTest(done, { extraEnvVars: { DD_TEST_MANAGEMENT_ENABLED: '0' } })
})
+ it('does not tag known attempt to fix tests as new', async () => {
+ receiver.setKnownTests({
+ vitest: {
+ 'ci-visibility/vitest-tests/test-attempt-to-fix.mjs': [
+ 'attempt to fix tests can attempt to fix a test',
+ ],
+ },
+ })
+ receiver.setSettings({
+ test_management: { enabled: true, attempt_to_fix_retries: 2 },
+ early_flake_detection: {
+ enabled: true,
+ slow_test_retries: { '5s': 2 },
+ faulty_session_threshold: 100,
+ },
+ known_tests_enabled: true,
+ })
+
+ const eventsPromise = receiver
+ .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', (payloads) => {
+ const events = payloads.flatMap(({ payload }) => payload.events)
+ const tests = events.filter(event => event.type === 'test').map(event => event.content)
+ const atfTests = tests.filter(
+ t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true'
+ )
+ assert.ok(atfTests.length > 0)
+ for (const test of atfTests) {
+ assert.ok(
+ !(TEST_IS_NEW in test.meta),
+ 'ATF test that is in known tests should not be tagged as new'
+ )
+ }
+ })
+
+ childProcess = exec(
+ './node_modules/.bin/vitest run',
+ {
+ cwd,
+ env: {
+ ...getCiVisAgentlessConfig(receiver.port),
+ TEST_DIR: 'ci-visibility/vitest-tests/test-attempt-to-fix*',
+ NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init --no-warnings',
+ },
+ }
+ )
+
+ await Promise.all([
+ once(childProcess, 'exit'),
+ eventsPromise,
+ ])
+ })
+
it('does not fail retry if a test is quarantined', (done) => {
receiver.setSettings({ test_management: { enabled: true, attempt_to_fix_retries: 3 } })
receiver.setTestManagementTests({
diff --git a/integration-tests/webpack/package.json b/integration-tests/webpack/package.json
index f0f0c768f2a..4ee7e9e0196 100644
--- a/integration-tests/webpack/package.json
+++ b/integration-tests/webpack/package.json
@@ -15,7 +15,7 @@
"author": "Thomas Hunter II ",
"license": "ISC",
"dependencies": {
- "axios": "1.13.5",
+ "axios": "1.15.0",
"express": "4.22.1",
"knex": "3.1.0"
}
diff --git a/package.json b/package.json
index 73a378a25cc..88f1b374514 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "dd-trace",
- "version": "5.96.0",
+ "version": "5.97.0",
"description": "Datadog APM tracing client for JavaScript",
"main": "index.js",
"typings": "index.d.ts",
@@ -11,6 +11,8 @@
"bench": "node benchmark/index.js",
"bench:e2e:test-optimization": "node benchmark/e2e-test-optimization/benchmark-run.js",
"dependencies:dedupe": "yarn-deduplicate yarn.lock",
+ "generate:config:types": "node scripts/generate-config-types.js",
+ "verify:config:types": "node scripts/generate-config-types.js --check",
"type:check": "tsc --noEmit -p tsconfig.dev.json",
"type:doc:build": "cd docs && yarn && yarn build",
"type:doc:test": "cd docs && yarn && yarn test",
@@ -142,7 +144,7 @@
"import-in-the-middle": "^3.0.1"
},
"optionalDependencies": {
- "@datadog/libdatadog": "0.9.2",
+ "@datadog/libdatadog": "0.9.3",
"@datadog/native-appsec": "11.0.1",
"@datadog/native-iast-taint-tracking": "4.1.0",
"@datadog/native-metrics": "3.1.1",
@@ -166,19 +168,19 @@
"@types/mocha": "^10.0.10",
"@types/node": "^18.19.106",
"@types/sinon": "^21.0.0",
- "axios": "^1.13.4",
+ "axios": "^1.15.0",
"benchmark": "^2.1.4",
"body-parser": "^2.2.2",
"bun": "1.3.11",
"codeowners-audit": "^2.9.0",
"eslint": "^9.39.2",
- "eslint-plugin-cypress": "^6.2.1",
+ "eslint-plugin-cypress": "^6.2.2",
"eslint-plugin-import": "^2.32.0",
- "eslint-plugin-jsdoc": "^62.8.1",
+ "eslint-plugin-jsdoc": "^62.9.0",
"eslint-plugin-mocha": "^11.2.0",
"eslint-plugin-n": "^17.23.2",
"eslint-plugin-promise": "^7.2.1",
- "eslint-plugin-unicorn": "^63.0.0",
+ "eslint-plugin-unicorn": "^64.0.0",
"express": "^5.1.0",
"glob": "^10.4.5",
"globals": "^17.2.0",
@@ -200,7 +202,7 @@
"semver": "^7.7.2",
"sinon": "^21.0.3",
"tiktoken": "^1.0.21",
- "typescript": "^5.9.2",
+ "typescript": "^6.0.2",
"workerpool": "^10.0.0",
"yaml": "^2.8.3",
"yarn-deduplicate": "^6.0.2"
diff --git a/packages/datadog-esbuild/index.js b/packages/datadog-esbuild/index.js
index 8e42a7fbf46..21f6600f3f1 100644
--- a/packages/datadog-esbuild/index.js
+++ b/packages/datadog-esbuild/index.js
@@ -2,7 +2,6 @@
const { execSync } = require('node:child_process')
const fs = require('node:fs')
-const RAW_BUILTINS = require('node:module').builtinModules
const path = require('node:path')
const { pathToFileURL, fileURLToPath } = require('node:url')
@@ -25,15 +24,27 @@ for (const hook of Object.values(hooks)) {
}
}
+function moduleOfInterestKey (name, file) {
+ return file ? `${name}/${file}` : name
+}
+
+const builtinModules = new Set(require('module').builtinModules)
+
+function addModuleOfInterest (name, file) {
+ if (!name) return
+
+ modulesOfInterest.add(moduleOfInterestKey(name, file))
+
+ if (builtinModules.has(name)) {
+ modulesOfInterest.add(moduleOfInterestKey(`node:${name}`, file))
+ }
+}
+
const modulesOfInterest = new Set()
-for (const instrumentation of Object.values(instrumentations)) {
+for (const [name, instrumentation] of Object.entries(instrumentations)) {
for (const entry of instrumentation) {
- if (entry.file) {
- modulesOfInterest.add(`${entry.name}/${entry.file}`) // e.g. "redis/my/file.js"
- } else {
- modulesOfInterest.add(entry.name) // e.g. "redis"
- }
+ addModuleOfInterest(name, entry.file)
}
}
@@ -41,7 +52,7 @@ const CHANNEL = 'dd-trace:bundler:load'
const builtins = new Set()
-for (const builtin of RAW_BUILTINS) {
+for (const builtin of builtinModules) {
builtins.add(builtin)
builtins.add(`node:${builtin}`)
}
@@ -247,7 +258,7 @@ ${build.initialOptions.banner.js}`
}
try {
- const packageJson = JSON.parse(fs.readFileSync(/** @type {string} */ (pathToPackageJson)).toString())
+ const packageJson = JSON.parse(fs.readFileSync(/** @type {string} */(pathToPackageJson)).toString())
const isESM = isESMFile(fullPathToModule, pathToPackageJson, packageJson)
if (isESM && !interceptedESMModules.has(fullPathToModule)) {
diff --git a/packages/datadog-instrumentations/src/child_process.js b/packages/datadog-instrumentations/src/child_process.js
index dd58deae249..d91a5a0ab29 100644
--- a/packages/datadog-instrumentations/src/child_process.js
+++ b/packages/datadog-instrumentations/src/child_process.js
@@ -14,11 +14,6 @@ const childProcessChannel = dc.tracingChannel('datadog:child_process:execution')
// ignored exec method because it calls to execFile directly
const execAsyncMethods = ['execFile', 'spawn', 'fork']
-const names = ['child_process', 'node:child_process']
-
-// child_process and node:child_process returns the same object instance, we only want to add hooks once
-let patched = false
-
function throwSyncError (error) {
throw error
}
@@ -37,19 +32,14 @@ function returnSpawnSyncError (error, context) {
return context.result
}
-for (const name of names) {
- addHook({ name }, childProcess => {
- if (!patched) {
- patched = true
- shimmer.massWrap(childProcess, execAsyncMethods, wrapChildProcessAsyncMethod(childProcess.ChildProcess))
- shimmer.wrap(childProcess, 'execSync', wrapChildProcessSyncMethod(throwSyncError, true))
- shimmer.wrap(childProcess, 'execFileSync', wrapChildProcessSyncMethod(throwSyncError))
- shimmer.wrap(childProcess, 'spawnSync', wrapChildProcessSyncMethod(returnSpawnSyncError))
- }
+addHook({ name: 'child_process' }, childProcess => {
+ shimmer.massWrap(childProcess, execAsyncMethods, wrapChildProcessAsyncMethod(childProcess.ChildProcess))
+ shimmer.wrap(childProcess, 'execSync', wrapChildProcessSyncMethod(throwSyncError, true))
+ shimmer.wrap(childProcess, 'execFileSync', wrapChildProcessSyncMethod(throwSyncError))
+ shimmer.wrap(childProcess, 'spawnSync', wrapChildProcessSyncMethod(returnSpawnSyncError))
- return childProcess
- })
-}
+ return childProcess
+})
function normalizeArgs (args, shell) {
const childProcessInfo = {
diff --git a/packages/datadog-instrumentations/src/crypto.js b/packages/datadog-instrumentations/src/crypto.js
index a0ca705434e..69955b08e8b 100644
--- a/packages/datadog-instrumentations/src/crypto.js
+++ b/packages/datadog-instrumentations/src/crypto.js
@@ -11,9 +11,8 @@ const cryptoCipherCh = channel('datadog:crypto:cipher:start')
const hashMethods = ['createHash', 'createHmac', 'createSign', 'createVerify', 'sign', 'verify']
const cipherMethods = ['createCipheriv', 'createDecipheriv']
-const names = ['crypto', 'node:crypto']
-addHook({ name: names }, crypto => {
+addHook({ name: 'crypto' }, crypto => {
shimmer.massWrap(crypto, hashMethods, wrapCryptoMethod(cryptoHashCh))
shimmer.massWrap(crypto, cipherMethods, wrapCryptoMethod(cryptoCipherCh))
return crypto
diff --git a/packages/datadog-instrumentations/src/cucumber.js b/packages/datadog-instrumentations/src/cucumber.js
index 36054ac579b..95320bb9a06 100644
--- a/packages/datadog-instrumentations/src/cucumber.js
+++ b/packages/datadog-instrumentations/src/cucumber.js
@@ -61,6 +61,8 @@ const numRetriesByPickleId = new Map()
const numAttemptToCtx = new Map()
const newTestsByTestFullname = new Map()
const modifiedTestsByPickleId = new Map()
+// Pickle IDs for tests that are genuinely new (not in known tests list).
+const newTestPickleIds = new Set()
let eventDataCollector = null
let pickleByFile = {}
@@ -359,7 +361,7 @@ function wrapRun (pl, isLatestVersion, version) {
}
if (isKnownTestsEnabled && status !== 'skip') {
- isNew = numRetries !== undefined
+ isNew = newTestPickleIds.has(this.pickle.id)
}
if (isNew || isModified) {
@@ -714,6 +716,7 @@ function getWrappedRunTestCase (runTestCaseFunction, isNewerCucumberVersion = fa
if (isKnownTestsEnabled && !isAttemptToFix) {
isNew = isNewTest(testSuitePath, pickle.name)
if (isNew) {
+ newTestPickleIds.add(pickle.id)
numRetriesByPickleId.set(pickle.id, 0)
}
}
diff --git a/packages/datadog-instrumentations/src/cypress-config.js b/packages/datadog-instrumentations/src/cypress-config.js
new file mode 100644
index 00000000000..ce479d08c66
--- /dev/null
+++ b/packages/datadog-instrumentations/src/cypress-config.js
@@ -0,0 +1,324 @@
+'use strict'
+
+const fs = require('fs')
+const os = require('os')
+const path = require('path')
+const { pathToFileURL } = require('url')
+
+const DD_CONFIG_WRAPPED = Symbol('dd-trace.cypress.config.wrapped')
+
+const noopTask = {
+ 'dd:testSuiteStart': () => null,
+ 'dd:beforeEach': () => ({}),
+ 'dd:afterEach': () => null,
+ 'dd:addTags': () => null,
+ 'dd:log': () => null,
+}
+
+/**
+ * @param {unknown} value
+ * @returns {boolean}
+ */
+function isPlainObject (value) {
+ if (!value || typeof value !== 'object') return false
+ const prototype = Object.getPrototypeOf(value)
+ return prototype === Object.prototype || prototype === null
+}
+
+/**
+ * Cypress allows setupNodeEvents to return partial config fragments that it
+ * diffs and merges into the resolved config. Preserve that behavior here so
+ * the wrapper does not drop user-provided config updates.
+ *
+ * @param {object} config Cypress resolved config object
+ * @param {unknown} updatedConfig value returned from setupNodeEvents
+ * @returns {object} resolved config with returned overrides applied
+ */
+function mergeReturnedConfig (config, updatedConfig) {
+ if (!isPlainObject(updatedConfig) || updatedConfig === config) {
+ return config
+ }
+
+ const mergedConfig = { ...config }
+
+ for (const [key, value] of Object.entries(updatedConfig)) {
+ mergedConfig[key] = isPlainObject(value) && isPlainObject(mergedConfig[key])
+ ? mergeReturnedConfig(mergedConfig[key], value)
+ : value
+ }
+
+ return mergedConfig
+}
+
+/**
+ * Creates a temporary wrapper support file under os.tmpdir() that loads
+ * dd-trace's browser-side hooks before the user's original support file.
+ * Returns the wrapper path (for cleanup) or undefined if injection was skipped.
+ *
+ * @param {object} config Cypress resolved config object
+ * @returns {string|undefined} wrapper file path, or undefined if skipped
+ */
+function injectSupportFile (config) {
+ const originalSupportFile = config.supportFile
+ if (!originalSupportFile || originalSupportFile === false) return
+
+ try {
+ const content = fs.readFileSync(originalSupportFile, 'utf8')
+ // Naive check: skip lines starting with // or * to avoid matching commented-out imports.
+ const hasActiveDdTraceImport = content.split('\n').some(line => {
+ const trimmed = line.trim()
+ return trimmed.includes('dd-trace/ci/cypress/support') &&
+ !trimmed.startsWith('//') && !trimmed.startsWith('*')
+ })
+ if (hasActiveDdTraceImport) return
+ } catch {
+ return
+ }
+
+ const ddSupportFile = require.resolve('../../../ci/cypress/support')
+ const wrapperFile = path.join(os.tmpdir(), `dd-cypress-support-${process.pid}.mjs`)
+
+ // Always use ESM: it can import both CJS and ESM support files.
+ const wrapperContent =
+ `import ${JSON.stringify(ddSupportFile)}\nimport ${JSON.stringify(originalSupportFile)}\n`
+
+ try {
+ fs.writeFileSync(wrapperFile, wrapperContent)
+ config.supportFile = wrapperFile
+ return wrapperFile
+ } catch {
+ // Can't write wrapper - skip injection
+ }
+}
+
+/**
+ * Registers dd-trace's Cypress hooks (before:run, after:spec, after:run, tasks)
+ * and injects the support file. Handles chaining with user-registered handlers
+ * for after:spec/after:run so both the user's code and dd-trace's run in sequence.
+ *
+ * @param {Function} on Cypress event registration function
+ * @param {object} config Cypress resolved config object
+ * @param {Function[]} userAfterSpecHandlers user's after:spec handlers collected from wrappedOn
+ * @param {Function[]} userAfterRunHandlers user's after:run handlers collected from wrappedOn
+ * @returns {object} the config object (possibly modified)
+ */
+function registerDdTraceHooks (on, config, userAfterSpecHandlers, userAfterRunHandlers) {
+ const wrapperFile = injectSupportFile(config)
+
+ const cleanupWrapper = () => {
+ if (wrapperFile) {
+ try { fs.unlinkSync(wrapperFile) } catch { /* best effort */ }
+ }
+ }
+
+ const tracer = global._ddtrace
+
+ const registerAfterRunWithCleanup = () => {
+ on('after:run', (results) => {
+ const chain = userAfterRunHandlers.reduce(
+ (p, h) => p.then(() => h(results)),
+ Promise.resolve()
+ )
+ return chain.finally(cleanupWrapper)
+ })
+ }
+
+ const registerNoopHandlers = () => {
+ for (const h of userAfterSpecHandlers) on('after:spec', h)
+ registerAfterRunWithCleanup()
+ on('task', noopTask)
+ }
+
+ if (!tracer || !tracer._initialized) {
+ registerNoopHandlers()
+ return config
+ }
+
+ const NoopTracer = require('../../../packages/dd-trace/src/noop/tracer')
+
+ if (tracer._tracer instanceof NoopTracer) {
+ registerNoopHandlers()
+ return config
+ }
+
+ const cypressPlugin = require('../../../packages/datadog-plugin-cypress/src/cypress-plugin')
+
+ if (cypressPlugin._isInit) {
+ for (const h of userAfterSpecHandlers) on('after:spec', h)
+ registerAfterRunWithCleanup()
+ return config
+ }
+
+ on('before:run', cypressPlugin.beforeRun.bind(cypressPlugin))
+
+ on('after:spec', (spec, results) => {
+ const chain = userAfterSpecHandlers.reduce(
+ (p, h) => p.then(() => h(spec, results)),
+ Promise.resolve()
+ )
+ return chain.then(() => cypressPlugin.afterSpec(spec, results))
+ })
+
+ on('after:run', (results) => {
+ const chain = userAfterRunHandlers.reduce(
+ (p, h) => p.then(() => h(results)),
+ Promise.resolve()
+ )
+ return chain
+ .then(() => cypressPlugin.afterRun(results))
+ .finally(cleanupWrapper)
+ })
+
+ on('task', cypressPlugin.getTasks())
+
+ return Promise.resolve(cypressPlugin.init(tracer, config)).then(() => config)
+}
+
+/**
+ * @param {Function|undefined} originalSetupNodeEvents
+ * @returns {Function}
+ */
+function wrapSetupNodeEvents (originalSetupNodeEvents) {
+ return function ddSetupNodeEvents (on, config) {
+ const userAfterSpecHandlers = []
+ const userAfterRunHandlers = []
+
+ const wrappedOn = (event, handler) => {
+ if (event === 'after:spec') {
+ userAfterSpecHandlers.push(handler)
+ } else if (event === 'after:run') {
+ userAfterRunHandlers.push(handler)
+ } else {
+ on(event, handler)
+ }
+ }
+
+ const maybePromise = originalSetupNodeEvents
+ ? originalSetupNodeEvents.call(this, wrappedOn, config)
+ : undefined
+
+ if (maybePromise && typeof maybePromise.then === 'function') {
+ return maybePromise.then((result) => {
+ return registerDdTraceHooks(
+ on,
+ mergeReturnedConfig(config, result),
+ userAfterSpecHandlers,
+ userAfterRunHandlers
+ )
+ })
+ }
+
+ return registerDdTraceHooks(
+ on,
+ mergeReturnedConfig(config, maybePromise),
+ userAfterSpecHandlers,
+ userAfterRunHandlers
+ )
+ }
+}
+
+/**
+ * @param {object} config
+ * @returns {object}
+ */
+function wrapConfig (config) {
+ if (!config || config[DD_CONFIG_WRAPPED]) return config
+ config[DD_CONFIG_WRAPPED] = true
+
+ if (config.e2e) {
+ config.e2e.setupNodeEvents = wrapSetupNodeEvents(config.e2e.setupNodeEvents)
+ }
+ if (config.component) {
+ config.component.setupNodeEvents = wrapSetupNodeEvents(config.component.setupNodeEvents)
+ }
+
+ return config
+}
+
+/**
+ * @param {string} originalConfigFile absolute path to the original config file
+ * @returns {string} path to the generated wrapper file
+ */
+function createConfigWrapper (originalConfigFile) {
+ const wrapperFile = path.join(
+ path.dirname(originalConfigFile),
+ `.dd-cypress-config-${process.pid}.mjs`
+ )
+
+ const cypressConfigPath = require.resolve('./cypress-config')
+
+ // Always use ESM: it can import both CJS and ESM configs, so it works
+ // regardless of the original file's extension or "type": "module" in package.json.
+ // Import cypress-config.js directly (CJS default = module.exports object).
+ fs.writeFileSync(wrapperFile, [
+ `import originalConfig from ${JSON.stringify(pathToFileURL(originalConfigFile).href)}`,
+ `import cypressConfig from ${JSON.stringify(pathToFileURL(cypressConfigPath).href)}`,
+ '',
+ 'export default cypressConfig.wrapConfig(originalConfig)',
+ '',
+ ].join('\n'))
+
+ return wrapperFile
+}
+
+/**
+ * Wraps the Cypress config file for a CLI start() call. When an explicit
+ * configFile is provided, creates a temp wrapper that imports the original
+ * and passes it through wrapConfig. This handles ESM configs (.mjs) and
+ * plain-object configs (without defineConfig) that can't be intercepted
+ * via the defineConfig shimmer.
+ *
+ * @param {object|undefined} options
+ * @returns {{ options: object|undefined, cleanup: Function }}
+ */
+function wrapCliConfigFileOptions (options) {
+ const noop = { options, cleanup: () => {} }
+
+ if (!options) return noop
+
+ const projectRoot = typeof options.project === 'string' ? options.project : process.cwd()
+ let configFilePath
+
+ if (options.configFile === false) {
+ // configFile: false means "no config file" — respect Cypress's semantics
+ return noop
+ } else if (typeof options.configFile === 'string') {
+ configFilePath = path.isAbsolute(options.configFile)
+ ? options.configFile
+ : path.resolve(projectRoot, options.configFile)
+ } else {
+ // No explicit --config-file: resolve the default cypress.config.{js,ts,cjs,mjs}
+ for (const ext of ['.js', '.ts', '.cjs', '.mjs']) {
+ const candidate = path.join(projectRoot, `cypress.config${ext}`)
+ if (fs.existsSync(candidate)) {
+ configFilePath = candidate
+ break
+ }
+ }
+ }
+
+ // Skip .ts files — Cypress transpiles them internally via its own loader.
+ // The ESM wrapper can't import .ts directly. The defineConfig shimmer
+ // handles .ts configs since they're transpiled to CJS by Cypress.
+ if (!configFilePath || !fs.existsSync(configFilePath) || path.extname(configFilePath) === '.ts') return noop
+
+ try {
+ const wrapperFile = createConfigWrapper(configFilePath)
+
+ return {
+ options: { ...options, configFile: wrapperFile },
+ cleanup: () => {
+ try { fs.unlinkSync(wrapperFile) } catch { /* best effort */ }
+ },
+ }
+ } catch {
+ // Config directory may be read-only — fall back to no wrapping.
+ // The defineConfig shimmer will still handle configs that use defineConfig.
+ return noop
+ }
+}
+
+module.exports = {
+ wrapCliConfigFileOptions,
+ wrapConfig,
+}
diff --git a/packages/datadog-instrumentations/src/cypress.js b/packages/datadog-instrumentations/src/cypress.js
index 1d22ffe0a42..5acd4b89e75 100644
--- a/packages/datadog-instrumentations/src/cypress.js
+++ b/packages/datadog-instrumentations/src/cypress.js
@@ -1,11 +1,93 @@
'use strict'
+const shimmer = require('../../datadog-shimmer')
const { DD_MAJOR } = require('../../../version')
const { addHook } = require('./helpers/instrument')
+const {
+ wrapCliConfigFileOptions,
+ wrapConfig,
+} = require('./cypress-config')
-// No handler because this is only useful for testing.
-// Cypress plugin does not patch any library.
+// Wrap defineConfig() so configs are instrumented when loaded in Cypress's
+// config child process. This covers both CLI and programmatic usage with CJS configs.
addHook({
name: 'cypress',
- versions: DD_MAJOR >= 6 ? ['>=10.2.0'] : ['>=6.7.0'],
-}, lib => lib)
+ versions: ['>=10.2.0'],
+}, (cypress) => {
+ if (typeof cypress.defineConfig === 'function') {
+ shimmer.wrap(cypress, 'defineConfig', (defineConfig) => function (config) {
+ wrapConfig(config)
+ return defineConfig(config)
+ })
+ }
+ return cypress
+})
+
+// Wrap the CLI entry points (cypress run / cypress open) to handle config files
+// that can't be intercepted via the defineConfig shimmer: ESM configs (.mjs)
+// and plain-object configs (without defineConfig).
+function getCliStartWrapper (start) {
+ return function ddTraceCliStart (options) {
+ const { options: wrappedOptions, cleanup } = wrapCliConfigFileOptions(options)
+ const result = start.call(this, wrappedOptions)
+
+ if (result && typeof result.then === 'function') {
+ return result.finally(cleanup)
+ }
+
+ cleanup()
+ return result
+ }
+}
+
+/**
+ * Wraps `start` on an object (or its `.default`) if present.
+ *
+ * @param {object} mod module exports
+ * @returns {object} mod
+ */
+function wrapStartOnModule (mod) {
+ const target = mod.default || mod
+ if (typeof target.start === 'function') {
+ shimmer.wrap(target, 'start', getCliStartWrapper)
+ }
+ return mod
+}
+
+// Hook the CLI entry points where Cypress resolves and executes `run`/`open`.
+// Cypress 10-14: lib/exec/{run,open}.js as separate files.
+// Cypress 15-15.10: dist/exec/{run,open}.js as separate files.
+// Cypress >=15.11: bundled into dist/cli-.js exporting runModule/openModule.
+for (const file of ['lib/exec/run.js', 'lib/exec/open.js', 'dist/exec/run.js', 'dist/exec/open.js']) {
+ addHook({
+ name: 'cypress',
+ versions: ['>=10.2.0'],
+ file,
+ }, wrapStartOnModule)
+}
+
+// Cypress >=15.11 bundles run/open into a single CLI chunk (dist/cli-.js).
+// The chunk exports runModule and openModule, each with a start() method.
+addHook({
+ name: 'cypress',
+ versions: ['>=10.2.0'],
+ filePattern: 'dist/cli.*',
+}, (cliChunk) => {
+ if (cliChunk.runModule?.start) {
+ shimmer.wrap(cliChunk.runModule, 'start', getCliStartWrapper)
+ }
+ if (cliChunk.openModule?.start) {
+ shimmer.wrap(cliChunk.openModule, 'start', getCliStartWrapper)
+ }
+ return cliChunk
+})
+
+// Cypress <10 uses the old pluginsFile approach. No auto-instrumentation;
+// users must use the manual dd-trace/ci/cypress/plugin setup.
+// This hook is kept so the plugin system registers Cypress for version tracking.
+if (DD_MAJOR < 6) {
+ addHook({
+ name: 'cypress',
+ versions: ['>=6.7.0 <10.2.0'],
+ }, lib => lib)
+}
diff --git a/packages/datadog-instrumentations/src/dns.js b/packages/datadog-instrumentations/src/dns.js
index 1c1f1f2b619..21c70853338 100644
--- a/packages/datadog-instrumentations/src/dns.js
+++ b/packages/datadog-instrumentations/src/dns.js
@@ -18,9 +18,8 @@ const rrtypes = {
}
const rrtypeMap = new WeakMap()
-const names = ['dns', 'node:dns']
-addHook({ name: names }, dns => {
+addHook({ name: 'dns' }, dns => {
shimmer.wrap(dns, 'lookup', fn => wrap('apm:dns:lookup', fn, 2))
shimmer.wrap(dns, 'lookupService', fn => wrap('apm:dns:lookup_service', fn, 2))
shimmer.wrap(dns, 'resolve', fn => wrap('apm:dns:resolve', fn, 2))
diff --git a/packages/datadog-instrumentations/src/express.js b/packages/datadog-instrumentations/src/express.js
index 13daeba0c22..49951a0808b 100644
--- a/packages/datadog-instrumentations/src/express.js
+++ b/packages/datadog-instrumentations/src/express.js
@@ -146,7 +146,7 @@ function wrapAppUse (use) {
}
}
-addHook({ name: 'express', versions: ['>=4'], file: ['lib/express.js'] }, express => {
+addHook({ name: 'express', versions: ['>=4'], file: 'lib/express.js' }, express => {
shimmer.wrap(express.application, 'handle', wrapHandle)
shimmer.wrap(express.application, 'all', wrapAppAll)
shimmer.wrap(express.application, 'route', wrapAppRoute)
@@ -224,19 +224,19 @@ function wrapProcessParamsMethod (requestPositionInArguments) {
}
}
-addHook({ name: 'express', versions: ['>=4.0.0 <4.3.0'], file: ['lib/express.js'] }, express => {
+addHook({ name: 'express', versions: ['>=4.0.0 <4.3.0'], file: 'lib/express.js' }, express => {
shimmer.wrap(express.Router, 'process_params', wrapProcessParamsMethod(1))
return express
})
-addHook({ name: 'express', versions: ['>=4.3.0 <5.0.0'], file: ['lib/express.js'] }, express => {
+addHook({ name: 'express', versions: ['>=4.3.0 <5.0.0'], file: 'lib/express.js' }, express => {
shimmer.wrap(express.Router, 'process_params', wrapProcessParamsMethod(2))
return express
})
const queryReadCh = channel('datadog:express:query:finish')
-addHook({ name: 'express', file: ['lib/request.js'], versions: ['>=5.0.0'] }, request => {
+addHook({ name: 'express', file: 'lib/request.js', versions: ['>=5.0.0'] }, request => {
shimmer.wrap(request, 'query', function (originalGet) {
return function wrappedGet () {
const query = originalGet.call(this)
diff --git a/packages/datadog-instrumentations/src/fs.js b/packages/datadog-instrumentations/src/fs.js
index 16f01ea145c..1a50d4d6a87 100644
--- a/packages/datadog-instrumentations/src/fs.js
+++ b/packages/datadog-instrumentations/src/fs.js
@@ -84,37 +84,35 @@ const paramsByFileHandleMethods = {
writeFile: ['data', 'options'],
writev: ['buffers', 'position'],
}
-const names = ['fs', 'node:fs']
-for (const name of names) {
- addHook({ name }, fs => {
- const asyncMethods = Object.keys(paramsByMethod)
- const syncMethods = asyncMethods.map(name => `${name}Sync`)
-
- massWrap(fs, asyncMethods, createWrapFunction())
- massWrap(fs, syncMethods, createWrapFunction())
- massWrap(fs.promises, asyncMethods, createWrapFunction('promises.'))
-
- wrap(fs.realpath, 'native', createWrapFunction('', 'realpath.native'))
- wrap(fs.realpathSync, 'native', createWrapFunction('', 'realpath.native'))
- wrap(fs.promises.realpath, 'native', createWrapFunction('', 'realpath.native'))
-
- wrap(fs, 'createReadStream', wrapCreateStream)
- wrap(fs, 'createWriteStream', wrapCreateStream)
- if (fs.Dir) {
- wrap(fs.Dir.prototype, 'close', createWrapFunction('dir.'))
- wrap(fs.Dir.prototype, 'closeSync', createWrapFunction('dir.'))
- wrap(fs.Dir.prototype, 'read', createWrapFunction('dir.'))
- wrap(fs.Dir.prototype, 'readSync', createWrapFunction('dir.'))
- wrap(fs.Dir.prototype, Symbol.asyncIterator, createWrapDirAsyncIterator())
- }
+addHook({ name: 'fs' }, fs => {
+ const asyncMethods = Object.keys(paramsByMethod)
+ const syncMethods = asyncMethods.map(name => `${name}Sync`)
+
+ massWrap(fs, asyncMethods, createWrapFunction())
+ massWrap(fs, syncMethods, createWrapFunction())
+ massWrap(fs.promises, asyncMethods, createWrapFunction('promises.'))
+
+ wrap(fs.realpath, 'native', createWrapFunction('', 'realpath.native'))
+ wrap(fs.realpathSync, 'native', createWrapFunction('', 'realpath.native'))
+ wrap(fs.promises.realpath, 'native', createWrapFunction('', 'realpath.native'))
+
+ wrap(fs, 'createReadStream', wrapCreateStream)
+ wrap(fs, 'createWriteStream', wrapCreateStream)
+ if (fs.Dir) {
+ wrap(fs.Dir.prototype, 'close', createWrapFunction('dir.'))
+ wrap(fs.Dir.prototype, 'closeSync', createWrapFunction('dir.'))
+ wrap(fs.Dir.prototype, 'read', createWrapFunction('dir.'))
+ wrap(fs.Dir.prototype, 'readSync', createWrapFunction('dir.'))
+ wrap(fs.Dir.prototype, Symbol.asyncIterator, createWrapDirAsyncIterator())
+ }
- wrap(fs, 'unwatchFile', createWatchWrapFunction())
- wrap(fs, 'watch', createWatchWrapFunction())
- wrap(fs, 'watchFile', createWatchWrapFunction())
+ wrap(fs, 'unwatchFile', createWatchWrapFunction())
+ wrap(fs, 'watch', createWatchWrapFunction())
+ wrap(fs, 'watchFile', createWatchWrapFunction())
+
+ return fs
+})
- return fs
- })
-}
function isFirstMethodReturningFileHandle (original) {
return !kHandle && original.name === 'open'
}
diff --git a/packages/datadog-instrumentations/src/graphql.js b/packages/datadog-instrumentations/src/graphql.js
index 52562c8316c..1ec3ce0a564 100644
--- a/packages/datadog-instrumentations/src/graphql.js
+++ b/packages/datadog-instrumentations/src/graphql.js
@@ -171,7 +171,7 @@ function wrapExecute (execute) {
args,
docSource: documentSources.get(document),
source,
- fields: {},
+ fields: Object.create(null),
abortController: new AbortController(),
}
diff --git a/packages/datadog-instrumentations/src/helpers/bundler-register.js b/packages/datadog-instrumentations/src/helpers/bundler-register.js
index 99e6675c1d6..57da69cda5e 100644
--- a/packages/datadog-instrumentations/src/helpers/bundler-register.js
+++ b/packages/datadog-instrumentations/src/helpers/bundler-register.js
@@ -45,45 +45,73 @@ if (!dc.unsubscribe) {
dc.unsubscribe = (channel, cb) => {
if (dc.channel(channel).hasSubscribers) {
dc.channel(channel).unsubscribe(cb)
+ return true
}
+ return false
}
}
-function doHook (payload) {
- const hook = hooks[payload.package]
+/**
+ * @param {string} name
+ */
+function doHook (name) {
+ const hook = hooks[name] ?? hooks[`node:${name}`]
if (!hook) {
- log.error('esbuild-wrapped %s missing in list of hooks', payload.package)
+ log.error('esbuild-wrapped %s missing in list of hooks', name)
return
}
const hookFn = hook.fn ?? hook
if (typeof hookFn !== 'function') {
- log.error('esbuild-wrapped hook %s is not a function', payload.package)
+ log.error('esbuild-wrapped hook %s is not a function', name)
return
}
try {
hookFn()
} catch {
- log.error('esbuild-wrapped %s hook failed', payload.package)
+ log.error('esbuild-wrapped %s hook failed', name)
}
}
-dc.subscribe(CHANNEL, (payload) => {
- doHook(payload)
+/** @type {Set} */
+const instrumentedNodeModules = new Set()
- if (!instrumentations[payload.package]) {
- log.error('esbuild-wrapped %s missing in list of instrumentations', payload.package)
+/** @typedef {{ package: string, module: unknown, version: string, path: string }} Payload */
+dc.subscribe(CHANNEL, (message) => {
+ const payload = /** @type {Payload} */ (message)
+ const name = payload.package
+
+ const isPrefixedWithNode = name.startsWith('node:')
+
+ const isNodeModule = isPrefixedWithNode || !hooks[name]
+
+ if (isNodeModule) {
+ const nodeName = isPrefixedWithNode ? name.slice(5) : name
+ // Used for node: prefixed modules to prevent double instrumentation.
+ if (instrumentedNodeModules.has(nodeName)) {
+ return
+ }
+ instrumentedNodeModules.add(nodeName)
+ }
+
+ doHook(name)
+
+ const instrumentation = instrumentations[name] ?? instrumentations[`node:${name}`]
+
+ if (!instrumentation) {
+ log.error('esbuild-wrapped %s missing in list of instrumentations', name)
return
}
- for (const { name, file, versions, hook } of instrumentations[payload.package]) {
- if (payload.path !== filename(name, file)) continue
- if (!matchVersion(payload.version, versions)) continue
+ for (const { file, versions, hook } of instrumentation) {
+ if (payload.path !== filename(name, file) || !matchVersion(payload.version, versions)) {
+ continue
+ }
try {
loadChannel.publish({ name, version: payload.version, file })
- payload.module = hook(payload.module, payload.version)
+ payload.module = hook(payload.module, payload.version) ?? payload.module
} catch (e) {
log.error('Error executing bundler hook', e)
}
diff --git a/packages/datadog-instrumentations/src/helpers/hook.js b/packages/datadog-instrumentations/src/helpers/hook.js
index c1b03fea446..b5db3a487ff 100644
--- a/packages/datadog-instrumentations/src/helpers/hook.js
+++ b/packages/datadog-instrumentations/src/helpers/hook.js
@@ -1,17 +1,41 @@
'use strict'
+
const path = require('path')
+
const iitm = require('../../../dd-trace/src/iitm')
const ritm = require('../../../dd-trace/src/ritm')
+const log = require('../../../dd-trace/src/log')
+const requirePackageJson = require('../../../dd-trace/src/require-package-json')
+
+/**
+ * @param {string} moduleBaseDir
+ * @returns {string|undefined}
+ */
+function getVersion (moduleBaseDir) {
+ if (moduleBaseDir) {
+ return requirePackageJson(moduleBaseDir, /** @type {import('module').Module} */ (module)).version
+ }
+
+ return process.version
+}
/**
* This is called for every package/internal-module that dd-trace supports instrumentation for
* In practice, `modules` is always an array with a single entry.
*
+ * @overload
+ * @param {string[]} modules list of modules to hook into
+ * @param {object} hookOptions hook options
+ * @param {Function} onrequire callback to be executed upon encountering module
+ */
+/**
+ * @overload
* @param {string[]} modules list of modules to hook into
* @param {object} hookOptions hook options
* @param {Function} onrequire callback to be executed upon encountering module
*/
function Hook (modules, hookOptions, onrequire) {
+ // TODO: Rewrite this to use class syntax. The same should be done for ritm.
if (!(this instanceof Hook)) return new Hook(modules, hookOptions, onrequire)
if (typeof hookOptions === 'function') {
@@ -42,6 +66,13 @@ function Hook (modules, hookOptions, onrequire) {
return result
}
+ try {
+ moduleVersion ||= getVersion(moduleBaseDir)
+ } catch (error) {
+ log.error('Error getting version for "%s": %s', moduleName, error.message, error)
+ return
+ }
+
if (
isIitm &&
moduleExports.default &&
@@ -66,10 +97,4 @@ function Hook (modules, hookOptions, onrequire) {
})
}
-Hook.prototype.unhook = function () {
- this._ritmHook.unhook()
- this._iitmHook.unhook()
- this._patched = Object.create(null)
-}
-
module.exports = Hook
diff --git a/packages/datadog-instrumentations/src/helpers/hooks.js b/packages/datadog-instrumentations/src/helpers/hooks.js
index 3d83b13e406..3d648a03ffa 100644
--- a/packages/datadog-instrumentations/src/helpers/hooks.js
+++ b/packages/datadog-instrumentations/src/helpers/hooks.js
@@ -1,6 +1,18 @@
'use strict'
module.exports = {
+ // Only list unprefixed node modules. They will automatically be instrumented as prefixed and unprefixed.
+ child_process: () => require('../child_process'),
+ crypto: () => require('../crypto'),
+ dns: () => require('../dns'),
+ fs: { serverless: false, fn: () => require('../fs') },
+ http: () => require('../http'),
+ http2: () => require('../http2'),
+ https: () => require('../http'),
+ net: () => require('../net'),
+ url: () => require('../url'),
+ vm: () => require('../vm'),
+ // Non Node.js modules
'@anthropic-ai/sdk': { esmFirst: true, fn: () => require('../anthropic') },
'@apollo/server': () => require('../apollo-server'),
'@apollo/gateway': () => require('../apollo'),
@@ -47,31 +59,24 @@ module.exports = {
bullmq: () => require('../bullmq'),
bunyan: () => require('../bunyan'),
'cassandra-driver': () => require('../cassandra-driver'),
- child_process: () => require('../child_process'),
connect: () => require('../connect'),
cookie: () => require('../cookie'),
'cookie-parser': () => require('../cookie-parser'),
couchbase: () => require('../couchbase'),
- crypto: () => require('../crypto'),
cypress: () => require('../cypress'),
'dd-trace-api': () => require('../dd-trace-api'),
- dns: () => require('../dns'),
elasticsearch: () => require('../elasticsearch'),
express: () => require('../express'),
'express-mongo-sanitize': () => require('../express-mongo-sanitize'),
'express-session': () => require('../express-session'),
fastify: () => require('../fastify'),
'find-my-way': () => require('../find-my-way'),
- fs: { serverless: false, fn: () => require('../fs') },
'generic-pool': () => require('../generic-pool'),
graphql: () => require('../graphql'),
grpc: () => require('../grpc'),
handlebars: () => require('../handlebars'),
hapi: () => require('../hapi'),
hono: { esmFirst: true, fn: () => require('../hono') },
- http: () => require('../http'),
- http2: () => require('../http2'),
- https: () => require('../http'),
ioredis: () => require('../ioredis'),
iovalkey: () => require('../iovalkey'),
'jest-circus': () => require('../jest'),
@@ -103,18 +108,8 @@ module.exports = {
multer: () => require('../multer'),
mysql: () => require('../mysql'),
mysql2: () => require('../mysql2'),
- net: () => require('../net'),
next: () => require('../next'),
'node-serialize': () => require('../node-serialize'),
- 'node:child_process': () => require('../child_process'),
- 'node:crypto': () => require('../crypto'),
- 'node:dns': () => require('../dns'),
- 'node:http': () => require('../http'),
- 'node:http2': () => require('../http2'),
- 'node:https': () => require('../http'),
- 'node:net': () => require('../net'),
- 'node:url': () => require('../url'),
- 'node:vm': () => require('../vm'),
nyc: () => require('../nyc'),
oracledb: () => require('../oracledb'),
openai: { esmFirst: true, fn: () => require('../openai') },
@@ -142,9 +137,7 @@ module.exports = {
tedious: () => require('../tedious'),
tinypool: { esmFirst: true, fn: () => require('../vitest') },
undici: () => require('../undici'),
- url: () => require('../url'),
vitest: { esmFirst: true, fn: () => require('../vitest') },
- vm: () => require('../vm'),
when: () => require('../when'),
winston: () => require('../winston'),
workerpool: () => require('../mocha'),
diff --git a/packages/datadog-instrumentations/src/helpers/instrument.js b/packages/datadog-instrumentations/src/helpers/instrument.js
index 6c7c493233e..2695807757c 100644
--- a/packages/datadog-instrumentations/src/helpers/instrument.js
+++ b/packages/datadog-instrumentations/src/helpers/instrument.js
@@ -1,11 +1,23 @@
'use strict'
const { AsyncResource } = require('async_hooks')
-const dc = require('dc-polyfill')
+const dc = /** @type {typeof import('node:diagnostics_channel')} */ (require('dc-polyfill'))
const instrumentations = require('./instrumentations')
const rewriterInstrumentations = require('./rewriter/instrumentations')
+/**
+ * @typedef {import('node:diagnostics_channel').Channel} Channel
+ * @typedef {import('node:diagnostics_channel').TracingChannel} TracingChannel
+ */
+
+/**
+ * @type {Record}
+ */
const channelMap = {}
+/**
+ * @param {string} name
+ * @returns {Channel}
+ */
exports.channel = function (name) {
const maybe = channelMap[name]
if (maybe) return maybe
@@ -14,7 +26,14 @@ exports.channel = function (name) {
return ch
}
+/**
+ * @type {Record}
+ */
const tracingChannelMap = {}
+/**
+ * @param {string} name
+ * @returns {TracingChannel}
+ */
exports.tracingChannel = function (name) {
const maybe = tracingChannelMap[name]
if (maybe) return maybe
@@ -34,24 +53,19 @@ exports.getHooks = function getHooks (names) {
/**
* @param {object} args
- * @param {string|string[]} args.name module name
- * @param {string[]} args.versions array of semver range strings
+ * @param {string} args.name module name
+ * @param {string[]} [args.versions] array of semver range strings
* @param {string} [args.file='index.js'] path to file within package to instrument
* @param {string} [args.filePattern] pattern to match files within package to instrument
- * @param {boolean} [args.patchDefault] whether to patch the default export
- * @param {(moduleExports: unknown, version: string) => unknown} hook
+ * @param {boolean} [args.patchDefault=true] whether to patch the default export
+ * @param {(moduleExports: unknown, version: string, isIitm?: boolean) => unknown} [hook] Patches module exports
*/
exports.addHook = function addHook ({ name, versions, file, filePattern, patchDefault }, hook) {
- if (typeof name === 'string') {
- name = [name]
+ if (!instrumentations[name]) {
+ instrumentations[name] = []
}
- for (const val of name) {
- if (!instrumentations[val]) {
- instrumentations[val] = []
- }
- instrumentations[val].push({ name: val, versions, file, filePattern, hook, patchDefault })
- }
+ instrumentations[name].push({ versions, file, filePattern, hook, patchDefault })
}
exports.AsyncResource = AsyncResource
diff --git a/packages/datadog-instrumentations/src/helpers/register.js b/packages/datadog-instrumentations/src/helpers/register.js
index 3785777cd07..66c2de19f47 100644
--- a/packages/datadog-instrumentations/src/helpers/register.js
+++ b/packages/datadog-instrumentations/src/helpers/register.js
@@ -1,9 +1,9 @@
'use strict'
+const { builtinModules } = require('module')
const path = require('path')
const { channel } = require('dc-polyfill')
const satisfies = require('../../../../vendor/dist/semifies')
-const requirePackageJson = require('../../../dd-trace/src/require-package-json')
const log = require('../../../dd-trace/src/log')
const telemetry = require('../../../dd-trace/src/guardrails/telemetry')
const { IS_SERVERLESS } = require('../../../dd-trace/src/serverless')
@@ -36,27 +36,47 @@ if (!disabledInstrumentations.has('process')) {
require('../process')
}
-const HOOK_SYMBOL = Symbol('hookExportsSet')
-
if (DD_TRACE_DEBUG && DD_TRACE_DEBUG.toLowerCase() !== 'false') {
checkRequireCache.checkForRequiredModules()
setImmediate(checkRequireCache.checkForPotentialConflicts)
}
-const seenCombo = new Set()
-const allInstrumentations = {}
-
for (const inst of disabledInstrumentations) {
rewriter.disable(inst)
}
-// TODO: make this more efficient
-for (const packageName of names) {
- if (disabledInstrumentations.has(packageName)) continue
+/** @type {Map} */
+const instrumentedNodeModules = new Map()
+/** @type {Map} */
+const instrumentedIntegrationsSuccess = new Map()
+/** @type {Set} */
+const alreadyLoggedIncompatibleIntegrations = new Set()
+
+// Always disable prefixed and unprefixed node modules if one is disabled.
+if (disabledInstrumentations.size) {
+ const builtinsSet = new Set(builtinModules)
+ for (const name of disabledInstrumentations) {
+ const hasPrefix = name.startsWith('node:')
+ if (hasPrefix || builtinsSet.has(name)) {
+ if (hasPrefix) {
+ const unprefixedName = name.slice(5)
+ if (!disabledInstrumentations.has(unprefixedName)) {
+ disabledInstrumentations.add(unprefixedName)
+ }
+ } else if (!disabledInstrumentations.has(`node:${name}`)) {
+ disabledInstrumentations.add(`node:${name}`)
+ }
+ }
+ }
+ builtinsSet.clear()
+}
+
+for (const name of names) {
+ if (disabledInstrumentations.has(name)) continue
const hookOptions = {}
- let hook = hooks[packageName]
+ let hook = hooks[name]
if (hook !== null && typeof hook === 'object') {
if (hook.serverless === false && IS_SERVERLESS) continue
@@ -65,173 +85,114 @@ for (const packageName of names) {
hook = hook.fn
}
- // get the instrumentation file name to save all hooked versions
- const instrumentationFileName = parseHookInstrumentationFileName(packageName)
-
- Hook([packageName], hookOptions, (moduleExports, moduleName, moduleBaseDir, moduleVersion, isIitm) => {
- moduleName = moduleName.replace(pathSepExpr, '/')
+ Hook([name], hookOptions, (moduleExports, moduleName, moduleBaseDir, moduleVersion, isIitm) => {
+ // All loaded versions are first expected to fail instrumentation.
+ if (!instrumentedIntegrationsSuccess.has(`${name}@${moduleVersion}`)) {
+ instrumentedIntegrationsSuccess.set(`${name}@${moduleVersion}`, false)
+ }
// This executes the integration file thus adding its entries to `instrumentations`
hook()
- if (!instrumentations[packageName]) {
+ if (!instrumentations[name] || moduleExports === instrumentedNodeModules.get(name)) {
return moduleExports
}
- const namesAndSuccesses = {}
- for (const { name, file, versions, hook, filePattern, patchDefault } of instrumentations[packageName]) {
- if (patchDefault === false && !moduleExports.default && isIitm) {
- return moduleExports
- } else if (patchDefault === true && moduleExports.default && isIitm) {
- moduleExports = moduleExports.default
+ // Used for node: prefixed modules to prevent double instrumentation.
+ if (moduleBaseDir) {
+ moduleName = moduleName.replace(pathSepExpr, '/')
+ } else {
+ instrumentedNodeModules.set(name, moduleExports)
+ }
+
+ for (const { file, versions, hook, filePattern, patchDefault } of instrumentations[name]) {
+ if (isIitm && patchDefault === !!moduleExports.default) {
+ if (patchDefault) {
+ moduleExports = moduleExports.default
+ } else {
+ return moduleExports
+ }
}
- let fullFilePattern = filePattern
const fullFilename = filename(name, file)
- if (fullFilePattern) {
- fullFilePattern = filename(name, fullFilePattern)
- }
- // Create a WeakSet associated with the hook function so that patches on the same moduleExport only happens once
- // for example by instrumenting both dns and node:dns double the spans would be created
- // since they both patch the same moduleExport, this WeakSet is used to mitigate that
- // TODO(BridgeAR): Instead of using a WeakSet here, why not just use aliases for the hook in register?
- // That way it would also not be duplicated. The actual name being used has to be identified else wise.
- // Maybe it is also not important to know what name was actually used?
- hook[HOOK_SYMBOL] ??= new WeakSet()
let matchesFile = moduleName === fullFilename
if (!matchesFile && isRelativeRequire(name)) matchesFile = true
+ const fullFilePattern = filePattern && filename(name, filePattern)
if (fullFilePattern) {
// Some libraries include a hash in their filenames when installed,
// so our instrumentation has to include a '.*' to match them for more than a single version.
- matchesFile = matchesFile || new RegExp(fullFilePattern).test(moduleName)
+ matchesFile ||= new RegExp(fullFilePattern).test(moduleName)
}
- if (matchesFile) {
- let version = moduleVersion
+ if (matchesFile && matchVersion(moduleVersion, versions)) {
+ // Do not log in case of an error to prevent duplicate telemetry for the same integration version.
+ instrumentedIntegrationsSuccess.set(`${name}@${moduleVersion}`, true)
try {
- version = version || getVersion(moduleBaseDir)
- allInstrumentations[instrumentationFileName] = allInstrumentations[instrumentationFileName] || false
- } catch (e) {
- log.error('Error getting version for "%s": %s', name, e.message, e)
- continue
- }
- if (namesAndSuccesses[`${name}@${version}`] === undefined && !file) {
- // TODO If `file` is present, we might elsewhere instrument the result of the module
- // for a version range that actually matches, so we can't assume that we're _not_
- // going to instrument that. However, the way the data model around instrumentation
- // works, we can't know either way just yet, so to avoid false positives, we'll just
- // ignore this if there is a `file` in the hook. The thing to do here is rework
- // everything so that we can be sure that there are _no_ instrumentations that it
- // could match.
- namesAndSuccesses[`${name}@${version}`] = false
- }
-
- if (matchVersion(version, versions)) {
- allInstrumentations[instrumentationFileName] = true
-
- // Check if the hook already has a set moduleExport
- if (hook[HOOK_SYMBOL].has(moduleExports)) {
- namesAndSuccesses[`${name}@${version}`] = true
- return moduleExports
- }
-
- try {
- loadChannel.publish({ name, version, file })
- // Send the name and version of the module back to the callback because now addHook
- // takes in an array of names so by passing the name the callback will know which module name is being used
- // TODO(BridgeAR): This is only true in case the name is identical
- // in all loads. If they deviate, the deviating name would not be
- // picked up due to the unification. Check what modules actually use the name.
- // TODO(BridgeAR): Only replace moduleExports if the hook returns a new value.
- // This allows to reduce the instrumentation code (no return needed).
-
- moduleExports = hook(moduleExports, version, name, isIitm) ?? moduleExports
- // Set the moduleExports in the hooks WeakSet
- hook[HOOK_SYMBOL].add(moduleExports)
- } catch (e) {
- log.info('Error during ddtrace instrumentation of application, aborting.', e)
- telemetry('error', [
- `error_type:${e.constructor.name}`,
- `integration:${name}`,
- `integration_version:${version}`,
- ], {
- result: 'error',
- result_class: 'internal_error',
- result_reason: `Error during instrumentation of ${name}@${version}: ${e.message}`,
- })
- }
- namesAndSuccesses[`${name}@${version}`] = true
+ loadChannel.publish({ name })
+
+ moduleExports = hook(moduleExports, moduleVersion, isIitm) ?? moduleExports
+ } catch (error) {
+ log.info('Error during ddtrace instrumentation of application, aborting.', error)
+ telemetry('error', [
+ `error_type:${error.constructor.name}`,
+ `integration:${name}`,
+ `integration_version:${moduleVersion}`,
+ ], {
+ result: 'error',
+ result_class: 'internal_error',
+ result_reason: `Error during instrumentation of ${name}@${moduleVersion}: ${error.message}`,
+ })
}
}
}
- for (const nameVersion of Object.keys(namesAndSuccesses)) {
- const [name, version] = nameVersion.split('@')
- const success = namesAndSuccesses[nameVersion]
- // we check allVersions to see if any version of the integration was successfully instrumented
- if (!success && !seenCombo.has(nameVersion) && !allInstrumentations[instrumentationFileName]) {
- telemetry('abort.integration', [
- `integration:${name}`,
- `integration_version:${version}`,
- ], {
- result: 'abort',
- result_class: 'incompatible_library',
- result_reason: `Incompatible integration version: ${name}@${version}`,
- })
- log.info('Found incompatible integration version: %s', nameVersion)
- seenCombo.add(nameVersion)
- }
- }
return moduleExports
})
}
-function matchVersion (version, ranges) {
- return !version || !ranges || ranges.some(range => satisfies(version, range))
-}
+globalThis[Symbol.for('dd-trace')]?.beforeExitHandlers.add(logAbortedIntegrations)
+// TODO: check if we want to stop using channels for single subscriber tasks
+channel('dd-trace:exporter:first-flush').subscribe(logAbortedIntegrations)
-function getVersion (moduleBaseDir) {
- if (moduleBaseDir) {
- return requirePackageJson(moduleBaseDir, module).version
+function logAbortedIntegrations () {
+ for (const [nameVersion, success] of instrumentedIntegrationsSuccess) {
+ // Only ever log a single version of an integration, even if it is loaded later.
+ if (!success && !alreadyLoggedIncompatibleIntegrations.has(nameVersion)) {
+ const [name, version] = nameVersion.split('@')
+ telemetry('abort.integration', [
+ `integration:${name}`,
+ `integration_version:${version}`,
+ ], {
+ result: 'abort',
+ result_class: 'incompatible_library',
+ result_reason: `Incompatible integration version: ${name}@${version}`,
+ })
+ log.info('Found incompatible integration version: %s', nameVersion)
+ alreadyLoggedIncompatibleIntegrations.add(nameVersion)
+ }
}
+ // Clear the map to avoid reporting the same integration version again.
+ instrumentedIntegrationsSuccess.clear()
}
-function filename (name, file) {
- return [name, file].filter(Boolean).join('/')
+/**
+ * @param {string|undefined} version
+ * @param {string[]|undefined} ranges
+ */
+function matchVersion (version, ranges) {
+ return !version || !ranges || ranges.some(range => satisfies(version, range))
}
-// This function captures the instrumentation file name for a given package by parsing the hook require
-// function given the module name. It is used to ensure that instrumentations such as redis
-// that have several different modules being hooked, ie: 'redis' main package, and @redis/client submodule
-// return a consistent instrumentation name. This is used later to ensure that at least some portion of
-// the integration was successfully instrumented. Prevents incorrect `Found incompatible integration version: ` messages
-// Example:
-// redis -> "() => require('../redis')" -> redis
-// @redis/client -> "() => require('../redis')" -> redis
-//
-function parseHookInstrumentationFileName (packageName) {
- let hook = hooks[packageName]
- if (hook.fn) {
- hook = hook.fn
- }
- const hookString = hook.toString()
- const regex = /require\('([^']*)'\)/
- const match = hookString.match(regex)
-
- // try to capture the hook require file location.
- if (match && match[1]) {
- let moduleName = match[1]
- // Remove leading '../' if present
- if (moduleName.startsWith('../')) {
- moduleName = moduleName.slice(3)
- }
- return moduleName
- }
-
- return null
+/**
+ * @param {string} name
+ * @param {string} [file]
+ * @returns {string}
+ */
+function filename (name, file) {
+ return file ? `${name}/${file}` : name
}
module.exports = {
diff --git a/packages/datadog-instrumentations/src/http/client.js b/packages/datadog-instrumentations/src/http/client.js
index 3625f384de4..15c5693efef 100644
--- a/packages/datadog-instrumentations/src/http/client.js
+++ b/packages/datadog-instrumentations/src/http/client.js
@@ -16,9 +16,8 @@ const asyncStartChannel = channel('apm:http:client:request:asyncStart')
const errorChannel = channel('apm:http:client:request:error')
const responseFinishChannel = channel('apm:http:client:response:finish')
-const names = ['http', 'https', 'node:http', 'node:https']
-
-addHook({ name: names }, hookFn)
+addHook({ name: 'http' }, hookFn)
+addHook({ name: 'https' }, hookFn)
function hookFn (http) {
patch(http, 'request')
diff --git a/packages/datadog-instrumentations/src/http/server.js b/packages/datadog-instrumentations/src/http/server.js
index adf61ab86da..51a488c3a85 100644
--- a/packages/datadog-instrumentations/src/http/server.js
+++ b/packages/datadog-instrumentations/src/http/server.js
@@ -16,10 +16,7 @@ const startSetHeaderCh = channel('datadog:http:server:response:set-header:start'
const requestFinishedSet = new WeakSet()
-const httpNames = ['http', 'node:http']
-const httpsNames = ['https', 'node:https']
-
-addHook({ name: httpNames }, http => {
+addHook({ name: 'http' }, http => {
shimmer.wrap(http.ServerResponse.prototype, 'emit', wrapResponseEmit)
shimmer.wrap(http.Server.prototype, 'emit', wrapEmit)
shimmer.wrap(http.ServerResponse.prototype, 'writeHead', wrapWriteHead)
@@ -34,7 +31,7 @@ addHook({ name: httpNames }, http => {
return http
})
-addHook({ name: httpsNames }, http => {
+addHook({ name: 'https' }, http => {
// http.ServerResponse not present on https
shimmer.wrap(http.Server.prototype, 'emit', wrapEmit)
return http
diff --git a/packages/datadog-instrumentations/src/http2/client.js b/packages/datadog-instrumentations/src/http2/client.js
index b335df7c4cd..d38837fb2d9 100644
--- a/packages/datadog-instrumentations/src/http2/client.js
+++ b/packages/datadog-instrumentations/src/http2/client.js
@@ -10,8 +10,6 @@ const asyncStartChannel = channel('apm:http2:client:request:asyncStart')
const asyncEndChannel = channel('apm:http2:client:request:asyncEnd')
const errorChannel = channel('apm:http2:client:request:error')
-const names = ['http2', 'node:http2']
-
function createWrapEmit (ctx) {
return function wrapEmit (emit) {
return function (event, arg1) {
@@ -68,7 +66,7 @@ function wrapConnect (connect) {
}
}
-addHook({ name: names }, http2 => {
+addHook({ name: 'http2' }, http2 => {
shimmer.wrap(http2, 'connect', wrapConnect)
if (http2.default) http2.default.connect = http2.connect
diff --git a/packages/datadog-instrumentations/src/http2/server.js b/packages/datadog-instrumentations/src/http2/server.js
index f5e7d961f84..878b9fa7f22 100644
--- a/packages/datadog-instrumentations/src/http2/server.js
+++ b/packages/datadog-instrumentations/src/http2/server.js
@@ -13,9 +13,7 @@ const startServerCh = channel('apm:http2:server:request:start')
const errorServerCh = channel('apm:http2:server:request:error')
const emitCh = channel('apm:http2:server:response:emit')
-const names = ['http2', 'node:http2']
-
-addHook({ name: names }, http2 => {
+addHook({ name: 'http2' }, http2 => {
shimmer.wrap(http2, 'createSecureServer', wrapCreateServer)
shimmer.wrap(http2, 'createServer', wrapCreateServer)
})
diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js
index 3c575100bad..794d5525ba1 100644
--- a/packages/datadog-instrumentations/src/jest.js
+++ b/packages/datadog-instrumentations/src/jest.js
@@ -1,5 +1,8 @@
'use strict'
+// Capture real timers at module load time, before any test can install fake timers.
+const realSetTimeout = setTimeout
+
const path = require('path')
const shimmer = require('../../datadog-shimmer')
const log = require('../../dd-trace/src/log')
@@ -111,6 +114,8 @@ const efdDeterminedRetries = new Map()
const efdSlowAbortedTests = new Set()
// Tests added as EFD new-test candidates (not ATF, not impacted).
const efdNewTestCandidates = new Set()
+// Tests that are genuinely new (not in known tests list).
+const newTests = new Set()
const testSuiteAbsolutePathsWithFastCheck = new Set()
const testSuiteJestObjects = new Map()
@@ -485,7 +490,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
}
if (this.isKnownTestsEnabled) {
- isNewTest = retriedTestsToNumAttempts.has(testName)
+ isNewTest = newTests.has(testName)
}
const willRunEfd = this.isEarlyFlakeDetectionEnabled && (isNewTest || isModified)
@@ -605,6 +610,9 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
}
if (!isAttemptToFix && this.isKnownTestsEnabled) {
const isNew = !this.knownTestsForThisSuite.includes(testFullName)
+ if (isNew && !isSkipped) {
+ newTests.add(testFullName)
+ }
if (isNew && !isSkipped && !retriedTestsToNumAttempts.has(testFullName)) {
if (DYNAMIC_NAME_RE.test(testFullName)) {
// Populated directly for runInBand; for parallel workers the main process
@@ -715,7 +723,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
let isEfdRetry = false
// We'll store the test statuses of the retries
if (this.isKnownTestsEnabled) {
- const isNewTest = retriedTestsToNumAttempts.has(testName)
+ const isNewTest = newTests.has(testName)
if (isNewTest) {
if (newTestsTestStatuses.has(testName)) {
newTestsTestStatuses.get(testName).push(status)
@@ -776,7 +784,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
// This means that tests retried with DI are BREAKPOINT_HIT_GRACE_PERIOD_MS slower at least.
if (status === 'fail' && mightHitBreakpoint) {
await new Promise(resolve => {
- setTimeout(() => {
+ realSetTimeout(() => {
resolve()
}, BREAKPOINT_HIT_GRACE_PERIOD_MS)
})
@@ -811,6 +819,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) {
efdDeterminedRetries.clear()
efdSlowAbortedTests.clear()
efdNewTestCandidates.clear()
+ newTests.clear()
retriedTestsToNumAttempts.clear()
attemptToFixRetriedTestsStatuses.clear()
testsToBeRetried.clear()
@@ -1345,7 +1354,7 @@ function getCliWrapper (isNewJestVersion) {
})
const timeoutPromise = new Promise((resolve) => {
- timeoutId = setTimeout(() => {
+ timeoutId = realSetTimeout(() => {
resolve('timeout')
}, FLUSH_TIMEOUT).unref()
})
diff --git a/packages/datadog-instrumentations/src/limitd-client.js b/packages/datadog-instrumentations/src/limitd-client.js
index 2b519a29bf5..c843345d6da 100644
--- a/packages/datadog-instrumentations/src/limitd-client.js
+++ b/packages/datadog-instrumentations/src/limitd-client.js
@@ -14,7 +14,7 @@ function wrapRequest (original) {
addHook({
name: 'limitd-client',
versions: ['>=2.8'],
- file: ['client.js'],
+ file: 'client.js',
}, LimitdClient => {
shimmer.wrap(LimitdClient.prototype, '_directRequest', wrapRequest)
shimmer.wrap(LimitdClient.prototype, '_retriedRequest', wrapRequest)
diff --git a/packages/datadog-instrumentations/src/mocha/utils.js b/packages/datadog-instrumentations/src/mocha/utils.js
index c3afc807b3f..4a91b6754c5 100644
--- a/packages/datadog-instrumentations/src/mocha/utils.js
+++ b/packages/datadog-instrumentations/src/mocha/utils.js
@@ -1,5 +1,8 @@
'use strict'
+// Capture real timers at module load time, before any test can install fake timers.
+const realSetTimeout = setTimeout
+
const { getTestSuitePath, DYNAMIC_NAME_RE } = require('../../../dd-trace/src/plugins/util/test')
const { channel } = require('../helpers/instrument')
const shimmer = require('../../../datadog-shimmer')
@@ -293,7 +296,7 @@ function getOnTestEndHandler (config) {
// This means that tests retried with DI are BREAKPOINT_HIT_GRACE_PERIOD_MS slower at least.
if (test._ddShouldWaitForHitProbe || test._retriedTest?._ddShouldWaitForHitProbe) {
await new Promise((resolve) => {
- setTimeout(() => {
+ realSetTimeout(() => {
resolve()
}, BREAKPOINT_HIT_GRACE_PERIOD_MS)
})
diff --git a/packages/datadog-instrumentations/src/net.js b/packages/datadog-instrumentations/src/net.js
index 977dc61eb30..255ebe7f607 100644
--- a/packages/datadog-instrumentations/src/net.js
+++ b/packages/datadog-instrumentations/src/net.js
@@ -16,16 +16,10 @@ const errorTCPCh = channel('apm:net:tcp:error')
const readyCh = channel('apm:net:tcp:ready')
const connectionCh = channel('apm:net:tcp:connection')
-const names = ['net', 'node:net']
-
-addHook({ name: names }, (net, version, name) => {
+addHook({ name: 'net' }, (net) => {
// explicitly require dns so that net gets an instrumented instance
// so that we don't miss the dns calls
- if (name === 'net') {
- require('dns')
- } else {
- require('node:dns')
- }
+ require('node:dns')
shimmer.wrap(net.Socket.prototype, 'connect', connect => function () {
if (!startICPCh.hasSubscribers || !startTCPCh.hasSubscribers) {
diff --git a/packages/datadog-instrumentations/src/pino.js b/packages/datadog-instrumentations/src/pino.js
index 043d97ac9d4..3d44cb7a181 100644
--- a/packages/datadog-instrumentations/src/pino.js
+++ b/packages/datadog-instrumentations/src/pino.js
@@ -97,7 +97,7 @@ addHook({ name: 'pino', versions: ['>=5.14.0 <6.8.0'] }, (pino) => {
return wrapped
})
-addHook({ name: 'pino', versions: ['>=6.8.0'], patchDefault: false }, (pino, _1, _2, isIitm) => {
+addHook({ name: 'pino', versions: ['>=6.8.0'], patchDefault: false }, (pino) => {
const mixinSym = pino.symbols.mixinSym
const wrapped = shimmer.wrapFunction(pino, pino => wrapPino(mixinSym, wrapMixin, pino))
diff --git a/packages/datadog-instrumentations/src/playwright.js b/packages/datadog-instrumentations/src/playwright.js
index 5a98b56dfb2..31b04eee3a6 100644
--- a/packages/datadog-instrumentations/src/playwright.js
+++ b/packages/datadog-instrumentations/src/playwright.js
@@ -1,5 +1,8 @@
'use strict'
+// Capture real timers at module load time, before any test can install fake timers.
+const realSetTimeout = setTimeout
+
const satisfies = require('../../../vendor/dist/semifies')
const shimmer = require('../../datadog-shimmer')
@@ -1216,7 +1219,7 @@ addHook({
if (isRumActive) {
// Give some time RUM to flush data, similar to what we do in selenium
- await new Promise(resolve => setTimeout(resolve, RUM_FLUSH_WAIT_TIME))
+ await new Promise(resolve => realSetTimeout(resolve, RUM_FLUSH_WAIT_TIME))
const url = page.url()
if (url) {
const domain = new URL(url).hostname
diff --git a/packages/datadog-instrumentations/src/prisma.js b/packages/datadog-instrumentations/src/prisma.js
index 6dd40aac3c0..fcbf0916ee3 100644
--- a/packages/datadog-instrumentations/src/prisma.js
+++ b/packages/datadog-instrumentations/src/prisma.js
@@ -136,11 +136,10 @@ function resolveClientDbConfig (clientConfig, datasourceName, runtimeDbConfig) {
/**
* @param {unknown} runtime
* @param {string} versions
- * @param {string} [name]
* @param {boolean} [isIitm]
* @returns {object}
*/
-const prismaHook = (runtime, versions, name, isIitm) => {
+const prismaHook = (runtime, versions, isIitm) => {
/**
* @typedef {{ getPrismaClient?: (config: PrismaRuntimeConfig, ...args: unknown[]) => Function }} PrismaRuntime
*/
diff --git a/packages/datadog-instrumentations/src/selenium.js b/packages/datadog-instrumentations/src/selenium.js
index 88f54c9debf..0046edd7e3e 100644
--- a/packages/datadog-instrumentations/src/selenium.js
+++ b/packages/datadog-instrumentations/src/selenium.js
@@ -1,5 +1,8 @@
'use strict'
+// Capture real timers at module load time, before any test can install fake timers.
+const realSetTimeout = setTimeout
+
const shimmer = require('../../datadog-shimmer')
const { getValueFromEnvSources } = require('../../dd-trace/src/config/helper')
const { addHook, channel } = require('./helpers/instrument')
@@ -66,7 +69,7 @@ addHook({
if (isRumActive) {
// We'll have time for RUM to flush the events (there's no callback to know when it's done)
await new Promise(resolve => {
- setTimeout(() => {
+ realSetTimeout(() => {
resolve()
}, DD_CIVISIBILITY_RUM_FLUSH_WAIT_MILLIS)
})
diff --git a/packages/datadog-instrumentations/src/sequelize.js b/packages/datadog-instrumentations/src/sequelize.js
index 29b9ccdfd77..74fa264f52d 100644
--- a/packages/datadog-instrumentations/src/sequelize.js
+++ b/packages/datadog-instrumentations/src/sequelize.js
@@ -6,7 +6,7 @@ const {
addHook,
} = require('./helpers/instrument')
-addHook({ name: 'sequelize', versions: ['>=4'], file: ['lib/sequelize.js'] }, Sequelize => {
+addHook({ name: 'sequelize', versions: ['>=4'], file: 'lib/sequelize.js' }, Sequelize => {
const startCh = channel('datadog:sequelize:query:start')
const finishCh = channel('datadog:sequelize:query:finish')
diff --git a/packages/datadog-instrumentations/src/url.js b/packages/datadog-instrumentations/src/url.js
index 654898a826d..6c19cb9060a 100644
--- a/packages/datadog-instrumentations/src/url.js
+++ b/packages/datadog-instrumentations/src/url.js
@@ -2,13 +2,11 @@
const shimmer = require('../../datadog-shimmer')
const { addHook, channel } = require('./helpers/instrument')
-const names = ['url', 'node:url']
-
const parseFinishedChannel = channel('datadog:url:parse:finish')
const urlGetterChannel = channel('datadog:url:getter:finish')
const instrumentedGetters = ['host', 'origin', 'hostname']
-addHook({ name: names }, function (url) {
+addHook({ name: 'url' }, function (url) {
shimmer.wrap(url, 'parse', (parse) => {
return function wrappedParse (input) {
const parsedValue = parse.apply(this, arguments)
diff --git a/packages/datadog-instrumentations/src/vitest.js b/packages/datadog-instrumentations/src/vitest.js
index 4b19bb8ee02..a83093973f3 100644
--- a/packages/datadog-instrumentations/src/vitest.js
+++ b/packages/datadog-instrumentations/src/vitest.js
@@ -1,4 +1,8 @@
'use strict'
+
+// Capture real timers at module load time, before any test can install fake timers.
+const realSetTimeout = setTimeout
+
const path = require('node:path')
const shimmer = require('../../datadog-shimmer')
@@ -83,7 +87,7 @@ function getTestCommand () {
function waitForHitProbe () {
return new Promise(resolve => {
- setTimeout(() => {
+ realSetTimeout(() => {
resolve()
}, BREAKPOINT_HIT_GRACE_PERIOD_MS)
})
diff --git a/packages/datadog-instrumentations/src/vm.js b/packages/datadog-instrumentations/src/vm.js
index 545e797085f..46159c855f5 100644
--- a/packages/datadog-instrumentations/src/vm.js
+++ b/packages/datadog-instrumentations/src/vm.js
@@ -2,12 +2,10 @@
const shimmer = require('../../datadog-shimmer')
const { channel, addHook } = require('./helpers/instrument')
-const names = ['vm', 'node:vm']
-
const runScriptStartChannel = channel('datadog:vm:run-script:start')
const sourceTextModuleStartChannel = channel('datadog:vm:source-text-module:start')
-addHook({ name: names }, function (vm) {
+addHook({ name: 'vm' }, function (vm) {
vm.Script = class extends vm.Script {
constructor (code) {
super(...arguments)
diff --git a/packages/datadog-plugin-aws-sdk/src/base.js b/packages/datadog-plugin-aws-sdk/src/base.js
index da8e40a56f1..b349ba99a80 100644
--- a/packages/datadog-plugin-aws-sdk/src/base.js
+++ b/packages/datadog-plugin-aws-sdk/src/base.js
@@ -23,12 +23,13 @@ class BaseAwsSdkPlugin extends ClientPlugin {
return id
}
+ /** @type {import('../../dd-trace/src/config/config-types').ConfigProperties['cloudPayloadTagging']} */
get cloudTaggingConfig () {
return this._tracerConfig.cloudPayloadTagging
}
get payloadTaggingRules () {
- return this.cloudTaggingConfig.rules.aws?.[this.constructor.id]
+ return this.cloudTaggingConfig.rules?.aws?.[this.constructor.id]
}
constructor (...args) {
@@ -78,7 +79,7 @@ class BaseAwsSdkPlugin extends ClientPlugin {
this.requestInject(span, request)
})
- if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.requestsEnabled) {
+ if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.request) {
const maxDepth = this.cloudTaggingConfig.maxDepth
const requestTags = tagsFromRequest(this.payloadTaggingRules, request.params, { maxDepth })
span.addTags(requestTags)
@@ -215,7 +216,7 @@ class BaseAwsSdkPlugin extends ClientPlugin {
span.addTags(tags)
- if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.responsesEnabled) {
+ if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.response) {
const maxDepth = this.cloudTaggingConfig.maxDepth
const responseBody = this.extractResponseBody(response)
const responseTags = tagsFromResponse(this.payloadTaggingRules, responseBody, { maxDepth })
diff --git a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js
index b1454be7359..011fd1aaaf9 100644
--- a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js
+++ b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js
@@ -252,6 +252,85 @@ describe('Plugin', () => {
return expectedSpanPromise
})
})
+
+ describe('consumer (eachBatch)', () => {
+ let consumer
+ let batchMessages
+
+ beforeEach(async () => {
+ batchMessages = [{ key: 'key1', value: 'test2' }, { key: 'key2', value: 'test3' }]
+ consumer = kafka.consumer({
+ kafkaJS: { groupId, fromBeginning: true, autoCommit: false },
+ })
+ await consumer.connect()
+ await consumer.subscribe({ topic: testTopic })
+ })
+
+ afterEach(async () => {
+ await consumer.disconnect()
+ })
+
+ it('should be instrumented', async () => {
+ const expectedSpanPromise = expectSpanWithDefaults({
+ name: expectedSchema.receive.opName,
+ service: expectedSchema.receive.serviceName,
+ meta: {
+ 'span.kind': 'consumer',
+ component: 'confluentinc-kafka-javascript',
+ 'kafka.topic': testTopic,
+ 'messaging.destination.name': testTopic,
+ 'messaging.system': 'kafka',
+ },
+ resource: testTopic,
+ error: 0,
+ type: 'worker',
+ })
+
+ await consumer.run({ eachBatch: () => {} })
+ return Promise.all([sendMessages(kafka, testTopic, batchMessages), expectedSpanPromise])
+ })
+
+ it('should run the consumer in the context of the consumer span', done => {
+ const firstSpan = tracer.scope().active()
+ let eachBatch = async ({ batch }) => {
+ const currentSpan = tracer.scope().active()
+
+ try {
+ assert.notEqual(currentSpan, firstSpan)
+ assert.strictEqual(currentSpan.context()._name, expectedSchema.receive.opName)
+ done()
+ } catch (e) {
+ done(e)
+ } finally {
+ eachBatch = () => {} // avoid being called for each message
+ }
+ }
+
+ consumer.run({ eachBatch: (...args) => eachBatch(...args) })
+ .then(() => sendMessages(kafka, testTopic, batchMessages))
+ .catch(done)
+ })
+
+ it('should propagate context via span links', async () => {
+ const expectedSpanPromise = agent.assertSomeTraces(traces => {
+ const span = traces[0][0]
+ const links = span.meta['_dd.span_links'] ? JSON.parse(span.meta['_dd.span_links']) : []
+
+ assertObjectContains(span, {
+ name: expectedSchema.receive.opName,
+ service: expectedSchema.receive.serviceName,
+ resource: testTopic,
+ })
+
+ // librdkafka may deliver messages across multiple batches,
+ // so each batch span will have links for the messages it received.
+ assert.ok(links.length >= 1, `expected at least 1 span link, got ${links.length}`)
+ })
+
+ await consumer.run({ eachBatch: () => {} })
+ await Promise.all([sendMessages(kafka, testTopic, batchMessages), expectedSpanPromise])
+ })
+ })
})
// Adding tests for the native API
diff --git a/packages/datadog-plugin-cucumber/src/index.js b/packages/datadog-plugin-cucumber/src/index.js
index 3687fc90a09..e49befa4396 100644
--- a/packages/datadog-plugin-cucumber/src/index.js
+++ b/packages/datadog-plugin-cucumber/src/index.js
@@ -1,5 +1,9 @@
'use strict'
+// Capture real timers at module load time, before any test can install fake timers.
+const realDateNow = Date.now.bind(Date)
+const realSetTimeout = setTimeout
+
const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin')
const { storage } = require('../../datadog-core')
const { getEnvironmentVariable, getValueFromEnvSources } = require('../../dd-trace/src/config/helper')
@@ -229,7 +233,7 @@ class CucumberPlugin extends CiPlugin {
// Time we give the breakpoint to be hit
if (promises && this.runningTestProbe) {
promises.hitBreakpointPromise = new Promise((resolve) => {
- setTimeout(resolve, BREAKPOINT_HIT_GRACE_PERIOD_MS)
+ realSetTimeout(resolve, BREAKPOINT_HIT_GRACE_PERIOD_MS)
})
}
@@ -252,8 +256,8 @@ class CucumberPlugin extends CiPlugin {
const { file, line, stackIndex } = probeInformation
this.runningTestProbe = { file, line }
this.testErrorStackIndex = stackIndex
- const waitUntil = Date.now() + BREAKPOINT_SET_GRACE_PERIOD_MS
- while (Date.now() < waitUntil) {
+ const waitUntil = realDateNow() + BREAKPOINT_SET_GRACE_PERIOD_MS
+ while (realDateNow() < waitUntil) {
// TODO: To avoid a race condition, we should wait until `probeInformation.setProbePromise` has resolved.
// However, Cucumber doesn't have a mechanism for waiting asyncrounously here, so for now, we'll have to
// fall back to a fixed syncronous delay.
diff --git a/packages/datadog-plugin-cypress/src/cypress-plugin.js b/packages/datadog-plugin-cypress/src/cypress-plugin.js
index 015a6307024..52bb099345e 100644
--- a/packages/datadog-plugin-cypress/src/cypress-plugin.js
+++ b/packages/datadog-plugin-cypress/src/cypress-plugin.js
@@ -307,10 +307,55 @@ class CypressPlugin {
}
}
+ /**
+ * Resets state that is scoped to a single Cypress run so the singleton plugin
+ * can be reused safely across multiple programmatic cypress.run() calls.
+ *
+ * @returns {void}
+ */
+ resetRunState () {
+ this._isInit = false
+ this.finishedTestsByFile = {}
+ this.testStatuses = {}
+ this.isTestsSkipped = false
+ this.isSuitesSkippingEnabled = false
+ this.isCodeCoverageEnabled = false
+ this.isFlakyTestRetriesEnabled = false
+ this.flakyTestRetriesCount = 0
+ this.isEarlyFlakeDetectionEnabled = false
+ this.isKnownTestsEnabled = false
+ this.earlyFlakeDetectionNumRetries = 0
+ this.testsToSkip = []
+ this.skippedTests = []
+ this.hasForcedToRunSuites = false
+ this.hasUnskippableSuites = false
+ this.unskippableSuites = []
+ this.knownTests = []
+ this.knownTestsByTestSuite = undefined
+ this.isTestManagementTestsEnabled = false
+ this.testManagementAttemptToFixRetries = 0
+ this.testManagementTests = undefined
+ this.isImpactedTestsEnabled = false
+ this.modifiedFiles = []
+ this.activeTestSpan = null
+ this.testSuiteSpan = null
+ this.testModuleSpan = null
+ this.testSessionSpan = null
+ this.command = undefined
+ this.frameworkVersion = undefined
+ this.rootDir = undefined
+ this.itrCorrelationId = undefined
+ this.isTestIsolationEnabled = undefined
+ this.rumFlushWaitMillis = undefined
+ this._pendingRequestErrorTags = []
+ this.libraryConfigurationPromise = undefined
+ }
+
// Init function returns a promise that resolves with the Cypress configuration
// Depending on the received configuration, the Cypress configuration can be modified:
// for example, to enable retries for failed tests.
init (tracer, cypressConfig) {
+ this.resetRunState()
this._isInit = true
this.tracer = tracer
this.cypressConfig = cypressConfig
@@ -694,20 +739,27 @@ class CypressPlugin {
}
return new Promise(resolve => {
+ const finishAfterRun = () => {
+ this._isInit = false
+ appClosingTelemetry()
+ resolve(null)
+ }
+
const exporter = this.tracer._tracer._exporter
if (!exporter) {
- return resolve(null)
+ finishAfterRun()
+ return
}
if (exporter.flush) {
exporter.flush(() => {
- appClosingTelemetry()
- resolve(null)
+ finishAfterRun()
})
} else if (exporter._writer) {
exporter._writer.flush(() => {
- appClosingTelemetry()
- resolve(null)
+ finishAfterRun()
})
+ } else {
+ finishAfterRun()
}
})
}
diff --git a/packages/datadog-plugin-graphql/src/resolve.js b/packages/datadog-plugin-graphql/src/resolve.js
index c92b828f422..3597f2002ae 100644
--- a/packages/datadog-plugin-graphql/src/resolve.js
+++ b/packages/datadog-plugin-graphql/src/resolve.js
@@ -28,7 +28,7 @@ class GraphQLResolvePlugin extends TracingPlugin {
if (rootCtx.fields[computedPathString]) return
if (!rootCtx[collapsedPathSym]) {
- rootCtx[collapsedPathSym] = {}
+ rootCtx[collapsedPathSym] = Object.create(null)
} else if (rootCtx[collapsedPathSym][computedPathString]) {
return
}
diff --git a/packages/datadog-plugin-graphql/test/index.spec.js b/packages/datadog-plugin-graphql/test/index.spec.js
index 73385f89f7d..9d943372a45 100644
--- a/packages/datadog-plugin-graphql/test/index.spec.js
+++ b/packages/datadog-plugin-graphql/test/index.spec.js
@@ -446,6 +446,36 @@ describe('Plugin', () => {
graphql.graphql({ schema, source }).catch(done)
})
+ it('should trace aliased __proto__ fields with default collapsing', async () => {
+ const source = '{ hello(name: "world") __proto__: hello(name: "alias") }'
+
+ const [, result] = await Promise.all([
+ agent.assertSomeTraces(traces => {
+ const spans = sort(traces[0])
+ const resolveSpans = spans.filter(span => span.name === 'graphql.resolve')
+
+ assert.strictEqual(resolveSpans.length, 2)
+
+ const paths = resolveSpans
+ .map(span => span.meta['graphql.field.path'])
+ .sort()
+
+ assert.deepStrictEqual(paths, ['__proto__', 'hello'])
+
+ for (const span of resolveSpans) {
+ assert.strictEqual(span.error, 0)
+ assert.strictEqual(span.resource, 'hello:String')
+ }
+ }),
+ graphql.graphql({ schema, source }),
+ ])
+
+ assert.ok(!result.errors || result.errors.length === 0)
+ assert.strictEqual(result.data.hello, 'world')
+ // eslint-disable-next-line no-proto
+ assert.strictEqual(result.data.__proto__, 'alias')
+ })
+
it('should instrument each field resolver duration independently', done => {
const source = `
{
@@ -1667,6 +1697,31 @@ describe('Plugin', () => {
graphql.graphql({ schema, source }).catch(done)
})
+
+ it('should trace aliased __proto__ fields when collapsing is disabled', async () => {
+ const source = '{ __proto__: hello(name: "alias") }'
+
+ const [, result] = await Promise.all([
+ agent.assertSomeTraces(traces => {
+ const spans = sort(traces[0])
+ const resolveSpans = spans.filter(span => span.name === 'graphql.resolve')
+
+ assert.strictEqual(resolveSpans.length, 1)
+ assertObjectContains(resolveSpans[0], {
+ resource: 'hello:String',
+ error: 0,
+ meta: {
+ 'graphql.field.path': '__proto__',
+ },
+ })
+ }),
+ graphql.graphql({ schema, source }),
+ ])
+
+ assert.ok(!result.errors || result.errors.length === 0)
+ // eslint-disable-next-line no-proto
+ assert.strictEqual(result.data.__proto__, 'alias')
+ })
})
describe('with signature calculation disabled', () => {
diff --git a/packages/datadog-plugin-grpc/test/client.spec.js b/packages/datadog-plugin-grpc/test/client.spec.js
index 6c1eef00667..bad1443d9f4 100644
--- a/packages/datadog-plugin-grpc/test/client.spec.js
+++ b/packages/datadog-plugin-grpc/test/client.spec.js
@@ -12,7 +12,7 @@ const loader = require('../../../versions/@grpc/proto-loader').get()
const { withNamingSchema, withPeerService, withVersions } = require('../../dd-trace/test/setup/mocha')
const agent = require('../../dd-trace/test/plugins/agent')
const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants')
-const defaults = require('../../dd-trace/src/config/defaults')
+const { defaults } = require('../../dd-trace/src/config/defaults')
const { NODE_MAJOR } = require('../../../version')
const getService = require('./service')
diff --git a/packages/datadog-plugin-grpc/test/server.spec.js b/packages/datadog-plugin-grpc/test/server.spec.js
index 1c75183879d..f7c638d22fb 100644
--- a/packages/datadog-plugin-grpc/test/server.spec.js
+++ b/packages/datadog-plugin-grpc/test/server.spec.js
@@ -11,7 +11,7 @@ const { assertObjectContains } = require('../../../integration-tests/helpers')
const { withNamingSchema, withVersions } = require('../../dd-trace/test/setup/mocha')
const agent = require('../../dd-trace/test/plugins/agent')
const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants')
-const defaults = require('../../dd-trace/src/config/defaults')
+const { defaults } = require('../../dd-trace/src/config/defaults')
const { NODE_MAJOR } = require('../../../version')
const GRPC_SERVER_ERROR_STATUSES = defaults['grpc.server.error.statuses']
diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js
index 45cfb7d2ec5..e73767b6a33 100644
--- a/packages/datadog-plugin-jest/src/index.js
+++ b/packages/datadog-plugin-jest/src/index.js
@@ -1,5 +1,8 @@
'use strict'
+// Capture real timers at module load time, before any test can install fake timers.
+const realSetTimeout = setTimeout
+
const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin')
const { storage } = require('../../datadog-core')
const { getEnvironmentVariable, getValueFromEnvSources } = require('../../dd-trace/src/config/helper')
@@ -60,8 +63,7 @@ const CHILD_MESSAGE_END = 2
function withTimeout (promise, timeoutMs) {
return new Promise(resolve => {
- // Set a timeout to resolve after 1s
- setTimeout(resolve, timeoutMs)
+ realSetTimeout(resolve, timeoutMs)
// Also resolve if the original promise resolves
promise.then(resolve)
diff --git a/packages/datadog-plugin-kafkajs/src/batch-consumer.js b/packages/datadog-plugin-kafkajs/src/batch-consumer.js
index 7cc07bbac12..0c89d0e4147 100644
--- a/packages/datadog-plugin-kafkajs/src/batch-consumer.js
+++ b/packages/datadog-plugin-kafkajs/src/batch-consumer.js
@@ -8,20 +8,47 @@ class KafkajsBatchConsumerPlugin extends ConsumerPlugin {
static id = 'kafkajs'
static operation = 'consume-batch'
- start (ctx) {
- const { topic, messages, groupId, clusterId } = ctx.extractedArgs || ctx
+ bindStart (ctx) {
+ const { topic, partition, messages, groupId, clusterId } = ctx.extractedArgs || ctx
+
+ const span = this.startSpan({
+ resource: topic,
+ type: 'worker',
+ meta: {
+ component: this.constructor.id,
+ 'kafka.topic': topic,
+ 'kafka.cluster_id': clusterId,
+ 'messaging.destination.name': topic,
+ 'messaging.system': 'kafka',
+ },
+ metrics: {
+ 'kafka.partition': partition,
+ 'messaging.batch.message_count': messages.length,
+ },
+ }, ctx)
- if (!this.config.dsmEnabled) return
for (const message of messages) {
if (!message || !message.headers) continue
+
+ const headers = convertToTextMap(message.headers)
+ if (headers) {
+ const childOf = this.tracer.extract('text_map', headers)
+ if (childOf) {
+ span.addLink(childOf)
+ }
+ }
+
+ if (!this.config.dsmEnabled) continue
const payloadSize = getMessageSize(message)
- this.tracer.decodeDataStreamsContext(convertToTextMap(message.headers))
+ this.tracer.decodeDataStreamsContext(headers)
const edgeTags = ['direction:in', `group:${groupId}`, `topic:${topic}`, 'type:kafka']
if (clusterId) {
edgeTags.push(`kafka_cluster_id:${clusterId}`)
}
this.tracer.setCheckpoint(edgeTags, null, payloadSize)
}
+
+ return ctx.currentStore
}
}
diff --git a/packages/datadog-plugin-kafkajs/test/index.spec.js b/packages/datadog-plugin-kafkajs/test/index.spec.js
index 3bd95902c56..596482937ce 100644
--- a/packages/datadog-plugin-kafkajs/test/index.spec.js
+++ b/packages/datadog-plugin-kafkajs/test/index.spec.js
@@ -404,6 +404,123 @@ describe('Plugin', () => {
rawExpectedSchema.receive
)
})
+
+ describe('consumer (eachBatch)', () => {
+ let consumer
+ const batchMessages = [{ key: 'key1', value: 'test2' }, { key: 'key2', value: 'test3' }]
+
+ beforeEach(async () => {
+ consumer = kafka.consumer({ groupId: 'test-group' })
+ await consumer.connect()
+ await consumer.subscribe({ topic: testTopic, fromBeginning: true })
+ })
+
+ afterEach(async () => {
+ await consumer.disconnect()
+ })
+
+ it('should be instrumented', async () => {
+ const meta = {
+ 'span.kind': 'consumer',
+ component: 'kafkajs',
+ 'kafka.topic': testTopic,
+ 'messaging.destination.name': testTopic,
+ 'messaging.system': 'kafka',
+ }
+ if (clusterIdAvailable) meta['kafka.cluster_id'] = testKafkaClusterId
+
+ const expectedSpanPromise = expectSpanWithDefaults({
+ name: expectedSchema.receive.opName,
+ service: expectedSchema.receive.serviceName,
+ meta,
+ metrics: {
+ 'messaging.batch.message_count': batchMessages.length,
+ },
+ resource: testTopic,
+ error: 0,
+ type: 'worker',
+ })
+
+ await consumer.run({
+ eachBatch: () => {},
+ })
+ return Promise.all([sendMessages(kafka, testTopic, batchMessages), expectedSpanPromise])
+ })
+
+ it('should run the consumer in the context of the consumer span', done => {
+ const firstSpan = tracer.scope().active()
+
+ let eachBatch = async ({ batch }) => {
+ const currentSpan = tracer.scope().active()
+
+ try {
+ assert.notEqual(currentSpan, firstSpan)
+ assert.strictEqual(currentSpan.context()._name, expectedSchema.receive.opName)
+ done()
+ } catch (e) {
+ done(e)
+ } finally {
+ eachBatch = () => {} // avoid being called for each message
+ }
+ }
+
+ consumer.run({ eachBatch: (...args) => eachBatch(...args) })
+ .then(() => sendMessages(kafka, testTopic, batchMessages))
+ .catch(done)
+ })
+
+ it('should propagate context via span links', async () => {
+ const expectedSpanPromise = agent.assertSomeTraces(traces => {
+ const span = traces[0][0]
+ const links = span.meta['_dd.span_links'] ? JSON.parse(span.meta['_dd.span_links']) : []
+
+ assertObjectContains(span, {
+ name: expectedSchema.receive.opName,
+ service: expectedSchema.receive.serviceName,
+ resource: testTopic,
+ })
+
+ assert.strictEqual(links.length, batchMessages.length)
+ })
+
+ await consumer.run({ eachBatch: () => {} })
+ await Promise.all([sendMessages(kafka, testTopic, batchMessages), expectedSpanPromise])
+ })
+
+ it('should not fail when messages have headers without trace context', async () => {
+ const messagesWithHeaders = [
+ { key: 'key1', value: 'test1', headers: { 'x-custom-header': 'value' } },
+ ]
+ const meta = {
+ 'span.kind': 'consumer',
+ component: 'kafkajs',
+ 'kafka.topic': testTopic,
+ 'messaging.destination.name': testTopic,
+ 'messaging.system': 'kafka',
+ }
+ if (clusterIdAvailable) meta['kafka.cluster_id'] = testKafkaClusterId
+
+ const expectedSpanPromise = expectSpanWithDefaults({
+ name: expectedSchema.receive.opName,
+ service: expectedSchema.receive.serviceName,
+ meta,
+ resource: testTopic,
+ error: 0,
+ type: 'worker',
+ })
+
+ await consumer.run({ eachBatch: () => {} })
+ return Promise.all([sendMessages(kafka, testTopic, messagesWithHeaders), expectedSpanPromise])
+ })
+
+ withNamingSchema(
+ async () => {
+ await consumer.run({ eachBatch: () => {} })
+ await sendMessages(kafka, testTopic, batchMessages)
+ },
+ rawExpectedSchema.receive
+ )
+ })
})
})
})
diff --git a/packages/datadog-plugin-mocha/src/index.js b/packages/datadog-plugin-mocha/src/index.js
index b108e5c2476..4a108ecdd29 100644
--- a/packages/datadog-plugin-mocha/src/index.js
+++ b/packages/datadog-plugin-mocha/src/index.js
@@ -1,5 +1,8 @@
'use strict'
+// Capture real Date.now at module load time, before any test can install fake timers.
+const realDateNow = Date.now.bind(Date)
+
const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin')
const { storage } = require('../../datadog-core')
const { getValueFromEnvSources } = require('../../dd-trace/src/config/helper')
@@ -323,8 +326,8 @@ class MochaPlugin extends CiPlugin {
this.runningTestProbe = { file, line }
this.testErrorStackIndex = stackIndex
test._ddShouldWaitForHitProbe = true
- const waitUntil = Date.now() + BREAKPOINT_SET_GRACE_PERIOD_MS
- while (Date.now() < waitUntil) {
+ const waitUntil = realDateNow() + BREAKPOINT_SET_GRACE_PERIOD_MS
+ while (realDateNow() < waitUntil) {
// TODO: To avoid a race condition, we should wait until `probeInformation.setProbePromise` has resolved.
// However, Mocha doesn't have a mechanism for waiting asyncrounously here, so for now, we'll have to
// fall back to a fixed syncronous delay.
diff --git a/packages/datadog-plugin-next/src/index.js b/packages/datadog-plugin-next/src/index.js
index fa4216cdb7b..04aea364971 100644
--- a/packages/datadog-plugin-next/src/index.js
+++ b/packages/datadog-plugin-next/src/index.js
@@ -10,7 +10,6 @@ const errorPages = new Set(['/404', '/500', '/_error', '/_not-found', '/_not-fou
class NextPlugin extends ServerPlugin {
static id = 'next'
- #requestsBySpanId = new WeakMap()
constructor (...args) {
super(...args)
@@ -35,11 +34,7 @@ class NextPlugin extends ServerPlugin {
analyticsSampler.sample(span, this.config.measured, true)
- // Store request by span ID to handle cases where child spans are activated
- const spanId = span.context()._spanId
- this.#requestsBySpanId.set(spanId, req)
-
- return { ...store, span }
+ return { ...store, span, req }
}
error ({ span, error }) {
@@ -90,14 +85,7 @@ class NextPlugin extends ServerPlugin {
if (!store) return
- const span = store.span
-
- const spanId = span.context()._spanId
- const parentSpanId = span.context()._parentId
-
- // Try current span first, then parent span.
- // This handles cases where pageLoad runs in a child span context
- const req = this.#requestsBySpanId.get(spanId) ?? this.#requestsBySpanId.get(parentSpanId)
+ const { span, req } = store
// safeguard against missing req in complicated timeout scenarios
if (!req) return
diff --git a/packages/datadog-plugin-openai/src/services.js b/packages/datadog-plugin-openai/src/services.js
index f3ca56911af..917aa260544 100644
--- a/packages/datadog-plugin-openai/src/services.js
+++ b/packages/datadog-plugin-openai/src/services.js
@@ -20,6 +20,7 @@ module.exports.init = function (tracerConfig) {
`env:${tracerConfig.tags.env}`,
`version:${tracerConfig.tags.version}`,
],
+ lookup: tracerConfig.lookup,
})
: new NoopDogStatsDClient()
diff --git a/packages/datadog-plugin-openai/test/services.spec.js b/packages/datadog-plugin-openai/test/services.spec.js
index 99673240f49..cd984d320e9 100644
--- a/packages/datadog-plugin-openai/test/services.spec.js
+++ b/packages/datadog-plugin-openai/test/services.spec.js
@@ -1,15 +1,60 @@
'use strict'
+const sinon = require('sinon')
+const proxyquire = require('proxyquire')
+
const services = require('../src/services')
const { getConfigFresh } = require('../../dd-trace/test/helpers/config')
describe('Plugin', () => {
describe('openai services', () => {
- describe('when unconfigured', () => {
- afterEach(() => {
- services.shutdown()
+ afterEach(() => {
+ services.shutdown()
+ })
+
+ it('should initialize DogStatsDClient with explicit config values', () => {
+ const flush = sinon.stub()
+ const DogStatsDClient = sinon.stub().returns({
+ flush,
+ })
+ const ExternalLogger = sinon.stub().returns({
+ log: sinon.stub(),
+ })
+ const NoopDogStatsDClient = sinon.stub()
+ const NoopExternalLogger = sinon.stub()
+ const proxiedServices = proxyquire('../src/services', {
+ '../../dd-trace/src/dogstatsd': { DogStatsDClient },
+ '../../dd-trace/src/noop/dogstatsd': NoopDogStatsDClient,
+ '../../dd-trace/src/external-logger/src': {
+ ExternalLogger,
+ NoopExternalLogger,
+ },
+ })
+ const config = getConfigFresh({
+ env: 'prod',
+ hostname: 'foo',
+ service: 'bar',
+ version: '1.2.3',
})
+ proxiedServices.init(config)
+
+ sinon.assert.calledOnceWithExactly(DogStatsDClient, {
+ host: config.dogstatsd.hostname,
+ lookup: config.lookup,
+ port: config.dogstatsd.port,
+ tags: [
+ 'service:bar',
+ 'env:prod',
+ 'version:1.2.3',
+ ],
+ })
+ sinon.assert.notCalled(NoopDogStatsDClient)
+
+ proxiedServices.shutdown()
+ })
+
+ describe('when unconfigured', () => {
it('dogstatsd does not throw when missing .dogstatsd', () => {
const service = services.init(getConfigFresh({
hostname: 'foo',
diff --git a/packages/datadog-plugin-prisma/test/index.spec.js b/packages/datadog-plugin-prisma/test/index.spec.js
index 6cd6ec503fc..89a054cab46 100644
--- a/packages/datadog-plugin-prisma/test/index.spec.js
+++ b/packages/datadog-plugin-prisma/test/index.spec.js
@@ -326,16 +326,23 @@ describe('Plugin', () => {
supportedRange = '>=6.16.0 <7.0.0'
}
withVersions('prisma', ['@prisma/client'], supportedRange, async (range, _moduleName_, version) => {
+ // Run prisma generate once per (config, version) pair instead of once per describe block.
+ // All three describe blocks below use the same schema + version, so the output is identical.
+ before(async function () {
+ this.timeout(10000)
+ clearPrismaEnv()
+ setPrismaEnv(config)
+ const cwd = await copySchemaToVersionDir(config.schema, range)
+ execPrismaGenerate(config, cwd)
+ })
+
describe(`without configuration ${config.schema}`, () => {
before(async function () {
this.timeout(10000)
clearPrismaEnv()
setPrismaEnv(config)
- const cwd = await copySchemaToVersionDir(config.schema, range)
-
await agent.load(['prisma', 'pg'])
- execPrismaGenerate(config, cwd)
prisma = loadPrismaModule(config, range)
prismaClient = createPrismaClient(prisma, config)
@@ -514,10 +521,6 @@ describe('Plugin', () => {
clearPrismaEnv()
setPrismaEnv(config)
- const cwd = await copySchemaToVersionDir(config.schema, range)
-
- execPrismaGenerate(config, cwd)
-
require('../../dd-trace')
prisma = loadPrismaModule(config, range)
@@ -537,10 +540,6 @@ describe('Plugin', () => {
clearPrismaEnv()
setPrismaEnv(config)
- const cwd = await copySchemaToVersionDir(config.schema, range)
-
- execPrismaGenerate(config, cwd)
-
const pluginConfig = {
service: 'custom',
}
diff --git a/packages/datadog-plugin-prisma/test/integration-test/client.spec.js b/packages/datadog-plugin-prisma/test/integration-test/client.spec.js
index 063ab2bd6b2..0ba0625656c 100644
--- a/packages/datadog-plugin-prisma/test/integration-test/client.spec.js
+++ b/packages/datadog-plugin-prisma/test/integration-test/client.spec.js
@@ -207,7 +207,6 @@ describe('esm', () => {
let agent
let proc
prismaClientConfigs.forEach(config => {
- // if (!config.name.includes('prisma-generator v7 mssql adapter (url)')) return
describe(config.name, () => {
const isNodeSupported = semifies(semver.clean(process.version), '>=20.19.0')
const isPrismaV7 = config.configFile
@@ -272,7 +271,7 @@ describe('esm', () => {
' --target ES2023' +
' --module ESNext' +
' --strict true' +
- ' --moduleResolution node' +
+ ' --moduleResolution bundler' +
' --esModuleInterop true'
)
}
diff --git a/packages/datadog-plugin-prisma/test/integration-test/server-output.mjs b/packages/datadog-plugin-prisma/test/integration-test/server-output.mjs
index 762e29b4843..0b1d135011e 100644
--- a/packages/datadog-plugin-prisma/test/integration-test/server-output.mjs
+++ b/packages/datadog-plugin-prisma/test/integration-test/server-output.mjs
@@ -14,3 +14,5 @@ await prismaClient.user.findUnique({
id: user.id,
},
})
+
+await prismaClient.$disconnect()
diff --git a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v6.mjs b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v6.mjs
index f51427a7282..3342a206731 100644
--- a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v6.mjs
+++ b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v6.mjs
@@ -15,3 +15,5 @@ await prismaClient.user.findUnique({
id: user.id,
},
})
+
+await prismaClient.$disconnect()
diff --git a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7-mssql.mjs b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7-mssql.mjs
index 52875851ed5..5366072409e 100644
--- a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7-mssql.mjs
+++ b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7-mssql.mjs
@@ -31,3 +31,5 @@ await prismaClient.user.findUnique({
id: user.id,
},
})
+
+await prismaClient.$disconnect()
diff --git a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7.mjs b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7.mjs
index e2ff203e663..b39dd37785b 100644
--- a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7.mjs
+++ b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7.mjs
@@ -25,3 +25,5 @@ await prismaClient.user.findUnique({
id: user.id,
},
})
+
+await prismaClient.$disconnect()
diff --git a/packages/datadog-plugin-prisma/test/integration-test/server.mjs b/packages/datadog-plugin-prisma/test/integration-test/server.mjs
index 61f8d117aa8..28eb0909f49 100644
--- a/packages/datadog-plugin-prisma/test/integration-test/server.mjs
+++ b/packages/datadog-plugin-prisma/test/integration-test/server.mjs
@@ -14,3 +14,5 @@ await prismaClient.user.findUnique({
id: user.id,
},
})
+
+await prismaClient.$disconnect()
diff --git a/packages/datadog-webpack/index.js b/packages/datadog-webpack/index.js
index 6ee82386e6c..0a5f91be719 100644
--- a/packages/datadog-webpack/index.js
+++ b/packages/datadog-webpack/index.js
@@ -21,12 +21,12 @@ for (const hook of Object.values(hooks)) {
const modulesOfInterest = new Set()
-for (const instrumentation of Object.values(instrumentations)) {
+for (const [name, instrumentation] of Object.entries(instrumentations)) {
for (const entry of instrumentation) {
if (entry.file) {
- modulesOfInterest.add(`${entry.name}/${entry.file}`) // e.g. "redis/my/file.js"
+ modulesOfInterest.add(`${name}/${entry.file}`) // e.g. "redis/my/file.js"
} else {
- modulesOfInterest.add(entry.name) // e.g. "redis"
+ modulesOfInterest.add(name) // e.g. "redis"
}
}
}
diff --git a/packages/dd-trace/index.js b/packages/dd-trace/index.js
index 6f7ee42d5a6..c4175c20a30 100644
--- a/packages/dd-trace/index.js
+++ b/packages/dd-trace/index.js
@@ -1,23 +1,16 @@
'use strict'
if (!global._ddtrace) {
- const TracerProxy = require('./src')
-
- Object.defineProperty(global, '_ddtrace', {
- value: new TracerProxy(),
- enumerable: false,
- configurable: true,
- writable: true,
- })
-
const ddTraceSymbol = Symbol.for('dd-trace')
+ // Set up beforeExitHandlers before loading the tracer so that modules loaded
+ // during require('./src') can register handlers.
Object.defineProperty(globalThis, ddTraceSymbol, {
value: {
beforeExitHandlers: new Set(),
},
enumerable: false,
- configurable: true, // Allow this to be overridden by loading the tracer
+ configurable: true,
writable: false,
})
@@ -29,6 +22,15 @@ if (!global._ddtrace) {
}
})
+ const TracerProxy = require('./src')
+
+ Object.defineProperty(global, '_ddtrace', {
+ value: new TracerProxy(),
+ enumerable: false,
+ configurable: true,
+ writable: true,
+ })
+
global._ddtrace.default = global._ddtrace
global._ddtrace.tracer = global._ddtrace
}
diff --git a/packages/dd-trace/src/agent/url.js b/packages/dd-trace/src/agent/url.js
index 82f734d9a9a..f2460ce24d6 100644
--- a/packages/dd-trace/src/agent/url.js
+++ b/packages/dd-trace/src/agent/url.js
@@ -1,7 +1,7 @@
'use strict'
const { URL, format } = require('url')
-const defaults = require('../config/defaults')
+const { defaults } = require('../config/defaults')
module.exports = { getAgentUrl }
@@ -12,7 +12,7 @@ module.exports = { getAgentUrl }
/**
* Gets the agent URL from config, constructing it from hostname/port if needed
- * @param {ReturnType} config - Tracer configuration object
+ * @param {Partial} config - Tracer configuration object
* @returns {URL} The agent URL
*/
function getAgentUrl (config) {
diff --git a/packages/dd-trace/src/aiguard/sdk.js b/packages/dd-trace/src/aiguard/sdk.js
index 64886ba092a..cbd3a486199 100644
--- a/packages/dd-trace/src/aiguard/sdk.js
+++ b/packages/dd-trace/src/aiguard/sdk.js
@@ -57,6 +57,10 @@ class AIGuard extends NoopAIGuard {
#maxContentSize
#meta
+ /**
+ * @param {import('../tracer')} tracer - Tracer instance
+ * @param {import('../config/config-base')} config - Tracer configuration
+ */
constructor (tracer, config) {
super()
diff --git a/packages/dd-trace/src/appsec/blocking.js b/packages/dd-trace/src/appsec/blocking.js
index 3615e7ef2dc..a21aab0b76d 100644
--- a/packages/dd-trace/src/appsec/blocking.js
+++ b/packages/dd-trace/src/appsec/blocking.js
@@ -164,6 +164,9 @@ function getBlockingAction (actions) {
return actions?.redirect_request || actions?.block_request
}
+/**
+ * @param {import('../config/config-base')} config - Tracer configuration
+ */
function setTemplates (config) {
templateHtml = config.appsec.blockedTemplateHtml || blockedTemplates.html
diff --git a/packages/dd-trace/src/appsec/iast/iast-plugin.js b/packages/dd-trace/src/appsec/iast/iast-plugin.js
index cbabe49ff3c..3323f8b62e6 100644
--- a/packages/dd-trace/src/appsec/iast/iast-plugin.js
+++ b/packages/dd-trace/src/appsec/iast/iast-plugin.js
@@ -168,7 +168,7 @@ class IastPlugin extends Plugin {
loadChannel.subscribe(this.onInstrumentationLoadedListener)
// check for already instrumented modules
- for (const name in instrumentations) {
+ for (const name of Object.keys(instrumentations)) {
this._onInstrumentationLoaded(name)
}
}
diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js
index 5039f2bb544..1964e333d98 100644
--- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js
+++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js
@@ -3,7 +3,7 @@
const log = require('../../../../log')
const vulnerabilities = require('../../vulnerabilities')
-const defaults = require('../../../../config/defaults')
+const { defaults } = require('../../../../config/defaults')
const { contains, intersects, remove } = require('./range-utils')
diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js
index 6e1c483a967..f9628c2673b 100644
--- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js
+++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js
@@ -2,7 +2,7 @@
const crypto = require('crypto')
-const defaults = require('../../../config/defaults')
+const { defaults } = require('../../../config/defaults')
const STRINGIFY_RANGE_KEY = 'DD_' + crypto.randomBytes(20).toString('hex')
const STRINGIFY_SENSITIVE_KEY = STRINGIFY_RANGE_KEY + 'SENSITIVE'
diff --git a/packages/dd-trace/src/appsec/remote_config.js b/packages/dd-trace/src/appsec/remote_config.js
index a56465bd3a8..5db2d30f8d8 100644
--- a/packages/dd-trace/src/appsec/remote_config.js
+++ b/packages/dd-trace/src/appsec/remote_config.js
@@ -76,6 +76,7 @@ function enableOrDisableAppsec (action, rcConfig, config, appsec) {
appsec.disable()
}
+ // TODO: Use configWithOrigin /generateTelemetry instead of manually constructing the change.
updateConfig([
{
name: 'appsec.enabled',
diff --git a/packages/dd-trace/src/appsec/sdk/index.js b/packages/dd-trace/src/appsec/sdk/index.js
index 1b07e25c902..499079c2b4f 100644
--- a/packages/dd-trace/src/appsec/sdk/index.js
+++ b/packages/dd-trace/src/appsec/sdk/index.js
@@ -26,6 +26,10 @@ class EventTrackingV2 {
}
class AppsecSdk {
+ /**
+ * @param {import('../../tracer')} tracer - Tracer instance
+ * @param {import('../../config/config-base')} config - Tracer configuration
+ */
constructor (tracer, config) {
this._tracer = tracer
if (config) {
diff --git a/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js b/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js
index 273d9091b98..5df5b9c8b59 100644
--- a/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js
+++ b/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js
@@ -11,6 +11,9 @@ const probeIdToResolveBreakpointSet = new Map()
const probeIdToResolveBreakpointRemove = new Map()
class TestVisDynamicInstrumentation {
+ /**
+ * @param {import('../../config/config-base')} config - Tracer configuration
+ */
constructor (config) {
this._config = config
this.worker = null
@@ -83,7 +86,6 @@ class TestVisDynamicInstrumentation {
DD_TRACE_ENABLED: 'false',
DD_TEST_FAILED_TEST_REPLAY_ENABLED: 'false',
DD_CIVISIBILITY_MANUAL_API_ENABLED: 'false',
- DD_TRACING_ENABLED: 'false',
DD_INSTRUMENTATION_TELEMETRY_ENABLED: 'false',
},
workerData: {
@@ -150,6 +152,9 @@ class TestVisDynamicInstrumentation {
let dynamicInstrumentation
+/**
+ * @param {import('../../config/config-base')} config - Tracer configuration
+ */
module.exports = function createAndGetTestVisDynamicInstrumentation (config) {
if (dynamicInstrumentation) {
return dynamicInstrumentation
diff --git a/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js b/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js
index 90602ba0a1a..c6883eb0c8d 100644
--- a/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js
+++ b/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js
@@ -54,6 +54,10 @@ class TestApiManualPlugin extends CiPlugin {
})
}
+ /**
+ * @param {import('../../config/config-base')} config - Tracer configuration
+ * @param {boolean} shouldGetEnvironmentData - Whether to get environment data
+ */
configure (config, shouldGetEnvironmentData) {
this._config = config
super.configure(config, shouldGetEnvironmentData)
diff --git a/packages/dd-trace/src/config/defaults.js b/packages/dd-trace/src/config/defaults.js
index 4c2af2c9bc2..041783dceda 100644
--- a/packages/dd-trace/src/config/defaults.js
+++ b/packages/dd-trace/src/config/defaults.js
@@ -1,177 +1,347 @@
'use strict'
-const pkg = require('../pkg')
-const { isFalse, isTrue } = require('../util')
-const { DD_MAJOR } = require('../../../../version')
-const { getEnvironmentVariable: getEnv } = require('./helper')
+const dns = require('dns')
+const util = require('util')
+const { DD_MAJOR } = require('../../../../version')
+const { parsers, transformers, telemetryTransformers, setWarnInvalidValue } = require('./parsers')
const {
supportedConfigurations,
} = /** @type {import('./helper').SupportedConfigurationsJson} */ (require('./supported-configurations.json'))
-const service = getEnv('AWS_LAMBDA_FUNCTION_NAME') ||
- getEnv('FUNCTION_NAME') || // Google Cloud Function Name set by deprecated runtimes
- getEnv('K_SERVICE') || // Google Cloud Function Name set by newer runtimes
- getEnv('WEBSITE_SITE_NAME') || // set by Azure Functions
- pkg.name ||
- 'node'
+let log
+let seqId = 0
+const configWithOrigin = new Map()
+const parseErrors = new Map()
+
+if (DD_MAJOR >= 6) {
+ // Programmatic configuration of DD_IAST_SECURITY_CONTROLS_CONFIGURATION is not supported
+ // in newer major versions. This is special handled here until a better solution is found.
+ // TODO: Remove the programmatic configuration from supported-configurations.json once v5 is not supported anymore.
+ supportedConfigurations.DD_IAST_SECURITY_CONTROLS_CONFIGURATION[0].internalPropertyName =
+ supportedConfigurations.DD_IAST_SECURITY_CONTROLS_CONFIGURATION[0].configurationNames?.[0]
+ delete supportedConfigurations.DD_IAST_SECURITY_CONTROLS_CONFIGURATION[0].configurationNames
+} else {
+ // Default value for DD_TRACE_STARTUP_LOGS is 'false' in older major versions.
+ // This is special handled here until a better solution is found.
+ // TODO: Remove this here once v5 is not supported anymore.
+ supportedConfigurations.DD_TRACE_STARTUP_LOGS[0].default = 'false'
+}
/**
- * @param {string|null} raw
- * @param {string} type
- * @returns {string|number|boolean|Record|unknown[]|undefined}
+ * Warns about an invalid value for an option and adds the error to the last telemetry entry if it is not already set.
+ * Logging happens only if the error is not already set or the option name is different from the last telemetry entry.
+ *
+ * @param {unknown} value - The value that is invalid.
+ * @param {string} optionName - The name of the option.
+ * @param {string} source - The source of the value.
+ * @param {string} baseMessage - The base message to use for the warning.
+ * @param {Error} [error] - An error that was thrown while parsing the value.
*/
-function parseDefaultByType (raw, type) {
- if (raw === null) {
- return
+function warnInvalidValue (value, optionName, source, baseMessage, error) {
+ const canonicalName = (optionsTable[optionName]?.canonicalName ?? optionName) + source
+ // Lazy load log module to avoid circular dependency
+ if (!parseErrors.has(canonicalName)) {
+ // TODO: Rephrase: It will fallback to former source (or default if not set)
+ let message = `${baseMessage}: ${util.inspect(value)} for ${optionName} (source: ${source}), picked default`
+ if (error) {
+ error.stack = error.toString()
+ message += `\n\n${util.inspect(error)}`
+ }
+ parseErrors.set(canonicalName, { message })
+ log ??= require('../log')
+ const logLevel = error ? 'error' : 'warn'
+ log[logLevel](message)
}
+}
+setWarnInvalidValue(warnInvalidValue)
- switch (type) {
- case 'boolean':
- if (isTrue(raw)) return true
- if (isFalse(raw)) return false
- // TODO: What should we do with these?
- return
- case 'int':
- case 'decimal': {
- return Number(raw)
- }
- case 'array': {
- if (!raw || raw.length === 0) return []
- // TODO: Make the parsing a helper that is reused.
- return raw.split(',').map(item => {
- const colonIndex = item.indexOf(':')
- if (colonIndex === -1) {
- return item.trim()
- }
- const key = item.slice(0, colonIndex).trim()
- const value = item.slice(colonIndex + 1).trim()
- return `${key}:${value}`
- })
+/** @type {import('./config-types').ConfigDefaults} */
+const defaults = {
+ instrumentationSource: 'manual',
+ isServiceUserProvided: false,
+ isServiceNameInferred: true,
+ plugins: true,
+ isCiVisibility: false,
+ lookup: dns.lookup,
+ logger: undefined,
+}
+
+for (const [name, value] of Object.entries(defaults)) {
+ configWithOrigin.set(`${name}default`, {
+ name,
+ value: value ?? null,
+ origin: 'default',
+ seq_id: seqId++,
+ })
+}
+
+/**
+ * @param {unknown} value
+ * @param {string} origin
+ * @param {string} optionName
+ */
+function generateTelemetry (value = null, origin, optionName) {
+ const { type, canonicalName = optionName } = configurationsTable[optionName] ?? { type: typeof value }
+ // TODO: Consider adding a preParser hook to the parsers object.
+ if (canonicalName === 'OTEL_RESOURCE_ATTRIBUTES') {
+ value = telemetryTransformers.MAP(value)
+ }
+ // TODO: Should we not send defaults to telemetry to reduce size?
+ // TODO: How to handle aliases/actual names in the future? Optional fields? Normalize the name at intake?
+ // TODO: Validate that space separated tags are parsed by the backend. Optimizations would be possible with that.
+ // TODO: How to handle telemetry reporting for aliases?
+ if (value !== null) {
+ if (telemetryTransformers[type]) {
+ value = telemetryTransformers[type](value)
+ } else if (typeof value === 'object' && value !== null) {
+ value = value instanceof URL
+ ? String(value)
+ : JSON.stringify(value)
+ } else if (typeof value === 'function') {
+ value = value.name || 'function'
}
- case 'map': {
- if (!raw || raw.length === 0) return {}
- // TODO: Make the parsing a helper that is reused.
- /** @type {Record} */
- const entries = {}
- for (const item of raw.split(',')) {
- const colonIndex = item.indexOf(':')
- if (colonIndex === -1) {
- const key = item.trim()
- if (key.length > 0) {
- entries[key] = ''
+ }
+ const telemetryEntry = {
+ name: canonicalName,
+ value,
+ origin,
+ seq_id: seqId++,
+ }
+ const error = parseErrors.get(`${canonicalName}${origin}`)
+ if (error) {
+ parseErrors.delete(`${canonicalName}${origin}`)
+ telemetryEntry.error = error
+ }
+ configWithOrigin.set(`${canonicalName}${origin}`, telemetryEntry)
+}
+
+// Iterate over the object and always handle the leaf properties as lookup.
+// Example entries:
+//
+// cloudPayloadTagging: {
+// nestedProperties: [
+// 'rules',
+// 'requestsEnabled',
+// 'responses',
+// ],
+// option: {
+// property: 'rules',
+// parser: parsers.JSON,
+// canonicalName: 'DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING',
+// transformer: transformers.toCamelCase,
+// },
+// },
+// 'cloudPayloadTagging.responses': {
+// nestedProperties: [
+// 'enabled',
+// ],
+// },
+// 'cloudPayloadTagging.rules': {},
+// 'cloudPayloadTagging.requestsEnabled': {},
+// 'cloudPayloadTagging.responses.enabled': {}
+const optionsTable = {
+ // Additional properties that are not supported by the supported-configurations.json file.
+ lookup: {
+ transformer (value) {
+ if (typeof value === 'function') {
+ return value
+ }
+ },
+ property: 'lookup',
+ },
+ logger: {
+ transformer (object) {
+ // Create lazily to avoid the overhead when not used.
+ // Match at least one log level.
+ const knownLogLevels = new Set(supportedConfigurations.DD_TRACE_LOG_LEVEL[0].allowed?.split('|'))
+ if (typeof object !== 'object' || object === null) {
+ return object
+ }
+ let matched = false
+ for (const logLevel of knownLogLevels) {
+ if (object[logLevel] !== undefined) {
+ if (typeof object[logLevel] !== 'function') {
+ warnInvalidValue(object[logLevel], 'logger', 'default', `Invalid log level ${logLevel}`)
+ return
}
- continue
- }
- const key = item.slice(0, colonIndex).trim()
- const value = item.slice(colonIndex + 1).trim()
- if (key.length > 0) {
- entries[key] = value
+ matched = true
}
}
- return entries
- }
- default:
- return raw
+ if (matched) {
+ return object
+ }
+ },
+ property: 'logger',
+ },
+ isCiVisibility: {
+ property: 'isCiVisibility',
+ },
+ plugins: {
+ property: 'plugins',
+ },
+}
+
+const parser = (value, optionName, source) => {
+ const { type, canonicalName = optionName } = configurationsTable[optionName]
+ const parsed = parsers[type](value, canonicalName)
+ if (parsed === undefined) {
+ warnInvalidValue(value, optionName, source, `Invalid ${type} input`)
}
+ return parsed
}
-/** @type {Record} */
-const metadataDefaults = {}
-for (const entries of Object.values(supportedConfigurations)) {
+/**
+ * @template {import('./config-types').ConfigPath} TPath
+ * @type {Partial unknown,
+ * canonicalName?: string,
+ * transformer?: (value: unknown, optionName: string, source: string) => unknown,
+ * telemetryTransformer?: (value: unknown) => unknown
+ * }>>} ConfigurationsTable
+ */
+const configurationsTable = {}
+
+// One way aliases. Must be applied in apply calculated entries.
+const fallbackConfigurations = new Map()
+
+const regExps = {}
+
+for (const [canonicalName, entries] of Object.entries(supportedConfigurations)) {
+ if (entries.length !== 1) {
+ // TODO: Determine if we really want to support multiple entries for a canonical name.
+ // This would be needed to show official support for multiple diverging implementations
+ // at a time with by checking for another configuration that is not the canonical name.
+ throw new Error(
+ `Multiple entries found for canonical name: ${canonicalName}. ` +
+ 'This is currently not supported and must be implemented, if needed.'
+ )
+ }
for (const entry of entries) {
- // TODO: Replace $dynamic with method names that would be called and that
- // are also called when the user passes through the value. That way the
- // handling is unified and methods can be declared as default.
- // The name of that method should be expressive for users.
- // TODO: Add handling for all environment variable names. They should not
- // need a configuration name for being listed with their default.
- if (!Array.isArray(entry.configurationNames)) {
- continue
+ const configurationNames = entry.internalPropertyName ? [entry.internalPropertyName] : entry.configurationNames
+ const fullPropertyName = configurationNames?.[0] ?? canonicalName
+ const type = entry.type.toUpperCase()
+
+ let transformer = transformers[entry.transform]
+ if (entry.allowed) {
+ regExps[entry.allowed] ??= new RegExp(`^(${entry.allowed})$`, 'i')
+ const allowed = regExps[entry.allowed]
+ const originalTransform = transformer
+ transformer = (value, optionName, source) => {
+ if (!allowed.test(value)) {
+ warnInvalidValue(value, optionName, source, 'Invalid value')
+ return
+ }
+ if (originalTransform) {
+ value = originalTransform(value)
+ }
+ return value
+ }
}
- const parsedValue = parseDefaultByType(entry.default, entry.type)
- for (const configurationName of entry.configurationNames) {
- metadataDefaults[configurationName] = entry.default === null ? undefined : parsedValue
+ const option = { parser, type }
+
+ if (fullPropertyName !== canonicalName) {
+ option.property = fullPropertyName
+ option.canonicalName = canonicalName
+ configurationsTable[fullPropertyName] = option
+ }
+ if (transformer) {
+ option.transformer = transformer
+ }
+ if (entry.configurationNames) {
+ addOption(option, type, entry.configurationNames)
+ }
+ configurationsTable[canonicalName] = option
+
+ if (entry.default === null) {
+ defaults[fullPropertyName] = undefined
+ } else {
+ let parsedDefault = parser(entry.default, fullPropertyName, 'default')
+ if (entry.transform) {
+ parsedDefault = transformer(parsedDefault, fullPropertyName, 'default')
+ }
+ defaults[fullPropertyName] = parsedDefault
+ }
+ generateTelemetry(defaults[fullPropertyName], 'default', fullPropertyName)
+
+ if (entry.aliases) {
+ for (const alias of entry.aliases) {
+ if (!supportedConfigurations[alias]) {
+ // An actual alias has no matching entry
+ continue
+ }
+ if (!supportedConfigurations[alias].aliases?.includes(canonicalName)) {
+ // Alias will be replaced with the full property name of the alias, if it exists.
+ fallbackConfigurations.set(fullPropertyName, alias)
+ }
+ }
}
}
}
-// Defaults required by JS config merge/applyCalculated that are not represented in supported-configurations.
-const defaultsWithoutSupportedConfigurationEntry = {
- 'cloudPayloadTagging.rules': [],
- 'cloudPayloadTagging.requestsEnabled': false,
- 'cloudPayloadTagging.responsesEnabled': false,
- isAzureFunction: false,
- isCiVisibility: false,
- isGCPFunction: false,
- instrumentationSource: 'manual',
- isServiceUserProvided: false,
- isServiceNameInferred: true,
- lookup: undefined,
- plugins: true,
+// Replace the alias with the canonical property name.
+for (const [fullPropertyName, alias] of fallbackConfigurations) {
+ if (configurationsTable[alias].property) {
+ fallbackConfigurations.set(fullPropertyName, configurationsTable[alias].property)
+ }
}
-// These values are documented in supported-configurations as CI Visibility
-// defaults. Keep startup baseline false and let #applyCalculated() switch them
-// when CI Visibility is active.
-// TODO: These entries should be removed. They are off by default
-// because they rely on other configs.
-const defaultsWithConditionalRuntimeBehavior = {
- startupLogs: DD_MAJOR >= 6,
- isGitUploadEnabled: false,
- isImpactedTestsEnabled: false,
- isIntelligentTestRunnerEnabled: false,
- isManualApiEnabled: false,
- isTestManagementEnabled: false,
- // TODO: These are not conditional, they would just be of type number.
- 'dogstatsd.port': '8125',
- port: '8126',
- // Override due to expecting numbers, not strings. TODO: Replace later.
- 'grpc.client.error.statuses': [
- 1,
- 2,
- 3,
- 4,
- 5,
- 6,
- 7,
- 8,
- 9,
- 10,
- 11,
- 12,
- 13,
- 14,
- 15,
- 16,
- ],
- 'grpc.server.error.statuses': [
- 2,
- 3,
- 4,
- 5,
- 6,
- 7,
- 8,
- 9,
- 10,
- 11,
- 12,
- 13,
- 14,
- 15,
- 16,
- ],
-}
+function addOption (option, type, configurationNames) {
+ for (const name of configurationNames) {
+ let index = -1
+ let lastNestedProperties
+ while (true) {
+ const nextIndex = name.indexOf('.', index + 1)
+ const intermediateName = nextIndex === -1 ? name : name.slice(0, nextIndex)
+ if (lastNestedProperties) {
+ lastNestedProperties.add(intermediateName.slice(index + 1))
+ }
-/** @type {Record} */
-const defaults = {
- ...defaultsWithoutSupportedConfigurationEntry,
- ...metadataDefaults,
- ...defaultsWithConditionalRuntimeBehavior,
- service,
- version: pkg.version,
+ if (nextIndex === -1) {
+ if (optionsTable[name]) {
+ if (optionsTable[name].nestedProperties && !optionsTable[name].option) {
+ optionsTable[name].option = option
+ break
+ }
+ throw new Error(`Duplicate configuration name: ${name}`)
+ }
+ optionsTable[name] = option
+ break
+ }
+
+ lastNestedProperties = new Set()
+ index = nextIndex
+
+ if (!optionsTable[intermediateName]) {
+ optionsTable[intermediateName] = {
+ nestedProperties: lastNestedProperties,
+ }
+ } else if (optionsTable[intermediateName].nestedProperties) {
+ lastNestedProperties = optionsTable[intermediateName].nestedProperties
+ } else {
+ optionsTable[intermediateName] = {
+ nestedProperties: lastNestedProperties,
+ option: optionsTable[intermediateName],
+ }
+ }
+ }
+ }
}
-module.exports = defaults
+module.exports = {
+ configurationsTable,
+
+ defaults,
+
+ fallbackConfigurations,
+
+ optionsTable,
+
+ configWithOrigin,
+
+ parseErrors,
+
+ generateTelemetry,
+}
diff --git a/packages/dd-trace/src/config/generated-config-types.d.ts b/packages/dd-trace/src/config/generated-config-types.d.ts
index 71394bbd911..7b79f8c03b5 100644
--- a/packages/dd-trace/src/config/generated-config-types.d.ts
+++ b/packages/dd-trace/src/config/generated-config-types.d.ts
@@ -88,10 +88,12 @@ export interface GeneratedConfig {
DD_CIVISIBILITY_TEST_MODULE_ID: string | undefined;
DD_CIVISIBILITY_TEST_SESSION_ID: string | undefined;
DD_CUSTOM_TRACE_ID: string | undefined;
+ DD_ENABLE_LAGE_PACKAGE_NAME: boolean;
DD_ENABLE_NX_SERVICE_NAME: boolean;
DD_EXPERIMENTAL_TEST_OPT_GIT_CACHE_DIR: string;
DD_EXPERIMENTAL_TEST_OPT_GIT_CACHE_ENABLED: boolean;
DD_EXPERIMENTAL_TEST_OPT_SETTINGS_CACHE: string;
+ DD_EXPERIMENTAL_TEST_REQUESTS_FS_CACHE: boolean;
DD_EXTERNAL_ENV: string | undefined;
DD_GIT_BRANCH: string | undefined;
DD_GIT_COMMIT_AUTHOR_DATE: string | undefined;
@@ -379,6 +381,7 @@ export interface GeneratedConfig {
env: string | undefined;
experimental: {
aiguard: {
+ block: boolean;
enabled: boolean;
endpoint: string | undefined;
maxContentSize: number;
@@ -475,7 +478,6 @@ export interface GeneratedConfig {
};
openAiLogsEnabled: boolean;
OTEL_EXPORTER_OTLP_ENDPOINT: string | undefined;
- OTEL_LOG_LEVEL: "debug" | "info" | "warn" | "error" | undefined;
OTEL_LOGS_EXPORTER: "none" | "otlp" | undefined;
OTEL_METRICS_EXPORTER: "none" | "otlp" | undefined;
OTEL_RESOURCE_ATTRIBUTES: Record;
@@ -547,6 +549,7 @@ export interface GeneratedConfig {
debug: boolean;
dependencyCollection: boolean;
enabled: boolean;
+ extendedHeartbeatInterval: number;
heartbeatInterval: number;
logCollection: boolean;
metrics: boolean;
diff --git a/packages/dd-trace/src/config/helper.js b/packages/dd-trace/src/config/helper.js
index 0ba7b197758..011fa0caa1a 100644
--- a/packages/dd-trace/src/config/helper.js
+++ b/packages/dd-trace/src/config/helper.js
@@ -9,6 +9,9 @@
* @property {string|number|boolean|null|object|unknown[]} default
* @property {string[]} [aliases]
* @property {string[]} [configurationNames]
+ * @property {string} [internalPropertyName]
+ * @property {string} [transform]
+ * @property {string} [allowed]
* @property {string|boolean} [deprecated]
*/
@@ -57,6 +60,13 @@ for (const [canonical, configuration] of Object.entries(supportedConfigurations)
const aliasToCanonical = {}
for (const canonical of Object.keys(aliases)) {
for (const alias of aliases[canonical]) {
+ if (supportedConfigurations[alias]) {
+ // Allow 'fallback' aliases to be used for other configurations.
+ // This is used to handle the case where an alias could be used for multiple configurations.
+ // For example, OTEL_EXPORTER_OTLP_ENDPOINT is used for OTEL_EXPORTER_OTLP_LOGS_ENDPOINT
+ // and OTEL_EXPORTER_OTLP_METRICS_ENDPOINT.
+ continue
+ }
if (aliasToCanonical[alias]) {
throw new Error(`The alias ${alias} is already used for ${aliasToCanonical[alias]}.`)
}
@@ -99,22 +109,37 @@ function loadStableConfig () {
}
function getValueFromSource (name, source) {
- const value = source[name]
+ if (source[name] !== undefined) {
+ return source[name]
+ }
- if (value === undefined && aliases[name]) {
+ if (aliases[name]) {
for (const alias of aliases[name]) {
if (source[alias] !== undefined) {
return source[alias]
}
}
}
+}
- return value
+function getEnvNameFromSource (name, source) {
+ if (source[name] !== undefined) {
+ return name
+ }
+
+ if (aliases[name]) {
+ for (const alias of aliases[name]) {
+ if (source[alias] !== undefined) {
+ return alias
+ }
+ }
+ }
}
function validateAccess (name) {
- if ((name.startsWith('DD_') || name.startsWith('OTEL_') || aliasToCanonical[name]) &&
- !supportedConfigurations[name]) {
+ if ((name.startsWith('DD_') || name.startsWith('OTEL_')) &&
+ !supportedConfigurations[name] &&
+ !aliasToCanonical[name]) {
throw new Error(`Missing ${name} env/configuration in "supported-configurations.json" file.`)
}
}
@@ -144,10 +169,9 @@ module.exports = {
*
* @returns {TracerEnv} The environment variables
*/
- getEnvironmentVariables () {
+ getEnvironmentVariables (source = process.env, internalOnly = false) {
const configs = {}
- for (const [key, value] of Object.entries(process.env)) {
- // TODO(BridgeAR): Handle telemetry reporting for aliases.
+ for (const [key, value] of Object.entries(source)) {
if (key.startsWith('DD_') || key.startsWith('OTEL_') || aliasToCanonical[key]) {
if (supportedConfigurations[key]) {
configs[key] = value
@@ -155,7 +179,7 @@ module.exports = {
// The alias should only be used if the actual configuration is not set
// In case that more than a single alias exist, use the one defined first in our own order
for (const alias of aliases[aliasToCanonical[key]]) {
- if (process.env[alias] !== undefined) {
+ if (source[alias] !== undefined) {
configs[aliasToCanonical[key]] = value
break
}
@@ -165,9 +189,10 @@ module.exports = {
// debug(
// `Missing configuration ${env} in supported-configurations file. The environment variable is ignored.`
// )
+ // This could be moved inside the main config logic.
}
deprecationMethods[key]?.()
- } else {
+ } else if (!internalOnly) {
configs[key] = value
}
}
@@ -211,4 +236,28 @@ module.exports = {
return getValueFromSource(name, localStableConfig)
}
},
+
+ /**
+ * Returns the actual environment variable name used for a supported configuration
+ * from a specific environment-based source.
+ *
+ * @param {string} name Environment variable name
+ * @returns {string|undefined}
+ */
+ getConfiguredEnvName (name) {
+ validateAccess(name)
+
+ if (!stableConfigLoaded) {
+ loadStableConfig()
+ }
+
+ for (const source of [fleetStableConfig, process.env, localStableConfig]) {
+ if (source !== undefined) {
+ const fromSource = getEnvNameFromSource(name, source)
+ if (fromSource !== undefined) {
+ return fromSource
+ }
+ }
+ }
+ },
}
diff --git a/packages/dd-trace/src/config/index.js b/packages/dd-trace/src/config/index.js
index ed0428b26a6..93bed28e84a 100644
--- a/packages/dd-trace/src/config/index.js
+++ b/packages/dd-trace/src/config/index.js
@@ -5,1685 +5,749 @@ const os = require('node:os')
const { URL } = require('node:url')
const path = require('node:path')
+const rfdc = require('../../../../vendor/dist/rfdc')({ proto: false, circles: false })
const uuid = require('../../../../vendor/dist/crypto-randomuuid') // we need to keep the old uuid dep because of cypress
-
const set = require('../../../datadog-core/src/utils/src/set')
const { DD_MAJOR } = require('../../../../version')
const log = require('../log')
-const tagger = require('../tagger')
-const { isTrue, isFalse, normalizeProfilingEnabledValue } = require('../util')
+const pkg = require('../pkg')
+const { isTrue } = require('../util')
const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('../plugins/util/tags')
-const { updateConfig } = require('../telemetry')
+const telemetry = require('../telemetry')
const telemetryMetrics = require('../telemetry/metrics')
const {
IS_SERVERLESS,
getIsGCPFunction,
getIsAzureFunction,
- enableGCPPubSubPushSubscription,
} = require('../serverless')
const { ORIGIN_KEY, DATADOG_MINI_AGENT_PATH } = require('../constants')
const { appendRules } = require('../payload-tagging/config')
const { getGitMetadataFromGitProperties, removeUserSensitiveInfo, getRemoteOriginURL, resolveGitHeadSHA } =
require('./git_properties')
-const { getEnvironmentVariable: getEnv, getEnvironmentVariables, getStableConfigSources } = require('./helper')
-const defaults = require('./defaults')
-
-const TELEMETRY_COUNTERS = new Map([
- ['otel.env.hiding', {}],
- ['otel.env.invalid', {}],
-])
-const OTEL_DD_ENV_MAPPING = new Map([
- ['OTEL_LOG_LEVEL', 'DD_TRACE_LOG_LEVEL'],
- ['OTEL_PROPAGATORS', 'DD_TRACE_PROPAGATION_STYLE'],
- ['OTEL_SERVICE_NAME', 'DD_SERVICE'],
- ['OTEL_TRACES_SAMPLER', 'DD_TRACE_SAMPLE_RATE'],
- ['OTEL_TRACES_SAMPLER_ARG', 'DD_TRACE_SAMPLE_RATE'],
- ['OTEL_TRACES_EXPORTER', 'DD_TRACE_ENABLED'],
- ['OTEL_METRICS_EXPORTER', 'DD_RUNTIME_METRICS_ENABLED'],
- ['OTEL_RESOURCE_ATTRIBUTES', 'DD_TAGS'],
- ['OTEL_SDK_DISABLED', 'DD_TRACE_OTEL_ENABLED'],
- ['OTEL_LOGS_EXPORTER', undefined],
-])
-const VALID_PROPAGATION_STYLES = new Set(['datadog', 'tracecontext', 'b3', 'b3 single header', 'none'])
-const VALID_PROPAGATION_BEHAVIOR_EXTRACT = new Set(['continue', 'restart', 'ignore'])
-const VALID_LOG_LEVELS = new Set(['debug', 'info', 'warn', 'error'])
-const DEFAULT_OTLP_PORT = 4318
+const ConfigBase = require('./config-base')
+const {
+ getEnvironmentVariable,
+ getEnvironmentVariables,
+ getStableConfigSources,
+ getValueFromEnvSources,
+} = require('./helper')
+const {
+ defaults,
+ fallbackConfigurations,
+ configurationsTable,
+ optionsTable,
+ configWithOrigin,
+ parseErrors,
+ generateTelemetry,
+} = require('./defaults')
+const { transformers } = require('./parsers')
+
const RUNTIME_ID = uuid()
-// eslint-disable-next-line eslint-rules/eslint-process-env -- internal propagation, not user config
-const ROOT_SESSION_ID = process.env.DD_ROOT_JS_SESSION_ID || RUNTIME_ID
-const NAMING_VERSIONS = new Set(['v0', 'v1'])
-const DEFAULT_NAMING_VERSION = 'v0'
const tracerMetrics = telemetryMetrics.manager.namespace('tracers')
-const changeTracker = {}
+/**
+ * @typedef {'default'
+ * | 'code'
+ * | 'remote_config'
+ * | 'calculated'
+ * | 'env_var'
+ * | 'local_stable_config'
+ * | 'fleet_stable_config'} TelemetrySource
+ * @typedef {'remote_config' | 'calculated'} RevertibleTelemetrySource
+ * @typedef {import('../../../../index').TracerOptions} TracerOptions
+ * @typedef {import('./config-types').ConfigKey} ConfigKey
+ * @typedef {import('./config-types').ConfigPath} ConfigPath
+ * @typedef {{
+ * value: import('./config-types').ConfigPathValue,
+ * source: TelemetrySource
+ * }} TrackedConfigEntry
+ * @typedef {{
+ * baseValuesByPath: Partial>,
+ * remote_config: Set,
+ * calculated: Set,
+ * }} ChangeTracker
+ */
+
+/** @type {Config | null} */
let configInstance = null
+// An entry that is undefined means it is the default value.
+/** @type {Map} */
+const trackedConfigOrigins = new Map()
+
+// ChangeTracker tracks the changes to the config up to programmatic options (code).
+/** @type {ChangeTracker} */
+const changeTracker = {
+ baseValuesByPath: {},
+ remote_config: new Set(),
+ calculated: new Set(),
+}
+
+/**
+ * @param {Config} config
+ * @param {RevertibleTelemetrySource} source
+ */
+function undo (config, source) {
+ for (const name of changeTracker[source]) {
+ const entry = changeTracker.baseValuesByPath[name] ?? { source: 'default', value: defaults[name] }
+ setAndTrack(config, name, entry.value, undefined, entry.source)
+ }
+}
+
+function get (object, path) {
+ // Fast path for simple property access.
+ if (object[path] !== undefined) {
+ return object[path]
+ }
+ let index = 0
+ while (true) {
+ const nextIndex = path.indexOf('.', index)
+ if (nextIndex === -1) {
+ return object[path.slice(index)]
+ }
+ object = object[path.slice(index, nextIndex)]
+ index = nextIndex + 1
+ }
+}
+
+/**
+ * @param {Config} config
+ * @template {ConfigPath} TPath
+ * @param {TPath} name
+ * @param {import('./config-types').ConfigPathValue} value
+ * @param {unknown} [rawValue]
+ * @param {TelemetrySource} [source]
+ */
+function setAndTrack (config, name, value, rawValue = value, source = 'calculated') {
+ // envs can not be undefined
+ if (value == null) {
+ // TODO: This works as before while ignoring undefined programmatic options is not ideal.
+ if (source !== 'default') {
+ return
+ }
+ } else if (source === 'calculated' || source === 'remote_config') {
+ if (source === 'calculated' && value === get(config, name)) {
+ return
+ }
+ changeTracker[source].add(name)
+ } else {
+ const copy = typeof value === 'object' && value !== null ? rfdc(value) : value
+ changeTracker.baseValuesByPath[name] = { value: copy, source }
+ }
+ set(config, name, value)
+
+ generateTelemetry(rawValue, source, name)
+ if (source === 'default') {
+ trackedConfigOrigins.delete(name)
+ } else {
+ trackedConfigOrigins.set(name, source)
+ }
+}
+
module.exports = getConfig
-class Config {
+// We extend from ConfigBase to make our types work
+class Config extends ConfigBase {
/**
* parsed DD_TAGS, usable as a standalone tag set across products
* @type {Record}
*/
- #parsedDdTags = {}
- #envUnprocessed = {}
- #optsUnprocessed = {}
- #remoteUnprocessed = {}
- #env = {}
- #options = {}
- #remote = {}
- #defaults = {}
- #optionsArg = {}
- #localStableConfig = {}
- #fleetStableConfig = {}
- #calculated = {}
+ #parsedDdTags
+ /**
+ * @type {Record}
+ */
+ get parsedDdTags () {
+ return this.#parsedDdTags
+ }
+
+ /**
+ * @param {TracerOptions} [options={}]
+ */
constructor (options = {}) {
- if (!IS_SERVERLESS) {
- const configEnvSources = getStableConfigSources()
- this.stableConfig = {
- fleetEntries: configEnvSources.fleetStableConfig,
- localEntries: configEnvSources.localStableConfig,
- warnings: configEnvSources.stableConfigWarnings,
- }
- }
+ super()
- options = {
- ...options,
- // TODO(BridgeAR): Remove the experimental prefix once we have a major version.
- // That also applies to index.d.ts
- appsec: options.appsec == null ? options.experimental?.appsec : options.appsec,
- iast: options.iast == null ? options.experimental?.iast : options.iast,
+ const configEnvSources = getStableConfigSources()
+ this.stableConfig = {
+ fleetEntries: configEnvSources.fleetStableConfig ?? {},
+ localEntries: configEnvSources.localStableConfig ?? {},
+ warnings: configEnvSources.stableConfigWarnings,
}
// Configure the logger first so it can be used to warn about other configs
- const logConfig = log.getConfig()
- this.debug = log.isEnabled(
- this.stableConfig?.fleetEntries?.DD_TRACE_DEBUG,
- this.stableConfig?.localEntries?.DD_TRACE_DEBUG
- )
- this.logger = options.logger ?? logConfig.logger
- this.logLevel = log.getLogLevel(
- options.logLevel,
- this.stableConfig?.fleetEntries?.DD_TRACE_LOG_LEVEL,
- this.stableConfig?.localEntries?.DD_TRACE_LOG_LEVEL
- )
- log.use(this.logger)
- log.toggle(this.debug, this.logLevel)
+ // TODO: Implement auto buffering of inside of log module before first
+ // configure call. That way the logger is always available and the
+ // application doesn't need to configure it first and the configuration
+ // happens inside of config instead of inside of log module. If the logger
+ // is not deactivated, the buffered logs would be discarded. That way stable
+ // config warnings can also be logged directly and do not need special
+ // handling.
+ this.debug = log.configure(options)
// Process stable config warnings, if any
for (const warning of this.stableConfig?.warnings ?? []) {
log.warn(warning)
}
- checkIfBothOtelAndDdEnvVarSet()
-
- if (typeof options.appsec === 'boolean') {
- options.appsec = {
- enabled: options.appsec,
- }
- }
-
- if (typeof options.runtimeMetrics === 'boolean') {
- options.runtimeMetrics = {
- enabled: options.runtimeMetrics,
- }
- }
-
- this.#defaults = defaults
this.#applyDefaults()
- this.#applyStableConfig(this.stableConfig?.localEntries ?? {}, this.#localStableConfig)
- this.#applyEnvironment()
- this.#applyStableConfig(this.stableConfig?.fleetEntries ?? {}, this.#fleetStableConfig)
- this.#applyOptions(options)
+ // TODO: Update origin documentation to list all valid sources. Add local_stable_config and fleet_stable_config.
+ this.#applyEnvs(getEnvironmentVariables(this.stableConfig.localEntries, true), 'local_stable_config')
+ this.#applyEnvs(getEnvironmentVariables(undefined, true), 'env_var')
+ this.#applyEnvs(getEnvironmentVariables(this.stableConfig.fleetEntries, true), 'fleet_stable_config')
+
+ // Experimental options are applied first, so they can be overridden by non-experimental options.
+ // TODO: When using programmatic options, check if there is a higher
+ // priority name in the same options object. Use the highest priority name.
+ const { experimental, ...rest } = options
+ if (experimental) {
+ // @ts-expect-error - Difficult to type this correctly.
+ this.#applyOptions(experimental, 'code', 'experimental')
+ }
+ this.#applyOptions(rest, 'code')
this.#applyCalculated()
- this.#merge()
- tagger.add(this.tags, {
- service: this.service,
- env: this.env,
- version: this.version,
- 'runtime-id': RUNTIME_ID,
- })
+ warnWrongOtelSettings()
+
+ if (this.gitMetadataEnabled) {
+ this.#loadGitMetadata()
+ }
- this.rootSessionId = ROOT_SESSION_ID
+ parseErrors.clear()
+ }
- if (this.isCiVisibility) {
- tagger.add(this.tags, {
- [ORIGIN_KEY]: 'ciapp-test',
- })
+ #applyDefaults () {
+ for (const [name, value] of Object.entries(defaults)) {
+ set(this, name, value)
}
+ }
- if (this.gitMetadataEnabled) {
- this.#loadGitMetadata()
+ /**
+ * @param {import('./helper').TracerEnv} envs
+ * @param {'env_var' | 'local_stable_config' | 'fleet_stable_config'} source
+ */
+ #applyEnvs (envs, source) {
+ for (const [name, value] of Object.entries(envs)) {
+ const entry = configurationsTable[name]
+ // TracePropagationStyle is a special case. It is a single option that is used to set both inject and extract.
+ // TODO: Consider what to do with this later
+ if (name === 'DD_TRACE_PROPAGATION_STYLE') {
+ if (
+ getValueFromEnvSources('DD_TRACE_PROPAGATION_STYLE_INJECT') !== undefined ||
+ getValueFromEnvSources('DD_TRACE_PROPAGATION_STYLE_EXTRACT') !== undefined
+ ) {
+ log.warn(
+ // eslint-disable-next-line @stylistic/max-len
+ 'Use either DD_TRACE_PROPAGATION_STYLE or separate DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables'
+ )
+ continue
+ }
+ this.#applyEnvs({ DD_TRACE_PROPAGATION_STYLE_INJECT: value, DD_TRACE_PROPAGATION_STYLE_EXTRACT: value }, source)
+ continue
+ }
+ const parsed = entry.parser(value, name, source)
+ const transformed = parsed !== undefined && entry.transformer ? entry.transformer(parsed, name, source) : parsed
+ const rawValue = transformed !== null && typeof transformed === 'object' ? value : parsed
+ setAndTrack(this, entry.property ?? name, transformed, rawValue, source)
}
}
- get parsedDdTags () {
- return this.#parsedDdTags
+ /**
+ * @param {TracerOptions} options
+ * @param {'code' | 'remote_config'} source
+ * @param {string} [root]
+ */
+ #applyOptions (options, source, root = '') {
+ for (const [name, value] of Object.entries(options)) {
+ const fullName = root ? `${root}.${name}` : name
+ let entry = optionsTable[fullName]
+ if (!entry) {
+ // TODO: Fix this by by changing remote config to use env styles.
+ if (name !== 'tracing' || source !== 'remote_config') {
+ log.warn('Unknown option %s with value %o', fullName, value)
+ continue
+ }
+ // @ts-expect-error - The entry is defined in the configurationsTable.
+ entry = configurationsTable.tracing
+ }
+
+ if (entry.nestedProperties) {
+ let matched = false
+ if (typeof value === 'object' && value !== null) {
+ for (const nestedProperty of entry.nestedProperties) {
+ // WARNING: if the property name might be part of the value we look at, this could conflict!
+ // Defining an option that receives an object as value may not contain a property that is also
+ // potentially a nested property!
+ if (Object.hasOwn(value, nestedProperty)) {
+ this.#applyOptions(value, source, fullName)
+ matched = true
+ break
+ }
+ }
+ }
+ if (matched) {
+ continue
+ }
+ if (entry.option) {
+ entry = entry.option
+ } else {
+ if (fullName === 'tracePropagationStyle') {
+ // TracePropagationStyle is special. It is a single option that is used to set both inject and extract.
+ // @ts-expect-error - Difficult to type this correctly.
+ this.#applyOptions({ inject: value, extract: value }, source, 'tracePropagationStyle')
+ } else {
+ log.warn('Unknown option %s with value %o', fullName, value)
+ }
+ continue
+ }
+ }
+ // TODO: Coerce mismatched types to the expected type, if possible. E.g., strings <> numbers
+ const transformed = value !== undefined && entry.transformer ? entry.transformer(value, fullName, source) : value
+ setAndTrack(this, entry.property, transformed, value, source)
+ }
}
/**
* Set the configuration with remote config settings.
* Applies remote configuration, recalculates derived values, and merges all configuration sources.
*
- * @param {import('./remote_config').RemoteConfigOptions|null} options - Configurations received via Remote
+ * @param {TracerOptions|null} options - Configurations received via Remote
* Config or null to reset all remote configuration
*/
setRemoteConfig (options) {
// Clear all RC-managed fields to ensure previous values don't persist.
// State is instead managed by the `RCClientLibConfigManager` class
- this.#remote = {}
- this.#remoteUnprocessed = {}
+ undo(this, 'remote_config')
// Special case: if options is null, nothing to apply
// This happens when all remote configs are removed
if (options !== null) {
- this.#applyRemoteConfig(options)
+ this.#applyOptions(options, 'remote_config')
}
this.#applyCalculated()
- this.#merge()
}
- // TODO: Remove the `updateOptions` method. We don't want to support updating the config this way
/**
- * Updates the configuration with new programmatic options.
- *
- * @deprecated This method should not be used and will be removed in a future version.
- * @param {object} options - Configuration options to apply (same format as tracer init options)
+ * @param {ConfigPath} name
*/
- updateOptions (options) {
- this.#applyOptions(options)
- this.#applyCalculated()
- this.#merge()
- }
-
getOrigin (name) {
- for (const { container, origin } of this.#getSourcesInOrder()) {
- const value = container[name]
- if (value != null || container === this.#defaults) {
- return origin
- }
- }
- }
-
- #getSourcesInOrder () {
- return [
- { container: this.#remote, origin: 'remote_config', unprocessed: this.#remoteUnprocessed },
- { container: this.#options, origin: 'code', unprocessed: this.#optsUnprocessed },
- { container: this.#fleetStableConfig, origin: 'fleet_stable_config' },
- { container: this.#env, origin: 'env_var', unprocessed: this.#envUnprocessed },
- { container: this.#localStableConfig, origin: 'local_stable_config' },
- { container: this.#calculated, origin: 'calculated' },
- { container: this.#defaults, origin: 'default' },
- ]
- }
-
- #applyStableConfig (config, obj) {
- this.#applyConfigValues(config, obj, {})
+ return trackedConfigOrigins.get(name) ?? 'default'
}
- // Set environment-dependent defaults that can be overridden by users
- #applyDefaults () {
- const defaults = this.#defaults
-
- if (IS_SERVERLESS) {
- setBoolean(defaults, 'crashtracking.enabled', false)
- setString(defaults, 'profiling.enabled', 'false')
- setBoolean(defaults, 'telemetry.enabled', false)
- setBoolean(defaults, 'remoteConfig.enabled', false)
- } else {
- setBoolean(defaults, 'crashtracking.enabled', true)
+ // Handles values calculated from a mixture of options and env vars
+ #applyCalculated () {
+ undo(this, 'calculated')
+
+ if (this.DD_CIVISIBILITY_AGENTLESS_URL ||
+ this.url ||
+ os.type() !== 'Windows_NT' &&
+ !trackedConfigOrigins.has('hostname') &&
+ !trackedConfigOrigins.has('port') &&
+ !this.DD_CIVISIBILITY_AGENTLESS_ENABLED &&
+ fs.existsSync('/var/run/datadog/apm.socket')) {
+ setAndTrack(
+ this,
+ 'url',
+ new URL(this.DD_CIVISIBILITY_AGENTLESS_URL || this.url || 'unix:///var/run/datadog/apm.socket')
+ )
}
- if (getEnv('JEST_WORKER_ID')) {
- setBoolean(defaults, 'telemetry.enabled', false)
+ if (this.isCiVisibility) {
+ setAndTrack(this, 'isServiceUserProvided', trackedConfigOrigins.has('service'))
+ this.tags[ORIGIN_KEY] = 'ciapp-test'
}
- }
+ // Compute OTLP logs and metrics URLs to send payloads to the active Datadog Agent
+ const agentHostname = this.hostname || /** @type {URL} */ (this.url).hostname
- #applyEnvironment () {
- this.#applyConfigValues(getEnvironmentVariables(), this.#env, this.#envUnprocessed)
- }
+ if (!trackedConfigOrigins.has('dogstatsd.hostname')) {
+ setAndTrack(this, 'dogstatsd.hostname', agentHostname)
+ }
+ // Disable log injection when OTEL logs are enabled
+ // OTEL logs and DD log injection are mutually exclusive
+ if (this.otelLogsEnabled) {
+ setAndTrack(this, 'logInjection', false)
+ }
+ if (this.otelMetricsEnabled &&
+ trackedConfigOrigins.has('OTEL_METRICS_EXPORTER') &&
+ this.OTEL_METRICS_EXPORTER === 'none') {
+ setAndTrack(this, 'otelMetricsEnabled', false)
+ }
- #applyConfigValues (source, target, unprocessedTarget) {
- const {
- AWS_LAMBDA_FUNCTION_NAME,
- DD_AGENT_HOST,
- DD_AI_GUARD_BLOCK,
- DD_AI_GUARD_ENABLED,
- DD_AI_GUARD_ENDPOINT,
- DD_AI_GUARD_MAX_CONTENT_SIZE,
- DD_AI_GUARD_MAX_MESSAGES_LENGTH,
- DD_AI_GUARD_TIMEOUT,
- DD_API_KEY,
- DD_API_SECURITY_ENABLED,
- DD_API_SECURITY_SAMPLE_DELAY,
- DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED,
- DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT,
- DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE,
- DD_API_SECURITY_MAX_DOWNSTREAM_REQUEST_BODY_ANALYSIS,
- DD_APM_TRACING_ENABLED,
- DD_APP_KEY,
- DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE,
- DD_APPSEC_COLLECT_ALL_HEADERS,
- DD_APPSEC_ENABLED,
- DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON,
- DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED,
- DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML,
- DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON,
- DD_APPSEC_MAX_COLLECTED_HEADERS,
- DD_APPSEC_MAX_STACK_TRACES,
- DD_APPSEC_MAX_STACK_TRACE_DEPTH,
- DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP,
- DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP,
- DD_APPSEC_RULES,
- DD_APPSEC_SCA_ENABLED,
- DD_APPSEC_STACK_TRACE_ENABLED,
- DD_APPSEC_RASP_ENABLED,
- DD_APPSEC_RASP_COLLECT_REQUEST_BODY,
- DD_APPSEC_TRACE_RATE_LIMIT,
- DD_APPSEC_WAF_TIMEOUT,
- DD_CRASHTRACKING_ENABLED,
- DD_CODE_ORIGIN_FOR_SPANS_ENABLED,
- DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED,
- DD_DATA_STREAMS_ENABLED,
- DD_DBM_PROPAGATION_MODE,
- DD_DBM_INJECT_SQL_BASEHASH,
- DD_DOGSTATSD_HOST,
- DD_DOGSTATSD_PORT,
- DD_DYNAMIC_INSTRUMENTATION_CAPTURE_TIMEOUT_MS,
- DD_DYNAMIC_INSTRUMENTATION_ENABLED,
- DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE,
- DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS,
- DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS,
- DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS,
- DD_ENV,
- DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED,
- DD_EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED,
- DD_PROFILING_ENABLED,
- DD_GRPC_CLIENT_ERROR_STATUSES,
- DD_GRPC_SERVER_ERROR_STATUSES,
- DD_HEAP_SNAPSHOT_COUNT,
- DD_HEAP_SNAPSHOT_DESTINATION,
- DD_HEAP_SNAPSHOT_INTERVAL,
- DD_IAST_DB_ROWS_TO_TAINT,
- DD_IAST_DEDUPLICATION_ENABLED,
- DD_IAST_ENABLED,
- DD_IAST_MAX_CONCURRENT_REQUESTS,
- DD_IAST_MAX_CONTEXT_OPERATIONS,
- DD_IAST_REDACTION_ENABLED,
- DD_IAST_REDACTION_NAME_PATTERN,
- DD_IAST_REDACTION_VALUE_PATTERN,
- DD_IAST_REQUEST_SAMPLING,
- DD_IAST_SECURITY_CONTROLS_CONFIGURATION,
- DD_IAST_TELEMETRY_VERBOSITY,
- DD_IAST_STACK_TRACE_ENABLED,
- DD_INJECTION_ENABLED,
- DD_INJECT_FORCE,
- DD_ENABLE_NX_SERVICE_NAME,
- DD_INSTRUMENTATION_TELEMETRY_ENABLED,
- DD_INSTRUMENTATION_CONFIG_ID,
- DD_LOGS_INJECTION,
- DD_LOGS_OTEL_ENABLED,
- DD_METRICS_OTEL_ENABLED,
- DD_LANGCHAIN_SPAN_CHAR_LIMIT,
- DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE,
- DD_LLMOBS_AGENTLESS_ENABLED,
- DD_LLMOBS_ENABLED,
- DD_LLMOBS_ML_APP,
- DD_OPENAI_LOGS_ENABLED,
- DD_OPENAI_SPAN_CHAR_LIMIT,
- DD_PROFILING_EXPORTERS,
- DD_PROFILING_SOURCE_MAP,
- DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD,
- DD_INSTRUMENTATION_INSTALL_ID,
- DD_INSTRUMENTATION_INSTALL_TIME,
- DD_INSTRUMENTATION_INSTALL_TYPE,
- DD_REMOTE_CONFIGURATION_ENABLED,
- DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS,
- DD_RUNTIME_METRICS_ENABLED,
- DD_RUNTIME_METRICS_EVENT_LOOP_ENABLED,
- DD_RUNTIME_METRICS_GC_ENABLED,
- DD_SERVICE,
- DD_SERVICE_MAPPING,
- DD_SITE,
- DD_SPAN_SAMPLING_RULES,
- DD_SPAN_SAMPLING_RULES_FILE,
- DD_TAGS,
- DD_TELEMETRY_DEBUG,
- DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED,
- DD_TELEMETRY_HEARTBEAT_INTERVAL,
- DD_TELEMETRY_LOG_COLLECTION_ENABLED,
- DD_TELEMETRY_METRICS_ENABLED,
- DD_TEST_TIA_KEEP_COV_CONFIG,
- DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED,
- DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED,
- DD_TRACE_AGENT_PORT,
- DD_TRACE_AGENT_PROTOCOL_VERSION,
- DD_TRACE_AWS_ADD_SPAN_POINTERS,
- DD_TRACE_BAGGAGE_MAX_BYTES,
- DD_TRACE_BAGGAGE_MAX_ITEMS,
- DD_TRACE_BAGGAGE_TAG_KEYS,
- DD_TRACE_CLIENT_IP_ENABLED,
- DD_TRACE_CLIENT_IP_HEADER,
- DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING,
- DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING,
- DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH,
- DD_TRACE_DYNAMODB_TABLE_PRIMARY_KEYS,
- DD_TRACE_ENABLED,
- DD_TRACE_EXPERIMENTAL_EXPORTER,
- DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED,
- DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED,
- DD_TRACE_GIT_METADATA_ENABLED,
- DD_TRACE_GRAPHQL_ERROR_EXTENSIONS,
- DD_TRACE_HEADER_TAGS,
- DD_TRACE_LEGACY_BAGGAGE_ENABLED,
- DD_TRACE_MEMCACHED_COMMAND_ENABLED,
- DD_TRACE_MIDDLEWARE_TRACING_ENABLED,
- DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP,
- DD_TRACE_PARTIAL_FLUSH_MIN_SPANS,
- DD_TRACE_FLUSH_INTERVAL,
- DD_TRACE_PEER_SERVICE_MAPPING,
- DD_TRACE_PROPAGATION_EXTRACT_FIRST,
- DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT,
- DD_TRACE_PROPAGATION_STYLE,
- DD_TRACE_PROPAGATION_STYLE_INJECT,
- DD_TRACE_PROPAGATION_STYLE_EXTRACT,
- DD_TRACE_RATE_LIMIT,
- DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED,
- DD_TRACE_REPORT_HOSTNAME,
- DD_TRACE_RESOURCE_RENAMING_ENABLED,
- DD_TRACE_SAMPLE_RATE,
- DD_TRACE_SAMPLING_RULES,
- DD_TRACE_SCOPE,
- DD_TRACE_SPAN_ATTRIBUTE_SCHEMA,
- DD_TRACE_SPAN_LEAK_DEBUG,
- DD_TRACE_STARTUP_LOGS,
- DD_TRACE_TAGS,
- DD_TRACE_WEBSOCKET_MESSAGES_ENABLED,
- DD_TRACE_WEBSOCKET_MESSAGES_INHERIT_SAMPLING,
- DD_TRACE_WEBSOCKET_MESSAGES_SEPARATE_TRACES,
- DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH,
- DD_TRACING_ENABLED,
- DD_VERSION,
- DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE,
- DD_VERTEXAI_SPAN_CHAR_LIMIT,
- DD_TRACE_INFERRED_PROXY_SERVICES_ENABLED,
- DD_TRACE_NATIVE_SPAN_EVENTS,
- OTEL_METRICS_EXPORTER,
- OTEL_PROPAGATORS,
- OTEL_RESOURCE_ATTRIBUTES,
- OTEL_SERVICE_NAME,
- OTEL_TRACES_SAMPLER,
- OTEL_TRACES_SAMPLER_ARG,
- DD_EXPERIMENTAL_FLAGGING_PROVIDER_ENABLED,
- DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS,
- OTEL_EXPORTER_OTLP_LOGS_ENDPOINT,
- OTEL_EXPORTER_OTLP_LOGS_HEADERS,
- OTEL_EXPORTER_OTLP_LOGS_PROTOCOL,
- OTEL_EXPORTER_OTLP_LOGS_TIMEOUT,
- OTEL_EXPORTER_OTLP_METRICS_ENDPOINT,
- OTEL_EXPORTER_OTLP_METRICS_HEADERS,
- OTEL_EXPORTER_OTLP_METRICS_PROTOCOL,
- OTEL_EXPORTER_OTLP_METRICS_TIMEOUT,
- OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE,
- OTEL_METRIC_EXPORT_TIMEOUT,
- OTEL_EXPORTER_OTLP_PROTOCOL,
- OTEL_EXPORTER_OTLP_ENDPOINT,
- OTEL_EXPORTER_OTLP_HEADERS,
- OTEL_EXPORTER_OTLP_TIMEOUT,
- OTEL_BSP_SCHEDULE_DELAY,
- OTEL_BSP_MAX_EXPORT_BATCH_SIZE,
- OTEL_BSP_MAX_QUEUE_SIZE,
- OTEL_METRIC_EXPORT_INTERVAL,
- NX_TASK_TARGET_PROJECT,
- } = source
-
- const tags = {}
-
- tagger.add(tags, parseSpaceSeparatedTags(handleOtel(OTEL_RESOURCE_ATTRIBUTES)))
- tagger.add(tags, parseSpaceSeparatedTags(DD_TAGS))
- tagger.add(tags, DD_TRACE_TAGS)
-
- Object.assign(this.#parsedDdTags, tags)
-
- setString(target, 'apiKey', DD_API_KEY)
- setBoolean(target, 'otelLogsEnabled', DD_LOGS_OTEL_ENABLED)
- // Set OpenTelemetry logs configuration with specific _LOGS_ vars taking precedence over generic _EXPORTERS_ vars
- if (OTEL_EXPORTER_OTLP_ENDPOINT) {
- // Only set if there's a custom URL, otherwise let calc phase handle the default
- setString(target, 'otelUrl', OTEL_EXPORTER_OTLP_ENDPOINT)
+ if (this.telemetry.heartbeatInterval) {
+ setAndTrack(this, 'telemetry.heartbeatInterval', Math.floor(this.telemetry.heartbeatInterval * 1000))
}
- if (OTEL_EXPORTER_OTLP_ENDPOINT || OTEL_EXPORTER_OTLP_LOGS_ENDPOINT) {
- setString(target, 'otelLogsUrl', OTEL_EXPORTER_OTLP_LOGS_ENDPOINT || target.otelUrl)
+ if (this.telemetry.extendedHeartbeatInterval) {
+ setAndTrack(this, 'telemetry.extendedHeartbeatInterval',
+ Math.floor(this.telemetry.extendedHeartbeatInterval * 1000))
}
- setString(target, 'otelHeaders', OTEL_EXPORTER_OTLP_HEADERS)
- setString(target, 'otelLogsHeaders', OTEL_EXPORTER_OTLP_LOGS_HEADERS || target.otelHeaders)
- setString(target, 'otelProtocol', OTEL_EXPORTER_OTLP_PROTOCOL)
- setString(target, 'otelLogsProtocol', OTEL_EXPORTER_OTLP_LOGS_PROTOCOL || target.otelProtocol)
- const otelTimeout = nonNegInt(OTEL_EXPORTER_OTLP_TIMEOUT, 'OTEL_EXPORTER_OTLP_TIMEOUT')
- if (otelTimeout !== undefined) {
- target.otelTimeout = otelTimeout
+
+ // Enable resourceRenamingEnabled when appsec is enabled and only
+ // if DD_TRACE_RESOURCE_RENAMING_ENABLED is not explicitly set
+ if (!trackedConfigOrigins.has('resourceRenamingEnabled')) {
+ setAndTrack(this, 'resourceRenamingEnabled', this.appsec.enabled ?? false)
}
- const otelLogsTimeout = nonNegInt(OTEL_EXPORTER_OTLP_LOGS_TIMEOUT, 'OTEL_EXPORTER_OTLP_LOGS_TIMEOUT')
- target.otelLogsTimeout = otelLogsTimeout === undefined ? target.otelTimeout : otelLogsTimeout
- const otelBatchTimeout = nonNegInt(OTEL_BSP_SCHEDULE_DELAY, 'OTEL_BSP_SCHEDULE_DELAY', false)
- if (otelBatchTimeout !== undefined) {
- target.otelBatchTimeout = otelBatchTimeout
+
+ if (!trackedConfigOrigins.has('spanComputePeerService') && this.spanAttributeSchema !== 'v0') {
+ setAndTrack(this, 'spanComputePeerService', true)
}
- target.otelMaxExportBatchSize = nonNegInt(OTEL_BSP_MAX_EXPORT_BATCH_SIZE, 'OTEL_BSP_MAX_EXPORT_BATCH_SIZE', false)
- target.otelMaxQueueSize = nonNegInt(OTEL_BSP_MAX_QUEUE_SIZE, 'OTEL_BSP_MAX_QUEUE_SIZE', false)
-
- const otelMetricsExporterEnabled = OTEL_METRICS_EXPORTER?.toLowerCase() !== 'none'
- setBoolean(
- target,
- 'otelMetricsEnabled',
- DD_METRICS_OTEL_ENABLED && isTrue(DD_METRICS_OTEL_ENABLED) && otelMetricsExporterEnabled
- )
- // Set OpenTelemetry metrics configuration with specific _METRICS_ vars
- // taking precedence over generic _EXPORTERS_ vars
- if (OTEL_EXPORTER_OTLP_ENDPOINT || OTEL_EXPORTER_OTLP_METRICS_ENDPOINT) {
- setString(target, 'otelMetricsUrl', OTEL_EXPORTER_OTLP_METRICS_ENDPOINT || target.otelUrl)
+
+ if (!this.apmTracingEnabled) {
+ setAndTrack(this, 'stats.enabled', false)
+ } else if (!trackedConfigOrigins.has('stats.enabled')) {
+ setAndTrack(this, 'stats.enabled', getIsGCPFunction() || getIsAzureFunction())
}
- setString(target, 'otelMetricsHeaders', OTEL_EXPORTER_OTLP_METRICS_HEADERS || target.otelHeaders)
- setString(target, 'otelMetricsProtocol', OTEL_EXPORTER_OTLP_METRICS_PROTOCOL || target.otelProtocol)
- const otelMetricsTimeout = nonNegInt(OTEL_EXPORTER_OTLP_METRICS_TIMEOUT, 'OTEL_EXPORTER_OTLP_METRICS_TIMEOUT')
- target.otelMetricsTimeout = otelMetricsTimeout === undefined ? target.otelTimeout : otelMetricsTimeout
- target.otelMetricsExportTimeout = nonNegInt(OTEL_METRIC_EXPORT_TIMEOUT, 'OTEL_METRIC_EXPORT_TIMEOUT')
- target.otelMetricsExportInterval = nonNegInt(OTEL_METRIC_EXPORT_INTERVAL, 'OTEL_METRIC_EXPORT_INTERVAL', false)
-
- // Parse temporality preference (default to DELTA for Datadog)
- if (OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE) {
- const temporalityPref = OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE.toUpperCase()
- if (['DELTA', 'CUMULATIVE', 'LOWMEMORY'].includes(temporalityPref)) {
- setString(target, 'otelMetricsTemporalityPreference', temporalityPref)
+
+ // TODO: Remove the experimental env vars as a major or deprecate the option?
+ if (this.experimental?.b3) {
+ if (!this.tracePropagationStyle.inject.includes('b3')) {
+ this.tracePropagationStyle.inject.push('b3')
}
- }
- setBoolean(
- target,
- 'apmTracingEnabled',
- DD_APM_TRACING_ENABLED ??
- (DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED && isFalse(DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED))
- )
- setBoolean(target, 'propagateProcessTags.enabled', DD_EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED)
- setString(target, 'appKey', DD_APP_KEY)
- setBoolean(target, 'appsec.apiSecurity.enabled', DD_API_SECURITY_ENABLED && isTrue(DD_API_SECURITY_ENABLED))
- target['appsec.apiSecurity.sampleDelay'] = maybeFloat(DD_API_SECURITY_SAMPLE_DELAY)
- setBoolean(target, 'appsec.apiSecurity.endpointCollectionEnabled',
- DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED)
- target['appsec.apiSecurity.endpointCollectionMessageLimit'] =
- maybeInt(DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT)
- target['appsec.blockedTemplateGraphql'] = maybeFile(DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON)
- target['appsec.blockedTemplateHtml'] = maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML)
- unprocessedTarget['appsec.blockedTemplateHtml'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML
- target['appsec.blockedTemplateJson'] = maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON)
- unprocessedTarget['appsec.blockedTemplateJson'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON
- setBoolean(target, 'appsec.enabled', DD_APPSEC_ENABLED)
- setString(target, 'appsec.eventTracking.mode', DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE)
- // TODO appsec.extendedHeadersCollection are deprecated, to delete in a major
- setBoolean(target, 'appsec.extendedHeadersCollection.enabled', DD_APPSEC_COLLECT_ALL_HEADERS)
- setBoolean(
- target,
- 'appsec.extendedHeadersCollection.redaction',
- DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED
- )
- target['appsec.extendedHeadersCollection.maxHeaders'] = maybeInt(DD_APPSEC_MAX_COLLECTED_HEADERS)
- unprocessedTarget['appsec.extendedHeadersCollection.maxHeaders'] = DD_APPSEC_MAX_COLLECTED_HEADERS
- setString(target, 'appsec.obfuscatorKeyRegex', DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP)
- setString(target, 'appsec.obfuscatorValueRegex', DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP)
- setBoolean(target, 'appsec.rasp.enabled', DD_APPSEC_RASP_ENABLED)
- // TODO Deprecated, to delete in a major
- setBoolean(target, 'appsec.rasp.bodyCollection', DD_APPSEC_RASP_COLLECT_REQUEST_BODY)
- target['appsec.rateLimit'] = maybeInt(DD_APPSEC_TRACE_RATE_LIMIT)
- unprocessedTarget['appsec.rateLimit'] = DD_APPSEC_TRACE_RATE_LIMIT
- setString(target, 'appsec.rules', DD_APPSEC_RULES)
- // DD_APPSEC_SCA_ENABLED is never used locally, but only sent to the backend
- setBoolean(target, 'appsec.sca.enabled', DD_APPSEC_SCA_ENABLED)
- setBoolean(target, 'appsec.stackTrace.enabled', DD_APPSEC_STACK_TRACE_ENABLED)
- target['appsec.stackTrace.maxDepth'] = maybeInt(DD_APPSEC_MAX_STACK_TRACE_DEPTH)
- unprocessedTarget['appsec.stackTrace.maxDepth'] = DD_APPSEC_MAX_STACK_TRACE_DEPTH
- target['appsec.stackTrace.maxStackTraces'] = maybeInt(DD_APPSEC_MAX_STACK_TRACES)
- unprocessedTarget['appsec.stackTrace.maxStackTraces'] = DD_APPSEC_MAX_STACK_TRACES
- target['appsec.wafTimeout'] = maybeInt(DD_APPSEC_WAF_TIMEOUT)
- unprocessedTarget['appsec.wafTimeout'] = DD_APPSEC_WAF_TIMEOUT
- target['appsec.apiSecurity.downstreamBodyAnalysisSampleRate'] =
- maybeFloat(DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE)
- target['appsec.apiSecurity.maxDownstreamRequestBodyAnalysis'] =
- maybeInt(DD_API_SECURITY_MAX_DOWNSTREAM_REQUEST_BODY_ANALYSIS)
- target.baggageMaxBytes = DD_TRACE_BAGGAGE_MAX_BYTES
- target.baggageMaxItems = DD_TRACE_BAGGAGE_MAX_ITEMS
- setArray(target, 'baggageTagKeys', DD_TRACE_BAGGAGE_TAG_KEYS)
- setBoolean(target, 'clientIpEnabled', DD_TRACE_CLIENT_IP_ENABLED)
- setString(target, 'clientIpHeader', DD_TRACE_CLIENT_IP_HEADER?.toLowerCase())
- if (DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING || DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING) {
- if (DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING) {
- setBoolean(target, 'cloudPayloadTagging.requestsEnabled', true)
+ if (!this.tracePropagationStyle.extract.includes('b3')) {
+ this.tracePropagationStyle.extract.push('b3')
}
- if (DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING) {
- setBoolean(target, 'cloudPayloadTagging.responsesEnabled', true)
+ if (!this.tracePropagationStyle.inject.includes('b3 single header')) {
+ this.tracePropagationStyle.inject.push('b3 single header')
}
- target['cloudPayloadTagging.rules'] = appendRules(
- splitJSONPathRules(DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING),
- splitJSONPathRules(DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING)
- )
- }
- if (DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH) {
- target['cloudPayloadTagging.maxDepth'] = maybeInt(DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH)
- }
- setBoolean(target, 'crashtracking.enabled', DD_CRASHTRACKING_ENABLED)
- setBoolean(target, 'codeOriginForSpans.enabled', DD_CODE_ORIGIN_FOR_SPANS_ENABLED)
- setBoolean(
- target,
- 'codeOriginForSpans.experimental.exit_spans.enabled',
- DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED
- )
- setString(target, 'dbmPropagationMode', DD_DBM_PROPAGATION_MODE)
- setBoolean(target, 'dbm.injectSqlBaseHash', DD_DBM_INJECT_SQL_BASEHASH)
- setString(target, 'dogstatsd.hostname', DD_DOGSTATSD_HOST)
- setString(target, 'dogstatsd.port', DD_DOGSTATSD_PORT)
- setBoolean(target, 'dsmEnabled', DD_DATA_STREAMS_ENABLED)
- target['dynamicInstrumentation.captureTimeoutMs'] = maybeInt(DD_DYNAMIC_INSTRUMENTATION_CAPTURE_TIMEOUT_MS)
- unprocessedTarget['dynamicInstrumentation.captureTimeoutMs'] = DD_DYNAMIC_INSTRUMENTATION_CAPTURE_TIMEOUT_MS
- setBoolean(target, 'dynamicInstrumentation.enabled', DD_DYNAMIC_INSTRUMENTATION_ENABLED)
- setString(target, 'dynamicInstrumentation.probeFile', DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE)
- setArray(target, 'dynamicInstrumentation.redactedIdentifiers',
- DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS)
- setArray(
- target,
- 'dynamicInstrumentation.redactionExcludedIdentifiers',
- DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS
- )
- target['dynamicInstrumentation.uploadIntervalSeconds'] =
- maybeFloat(DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS)
- unprocessedTarget['dynamicInstrumentation.uploadInterval'] = DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS
- setString(target, 'env', DD_ENV || tags.env)
- setBoolean(
- target,
- 'experimental.flaggingProvider.enabled',
- DD_EXPERIMENTAL_FLAGGING_PROVIDER_ENABLED
- )
- if (DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS != null) {
- target['experimental.flaggingProvider.initializationTimeoutMs'] =
- maybeInt(DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS)
+ if (!this.tracePropagationStyle.extract.includes('b3 single header')) {
+ this.tracePropagationStyle.extract.push('b3 single header')
+ }
+ setAndTrack(this, 'tracePropagationStyle.inject', this.tracePropagationStyle.inject)
+ setAndTrack(this, 'tracePropagationStyle.extract', this.tracePropagationStyle.extract)
}
- setBoolean(target, 'traceEnabled', DD_TRACE_ENABLED)
- setBoolean(target, 'experimental.aiguard.block', DD_AI_GUARD_BLOCK)
- setBoolean(target, 'experimental.aiguard.enabled', DD_AI_GUARD_ENABLED)
- setString(target, 'experimental.aiguard.endpoint', DD_AI_GUARD_ENDPOINT)
- target['experimental.aiguard.maxContentSize'] = maybeInt(DD_AI_GUARD_MAX_CONTENT_SIZE)
- unprocessedTarget['experimental.aiguard.maxContentSize'] = DD_AI_GUARD_MAX_CONTENT_SIZE
- target['experimental.aiguard.maxMessagesLength'] = maybeInt(DD_AI_GUARD_MAX_MESSAGES_LENGTH)
- unprocessedTarget['experimental.aiguard.maxMessagesLength'] = DD_AI_GUARD_MAX_MESSAGES_LENGTH
- target['experimental.aiguard.timeout'] = maybeInt(DD_AI_GUARD_TIMEOUT)
- unprocessedTarget['experimental.aiguard.timeout'] = DD_AI_GUARD_TIMEOUT
- setBoolean(target, 'experimental.enableGetRumData', DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED)
- setString(target, 'experimental.exporter', DD_TRACE_EXPERIMENTAL_EXPORTER)
- if (AWS_LAMBDA_FUNCTION_NAME && !fs.existsSync(DATADOG_MINI_AGENT_PATH)) {
- target.flushInterval = 0
- } else if (DD_TRACE_FLUSH_INTERVAL) {
- target.flushInterval = maybeInt(DD_TRACE_FLUSH_INTERVAL)
+
+ if (getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME') && !fs.existsSync(DATADOG_MINI_AGENT_PATH)) {
+ setAndTrack(this, 'flushInterval', 0)
}
- target.flushMinSpans = maybeInt(DD_TRACE_PARTIAL_FLUSH_MIN_SPANS)
- unprocessedTarget.flushMinSpans = DD_TRACE_PARTIAL_FLUSH_MIN_SPANS
- setBoolean(target, 'gitMetadataEnabled', DD_TRACE_GIT_METADATA_ENABLED)
- setIntegerRangeSet(target, 'grpc.client.error.statuses', DD_GRPC_CLIENT_ERROR_STATUSES)
- setIntegerRangeSet(target, 'grpc.server.error.statuses', DD_GRPC_SERVER_ERROR_STATUSES)
- setArray(target, 'headerTags', DD_TRACE_HEADER_TAGS)
- target['heapSnapshot.count'] = maybeInt(DD_HEAP_SNAPSHOT_COUNT)
- setString(target, 'heapSnapshot.destination', DD_HEAP_SNAPSHOT_DESTINATION)
- target['heapSnapshot.interval'] = maybeInt(DD_HEAP_SNAPSHOT_INTERVAL)
- setString(target, 'hostname', DD_AGENT_HOST)
- target['iast.dbRowsToTaint'] = maybeInt(DD_IAST_DB_ROWS_TO_TAINT)
- setBoolean(target, 'iast.deduplicationEnabled', DD_IAST_DEDUPLICATION_ENABLED)
- setBoolean(target, 'iast.enabled', DD_IAST_ENABLED)
- target['iast.maxConcurrentRequests'] = maybeInt(DD_IAST_MAX_CONCURRENT_REQUESTS)
- unprocessedTarget['iast.maxConcurrentRequests'] = DD_IAST_MAX_CONCURRENT_REQUESTS
- target['iast.maxContextOperations'] = maybeInt(DD_IAST_MAX_CONTEXT_OPERATIONS)
- unprocessedTarget['iast.maxContextOperations'] = DD_IAST_MAX_CONTEXT_OPERATIONS
- setBoolean(target, 'iast.redactionEnabled', DD_IAST_REDACTION_ENABLED && !isFalse(DD_IAST_REDACTION_ENABLED))
- setString(target, 'iast.redactionNamePattern', DD_IAST_REDACTION_NAME_PATTERN)
- setString(target, 'iast.redactionValuePattern', DD_IAST_REDACTION_VALUE_PATTERN)
- const iastRequestSampling = maybeInt(DD_IAST_REQUEST_SAMPLING)
- if (iastRequestSampling !== undefined && iastRequestSampling > -1 && iastRequestSampling < 101) {
- target['iast.requestSampling'] = iastRequestSampling
+
+ if (!trackedConfigOrigins.has('apmTracingEnabled') &&
+ trackedConfigOrigins.has('experimental.appsec.standalone.enabled')) {
+ setAndTrack(this, 'apmTracingEnabled', !this.experimental.appsec.standalone.enabled)
}
- unprocessedTarget['iast.requestSampling'] = DD_IAST_REQUEST_SAMPLING
- setString(target, 'iast.securityControlsConfiguration', DD_IAST_SECURITY_CONTROLS_CONFIGURATION)
- setString(target, 'iast.telemetryVerbosity', DD_IAST_TELEMETRY_VERBOSITY)
- setBoolean(target, 'iast.stackTrace.enabled', DD_IAST_STACK_TRACE_ENABLED)
- setString(target, 'installSignature.id', DD_INSTRUMENTATION_INSTALL_ID)
- setString(target, 'installSignature.time', DD_INSTRUMENTATION_INSTALL_TIME)
- setString(target, 'installSignature.type', DD_INSTRUMENTATION_INSTALL_TYPE)
- // TODO: Why is DD_INJECTION_ENABLED a comma separated list?
- setArray(target, 'injectionEnabled', DD_INJECTION_ENABLED)
- if (DD_INJECTION_ENABLED !== undefined) {
- setString(target, 'instrumentationSource', DD_INJECTION_ENABLED ? 'ssi' : 'manual')
+
+ if (this.cloudPayloadTagging?.request || this.cloudPayloadTagging?.response) {
+ setAndTrack(this, 'cloudPayloadTagging.rules', appendRules(
+ this.cloudPayloadTagging.request,
+ this.cloudPayloadTagging.response
+ ))
}
- setBoolean(target, 'injectForce', DD_INJECT_FORCE)
- setBoolean(target, 'isAzureFunction', getIsAzureFunction())
- setBoolean(target, 'isGCPFunction', getIsGCPFunction())
- setBoolean(target, 'gcpPubSubPushSubscriptionEnabled', enableGCPPubSubPushSubscription())
- target['langchain.spanCharLimit'] = maybeInt(DD_LANGCHAIN_SPAN_CHAR_LIMIT)
- target['langchain.spanPromptCompletionSampleRate'] = maybeFloat(DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE)
- setBoolean(target, 'legacyBaggageEnabled', DD_TRACE_LEGACY_BAGGAGE_ENABLED)
- setBoolean(target, 'llmobs.agentlessEnabled', DD_LLMOBS_AGENTLESS_ENABLED)
- setBoolean(target, 'llmobs.enabled', DD_LLMOBS_ENABLED)
- setString(target, 'llmobs.mlApp', DD_LLMOBS_ML_APP)
- setBoolean(target, 'logInjection', DD_LOGS_INJECTION)
- // Requires an accompanying DD_APM_OBFUSCATION_MEMCACHED_KEEP_COMMAND=true in the agent
- setBoolean(target, 'memcachedCommandEnabled', DD_TRACE_MEMCACHED_COMMAND_ENABLED)
- setBoolean(target, 'middlewareTracingEnabled', DD_TRACE_MIDDLEWARE_TRACING_ENABLED)
- setBoolean(target, 'openAiLogsEnabled', DD_OPENAI_LOGS_ENABLED)
- target['openai.spanCharLimit'] = maybeInt(DD_OPENAI_SPAN_CHAR_LIMIT)
- unprocessedTarget.openaiSpanCharLimit = DD_OPENAI_SPAN_CHAR_LIMIT
- if (DD_TRACE_PEER_SERVICE_MAPPING) {
- target.peerServiceMapping = Object.fromEntries(
- DD_TRACE_PEER_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
- )
- unprocessedTarget.peerServiceMapping = DD_TRACE_PEER_SERVICE_MAPPING
+
+ if (this.injectionEnabled) {
+ setAndTrack(this, 'instrumentationSource', 'ssi')
}
- setString(target, 'port', DD_TRACE_AGENT_PORT)
- const profilingEnabled = normalizeProfilingEnabledValue(DD_PROFILING_ENABLED)
- setString(target, 'profiling.enabled', profilingEnabled)
- setString(target, 'profiling.exporters', DD_PROFILING_EXPORTERS)
- setBoolean(target, 'profiling.sourceMap', DD_PROFILING_SOURCE_MAP && !isFalse(DD_PROFILING_SOURCE_MAP))
- if (DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD) {
- // This is only used in testing to not have to wait 30s
- target['profiling.longLivedThreshold'] = Number(DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD)
+
+ if (!trackedConfigOrigins.has('runtimeMetrics.enabled') && this.OTEL_METRICS_EXPORTER === 'none') {
+ setAndTrack(this, 'runtimeMetrics.enabled', false)
}
- setString(target, 'protocolVersion', DD_TRACE_AGENT_PROTOCOL_VERSION)
- setString(target, 'queryStringObfuscation', DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP)
- setBoolean(target, 'remoteConfig.enabled', DD_REMOTE_CONFIGURATION_ENABLED)
- target['remoteConfig.pollInterval'] = maybeFloat(DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS)
- unprocessedTarget['remoteConfig.pollInterval'] = DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS
- setBoolean(target, 'reportHostname', DD_TRACE_REPORT_HOSTNAME)
- if (DD_TRACE_RESOURCE_RENAMING_ENABLED !== undefined) {
- setBoolean(target, 'resourceRenamingEnabled', DD_TRACE_RESOURCE_RENAMING_ENABLED)
+ if (!trackedConfigOrigins.has('sampleRate') && trackedConfigOrigins.has('OTEL_TRACES_SAMPLER')) {
+ setAndTrack(this, 'sampleRate', getFromOtelSamplerMap(this.OTEL_TRACES_SAMPLER, this.OTEL_TRACES_SAMPLER_ARG))
}
- // only used to explicitly set runtimeMetrics to false
- const otelSetRuntimeMetrics = String(OTEL_METRICS_EXPORTER).toLowerCase() === 'none'
- ? false
- : undefined
- setBoolean(target, 'runtimeMetrics.enabled', DD_RUNTIME_METRICS_ENABLED ||
- otelSetRuntimeMetrics)
- setBoolean(target, 'runtimeMetrics.eventLoop', DD_RUNTIME_METRICS_EVENT_LOOP_ENABLED)
- setBoolean(target, 'runtimeMetrics.gc', DD_RUNTIME_METRICS_GC_ENABLED)
- setBoolean(target, 'runtimeMetricsRuntimeId', DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED)
- setArray(target, 'sampler.spanSamplingRules', reformatSpanSamplingRules(
- maybeJsonFile(DD_SPAN_SAMPLING_RULES_FILE) ??
- safeJsonParse(DD_SPAN_SAMPLING_RULES)
- ))
- setUnit(
- target,
- 'sampleRate',
- DD_TRACE_SAMPLE_RATE || getFromOtelSamplerMap(OTEL_TRACES_SAMPLER, OTEL_TRACES_SAMPLER_ARG)
- )
- target['sampler.rateLimit'] = DD_TRACE_RATE_LIMIT
- setSamplingRule(target, 'sampler.rules', safeJsonParse(DD_TRACE_SAMPLING_RULES))
- unprocessedTarget['sampler.rules'] = DD_TRACE_SAMPLING_RULES
- setString(target, 'scope', DD_TRACE_SCOPE)
- // Priority:
- // DD_SERVICE > tags.service > OTEL_SERVICE_NAME > NX_TASK_TARGET_PROJECT (if DD_ENABLE_NX_SERVICE_NAME) > default
- let serviceName = DD_SERVICE || tags.service || OTEL_SERVICE_NAME
- let isServiceNameInferred
- if (!serviceName && NX_TASK_TARGET_PROJECT) {
- if (isTrue(DD_ENABLE_NX_SERVICE_NAME)) {
- isServiceNameInferred = true
- serviceName = NX_TASK_TARGET_PROJECT
- } else if (DD_MAJOR < 6) {
- // Warn about v6 behavior change for Nx projects
- log.warn(
- // eslint-disable-next-line @stylistic/max-len
- 'NX_TASK_TARGET_PROJECT is set but no service name was configured. In v6, NX_TASK_TARGET_PROJECT will be used as the default service name. Set DD_ENABLE_NX_SERVICE_NAME=true to opt-in to this behavior now, or set a service name explicitly.'
- )
+
+ if (this.DD_SPAN_SAMPLING_RULES_FILE) {
+ try {
+ // TODO: Should we log a warning in case this is defined next to spanSamplingRules?
+ setAndTrack(this, 'spanSamplingRules', transformers.toCamelCase(JSON.parse(this.DD_SPAN_SAMPLING_RULES_FILE)))
+ } catch (error) {
+ log.warn('Error reading span sampling rules file %s; %o', this.DD_SPAN_SAMPLING_RULES_FILE, error)
}
}
- setString(target, 'service', serviceName)
- if (serviceName) setBoolean(target, 'isServiceNameInferred', isServiceNameInferred ?? false)
- if (DD_SERVICE_MAPPING) {
- target.serviceMapping = Object.fromEntries(
- DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':'))
- )
- }
- setString(target, 'site', DD_SITE)
- if (DD_TRACE_SPAN_ATTRIBUTE_SCHEMA) {
- setString(target, 'spanAttributeSchema', validateNamingVersion(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA))
- unprocessedTarget.spanAttributeSchema = DD_TRACE_SPAN_ATTRIBUTE_SCHEMA
- }
- // 0: disabled, 1: logging, 2: garbage collection + logging
- target.spanLeakDebug = maybeInt(DD_TRACE_SPAN_LEAK_DEBUG)
- setBoolean(target, 'spanRemoveIntegrationFromService', DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED)
- setBoolean(target, 'startupLogs', DD_TRACE_STARTUP_LOGS)
- setTags(target, 'tags', tags)
- target.tagsHeaderMaxLength = DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH
- setBoolean(target, 'telemetry.enabled', DD_INSTRUMENTATION_TELEMETRY_ENABLED)
- setString(target, 'instrumentation_config_id', DD_INSTRUMENTATION_CONFIG_ID)
- setBoolean(target, 'telemetry.debug', DD_TELEMETRY_DEBUG)
- setBoolean(target, 'telemetry.dependencyCollection', DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED)
- target['telemetry.heartbeatInterval'] = maybeInt(Math.floor(DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000))
- unprocessedTarget['telemetry.heartbeatInterval'] = DD_TELEMETRY_HEARTBEAT_INTERVAL
- setBoolean(target, 'telemetry.logCollection', DD_TELEMETRY_LOG_COLLECTION_ENABLED)
- setBoolean(target, 'telemetry.metrics', DD_TELEMETRY_METRICS_ENABLED)
- setBoolean(target, 'isKeepingCoverageConfiguration', DD_TEST_TIA_KEEP_COV_CONFIG)
- setBoolean(target, 'traceId128BitGenerationEnabled', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED)
- setBoolean(target, 'traceId128BitLoggingEnabled', DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED)
- warnIfPropagationStyleConflict(
- DD_TRACE_PROPAGATION_STYLE,
- DD_TRACE_PROPAGATION_STYLE_INJECT,
- DD_TRACE_PROPAGATION_STYLE_EXTRACT
- )
- if (DD_TRACE_PROPAGATION_STYLE !== undefined) {
- setArray(target, 'tracePropagationStyle.inject', normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE))
- setArray(target, 'tracePropagationStyle.extract', normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE))
- }
- if (DD_TRACE_PROPAGATION_STYLE_INJECT !== undefined) {
- setArray(target, 'tracePropagationStyle.inject',
- normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE_INJECT))
+
+ // All sampler options are tracked as individual values. No need to track the sampler object as a whole.
+ this.sampler = {
+ rules: this.samplingRules,
+ rateLimit: this.rateLimit,
+ sampleRate: this.sampleRate,
+ spanSamplingRules: this.spanSamplingRules,
}
- if (DD_TRACE_PROPAGATION_STYLE_EXTRACT !== undefined) {
- setArray(target, 'tracePropagationStyle.extract',
- normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE_EXTRACT))
+
+ // For LLMObs, we want to auto enable it when other llmobs options are defined.
+ if (!this.llmobs.enabled &&
+ !trackedConfigOrigins.has('llmobs.enabled') &&
+ (trackedConfigOrigins.has('llmobs.agentlessEnabled') ||
+ trackedConfigOrigins.has('llmobs.mlApp'))) {
+ setAndTrack(this, 'llmobs.enabled', true)
}
- setBoolean(target, 'tracePropagationExtractFirst', DD_TRACE_PROPAGATION_EXTRACT_FIRST)
- if (DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT !== undefined) {
- const stringPropagationBehaviorExtract = String(DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT)
- target.tracePropagationBehaviorExtract =
- VALID_PROPAGATION_BEHAVIOR_EXTRACT.has(stringPropagationBehaviorExtract)
- ? stringPropagationBehaviorExtract
- : 'continue'
+
+ if (this.OTEL_RESOURCE_ATTRIBUTES) {
+ for (const [key, value] of Object.entries(this.OTEL_RESOURCE_ATTRIBUTES)) {
+ // Not replacing existing tags keeps the order of the tags as before.
+ if (!this.tags[key]) {
+ this.tags[key] = value
+ }
+ }
}
- if (DD_TRACE_PROPAGATION_STYLE !== undefined ||
- DD_TRACE_PROPAGATION_STYLE_INJECT !== undefined ||
- DD_TRACE_PROPAGATION_STYLE_EXTRACT !== undefined ||
- OTEL_PROPAGATORS !== undefined) {
- // At least one var is defined, calculate value using truthy logic
- const useDdStyle = DD_TRACE_PROPAGATION_STYLE ||
- DD_TRACE_PROPAGATION_STYLE_INJECT ||
- DD_TRACE_PROPAGATION_STYLE_EXTRACT
- setBoolean(target, 'tracePropagationStyle.otelPropagators',
- useDdStyle ? false : !!OTEL_PROPAGATORS)
-
- // Use OTEL_PROPAGATORS if no DD-specific vars are set
- if (!useDdStyle && OTEL_PROPAGATORS) {
- const otelStyles = normalizePropagationStyle(OTEL_PROPAGATORS)
- // Validate OTEL propagators
- for (const style of otelStyles || []) {
- if (!VALID_PROPAGATION_STYLES.has(style)) {
- log.warn('unexpected value %s for OTEL_PROPAGATORS environment variable', style)
- getCounter('otel.env.invalid', 'DD_TRACE_PROPAGATION_STYLE', 'OTEL_PROPAGATORS').inc()
+ if (this.DD_TRACE_TAGS) {
+ // TODO: This is a hack to keep the order of the tags as before.
+ // That hack is not sufficient, since it does not handle other cases where the tags are set by the user.
+ if (trackedConfigOrigins.get('tags') === 'code') {
+ for (const [key, value] of Object.entries(this.DD_TRACE_TAGS)) {
+ // Not replacing existing tags keeps the order of the tags as before.
+ if (!this.tags[key]) {
+ this.tags[key] = value
}
}
- // Set inject/extract from OTEL_PROPAGATORS
- if (otelStyles) {
- setArray(target, 'tracePropagationStyle.inject', otelStyles)
- setArray(target, 'tracePropagationStyle.extract', otelStyles)
- }
+ } else {
+ Object.assign(this.tags, this.DD_TRACE_TAGS)
}
}
- setBoolean(target, 'traceWebsocketMessagesEnabled', DD_TRACE_WEBSOCKET_MESSAGES_ENABLED)
- setBoolean(target, 'traceWebsocketMessagesInheritSampling', DD_TRACE_WEBSOCKET_MESSAGES_INHERIT_SAMPLING)
- setBoolean(target, 'traceWebsocketMessagesSeparateTraces', DD_TRACE_WEBSOCKET_MESSAGES_SEPARATE_TRACES)
- setBoolean(target, 'tracing', DD_TRACING_ENABLED)
- setString(target, 'version', DD_VERSION || tags.version)
- setBoolean(target, 'inferredProxyServicesEnabled', DD_TRACE_INFERRED_PROXY_SERVICES_ENABLED)
- setBoolean(target, 'trace.aws.addSpanPointers', DD_TRACE_AWS_ADD_SPAN_POINTERS)
- setString(target, 'trace.dynamoDb.tablePrimaryKeys', DD_TRACE_DYNAMODB_TABLE_PRIMARY_KEYS)
- setArray(target, 'graphqlErrorExtensions', DD_TRACE_GRAPHQL_ERROR_EXTENSIONS)
- setBoolean(target, 'trace.nativeSpanEvents', DD_TRACE_NATIVE_SPAN_EVENTS)
- target['vertexai.spanPromptCompletionSampleRate'] = maybeFloat(DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE)
- target['vertexai.spanCharLimit'] = maybeInt(DD_VERTEXAI_SPAN_CHAR_LIMIT)
- }
- #applyOptions (options) {
- const opts = this.#options
- const tags = {}
-
- options = this.#optionsArg = { ingestion: {}, ...options, ...opts }
-
- tagger.add(tags, options.tags)
-
- setBoolean(opts, 'apmTracingEnabled', options.apmTracingEnabled ??
- (options.experimental?.appsec?.standalone && !options.experimental.appsec.standalone.enabled)
- )
- setBoolean(opts, 'appsec.apiSecurity.enabled', options.appsec?.apiSecurity?.enabled)
- setBoolean(opts, 'appsec.apiSecurity.endpointCollectionEnabled',
- options.appsec?.apiSecurity?.endpointCollectionEnabled)
- opts['appsec.apiSecurity.endpointCollectionMessageLimit'] =
- maybeInt(options.appsec?.apiSecurity?.endpointCollectionMessageLimit)
- opts['appsec.blockedTemplateGraphql'] = maybeFile(options.appsec?.blockedTemplateGraphql)
- opts['appsec.blockedTemplateHtml'] = maybeFile(options.appsec?.blockedTemplateHtml)
- this.#optsUnprocessed['appsec.blockedTemplateHtml'] = options.appsec?.blockedTemplateHtml
- opts['appsec.blockedTemplateJson'] = maybeFile(options.appsec?.blockedTemplateJson)
- this.#optsUnprocessed['appsec.blockedTemplateJson'] = options.appsec?.blockedTemplateJson
- setBoolean(opts, 'appsec.enabled', options.appsec?.enabled)
- setString(opts, 'appsec.eventTracking.mode', options.appsec?.eventTracking?.mode)
- setBoolean(
- opts,
- 'appsec.extendedHeadersCollection.enabled',
- options.appsec?.extendedHeadersCollection?.enabled
- )
- setBoolean(
- opts,
- 'appsec.extendedHeadersCollection.redaction',
- options.appsec?.extendedHeadersCollection?.redaction
- )
- opts['appsec.extendedHeadersCollection.maxHeaders'] = options.appsec?.extendedHeadersCollection?.maxHeaders
- setString(opts, 'appsec.obfuscatorKeyRegex', options.appsec?.obfuscatorKeyRegex)
- setString(opts, 'appsec.obfuscatorValueRegex', options.appsec?.obfuscatorValueRegex)
- setBoolean(opts, 'appsec.rasp.enabled', options.appsec?.rasp?.enabled)
- setBoolean(opts, 'appsec.rasp.bodyCollection', options.appsec?.rasp?.bodyCollection)
- opts['appsec.rateLimit'] = maybeInt(options.appsec?.rateLimit)
- this.#optsUnprocessed['appsec.rateLimit'] = options.appsec?.rateLimit
- setString(opts, 'appsec.rules', options.appsec?.rules)
- setBoolean(opts, 'appsec.stackTrace.enabled', options.appsec?.stackTrace?.enabled)
- opts['appsec.stackTrace.maxDepth'] = maybeInt(options.appsec?.stackTrace?.maxDepth)
- this.#optsUnprocessed['appsec.stackTrace.maxDepth'] = options.appsec?.stackTrace?.maxDepth
- opts['appsec.stackTrace.maxStackTraces'] = maybeInt(options.appsec?.stackTrace?.maxStackTraces)
- this.#optsUnprocessed['appsec.stackTrace.maxStackTraces'] = options.appsec?.stackTrace?.maxStackTraces
- opts['appsec.wafTimeout'] = maybeInt(options.appsec?.wafTimeout)
- this.#optsUnprocessed['appsec.wafTimeout'] = options.appsec?.wafTimeout
- setBoolean(opts, 'clientIpEnabled', options.clientIpEnabled)
- setString(opts, 'clientIpHeader', options.clientIpHeader?.toLowerCase())
- if (options.cloudPayloadTagging?.request || options.cloudPayloadTagging?.response) {
- if (options.cloudPayloadTagging.request) {
- setBoolean(opts, 'cloudPayloadTagging.requestsEnabled', true)
- }
- if (options.cloudPayloadTagging.response) {
- setBoolean(opts, 'cloudPayloadTagging.responsesEnabled', true)
- }
- opts['cloudPayloadTagging.rules'] = appendRules(
- splitJSONPathRules(options.cloudPayloadTagging.request),
- splitJSONPathRules(options.cloudPayloadTagging.response)
- )
- }
- if (options.cloudPayloadTagging?.requestsEnabled !== undefined) {
- setBoolean(opts, 'cloudPayloadTagging.requestsEnabled', options.cloudPayloadTagging.requestsEnabled)
- }
- if (options.cloudPayloadTagging?.responsesEnabled !== undefined) {
- setBoolean(opts, 'cloudPayloadTagging.responsesEnabled', options.cloudPayloadTagging.responsesEnabled)
- }
- opts['cloudPayloadTagging.maxDepth'] = maybeInt(options.cloudPayloadTagging?.maxDepth)
- opts.baggageMaxBytes = options.baggageMaxBytes
- opts.baggageMaxItems = options.baggageMaxItems
- setArray(opts, 'baggageTagKeys', options.baggageTagKeys)
- setBoolean(opts, 'codeOriginForSpans.enabled', options.codeOriginForSpans?.enabled)
- setBoolean(
- opts,
- 'codeOriginForSpans.experimental.exit_spans.enabled',
- options.codeOriginForSpans?.experimental?.exit_spans?.enabled
- )
- setString(opts, 'dbmPropagationMode', options.dbmPropagationMode)
- setBoolean(opts, 'dbm.injectSqlBaseHash', options.dbm?.injectSqlBaseHash)
- if (options.dogstatsd) {
- setString(opts, 'dogstatsd.hostname', options.dogstatsd.hostname)
- setString(opts, 'dogstatsd.port', options.dogstatsd.port)
+ if (!this.#parsedDdTags) {
+ this.#parsedDdTags = rfdc(this.tags)
}
- setBoolean(opts, 'dsmEnabled', options.dsmEnabled)
- opts['dynamicInstrumentation.captureTimeoutMs'] = maybeInt(options.dynamicInstrumentation?.captureTimeoutMs)
- this.#optsUnprocessed['dynamicInstrumentation.captureTimeoutMs'] = options.dynamicInstrumentation?.captureTimeoutMs
- setBoolean(opts, 'dynamicInstrumentation.enabled', options.dynamicInstrumentation?.enabled)
- setString(opts, 'dynamicInstrumentation.probeFile', options.dynamicInstrumentation?.probeFile)
- setArray(
- opts,
- 'dynamicInstrumentation.redactedIdentifiers',
- options.dynamicInstrumentation?.redactedIdentifiers
- )
- setArray(
- opts,
- 'dynamicInstrumentation.redactionExcludedIdentifiers',
- options.dynamicInstrumentation?.redactionExcludedIdentifiers
- )
- opts['dynamicInstrumentation.uploadIntervalSeconds'] =
- maybeFloat(options.dynamicInstrumentation?.uploadIntervalSeconds)
- this.#optsUnprocessed['dynamicInstrumentation.uploadIntervalSeconds'] =
- options.dynamicInstrumentation?.uploadIntervalSeconds
- setString(opts, 'env', options.env || tags.env)
- setBoolean(opts, 'experimental.aiguard.block', options.experimental?.aiguard?.block)
- setBoolean(opts, 'experimental.aiguard.enabled', options.experimental?.aiguard?.enabled)
- setString(opts, 'experimental.aiguard.endpoint', options.experimental?.aiguard?.endpoint)
- opts['experimental.aiguard.maxMessagesLength'] = maybeInt(options.experimental?.aiguard?.maxMessagesLength)
- this.#optsUnprocessed['experimental.aiguard.maxMessagesLength'] = options.experimental?.aiguard?.maxMessagesLength
- opts['experimental.aiguard.maxContentSize'] = maybeInt(options.experimental?.aiguard?.maxContentSize)
- this.#optsUnprocessed['experimental.aiguard.maxContentSize'] = options.experimental?.aiguard?.maxContentSize
- opts['experimental.aiguard.timeout'] = maybeInt(options.experimental?.aiguard?.timeout)
- this.#optsUnprocessed['experimental.aiguard.timeout'] = options.experimental?.aiguard?.timeout
- setBoolean(opts, 'experimental.enableGetRumData', options.experimental?.enableGetRumData)
- setString(opts, 'experimental.exporter', options.experimental?.exporter)
- setBoolean(opts, 'experimental.flaggingProvider.enabled', options.experimental?.flaggingProvider?.enabled)
- opts['experimental.flaggingProvider.initializationTimeoutMs'] = maybeInt(
- options.experimental?.flaggingProvider?.initializationTimeoutMs
- )
- this.#optsUnprocessed['experimental.flaggingProvider.initializationTimeoutMs'] =
- options.experimental?.flaggingProvider?.initializationTimeoutMs
- opts.flushInterval = maybeInt(options.flushInterval)
- this.#optsUnprocessed.flushInterval = options.flushInterval
- opts.flushMinSpans = maybeInt(options.flushMinSpans)
- this.#optsUnprocessed.flushMinSpans = options.flushMinSpans
- setArray(opts, 'headerTags', options.headerTags)
- setString(opts, 'hostname', options.hostname)
- opts['iast.dbRowsToTaint'] = maybeInt(options.iast?.dbRowsToTaint)
- setBoolean(opts, 'iast.deduplicationEnabled', options.iast && options.iast.deduplicationEnabled)
- setBoolean(opts, 'iast.enabled',
- options.iast && (options.iast === true || options.iast.enabled === true))
- opts['iast.maxConcurrentRequests'] = maybeInt(options.iast?.maxConcurrentRequests)
- this.#optsUnprocessed['iast.maxConcurrentRequests'] = options.iast?.maxConcurrentRequests
- opts['iast.maxContextOperations'] = maybeInt(options.iast?.maxContextOperations)
- this.#optsUnprocessed['iast.maxContextOperations'] = options.iast?.maxContextOperations
- setBoolean(opts, 'iast.redactionEnabled', options.iast?.redactionEnabled)
- setString(opts, 'iast.redactionNamePattern', options.iast?.redactionNamePattern)
- setString(opts, 'iast.redactionValuePattern', options.iast?.redactionValuePattern)
- const iastRequestSampling = maybeInt(options.iast?.requestSampling)
- if (iastRequestSampling !== undefined && iastRequestSampling > -1 && iastRequestSampling < 101) {
- opts['iast.requestSampling'] = iastRequestSampling
- this.#optsUnprocessed['iast.requestSampling'] = options.iast?.requestSampling
- }
- if (DD_MAJOR < 6) {
- opts['iast.securityControlsConfiguration'] = options.iast?.securityControlsConfiguration
+
+ if (!this.env && this.tags.env !== undefined) {
+ setAndTrack(this, 'env', this.tags.env)
}
- setBoolean(opts, 'iast.stackTrace.enabled', options.iast?.stackTrace?.enabled)
- setString(opts, 'iast.telemetryVerbosity', options.iast && options.iast.telemetryVerbosity)
- setBoolean(opts, 'isCiVisibility', options.isCiVisibility)
- setBoolean(opts, 'legacyBaggageEnabled', options.legacyBaggageEnabled)
- setBoolean(opts, 'llmobs.agentlessEnabled', options.llmobs?.agentlessEnabled)
- setString(opts, 'llmobs.mlApp', options.llmobs?.mlApp)
- setBoolean(opts, 'logInjection', options.logInjection)
- opts.lookup = options.lookup
- setBoolean(opts, 'middlewareTracingEnabled', options.middlewareTracingEnabled)
- setBoolean(opts, 'openAiLogsEnabled', options.openAiLogsEnabled)
- opts.peerServiceMapping = options.peerServiceMapping
- setBoolean(opts, 'plugins', options.plugins)
- setString(opts, 'port', options.port)
- const strProfiling = String(options.profiling)
- if (['true', 'false', 'auto'].includes(strProfiling)) {
- setString(opts, 'profiling.enabled', strProfiling)
+
+ if (!this.version) {
+ setAndTrack(this, 'version', this.tags.version || pkg.version)
+ this.tags.version ??= pkg.version
}
- setString(opts, 'protocolVersion', options.protocolVersion)
- if (options.remoteConfig) {
- opts['remoteConfig.pollInterval'] = maybeFloat(options.remoteConfig.pollInterval)
- this.#optsUnprocessed['remoteConfig.pollInterval'] = options.remoteConfig.pollInterval
+
+ let isServiceNameInferred = false
+ if (!trackedConfigOrigins.has('service')) {
+ if (this.tags.service) {
+ setAndTrack(this, 'service', this.tags.service)
+ } else {
+ const NX_TASK_TARGET_PROJECT = getEnvironmentVariable('NX_TASK_TARGET_PROJECT')
+ if (NX_TASK_TARGET_PROJECT) {
+ if (this.DD_ENABLE_NX_SERVICE_NAME) {
+ setAndTrack(this, 'service', NX_TASK_TARGET_PROJECT)
+ isServiceNameInferred = true
+ } else if (DD_MAJOR < 6) {
+ log.warn(
+ // eslint-disable-next-line eslint-rules/eslint-log-printf-style
+ 'NX_TASK_TARGET_PROJECT is set but no service name was configured. In v6, NX_TASK_TARGET_PROJECT will ' +
+ 'be used as the default service name. Set DD_ENABLE_NX_SERVICE_NAME=true to opt-in to this behavior ' +
+ 'now, or set a service name explicitly.'
+ )
+ }
+ }
+ }
+
+ if (!this.service) {
+ const serverlessName = IS_SERVERLESS
+ ? (
+ getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME') ||
+ getEnvironmentVariable('FUNCTION_NAME') || // Google Cloud Function Name set by deprecated runtimes
+ getEnvironmentVariable('K_SERVICE') || // Google Cloud Function Name set by newer runtimes
+ getEnvironmentVariable('WEBSITE_SITE_NAME') // set by Azure Functions
+ )
+ : undefined
+
+ setAndTrack(this, 'service', serverlessName || pkg.name || 'node')
+ this.tags.service ??= /** @type {string} */ (this.service)
+ isServiceNameInferred = true
+ }
}
- setBoolean(opts, 'reportHostname', options.reportHostname)
- setBoolean(opts, 'runtimeMetrics.enabled', options.runtimeMetrics?.enabled)
- setBoolean(opts, 'runtimeMetrics.eventLoop', options.runtimeMetrics?.eventLoop)
- setBoolean(opts, 'runtimeMetrics.gc', options.runtimeMetrics?.gc)
- setBoolean(opts, 'runtimeMetricsRuntimeId', options.runtimeMetricsRuntimeId)
- setArray(opts, 'sampler.spanSamplingRules', reformatSpanSamplingRules(options.spanSamplingRules))
- setUnit(opts, 'sampleRate', options.sampleRate ?? options.ingestion.sampleRate)
- opts['sampler.rateLimit'] = maybeInt(options.rateLimit ?? options.ingestion.rateLimit)
- setSamplingRule(opts, 'sampler.rules', options.samplingRules)
- const optService = options.service || tags.service
- setString(opts, 'service', optService)
- if (optService) {
- setBoolean(opts, 'isServiceNameInferred', false)
+ setAndTrack(this, 'isServiceNameInferred', isServiceNameInferred)
+
+ // Add missing tags, in case they are defined otherwise.
+ if (this.service) {
+ this.tags.service = this.service
}
- opts.serviceMapping = options.serviceMapping
- setString(opts, 'site', options.site)
- if (options.spanAttributeSchema) {
- setString(opts, 'spanAttributeSchema', validateNamingVersion(options.spanAttributeSchema))
- this.#optsUnprocessed.spanAttributeSchema = options.spanAttributeSchema
+ if (this.env) {
+ this.tags.env = this.env
}
- setBoolean(opts, 'spanRemoveIntegrationFromService', options.spanRemoveIntegrationFromService)
- setBoolean(opts, 'startupLogs', options.startupLogs)
- setTags(opts, 'tags', tags)
- setBoolean(opts, 'traceId128BitGenerationEnabled', options.traceId128BitGenerationEnabled)
- setBoolean(opts, 'traceId128BitLoggingEnabled', options.traceId128BitLoggingEnabled)
- setBoolean(opts, 'traceWebsocketMessagesEnabled', options.traceWebsocketMessagesEnabled)
- setBoolean(opts, 'traceWebsocketMessagesInheritSampling', options.traceWebsocketMessagesInheritSampling)
- setBoolean(opts, 'traceWebsocketMessagesSeparateTraces', options.traceWebsocketMessagesSeparateTraces)
- setString(opts, 'version', options.version || tags.version)
- setBoolean(opts, 'inferredProxyServicesEnabled', options.inferredProxyServicesEnabled)
- setBoolean(opts, 'graphqlErrorExtensions', options.graphqlErrorExtensions)
- setBoolean(opts, 'trace.nativeSpanEvents', options.trace?.nativeSpanEvents)
- if (options.tracePropagationStyle) {
- setArray(opts, 'tracePropagationStyle.inject',
- normalizePropagationStyle(options.tracePropagationStyle.inject ?? options.tracePropagationStyle))
- setArray(opts, 'tracePropagationStyle.extract',
- normalizePropagationStyle(options.tracePropagationStyle.extract ?? options.tracePropagationStyle))
+ if (this.version) {
+ this.tags.version = this.version
}
+ this.tags['runtime-id'] = RUNTIME_ID
- // For LLMObs, we want the environment variable to take precedence over the options.
- // This is reliant on environment config being set before options.
- // This is to make sure the origins of each value are tracked appropriately for telemetry.
- // We'll only set `llmobs.enabled` on the opts when it's not set on the environment, and options.llmobs is provided.
- if (this.#env['llmobs.enabled'] == null && options.llmobs) {
- setBoolean(opts, 'llmobs.enabled', true)
+ if (IS_SERVERLESS) {
+ setAndTrack(this, 'telemetry.enabled', false)
+ setAndTrack(this, 'crashtracking.enabled', false)
+ setAndTrack(this, 'remoteConfig.enabled', false)
}
- }
-
- #isCiVisibility () {
- return this.#optionsArg.isCiVisibility ?? this.#defaults.isCiVisibility
- }
-
- #getHostname () {
- const DD_CIVISIBILITY_AGENTLESS_URL = getEnv('DD_CIVISIBILITY_AGENTLESS_URL')
- const url = DD_CIVISIBILITY_AGENTLESS_URL
- ? new URL(DD_CIVISIBILITY_AGENTLESS_URL)
- : getAgentUrl(this.#getTraceAgentUrl(), this.#optionsArg)
- const DD_AGENT_HOST = this.#optionsArg.hostname ??
- getEnv('DD_AGENT_HOST') ??
- defaults.hostname
- return DD_AGENT_HOST || url?.hostname
- }
-
- #getSpanComputePeerService () {
- const DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = validateNamingVersion(
- this.#optionsArg.spanAttributeSchema ??
- getEnv('DD_TRACE_SPAN_ATTRIBUTE_SCHEMA')
- )
-
- const peerServiceSet = (
- this.#optionsArg.hasOwnProperty('spanComputePeerService') ||
- getEnv('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED') !== undefined
- )
- const peerServiceValue = this.#optionsArg.spanComputePeerService ??
- getEnv('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED')
-
- const spanComputePeerService = (
- DD_TRACE_SPAN_ATTRIBUTE_SCHEMA === 'v0'
- // In v0, peer service is computed only if it is explicitly set to true
- ? peerServiceSet && isTrue(peerServiceValue)
- // In >v0, peer service is false only if it is explicitly set to false
- : (peerServiceSet ? !isFalse(peerServiceValue) : true)
- )
-
- return spanComputePeerService
- }
- #isTraceStatsComputationEnabled () {
- const apmTracingEnabled = this.#options.apmTracingEnabled !== false &&
- this.#env.apmTracingEnabled !== false
-
- return apmTracingEnabled && (
- this.#optionsArg.stats ??
- getEnv('DD_TRACE_STATS_COMPUTATION_ENABLED') ??
- (getIsGCPFunction() || getIsAzureFunction())
- )
- }
-
- #getTraceAgentUrl () {
- return this.#optionsArg.url ??
- getEnv('DD_TRACE_AGENT_URL') ??
- null
- }
-
- // handles values calculated from a mixture of options and env vars
- #applyCalculated () {
- const calc = this.#calculated
-
- const DD_CIVISIBILITY_AGENTLESS_URL = getEnv('DD_CIVISIBILITY_AGENTLESS_URL')
-
- calc.url = DD_CIVISIBILITY_AGENTLESS_URL
- ? new URL(DD_CIVISIBILITY_AGENTLESS_URL)
- : getAgentUrl(this.#getTraceAgentUrl(), this.#optionsArg)
+ // TODO: Should this unconditionally be disabled?
+ if (getEnvironmentVariable('JEST_WORKER_ID') && !trackedConfigOrigins.has('telemetry.enabled')) {
+ setAndTrack(this, 'telemetry.enabled', false)
+ }
// Experimental agentless APM span intake
// When enabled, sends spans directly to Datadog intake without an agent
- const agentlessEnabled = isTrue(getEnv('_DD_APM_TRACING_AGENTLESS_ENABLED'))
+ // TODO: Replace this with a proper configuration
+ const agentlessEnabled = isTrue(getEnvironmentVariable('_DD_APM_TRACING_AGENTLESS_ENABLED'))
if (agentlessEnabled) {
- setString(calc, 'experimental.exporter', 'agentless')
- // Disable rate limiting - server-side sampling will be used
- calc['sampler.rateLimit'] = -1
+ setAndTrack(this, 'experimental.exporter', 'agentless')
// Disable client-side stats computation
- setBoolean(calc, 'stats.enabled', false)
+ setAndTrack(this, 'stats.enabled', false)
// Enable hostname reporting
- setBoolean(calc, 'reportHostname', true)
+ setAndTrack(this, 'reportHostname', true)
+ // Disable rate limiting - server-side sampling will be used
+ setAndTrack(this, 'sampler.rateLimit', -1)
// Clear sampling rules - server-side sampling handles this
- calc['sampler.rules'] = []
+ setAndTrack(this, 'sampler.rules', [])
// Agentless intake only accepts 64-bit trace IDs; disable 128-bit generation
- setBoolean(calc, 'traceId128BitGenerationEnabled', false)
- }
-
- if (this.#isCiVisibility()) {
- setBoolean(calc, 'isEarlyFlakeDetectionEnabled',
- getEnv('DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED') ?? true)
- setBoolean(calc, 'isFlakyTestRetriesEnabled', getEnv('DD_CIVISIBILITY_FLAKY_RETRY_ENABLED') ?? true)
- calc.flakyTestRetriesCount = maybeInt(getEnv('DD_CIVISIBILITY_FLAKY_RETRY_COUNT')) ?? 5
- setBoolean(calc, 'isIntelligentTestRunnerEnabled', isTrue(isCiVisibilityItrEnabled()))
- setBoolean(calc, 'isManualApiEnabled', !isFalse(getEnv('DD_CIVISIBILITY_MANUAL_API_ENABLED')))
- setString(calc, 'ciVisibilityTestSessionName', getEnv('DD_TEST_SESSION_NAME'))
- setBoolean(calc, 'ciVisAgentlessLogSubmissionEnabled',
- isTrue(getEnv('DD_AGENTLESS_LOG_SUBMISSION_ENABLED')))
- setBoolean(calc, 'isTestDynamicInstrumentationEnabled',
- !isFalse(getEnv('DD_TEST_FAILED_TEST_REPLAY_ENABLED')))
- setBoolean(calc, 'isServiceUserProvided', !!this.#env.service)
- setBoolean(calc, 'isTestManagementEnabled', !isFalse(getEnv('DD_TEST_MANAGEMENT_ENABLED')))
- calc.testManagementAttemptToFixRetries = maybeInt(getEnv('DD_TEST_MANAGEMENT_ATTEMPT_TO_FIX_RETRIES')) ?? 20
- setBoolean(calc, 'isImpactedTestsEnabled',
- !isFalse(getEnv('DD_CIVISIBILITY_IMPACTED_TESTS_DETECTION_ENABLED')))
- }
-
- // Disable log injection when OTEL logs are enabled
- // OTEL logs and DD log injection are mutually exclusive
- if (this.#env.otelLogsEnabled) {
- setBoolean(calc, 'logInjection', false)
+ if (!trackedConfigOrigins.has('traceId128BitGenerationEnabled')) {
+ setAndTrack(this, 'traceId128BitGenerationEnabled', false)
+ }
}
- calc['dogstatsd.hostname'] = this.#getHostname()
-
- // Compute OTLP logs and metrics URLs to send payloads to the active Datadog Agent
- const agentHostname = this.#getHostname()
- calc.otelLogsUrl = `http://${agentHostname}:${DEFAULT_OTLP_PORT}`
- calc.otelMetricsUrl = `http://${agentHostname}:${DEFAULT_OTLP_PORT}/v1/metrics`
- calc.otelUrl = `http://${agentHostname}:${DEFAULT_OTLP_PORT}`
- calc['telemetry.heartbeatInterval'] = maybeInt(Math.floor(this.#defaults['telemetry.heartbeatInterval'] * 1000))
-
- setBoolean(calc, 'isGitUploadEnabled',
- calc.isIntelligentTestRunnerEnabled && !isFalse(getEnv('DD_CIVISIBILITY_GIT_UPLOAD_ENABLED')))
-
- // Enable resourceRenamingEnabled when appsec is enabled and only
- // if DD_TRACE_RESOURCE_RENAMING_ENABLED is not explicitly set
- if (this.#env.resourceRenamingEnabled === undefined) {
- const appsecEnabled = this.#options['appsec.enabled'] ?? this.#env['appsec.enabled']
- if (appsecEnabled) {
- setBoolean(calc, 'resourceRenamingEnabled', true)
+ // Apply all fallbacks to the calculated config.
+ for (const [configName, alias] of fallbackConfigurations) {
+ if (!trackedConfigOrigins.has(configName) && trackedConfigOrigins.has(alias)) {
+ setAndTrack(this, configName, this[alias])
}
}
- setBoolean(calc, 'spanComputePeerService', this.#getSpanComputePeerService())
- setBoolean(calc, 'stats.enabled', this.#isTraceStatsComputationEnabled())
- const defaultPropagationStyle = getDefaultPropagationStyle(this.#optionsArg)
- if (defaultPropagationStyle.length > 2) {
- // b3 was added, so update defaults to include it
- // This will only be used if no other source (options, env, stable config) set the value
- calc['tracePropagationStyle.inject'] = defaultPropagationStyle
- calc['tracePropagationStyle.extract'] = defaultPropagationStyle
+ const DEFAULT_OTLP_PORT = '4318'
+ if (!this.otelLogsUrl) {
+ setAndTrack(this, 'otelLogsUrl', `http://${agentHostname}:${DEFAULT_OTLP_PORT}`)
}
- }
-
- /**
- * Applies remote configuration options from APM_TRACING configs.
- *
- * @param {import('./remote_config').RemoteConfigOptions} options - Configurations received via Remote Config
- */
- #applyRemoteConfig (options) {
- const opts = this.#remote
-
- setBoolean(opts, 'dynamicInstrumentation.enabled', options.dynamic_instrumentation_enabled)
- setBoolean(opts, 'codeOriginForSpans.enabled', options.code_origin_enabled)
- setUnit(opts, 'sampleRate', options.tracing_sampling_rate)
- setBoolean(opts, 'logInjection', options.log_injection_enabled)
- setBoolean(opts, 'tracing', options.tracing_enabled)
- this.#remoteUnprocessed['sampler.rules'] = options.tracing_sampling_rules
- setSamplingRule(opts, 'sampler.rules', reformatTagsFromRC(options.tracing_sampling_rules))
-
- opts.headerTags = options.tracing_header_tags?.map(tag => {
- return tag.tag_name ? `${tag.header}:${tag.tag_name}` : tag.header
- })
-
- const tags = {}
- tagger.add(tags, options.tracing_tags)
- if (Object.keys(tags).length) {
- tags['runtime-id'] = RUNTIME_ID
+ if (!this.otelMetricsUrl) {
+ setAndTrack(this, 'otelMetricsUrl', `http://${agentHostname}:${DEFAULT_OTLP_PORT}/v1/metrics`)
}
- setTags(opts, 'tags', tags)
- }
- #setAndTrackChange ({ name, value, origin, unprocessedValue, changes }) {
- set(this, name, value)
-
- if (!changeTracker[name]) {
- changeTracker[name] = {}
+ if (process.platform === 'win32') {
+ // OOM monitoring does not work properly on Windows, so it will be disabled.
+ deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED')
+ // Profiler sampling contexts are not available on Windows, so features
+ // depending on those (code hotspots and endpoint collection) need to be disabled on Windows.
+ deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_CODEHOTSPOTS_ENABLED')
+ deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_ENDPOINT_COLLECTION_ENABLED')
+ deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_CPU_ENABLED')
+ deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_TIMELINE_ENABLED')
+ deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED')
}
- const originExists = origin in changeTracker[name]
- const oldValue = changeTracker[name][origin]
+ // Single tags update is tracked as a calculated value.
+ setAndTrack(this, 'tags', this.tags)
- if (!originExists || oldValue !== value) {
- changeTracker[name][origin] = value
- changes.push({
- name,
- value: unprocessedValue || value,
- origin,
- })
- }
- }
-
- // TODO: Report origin changes and errors to telemetry.
- // TODO: Deeply merge configurations.
- // TODO: Move change tracking to telemetry.
- // for telemetry reporting, `name`s in `containers` need to be keys from:
- // https://github.com/DataDog/dd-go/blob/prod/trace/apps/tracer-telemetry-intake/telemetry-payload/static/config_norm_rules.json
- #merge () {
- const changes = []
- const sources = this.#getSourcesInOrder()
-
- for (const name of Object.keys(this.#defaults)) {
- // Use reverse order for merge (lowest priority first)
- for (let i = sources.length - 1; i >= 0; i--) {
- const { container, origin, unprocessed } = sources[i]
- const value = container[name]
- if (value != null || container === this.#defaults) {
- this.#setAndTrackChange({
- name,
- value,
- origin,
- unprocessedValue: unprocessed?.[name],
- changes,
- })
- }
- }
- }
- this.sampler.sampleRate = this.sampleRate
- updateConfig(changes, this)
+ telemetry.updateConfig([...configWithOrigin.values()], this)
}
+ // TODO: Move outside of config. This is unrelated to the config system.
#loadGitMetadata () {
- // try to read Git metadata from the environment variables
- this.repositoryUrl = removeUserSensitiveInfo(
- getEnv('DD_GIT_REPOSITORY_URL') ?? this.tags[GIT_REPOSITORY_URL]
- )
- this.commitSHA = getEnv('DD_GIT_COMMIT_SHA') ?? this.tags[GIT_COMMIT_SHA]
+ // Try to read Git metadata from the environment variables
+ this.repositoryUrl = removeUserSensitiveInfo(this.DD_GIT_REPOSITORY_URL ?? this.tags[GIT_REPOSITORY_URL])
+ this.commitSHA = this.DD_GIT_COMMIT_SHA ?? this.tags[GIT_COMMIT_SHA]
- // otherwise, try to read Git metadata from the git.properties file
+ // Otherwise, try to read Git metadata from the git.properties file
if (!this.repositoryUrl || !this.commitSHA) {
- const DD_GIT_PROPERTIES_FILE = getEnv('DD_GIT_PROPERTIES_FILE')
+ const DD_GIT_PROPERTIES_FILE = this.DD_GIT_PROPERTIES_FILE
const gitPropertiesFile = DD_GIT_PROPERTIES_FILE ?? `${process.cwd()}/git.properties`
- let gitPropertiesString
try {
- gitPropertiesString = fs.readFileSync(gitPropertiesFile, 'utf8')
- } catch (e) {
+ const gitPropertiesString = fs.readFileSync(gitPropertiesFile, 'utf8')
+ const { commitSHA, repositoryUrl } = getGitMetadataFromGitProperties(gitPropertiesString)
+ this.commitSHA ??= commitSHA
+ this.repositoryUrl ??= repositoryUrl
+ } catch (error) {
// Only log error if the user has set a git.properties path
if (DD_GIT_PROPERTIES_FILE) {
- log.error('Error reading DD_GIT_PROPERTIES_FILE: %s', gitPropertiesFile, e)
+ log.error('Error reading DD_GIT_PROPERTIES_FILE: %s', gitPropertiesFile, error)
}
}
- if (gitPropertiesString) {
- const { commitSHA, repositoryUrl } = getGitMetadataFromGitProperties(gitPropertiesString)
- this.commitSHA = this.commitSHA || commitSHA
- this.repositoryUrl = this.repositoryUrl || repositoryUrl
- }
}
- // otherwise, try to read Git metadata from the .git/ folder
- if (!this.repositoryUrl || !this.commitSHA) {
- const DD_GIT_FOLDER_PATH = getEnv('DD_GIT_FOLDER_PATH')
- const gitFolderPath = DD_GIT_FOLDER_PATH ?? path.join(process.cwd(), '.git')
- if (!this.repositoryUrl) {
- // try to read git config (repository URL)
- const gitConfigPath = path.join(gitFolderPath, 'config')
- try {
- const gitConfigContent = fs.readFileSync(gitConfigPath, 'utf8')
- if (gitConfigContent) {
- this.repositoryUrl = getRemoteOriginURL(gitConfigContent)
- }
- } catch (e) {
- // Only log error if the user has set a .git/ path
- if (DD_GIT_FOLDER_PATH) {
- log.error('Error reading git config: %s', gitConfigPath, e)
- }
+
+ // Otherwise, try to read Git metadata from the .git/ folder
+ const DD_GIT_FOLDER_PATH = this.DD_GIT_FOLDER_PATH
+ const gitFolderPath = DD_GIT_FOLDER_PATH ?? path.join(process.cwd(), '.git')
+
+ if (!this.repositoryUrl) {
+ // Try to read git config (repository URL)
+ const gitConfigPath = path.join(gitFolderPath, 'config')
+ try {
+ const gitConfigContent = fs.readFileSync(gitConfigPath, 'utf8')
+ if (gitConfigContent) {
+ this.repositoryUrl = getRemoteOriginURL(gitConfigContent)
}
- }
- if (!this.commitSHA) {
- // try to read git HEAD (commit SHA)
- const gitHeadSha = resolveGitHeadSHA(gitFolderPath)
- if (gitHeadSha) {
- this.commitSHA = gitHeadSha
+ } catch (error) {
+ // Only log error if the user has set a .git/ path
+ if (DD_GIT_FOLDER_PATH) {
+ log.error('Error reading git config: %s', gitConfigPath, error)
}
}
}
+ // Try to read git HEAD (commit SHA)
+ this.commitSHA ??= resolveGitHeadSHA(gitFolderPath)
}
}
-function getCounter (event, ddVar, otelVar) {
- const counters = TELEMETRY_COUNTERS.get(event)
- const tags = []
- const ddVarPrefix = 'config_datadog:'
- const otelVarPrefix = 'config_opentelemetry:'
- if (ddVar) {
- ddVar = ddVarPrefix + ddVar.toLowerCase()
- tags.push(ddVar)
- }
- if (otelVar) {
- otelVar = otelVarPrefix + otelVar.toLowerCase()
- tags.push(otelVar)
- }
-
- if (!(otelVar in counters)) counters[otelVar] = {}
-
- const counter = tracerMetrics.count(event, tags)
- counters[otelVar][ddVar] = counter
- return counter
-}
-
-function getFromOtelSamplerMap (otelTracesSampler, otelTracesSamplerArg) {
- const OTEL_TRACES_SAMPLER_MAPPING = {
- always_on: '1.0',
- always_off: '0.0',
- traceidratio: otelTracesSamplerArg,
- parentbased_always_on: '1.0',
- parentbased_always_off: '0.0',
- parentbased_traceidratio: otelTracesSamplerArg,
- }
- return OTEL_TRACES_SAMPLER_MAPPING[otelTracesSampler]
-}
-
/**
- * Validate the type of an environment variable
- * @param {string} envVar - The name of the environment variable
- * @param {string} [value] - The value of the environment variable
- * @returns {boolean} - True if the value is valid, false otherwise
+ * @param {Config} config
+ * @param {ConfigKey} envVar
*/
-function isInvalidOtelEnvironmentVariable (envVar, value) {
- // Skip validation if the value is undefined (it was not set as environment variable)
- if (value === undefined) return false
-
- switch (envVar) {
- case 'OTEL_LOG_LEVEL':
- return !VALID_LOG_LEVELS.has(value)
- case 'OTEL_PROPAGATORS':
- case 'OTEL_RESOURCE_ATTRIBUTES':
- case 'OTEL_SERVICE_NAME':
- return typeof value !== 'string'
- case 'OTEL_TRACES_SAMPLER':
- return getFromOtelSamplerMap(value, getEnv('OTEL_TRACES_SAMPLER_ARG')) === undefined
- case 'OTEL_TRACES_SAMPLER_ARG':
- return Number.isNaN(Number.parseFloat(value))
- case 'OTEL_SDK_DISABLED':
- return value.toLowerCase() !== 'true' && value.toLowerCase() !== 'false'
- case 'OTEL_TRACES_EXPORTER':
- case 'OTEL_METRICS_EXPORTER':
- case 'OTEL_LOGS_EXPORTER':
- return value.toLowerCase() !== 'none'
- default:
- return true
- }
-}
-
-function checkIfBothOtelAndDdEnvVarSet () {
- for (const [otelEnvVar, ddEnvVar] of OTEL_DD_ENV_MAPPING) {
- const otelValue = getEnv(otelEnvVar)
-
- if (ddEnvVar && getEnv(ddEnvVar) && otelValue) {
- log.warn('both %s and %s environment variables are set', ddEnvVar, otelEnvVar)
- getCounter('otel.env.hiding', ddEnvVar, otelEnvVar).inc()
+function deactivateIfEnabledAndWarnOnWindows (config, envVar) {
+ if (config[envVar]) {
+ const source = trackedConfigOrigins.get(envVar)
+ setAndTrack(config, envVar, false)
+ // TODO: Should we log even for default values?
+ if (source) {
+ log.warn('%s is not supported on Windows. Deactivating. (source: %s)', envVar, source)
}
-
- if (isInvalidOtelEnvironmentVariable(otelEnvVar, otelValue)) {
- log.warn('unexpected value %s for %s environment variable', otelValue, otelEnvVar)
- getCounter('otel.env.invalid', ddEnvVar, otelEnvVar).inc()
- }
- }
-}
-
-function maybeFile (filepath) {
- if (!filepath) return
- try {
- return fs.readFileSync(filepath, 'utf8')
- } catch (e) {
- log.error('Error reading file %s', filepath, e)
- }
-}
-
-function maybeJsonFile (filepath) {
- const file = maybeFile(filepath)
- if (!file) return
- try {
- return JSON.parse(file)
- } catch (e) {
- log.error('Error parsing JSON file %s', filepath, e)
- }
-}
-
-function safeJsonParse (input) {
- try {
- return JSON.parse(input)
- } catch {}
-}
-
-function validateNamingVersion (versionString) {
- if (!versionString) {
- return DEFAULT_NAMING_VERSION
- }
- if (!NAMING_VERSIONS.has(versionString)) {
- log.warn('Unexpected input for config.spanAttributeSchema, picked default', DEFAULT_NAMING_VERSION)
- return DEFAULT_NAMING_VERSION
}
- return versionString
}
-/**
- * Given a string of comma-separated paths, return the array of paths.
- * If a blank path is provided a null is returned to signal that the feature is disabled.
- * An empty array means the feature is enabled but that no rules need to be applied.
- *
- * @param {string | string[]} input
- */
-function splitJSONPathRules (input) {
- if (!input || input === '$') return
- if (Array.isArray(input)) return input
- if (input === 'all') return []
- return input.split(',')
-}
-
-// Shallow clone with property name remapping
-function remapify (input, mappings) {
- if (!input) return
- const output = {}
- for (const [key, value] of Object.entries(input)) {
- output[key in mappings ? mappings[key] : key] = value
- }
- return output
-}
-
-/**
- * Normalizes propagation style values to a lowercase array.
- * Handles both string (comma-separated) and array inputs.
- */
-function normalizePropagationStyle (value) {
- if (Array.isArray(value)) {
- return value.map(v => v.toLowerCase())
- }
- if (typeof value === 'string') {
- return value.split(',')
- .filter(v => v !== '')
- .map(v => v.trim().toLowerCase())
- }
- if (value !== undefined) {
- log.warn('Unexpected input for config.tracePropagationStyle')
+function increaseCounter (event, ddVar, otelVar) {
+ const tags = []
+ if (ddVar) {
+ tags.push(`config_datadog:${ddVar.toLowerCase()}`)
}
+ tags.push(`config_opentelemetry:${otelVar.toLowerCase()}`)
+ tracerMetrics.count(event, tags).inc()
}
-/**
- * Warns if both DD_TRACE_PROPAGATION_STYLE and specific inject/extract vars are set.
- */
-function warnIfPropagationStyleConflict (general, inject, extract) {
- if (general && (inject || extract)) {
- log.warn(
- // eslint-disable-next-line @stylistic/max-len
- 'Use either the DD_TRACE_PROPAGATION_STYLE environment variable or separate DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables'
- )
+function getFromOtelSamplerMap (otelTracesSampler, otelTracesSamplerArg) {
+ const OTEL_TRACES_SAMPLER_MAPPING = {
+ always_on: 1,
+ always_off: 0,
+ parentbased_always_on: 1,
+ parentbased_always_off: 0,
}
-}
-
-function reformatSpanSamplingRules (rules) {
- if (!rules) return rules
- return rules.map(rule => {
- return remapify(rule, {
- sample_rate: 'sampleRate',
- max_per_second: 'maxPerSecond',
- })
- })
-}
-function getDefaultPropagationStyle (options) {
- // TODO: Remove the experimental env vars as a major?
- const DD_TRACE_B3_ENABLED = options.experimental?.b3 ??
- getEnv('DD_TRACE_EXPERIMENTAL_B3_ENABLED')
- const defaultPropagationStyle = ['datadog', 'tracecontext']
- if (isTrue(DD_TRACE_B3_ENABLED)) {
- defaultPropagationStyle.push('b3', 'b3 single header')
+ const result = OTEL_TRACES_SAMPLER_MAPPING[otelTracesSampler] ?? otelTracesSamplerArg
+ if (result === undefined) {
+ increaseCounter('otel.env.invalid', 'DD_TRACE_SAMPLE_RATE', 'OTEL_TRACES_SAMPLER')
}
- return defaultPropagationStyle
+ return result
}
-function isCiVisibilityItrEnabled () {
- return getEnv('DD_CIVISIBILITY_ITR_ENABLED') ?? true
-}
-
-function reformatTagsFromRC (samplingRules) {
- for (const rule of (samplingRules || [])) {
- if (rule.tags) {
- const reformattedTags = {}
- for (const tag of rule.tags) {
- reformattedTags[tag.key] = tag.value_glob
+function warnWrongOtelSettings () {
+ // This mostly works for non-aliased environment variables only.
+ // TODO: Adjust this to work across all sources.
+ for (const [otelEnvVar, ddEnvVar, key] of [
+ // eslint-disable-next-line eslint-rules/eslint-env-aliases
+ ['OTEL_LOG_LEVEL', 'DD_TRACE_LOG_LEVEL', 'logLevel'],
+ // eslint-disable-next-line eslint-rules/eslint-env-aliases
+ ['OTEL_PROPAGATORS', 'DD_TRACE_PROPAGATION_STYLE'],
+ // eslint-disable-next-line eslint-rules/eslint-env-aliases
+ ['OTEL_SERVICE_NAME', 'DD_SERVICE', 'service'],
+ ['OTEL_TRACES_SAMPLER', 'DD_TRACE_SAMPLE_RATE'],
+ ['OTEL_TRACES_SAMPLER_ARG', 'DD_TRACE_SAMPLE_RATE'],
+ ['OTEL_TRACES_EXPORTER', 'DD_TRACE_ENABLED'],
+ ['OTEL_METRICS_EXPORTER', 'DD_RUNTIME_METRICS_ENABLED'],
+ ['OTEL_RESOURCE_ATTRIBUTES', 'DD_TAGS'],
+ ['OTEL_SDK_DISABLED', 'DD_TRACE_OTEL_ENABLED'],
+ ['OTEL_LOGS_EXPORTER'],
+ ]) {
+ // eslint-disable-next-line eslint-rules/eslint-process-env
+ const envs = process.env
+ const otelSource = trackedConfigOrigins.get(/** @type {ConfigPath} */ (key ?? otelEnvVar))
+ const otelEnvValue = envs[otelEnvVar]
+ if (otelEnvValue) {
+ if (envs[ddEnvVar]) {
+ log.warn('Conflicting %s and %s environment variables are set for %s', ddEnvVar, otelEnvVar, otelSource)
+ increaseCounter('otel.env.hiding', ddEnvVar, otelEnvVar)
}
- rule.tags = reformattedTags
- }
- }
- return samplingRules
-}
-
-function setBoolean (obj, name, value) {
- if (value === undefined || value === null) {
- obj[name] = value
- } else if (isTrue(value)) {
- obj[name] = true
- } else if (isFalse(value)) {
- obj[name] = false
- }
-}
-
-function setUnit (obj, name, value) {
- if (value === null || value === undefined) {
- obj[name] = value
- return
- }
-
- value = Number.parseFloat(value)
-
- if (!Number.isNaN(value)) {
- // TODO: Ignore out of range values instead of normalizing them.
- obj[name] = Math.min(Math.max(value, 0), 1)
- }
-}
-
-function setArray (obj, name, value) {
- if (value == null) {
- obj[name] = null
- return
- }
-
- if (typeof value === 'string') {
- value = value.split(',').map(item => {
- // Trim each item and remove whitespace around the colon
- const [key, val] = item.split(':').map(part => part.trim())
- return val === undefined ? key : `${key}:${val}`
- })
- }
- if (Array.isArray(value)) {
- obj[name] = value
- }
-}
-
-function setIntegerRangeSet (obj, name, value) {
- if (value == null) {
- obj[name] = null
- return
- }
- value = value.split(',')
- const result = []
-
- for (const val of value) {
- if (val.includes('-')) {
- const [start, end] = val.split('-').map(Number)
- for (let i = start; i <= end; i++) {
- result.push(i)
+ // eslint-disable-next-line eslint-rules/eslint-env-aliases
+ const invalidOtelValue = otelEnvVar === 'OTEL_PROPAGATORS'
+ ? trackedConfigOrigins.get(/** @type {ConfigPath} */ ('tracePropagationStyle.inject')) !== otelSource &&
+ !envs[ddEnvVar]
+ : !otelSource
+ if (invalidOtelValue) {
+ increaseCounter('otel.env.invalid', ddEnvVar, otelEnvVar)
}
- } else {
- result.push(Number(val))
}
}
- obj[name] = result
-}
-
-function setSamplingRule (obj, name, value) {
- if (value == null) {
- obj[name] = null
- return
- }
-
- if (typeof value === 'string') {
- value = value.split(',')
- }
-
- if (Array.isArray(value)) {
- value = value.map(rule => {
- return remapify(rule, {
- sample_rate: 'sampleRate',
- })
- })
- obj[name] = value
- }
-}
-
-function setString (obj, name, value) {
- obj[name] = value ? String(value) : undefined // unset for empty strings
-}
-
-function setTags (obj, name, value) {
- if (!value || Object.keys(value).length === 0) {
- obj[name] = null
- return
- }
-
- obj[name] = value
-}
-
-function handleOtel (tagString) {
- return tagString
- ?.replace(/(^|,)deployment\.environment=/, '$1env:')
- .replace(/(^|,)service\.name=/, '$1service:')
- .replace(/(^|,)service\.version=/, '$1version:')
- .replaceAll('=', ':')
-}
-
-function parseSpaceSeparatedTags (tagString) {
- if (tagString && !tagString.includes(',')) {
- tagString = tagString.replaceAll(/\s+/g, ',')
- }
- return tagString
-}
-
-function maybeInt (number) {
- const parsed = Number.parseInt(number)
- return Number.isNaN(parsed) ? undefined : parsed
-}
-
-function maybeFloat (number) {
- const parsed = Number.parseFloat(number)
- return Number.isNaN(parsed) ? undefined : parsed
-}
-
-function nonNegInt (value, envVarName, allowZero = true) {
- if (value === undefined) return
- const parsed = Number.parseInt(value)
- if (Number.isNaN(parsed) || parsed < 0 || (parsed === 0 && !allowZero)) {
- log.warn('Invalid value %d for %s. Using default value.', parsed, envVarName)
- return
- }
- return parsed
-}
-
-function getAgentUrl (url, options) {
- if (url) return new URL(url)
-
- if (os.type() === 'Windows_NT') return
-
- if (
- !options.hostname &&
- !options.port &&
- !getEnv('DD_AGENT_HOST') &&
- !getEnv('DD_TRACE_AGENT_PORT') &&
- !isTrue(getEnv('DD_CIVISIBILITY_AGENTLESS_ENABLED')) &&
- fs.existsSync('/var/run/datadog/apm.socket')
- ) {
- return new URL('unix:///var/run/datadog/apm.socket')
- }
}
+/**
+ * @param {TracerOptions} [options]
+ */
function getConfig (options) {
if (!configInstance) {
configInstance = new Config(options)
diff --git a/packages/dd-trace/src/config/parsers.js b/packages/dd-trace/src/config/parsers.js
new file mode 100644
index 00000000000..7ddd29b24a3
--- /dev/null
+++ b/packages/dd-trace/src/config/parsers.js
@@ -0,0 +1,256 @@
+'use strict'
+
+const fs = require('fs')
+
+const tagger = require('../tagger')
+
+let warnInvalidValue
+function setWarnInvalidValue (fn) {
+ warnInvalidValue = fn
+}
+
+const VALID_PROPAGATION_STYLES = new Set([
+ 'datadog', 'tracecontext', 'b3', 'b3 single header', 'b3multi', 'baggage', 'none',
+])
+
+function toCase (value, methodName) {
+ if (Array.isArray(value)) {
+ return value.map(item => {
+ return transformers[methodName](item)
+ })
+ }
+ return value[methodName]()
+}
+
+const transformers = {
+ setGRPCRange (value) {
+ if (value == null) {
+ return
+ }
+ value = value.split(',')
+ const result = []
+
+ for (const val of value) {
+ const dashIndex = val.indexOf('-')
+ if (dashIndex === -1) {
+ result.push(Number(val))
+ } else {
+ const start = Number(val.slice(0, dashIndex))
+ const end = Number(val.slice(dashIndex + 1))
+ for (let i = start; i <= end; i++) {
+ result.push(i)
+ }
+ }
+ }
+ return result
+ },
+ toLowerCase (value) {
+ return toCase(value, 'toLowerCase')
+ },
+ toUpperCase (value) {
+ return toCase(value, 'toUpperCase')
+ },
+ toCamelCase (value) {
+ if (Array.isArray(value)) {
+ return value.map(item => {
+ return transformers.toCamelCase(item)
+ })
+ }
+ if (typeof value === 'object' && value !== null) {
+ const result = {}
+ for (const [key, innerValue] of Object.entries(value)) {
+ const camelCaseKey = key.replaceAll(/_(\w)/g, (_, letter) => letter.toUpperCase())
+ result[camelCaseKey] = transformers.toCamelCase(innerValue)
+ }
+ return result
+ }
+ return value
+ },
+ parseOtelTags (value, optionName) {
+ return parsers.MAP(value
+ ?.replace(/(^|,)deployment\.environment=/, '$1env:')
+ .replace(/(^|,)service\.name=/, '$1service:')
+ .replace(/(^|,)service\.version=/, '$1version:')
+ .replaceAll('=', ':'), optionName)
+ },
+ normalizeProfilingEnabled (configValue) {
+ if (configValue == null) {
+ return
+ }
+ if (configValue === 'true' || configValue === '1') {
+ return 'true'
+ }
+ if (configValue === 'false' || configValue === '0') {
+ return 'false'
+ }
+ const lowercased = String(configValue).toLowerCase()
+ if (lowercased !== configValue) {
+ return transformers.normalizeProfilingEnabled(lowercased)
+ }
+ return configValue
+ },
+ sampleRate (value, optionName, source) {
+ const number = Number(value)
+ if (Number.isNaN(number) || value === null) {
+ warnInvalidValue(value, optionName, source, 'Sample rate invalid')
+ return
+ }
+ const clamped = Math.min(Math.max(number, 0), 1)
+ if (clamped !== number) {
+ warnInvalidValue(value, optionName, source, 'Sample rate out of range between 0 and 1')
+ return clamped
+ }
+ return number
+ },
+ readFilePath (raw, optionName, source) {
+ const { stackTraceLimit } = Error
+ Error.stackTraceLimit = 0
+ try {
+ return fs.readFileSync(raw, 'utf8')
+ } catch (error) {
+ warnInvalidValue(raw, optionName, source, 'Error reading path', error)
+ } finally {
+ Error.stackTraceLimit = stackTraceLimit
+ }
+ },
+ /**
+ * Given a string of comma-separated paths, return the array of paths.
+ * If a blank path is provided a null is returned to signal that the feature is disabled.
+ * An empty array means the feature is enabled but that no rules need to be applied.
+ *
+ * @param {string | string[]} input
+ */
+ splitJSONPathRules (input) {
+ if (!input || input === '$') return
+ if (Array.isArray(input)) return input
+ if (input === 'all') return []
+ return input.split(',')
+ },
+ stripColonWhitespace (value) {
+ if (Array.isArray(value)) {
+ return value.map(item => {
+ return transformers.stripColonWhitespace(item)
+ })
+ }
+ return value.replaceAll(/\s*:\s*/g, ':')
+ },
+ validatePropagationStyles (value, optionName) {
+ value = transformers.toLowerCase(value)
+ for (const propagator of value) {
+ if (!VALID_PROPAGATION_STYLES.has(propagator)) {
+ warnInvalidValue(propagator, optionName, optionName, 'Invalid propagator')
+ return
+ }
+ }
+ return value
+ },
+}
+
+const telemetryTransformers = {
+ JSON (object) {
+ return (typeof object !== 'object' || object === null) ? object : JSON.stringify(object)
+ },
+ MAP (object) {
+ if (typeof object !== 'object' || object === null) {
+ return object
+ }
+ let result = ''
+ for (const [key, value] of Object.entries(object)) {
+ result += `${key}:${value},`
+ }
+ return result.slice(0, -1)
+ },
+ ARRAY (array) {
+ return Array.isArray(array) ? array.join(',') : array
+ },
+}
+
+const parsers = {
+ BOOLEAN (raw) {
+ if (raw === 'true' || raw === '1') {
+ return true
+ }
+ if (raw === 'false' || raw === '0') {
+ return false
+ }
+ const lowercased = raw.toLowerCase()
+ if (lowercased !== raw) {
+ return parsers.BOOLEAN(lowercased)
+ }
+ },
+ INT (raw) {
+ const parsed = Math.trunc(raw)
+ if (Number.isNaN(parsed)) {
+ return
+ }
+ return parsed
+ },
+ DECIMAL (raw) {
+ const parsed = Number(raw)
+ if (Number.isNaN(parsed)) {
+ return
+ }
+ return parsed
+ },
+ ARRAY (raw) {
+ // TODO: Make the parsing a helper that is reused everywhere.
+ const result = []
+ if (!raw) {
+ return result
+ }
+ let valueStart = 0
+ for (let i = 0; i < raw.length; i++) {
+ const char = raw[i]
+ if (char === ',') {
+ const value = raw.slice(valueStart, i).trim()
+ // Auto filter empty entries.
+ if (value.length > 0) {
+ result.push(value)
+ }
+ valueStart = i + 1
+ }
+ }
+ if (valueStart < raw.length) {
+ const value = raw.slice(valueStart).trim()
+ // Auto filter empty entries.
+ if (value.length > 0) {
+ result.push(value)
+ }
+ }
+ return result
+ },
+ MAP (raw, optionName) {
+ /** @type {Record} */
+ const entries = {}
+ if (!raw) {
+ return entries
+ }
+ // DD_TAGS is a special case. It may be a map of key-value pairs separated by spaces.
+ if (optionName === 'DD_TAGS' && !raw.includes(',')) {
+ raw = raw.replaceAll(/\s+/g, ',')
+ }
+ tagger.add(entries, raw)
+ return entries
+ },
+ JSON (raw) {
+ const { stackTraceLimit } = Error
+ Error.stackTraceLimit = 0
+ try {
+ return JSON.parse(raw)
+ } catch {
+ // ignore
+ } finally {
+ Error.stackTraceLimit = stackTraceLimit
+ }
+ },
+ STRING (raw) {
+ return raw
+ },
+}
+
+module.exports = {
+ parsers,
+ transformers,
+ telemetryTransformers,
+ setWarnInvalidValue,
+}
diff --git a/packages/dd-trace/src/config/remote_config.js b/packages/dd-trace/src/config/remote_config.js
index 54f4b3067b4..f981dd37fef 100644
--- a/packages/dd-trace/src/config/remote_config.js
+++ b/packages/dd-trace/src/config/remote_config.js
@@ -2,6 +2,7 @@
const RemoteConfigCapabilities = require('../remote_config/capabilities')
const log = require('../log')
+const tagger = require('../tagger')
module.exports = {
enable,
@@ -194,10 +195,66 @@ function enable (rc, config, onConfigUpdated) {
transaction.ack(item.path)
}
- // Get merged config and apply it
- const mergedLibConfig = rcClientLibConfigManager.getMergedLibConfig()
+ /** @type {import('../config').TracerOptions|null|RemoteConfigOptions} */
+ let mergedLibConfig = rcClientLibConfigManager.getMergedLibConfig()
+
+ if (mergedLibConfig) {
+ mergedLibConfig = transformRemoteConfigToLocalOption(mergedLibConfig)
+ }
+
config.setRemoteConfig(mergedLibConfig)
onConfigUpdated()
})
}
+
+/**
+ * @param {RemoteConfigOptions} libConfig
+ * @returns {import('../config').TracerOptions}
+ */
+function transformRemoteConfigToLocalOption (libConfig) {
+ const normalizedConfig = {}
+ for (const [name, value] of Object.entries(libConfig)) {
+ if (value !== null) {
+ normalizedConfig[optionLookupTable[name] ?? name] = transformers[name]?.(value) ?? value
+ }
+ }
+ return normalizedConfig
+}
+
+// This is intermediate solution until remote config is reworked to handle all known entries with proper names
+const optionLookupTable = {
+ dynamic_instrumentation_enabled: 'dynamicInstrumentation.enabled',
+ code_origin_enabled: 'codeOriginForSpans.enabled',
+ tracing_sampling_rate: 'sampleRate',
+ log_injection_enabled: 'logInjection',
+ tracing_enabled: 'tracing',
+ tracing_sampling_rules: 'samplingRules',
+ tracing_header_tags: 'headerTags',
+ tracing_tags: 'tags',
+}
+
+const transformers = {
+ tracing_sampling_rules (samplingRules) {
+ for (const rule of (samplingRules || [])) {
+ if (rule.tags) {
+ const reformattedTags = {}
+ for (const tag of rule.tags) {
+ reformattedTags[tag.key] = tag.value_glob
+ }
+ rule.tags = reformattedTags
+ }
+ }
+ return samplingRules
+ },
+ tracing_header_tags (headerTags) {
+ return headerTags?.map(tag => {
+ return tag.tag_name ? `${tag.header}:${tag.tag_name}` : tag.header
+ })
+ },
+ tracing_tags (tags) {
+ const normalizedTags = {}
+ tagger.add(normalizedTags, tags)
+ return normalizedTags
+ },
+}
diff --git a/packages/dd-trace/src/config/supported-configurations.json b/packages/dd-trace/src/config/supported-configurations.json
index d99dc7bd585..63131eb0a37 100644
--- a/packages/dd-trace/src/config/supported-configurations.json
+++ b/packages/dd-trace/src/config/supported-configurations.json
@@ -13,9 +13,7 @@
"implementation": "A",
"type": "boolean",
"default": "false",
- "configurationNames": [
- "ciVisAgentlessLogSubmissionEnabled"
- ]
+ "internalPropertyName": "ciVisAgentlessLogSubmissionEnabled"
}
],
"DD_AGENTLESS_LOG_SUBMISSION_URL": [
@@ -114,9 +112,7 @@
"aliases": [
"DATADOG_API_KEY"
],
- "configurationNames": [
- "apiKey"
- ]
+ "internalPropertyName": "apiKey"
}
],
"DD_API_SECURITY_ENABLED": [
@@ -124,7 +120,8 @@
"implementation": "A",
"type": "boolean",
"configurationNames": [
- "appsec.apiSecurity.enabled"
+ "appsec.apiSecurity.enabled",
+ "experimental.appsec.apiSecurity.enabled"
],
"default": "true",
"aliases": [
@@ -137,7 +134,8 @@
"implementation": "A",
"type": "boolean",
"configurationNames": [
- "appsec.apiSecurity.endpointCollectionEnabled"
+ "appsec.apiSecurity.endpointCollectionEnabled",
+ "experimental.appsec.apiSecurity.endpointCollectionEnabled"
],
"default": "true"
}
@@ -147,7 +145,8 @@
"implementation": "A",
"type": "int",
"configurationNames": [
- "appsec.apiSecurity.endpointCollectionMessageLimit"
+ "appsec.apiSecurity.endpointCollectionMessageLimit",
+ "experimental.appsec.apiSecurity.endpointCollectionMessageLimit"
],
"default": "300"
}
@@ -156,9 +155,7 @@
{
"implementation": "A",
"type": "decimal",
- "configurationNames": [
- "appsec.apiSecurity.downstreamBodyAnalysisSampleRate"
- ],
+ "internalPropertyName": "appsec.apiSecurity.downstreamBodyAnalysisSampleRate",
"default": "0.5"
}
],
@@ -166,9 +163,7 @@
{
"implementation": "A",
"type": "int",
- "configurationNames": [
- "appsec.apiSecurity.maxDownstreamRequestBodyAnalysis"
- ],
+ "internalPropertyName": "appsec.apiSecurity.maxDownstreamRequestBodyAnalysis",
"default": "1"
}
],
@@ -177,9 +172,7 @@
"implementation": "A",
"type": "decimal",
"default": "30",
- "configurationNames": [
- "appsec.apiSecurity.sampleDelay"
- ]
+ "internalPropertyName": "appsec.apiSecurity.sampleDelay"
}
],
"DD_APM_FLUSH_DEADLINE_MILLISECONDS": [
@@ -204,7 +197,8 @@
"implementation": "E",
"type": "string",
"configurationNames": [
- "appsec.eventTracking.mode"
+ "appsec.eventTracking.mode",
+ "experimental.appsec.eventTracking.mode"
],
"default": "identification",
"aliases": [
@@ -217,7 +211,8 @@
"implementation": "A",
"type": "boolean",
"configurationNames": [
- "appsec.extendedHeadersCollection.enabled"
+ "appsec.extendedHeadersCollection.enabled",
+ "experimental.appsec.extendedHeadersCollection.enabled"
],
"default": "false",
"deprecated": true
@@ -228,7 +223,10 @@
"implementation": "C",
"type": "boolean",
"configurationNames": [
- "appsec.enabled"
+ "appsec.enabled",
+ "appsec",
+ "experimental.appsec.enabled",
+ "experimental.appsec"
],
"default": null
}
@@ -238,9 +236,11 @@
"implementation": "A",
"type": "string",
"configurationNames": [
- "appsec.blockedTemplateGraphql"
+ "appsec.blockedTemplateGraphql",
+ "experimental.appsec.blockedTemplateGraphql"
],
- "default": null
+ "default": null,
+ "transform": "readFilePath"
}
],
"DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED": [
@@ -248,7 +248,8 @@
"implementation": "A",
"type": "boolean",
"configurationNames": [
- "appsec.extendedHeadersCollection.redaction"
+ "appsec.extendedHeadersCollection.redaction",
+ "experimental.appsec.extendedHeadersCollection.redaction"
],
"default": "true"
}
@@ -258,9 +259,11 @@
"implementation": "B",
"type": "string",
"configurationNames": [
- "appsec.blockedTemplateHtml"
+ "appsec.blockedTemplateHtml",
+ "experimental.appsec.blockedTemplateHtml"
],
- "default": null
+ "default": null,
+ "transform": "readFilePath"
}
],
"DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON": [
@@ -268,9 +271,11 @@
"implementation": "B",
"type": "string",
"configurationNames": [
- "appsec.blockedTemplateJson"
+ "appsec.blockedTemplateJson",
+ "experimental.appsec.blockedTemplateJson"
],
- "default": null
+ "default": null,
+ "transform": "readFilePath"
}
],
"DD_APPSEC_MAX_COLLECTED_HEADERS": [
@@ -278,7 +283,8 @@
"implementation": "A",
"type": "int",
"configurationNames": [
- "appsec.extendedHeadersCollection.maxHeaders"
+ "appsec.extendedHeadersCollection.maxHeaders",
+ "experimental.appsec.extendedHeadersCollection.maxHeaders"
],
"default": "50"
}
@@ -288,7 +294,11 @@
"implementation": "A",
"type": "int",
"configurationNames": [
- "appsec.stackTrace.maxStackTraces"
+ "appsec.stackTrace.maxStackTraces",
+ "experimental.appsec.stackTrace.maxStackTraces"
+ ],
+ "aliases": [
+ "DD_APPSEC_MAX_STACKTRACES"
],
"default": "2"
}
@@ -298,7 +308,11 @@
"implementation": "A",
"type": "int",
"configurationNames": [
- "appsec.stackTrace.maxDepth"
+ "appsec.stackTrace.maxDepth",
+ "experimental.appsec.stackTrace.maxDepth"
+ ],
+ "aliases": [
+ "DD_APPSEC_MAX_STACKTRACE_DEPTH"
],
"default": "32"
}
@@ -308,7 +322,8 @@
"implementation": "B",
"type": "string",
"configurationNames": [
- "appsec.obfuscatorKeyRegex"
+ "appsec.obfuscatorKeyRegex",
+ "experimental.appsec.obfuscatorKeyRegex"
],
"default": "(?i)pass|pw(?:or)?d|secret|(?:api|private|public|access)[_-]?key|token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization|jsessionid|phpsessid|asp\\.net[_-]sessionid|sid|jwt"
}
@@ -318,7 +333,8 @@
"implementation": "G",
"type": "string",
"configurationNames": [
- "appsec.obfuscatorValueRegex"
+ "appsec.obfuscatorValueRegex",
+ "experimental.appsec.obfuscatorValueRegex"
],
"default": "(?i)(?:p(?:ass)?w(?:or)?d|pass(?:[_-]?phrase)?|secret(?:[_-]?key)?|(?:(?:api|private|public|access)[_-]?)key(?:[_-]?id)?|(?:(?:auth|access|id|refresh)[_-]?)?token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|jsessionid|phpsessid|asp\\.net(?:[_-]|-)sessionid|sid|jwt)(?:\\s*=([^;&]+)|\"\\s*:\\s*(\"[^\"]+\"|\\d+))|bearer\\s+([a-z0-9\\._\\-]+)|token\\s*:\\s*([a-z0-9]{13})|gh[opsu]_([0-9a-zA-Z]{36})|ey[I-L][\\w=-]+\\.(ey[I-L][\\w=-]+(?:\\.[\\w.+\\/=-]+)?)|[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}([^\\-]+)[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*([a-z0-9\\/\\.+]{100,})"
}
@@ -328,7 +344,8 @@
"implementation": "A",
"type": "boolean",
"configurationNames": [
- "appsec.rasp.bodyCollection"
+ "appsec.rasp.bodyCollection",
+ "experimental.appsec.rasp.bodyCollection"
],
"default": "false",
"deprecated": true
@@ -339,7 +356,8 @@
"implementation": "A",
"type": "boolean",
"configurationNames": [
- "appsec.rasp.enabled"
+ "appsec.rasp.enabled",
+ "experimental.appsec.rasp.enabled"
],
"default": "true"
}
@@ -349,7 +367,8 @@
"implementation": "B",
"type": "string",
"configurationNames": [
- "appsec.rules"
+ "appsec.rules",
+ "experimental.appsec.rules"
],
"default": null
}
@@ -359,9 +378,7 @@
"implementation": "B",
"type": "boolean",
"default": null,
- "configurationNames": [
- "appsec.sca.enabled"
- ]
+ "internalPropertyName": "appsec.sca.enabled"
}
],
"DD_APPSEC_STACK_TRACE_ENABLED": [
@@ -369,7 +386,8 @@
"implementation": "A",
"type": "boolean",
"configurationNames": [
- "appsec.stackTrace.enabled"
+ "appsec.stackTrace.enabled",
+ "experimental.appsec.stackTrace.enabled"
],
"default": "true"
}
@@ -379,7 +397,8 @@
"implementation": "A",
"type": "int",
"configurationNames": [
- "appsec.rateLimit"
+ "appsec.rateLimit",
+ "experimental.appsec.rateLimit"
],
"default": "100"
}
@@ -389,7 +408,8 @@
"implementation": "E",
"type": "int",
"configurationNames": [
- "appsec.wafTimeout"
+ "appsec.wafTimeout",
+ "experimental.appsec.wafTimeout"
],
"default": "5000"
}
@@ -399,9 +419,7 @@
"implementation": "A",
"type": "string",
"default": null,
- "configurationNames": [
- "appKey"
- ]
+ "internalPropertyName": "appKey"
}
],
"DD_AZURE_RESOURCE_GROUP": [
@@ -451,9 +469,7 @@
"implementation": "B",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "isEarlyFlakeDetectionEnabled"
- ]
+ "internalPropertyName": "isEarlyFlakeDetectionEnabled"
}
],
"DD_CIVISIBILITY_ENABLED": [
@@ -468,9 +484,7 @@
"implementation": "A",
"type": "int",
"default": "5",
- "configurationNames": [
- "flakyTestRetriesCount"
- ]
+ "internalPropertyName": "flakyTestRetriesCount"
}
],
"DD_CIVISIBILITY_FLAKY_RETRY_ENABLED": [
@@ -478,9 +492,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "isFlakyTestRetriesEnabled"
- ]
+ "internalPropertyName": "isFlakyTestRetriesEnabled"
}
],
"DD_CIVISIBILITY_GIT_UNSHALLOW_ENABLED": [
@@ -495,9 +507,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "isGitUploadEnabled"
- ]
+ "internalPropertyName": "isGitUploadEnabled"
}
],
"DD_CIVISIBILITY_IMPACTED_TESTS_DETECTION_ENABLED": [
@@ -505,9 +515,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "isImpactedTestsEnabled"
- ]
+ "internalPropertyName": "isImpactedTestsEnabled"
}
],
"DD_CIVISIBILITY_ITR_ENABLED": [
@@ -515,9 +523,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "isIntelligentTestRunnerEnabled"
- ]
+ "internalPropertyName": "isIntelligentTestRunnerEnabled"
}
],
"DD_ENABLE_LAGE_PACKAGE_NAME": [
@@ -532,9 +538,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "isManualApiEnabled"
- ]
+ "internalPropertyName": "isManualApiEnabled"
}
],
"DD_CIVISIBILITY_RUM_FLUSH_WAIT_MILLIS": [
@@ -590,9 +594,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "crashtracking.enabled"
- ]
+ "internalPropertyName": "crashtracking.enabled"
}
],
"DD_CUSTOM_TRACE_ID": [
@@ -707,19 +709,19 @@
],
"DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS": [
{
- "implementation": "A",
- "type": "int",
+ "implementation": "C",
+ "type": "decimal",
"configurationNames": [
"dynamicInstrumentation.uploadIntervalSeconds"
],
- "default": "1"
+ "default": "1.0"
}
],
"DD_ENABLE_NX_SERVICE_NAME": [
{
- "implementation": "A",
- "type": "string",
- "default": null
+ "implementation": "B",
+ "type": "boolean",
+ "default": "false"
}
],
"DD_ENV": [
@@ -734,9 +736,12 @@
],
"DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED": [
{
- "implementation": "A",
+ "implementation": "B",
"type": "boolean",
- "default": "true"
+ "default": "false",
+ "configurationNames": [
+ "experimental.appsec.standalone.enabled"
+ ]
}
],
"DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS": [
@@ -764,9 +769,7 @@
"implementation": "B",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "propagateProcessTags.enabled"
- ]
+ "internalPropertyName": "propagateProcessTags.enabled"
}
],
"DD_EXPERIMENTAL_TEST_OPT_SETTINGS_CACHE": [
@@ -918,23 +921,20 @@
],
"DD_GRPC_CLIENT_ERROR_STATUSES": [
{
- "implementation": "A",
- "type": "array",
- "default": "1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16",
- "configurationNames": [
- "grpc.client.error.statuses"
- ],
- "handler": "GRPC_HANDLER"
+ "implementation": "C",
+ "type": "string",
+ "default": "1-16",
+ "internalPropertyName": "grpc.client.error.statuses",
+ "transform": "setGRPCRange"
}
],
"DD_GRPC_SERVER_ERROR_STATUSES": [
{
- "implementation": "A",
- "type": "array",
- "default": "2,3,4,5,6,7,8,9,10,11,12,13,14,15,16",
- "configurationNames": [
- "grpc.server.error.statuses"
- ]
+ "implementation": "C",
+ "type": "string",
+ "default": "2-16",
+ "internalPropertyName": "grpc.server.error.statuses",
+ "transform": "setGRPCRange"
}
],
"DD_HEAP_SNAPSHOT_COUNT": [
@@ -942,9 +942,7 @@
"implementation": "A",
"type": "int",
"default": "0",
- "configurationNames": [
- "heapSnapshot.count"
- ]
+ "internalPropertyName": "heapSnapshot.count"
}
],
"DD_HEAP_SNAPSHOT_DESTINATION": [
@@ -952,9 +950,7 @@
"implementation": "A",
"type": "string",
"default": "",
- "configurationNames": [
- "heapSnapshot.destination"
- ]
+ "internalPropertyName": "heapSnapshot.destination"
}
],
"DD_HEAP_SNAPSHOT_INTERVAL": [
@@ -962,9 +958,7 @@
"implementation": "A",
"type": "int",
"default": "3600",
- "configurationNames": [
- "heapSnapshot.interval"
- ]
+ "internalPropertyName": "heapSnapshot.interval"
}
],
"DD_IAST_DB_ROWS_TO_TAINT": [
@@ -972,7 +966,8 @@
"implementation": "A",
"type": "int",
"configurationNames": [
- "iast.dbRowsToTaint"
+ "iast.dbRowsToTaint",
+ "experimental.iast.dbRowsToTaint"
],
"default": "1"
}
@@ -982,7 +977,8 @@
"implementation": "A",
"type": "boolean",
"configurationNames": [
- "iast.deduplicationEnabled"
+ "iast.deduplicationEnabled",
+ "experimental.iast.deduplicationEnabled"
],
"default": "true"
}
@@ -992,7 +988,10 @@
"implementation": "B",
"type": "boolean",
"configurationNames": [
- "iast.enabled"
+ "iast.enabled",
+ "iast",
+ "experimental.iast.enabled",
+ "experimental.iast"
],
"default": "false"
}
@@ -1002,7 +1001,8 @@
"implementation": "A",
"type": "int",
"configurationNames": [
- "iast.maxConcurrentRequests"
+ "iast.maxConcurrentRequests",
+ "experimental.iast.maxConcurrentRequests"
],
"default": "2"
}
@@ -1012,7 +1012,8 @@
"implementation": "A",
"type": "int",
"configurationNames": [
- "iast.maxContextOperations"
+ "iast.maxContextOperations",
+ "experimental.iast.maxContextOperations"
],
"default": "2"
}
@@ -1022,7 +1023,8 @@
"implementation": "A",
"type": "boolean",
"configurationNames": [
- "iast.redactionEnabled"
+ "iast.redactionEnabled",
+ "experimental.iast.redactionEnabled"
],
"default": "true"
}
@@ -1032,7 +1034,8 @@
"implementation": "A",
"type": "string",
"configurationNames": [
- "iast.redactionNamePattern"
+ "iast.redactionNamePattern",
+ "experimental.iast.redactionNamePattern"
],
"default": "(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|(?:sur|last)name|user(?:name)?|address|e?mail)"
}
@@ -1042,7 +1045,8 @@
"implementation": "A",
"type": "string",
"configurationNames": [
- "iast.redactionValuePattern"
+ "iast.redactionValuePattern",
+ "experimental.iast.redactionValuePattern"
],
"default": "(?:bearer\\s+[a-z0-9\\._\\-]+|glpat-[\\w\\-]{20}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=\\-]+\\.ey[I-L][\\w=\\-]+(?:\\.[\\w.+/=\\-]+)?|(?:[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY[\\-]{5}|ssh-rsa\\s*[a-z0-9/\\.+]{100,})|[\\w\\.-]+@[a-zA-Z\\d\\.-]+\\.[a-zA-Z]{2,})"
}
@@ -1052,9 +1056,12 @@
"implementation": "A",
"type": "int",
"configurationNames": [
- "iast.requestSampling"
+ "iast.requestSampling",
+ "experimental.iast.requestSampling"
],
- "default": "30"
+ "default": "30",
+ "allowed": "100|[1-9]?\\d",
+ "transform": "iastRequestSampling"
}
],
"DD_IAST_SECURITY_CONTROLS_CONFIGURATION": [
@@ -1062,7 +1069,8 @@
"implementation": "B",
"type": "string",
"configurationNames": [
- "iast.securityControlsConfiguration"
+ "iast.securityControlsConfiguration",
+ "experimental.iast.securityControlsConfiguration"
],
"default": null
}
@@ -1072,7 +1080,8 @@
"implementation": "B",
"type": "boolean",
"configurationNames": [
- "iast.stackTrace.enabled"
+ "iast.stackTrace.enabled",
+ "experimental.iast.stackTrace.enabled"
],
"default": "true"
}
@@ -1082,19 +1091,18 @@
"implementation": "B",
"type": "string",
"configurationNames": [
- "iast.telemetryVerbosity"
+ "iast.telemetryVerbosity",
+ "experimental.iast.telemetryVerbosity"
],
"default": "INFORMATION"
}
],
"DD_INJECTION_ENABLED": [
{
- "implementation": "A",
- "type": "array",
- "default": "",
- "configurationNames": [
- "injectionEnabled"
- ]
+ "implementation": "C",
+ "type": "string",
+ "default": null,
+ "internalPropertyName": "injectionEnabled"
}
],
"DD_INJECT_FORCE": [
@@ -1102,9 +1110,7 @@
"implementation": "A",
"type": "boolean",
"default": "false",
- "configurationNames": [
- "injectForce"
- ]
+ "internalPropertyName": "injectForce"
}
],
"DD_INSTRUMENTATION_CONFIG_ID": [
@@ -1112,9 +1118,7 @@
"implementation": "A",
"type": "string",
"default": null,
- "configurationNames": [
- "instrumentation_config_id"
- ]
+ "internalPropertyName": "instrumentation_config_id"
}
],
"DD_INSTRUMENTATION_INSTALL_ID": [
@@ -1122,9 +1126,7 @@
"implementation": "A",
"type": "string",
"default": null,
- "configurationNames": [
- "installSignature.id"
- ]
+ "internalPropertyName": "installSignature.id"
}
],
"DD_INSTRUMENTATION_INSTALL_TIME": [
@@ -1132,9 +1134,7 @@
"implementation": "A",
"type": "string",
"default": null,
- "configurationNames": [
- "installSignature.time"
- ]
+ "internalPropertyName": "installSignature.time"
}
],
"DD_INSTRUMENTATION_INSTALL_TYPE": [
@@ -1142,9 +1142,7 @@
"implementation": "A",
"type": "string",
"default": null,
- "configurationNames": [
- "installSignature.type"
- ]
+ "internalPropertyName": "installSignature.type"
}
],
"DD_INSTRUMENTATION_TELEMETRY_ENABLED": [
@@ -1155,9 +1153,7 @@
"aliases": [
"DD_TRACE_TELEMETRY_ENABLED"
],
- "configurationNames": [
- "telemetry.enabled"
- ]
+ "internalPropertyName": "telemetry.enabled"
}
],
"DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD": [
@@ -1165,9 +1161,7 @@
"implementation": "A",
"type": "int",
"default": "30000",
- "configurationNames": [
- "profiling.longLivedThreshold"
- ]
+ "internalPropertyName": "profiling.longLivedThreshold"
}
],
"DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED": [
@@ -1189,9 +1183,7 @@
"implementation": "A",
"type": "int",
"default": "128",
- "configurationNames": [
- "langchain.spanCharLimit"
- ]
+ "internalPropertyName": "langchain.spanCharLimit"
}
],
"DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE": [
@@ -1199,9 +1191,7 @@
"implementation": "A",
"type": "decimal",
"default": "1",
- "configurationNames": [
- "langchain.spanPromptCompletionSampleRate"
- ]
+ "internalPropertyName": "langchain.spanPromptCompletionSampleRate"
}
],
"DD_LLMOBS_AGENTLESS_ENABLED": [
@@ -1219,9 +1209,7 @@
"implementation": "A",
"type": "boolean",
"default": "false",
- "configurationNames": [
- "llmobs.enabled"
- ]
+ "internalPropertyName": "llmobs.enabled"
}
],
"DD_LLMOBS_ML_APP": [
@@ -1249,16 +1237,22 @@
"implementation": "A",
"type": "boolean",
"default": "false",
- "configurationNames": [
- "otelLogsEnabled"
- ]
+ "internalPropertyName": "otelLogsEnabled"
}
],
- "DD_LOG_LEVEL": [
+ "DD_TRACE_LOG_LEVEL": [
{
- "implementation": "B",
+ "implementation": "C",
"type": "string",
- "default": null
+ "default": "debug",
+ "configurationNames": [
+ "logLevel"
+ ],
+ "aliases": [
+ "DD_LOG_LEVEL",
+ "OTEL_LOG_LEVEL"
+ ],
+ "allowed": "debug|info|warn|error"
}
],
"DD_METRICS_OTEL_ENABLED": [
@@ -1266,9 +1260,7 @@
"implementation": "A",
"type": "boolean",
"default": "false",
- "configurationNames": [
- "otelMetricsEnabled"
- ]
+ "internalPropertyName": "otelMetricsEnabled"
}
],
"DD_MINI_AGENT_PATH": [
@@ -1293,9 +1285,7 @@
"implementation": "A",
"type": "int",
"default": "128",
- "configurationNames": [
- "openai.spanCharLimit"
- ]
+ "internalPropertyName": "openai.spanCharLimit"
}
],
"DD_PIPELINE_EXECUTION_ID": [
@@ -1370,38 +1360,30 @@
],
"DD_PROFILING_DEBUG_UPLOAD_COMPRESSION": [
{
- "implementation": "A",
+ "implementation": "B",
"type": "string",
- "default": "zstd"
+ "default": "on",
+ "allowed": "on|off|(gzip|zstd)(-[1-9][0-9]?)?",
+ "transform": "toLowerCase"
}
],
"DD_PROFILING_ENABLED": [
{
- "implementation": "A",
- "type": "boolean",
+ "implementation": "B",
+ "type": "string",
+ "internalPropertyName": "profiling.enabled",
"configurationNames": [
- "profiling.enabled"
+ "profiling"
],
+ "allowed": "false|true|auto|1|0",
+ "transform": "normalizeProfilingEnabled",
"default": "false",
+ "__TODO__": "The alias is deprecated and should log. This needs an re-implementation.",
"aliases": [
"DD_EXPERIMENTAL_PROFILING_ENABLED"
]
}
],
- "DD_EXPERIMENTAL_PROFILING_ENABLED": [
- {
- "implementation": "A",
- "type": "boolean",
- "configurationNames": [
- "profiling.enabled"
- ],
- "default": "false",
- "aliases": [
- "DD_PROFILING_ENABLED"
- ],
- "deprecated": true
- }
- ],
"DD_PROFILING_ENDPOINT_COLLECTION_ENABLED": [
{
"implementation": "A",
@@ -1425,8 +1407,8 @@
],
"DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES": [
{
- "implementation": "A",
- "type": "string",
+ "implementation": "B",
+ "type": "array",
"default": "process"
}
],
@@ -1453,19 +1435,16 @@
],
"DD_PROFILING_EXPORTERS": [
{
- "implementation": "A",
- "type": "string",
- "default": "agent",
- "configurationNames": [
- "profiling.exporters"
- ]
+ "implementation": "B",
+ "type": "array",
+ "default": "agent"
}
],
"DD_PROFILING_HEAP_ENABLED": [
{
- "implementation": "A",
+ "implementation": "B",
"type": "boolean",
- "default": "false"
+ "default": null
}
],
"DD_PROFILING_HEAP_SAMPLING_INTERVAL": [
@@ -1484,8 +1463,8 @@
],
"DD_PROFILING_PROFILERS": [
{
- "implementation": "A",
- "type": "string",
+ "implementation": "B",
+ "type": "array",
"default": "space,wall"
}
],
@@ -1493,10 +1472,7 @@
{
"implementation": "A",
"type": "boolean",
- "default": "true",
- "configurationNames": [
- "profiling.sourceMap"
- ]
+ "default": "true"
}
],
"DD_PROFILING_TIMELINE_ENABLED": [
@@ -1543,9 +1519,9 @@
],
"DD_PROFILING_WALLTIME_ENABLED": [
{
- "implementation": "B",
+ "implementation": "A",
"type": "boolean",
- "default": "true"
+ "default": null
}
],
"DD_REMOTE_CONFIGURATION_ENABLED": [
@@ -1556,9 +1532,7 @@
"aliases": [
"DD_REMOTE_CONFIG_ENABLED"
],
- "configurationNames": [
- "remoteConfig.enabled"
- ]
+ "internalPropertyName": "remoteConfig.enabled"
}
],
"DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS": [
@@ -1576,7 +1550,8 @@
"implementation": "A",
"type": "boolean",
"configurationNames": [
- "runtimeMetrics.enabled"
+ "runtimeMetrics.enabled",
+ "runtimeMetrics"
],
"default": "false"
}
@@ -1621,6 +1596,14 @@
]
}
],
+ "DD_ROOT_JS_SESSION_ID": [
+ {
+ "implementation": "A",
+ "type": "string",
+ "default": null,
+ "internal": true
+ }
+ ],
"DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED": [
{
"implementation": "B",
@@ -1641,8 +1624,10 @@
],
"default": null,
"aliases": [
- "DD_SERVICE_NAME"
- ]
+ "DD_SERVICE_NAME",
+ "OTEL_SERVICE_NAME"
+ ],
+ "allowed": ".+"
}
],
"DD_SERVICE_MAPPING": [
@@ -1667,20 +1652,21 @@
],
"DD_SPAN_SAMPLING_RULES": [
{
- "implementation": "C",
- "type": "array",
+ "implementation": "D",
+ "type": "json",
"configurationNames": [
- "spanSamplingRules",
- "sampler.spanSamplingRules"
+ "spanSamplingRules"
],
- "default": null
+ "default": null,
+ "transform": "toCamelCase"
}
],
"DD_SPAN_SAMPLING_RULES_FILE": [
{
- "implementation": "B",
+ "implementation": "A",
"type": "string",
- "default": ""
+ "default": null,
+ "transform": "readFilePath"
}
],
"DD_TAGS": [
@@ -1698,9 +1684,7 @@
"implementation": "A",
"type": "boolean",
"default": "false",
- "configurationNames": [
- "telemetry.debug"
- ]
+ "internalPropertyName": "telemetry.debug"
}
],
"DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED": [
@@ -1708,9 +1692,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "telemetry.dependencyCollection"
- ]
+ "internalPropertyName": "telemetry.dependencyCollection"
}
],
"DD_TELEMETRY_FORWARDER_PATH": [
@@ -1720,14 +1702,20 @@
"default": null
}
],
+ "DD_TELEMETRY_EXTENDED_HEARTBEAT_INTERVAL": [
+ {
+ "implementation": "A",
+ "type": "int",
+ "default": "86400",
+ "internalPropertyName": "telemetry.extendedHeartbeatInterval"
+ }
+ ],
"DD_TELEMETRY_HEARTBEAT_INTERVAL": [
{
"implementation": "B",
"type": "decimal",
"default": "60.0",
- "configurationNames": [
- "telemetry.heartbeatInterval"
- ]
+ "internalPropertyName": "telemetry.heartbeatInterval"
}
],
"DD_TELEMETRY_LOG_COLLECTION_ENABLED": [
@@ -1735,9 +1723,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "telemetry.logCollection"
- ]
+ "internalPropertyName": "telemetry.logCollection"
}
],
"DD_TELEMETRY_METRICS_ENABLED": [
@@ -1745,9 +1731,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "telemetry.metrics"
- ]
+ "internalPropertyName": "telemetry.metrics"
}
],
"DD_TEST_FAILED_TEST_REPLAY_ENABLED": [
@@ -1755,9 +1739,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "isTestDynamicInstrumentationEnabled"
- ]
+ "internalPropertyName": "isTestDynamicInstrumentationEnabled"
}
],
"DD_TEST_FLEET_CONFIG_PATH": [
@@ -1779,9 +1761,7 @@
"implementation": "C",
"type": "int",
"default": "20",
- "configurationNames": [
- "testManagementAttemptToFixRetries"
- ]
+ "internalPropertyName": "testManagementAttemptToFixRetries"
}
],
"DD_TEST_MANAGEMENT_ENABLED": [
@@ -1789,9 +1769,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "isTestManagementEnabled"
- ]
+ "internalPropertyName": "isTestManagementEnabled"
}
],
"DD_TEST_TIA_KEEP_COV_CONFIG": [
@@ -1799,9 +1777,7 @@
"implementation": "A",
"type": "boolean",
"default": "false",
- "configurationNames": [
- "isKeepingCoverageConfiguration"
- ]
+ "internalPropertyName": "isKeepingCoverageConfiguration"
}
],
"DD_TEST_SESSION_NAME": [
@@ -1809,9 +1785,7 @@
"implementation": "A",
"type": "string",
"default": null,
- "configurationNames": [
- "ciVisibilityTestSessionName"
- ]
+ "internalPropertyName": "ciVisibilityTestSessionName"
}
],
"DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED": [
@@ -1963,9 +1937,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "trace.aws.addSpanPointers"
- ]
+ "internalPropertyName": "trace.aws.addSpanPointers"
}
],
"DD_TRACE_AWS_SDK_AWS_BATCH_PROPAGATION_ENABLED": [
@@ -2351,7 +2323,8 @@
"configurationNames": [
"clientIpHeader"
],
- "default": null
+ "default": null,
+ "transform": "toLowerCase"
}
],
"DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH": [
@@ -2361,27 +2334,30 @@
"configurationNames": [
"cloudPayloadTagging.maxDepth"
],
- "default": "10"
+ "default": "10",
+ "allowed": "\\d+"
}
],
"DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING": [
{
- "implementation": "A",
- "type": "array",
+ "implementation": "B",
+ "type": "string",
"configurationNames": [
"cloudPayloadTagging.request"
],
- "default": null
+ "default": null,
+ "transform": "splitJSONPathRules"
}
],
"DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING": [
{
- "implementation": "A",
- "type": "array",
+ "implementation": "B",
+ "type": "string",
"configurationNames": [
"cloudPayloadTagging.response"
],
- "default": null
+ "default": null,
+ "transform": "splitJSONPathRules"
}
],
"DD_TRACE_COLLECTIONS_ENABLED": [
@@ -2494,9 +2470,7 @@
"implementation": "A",
"type": "string",
"default": null,
- "configurationNames": [
- "trace.dynamoDb.tablePrimaryKeys"
- ]
+ "internalPropertyName": "trace.dynamoDb.tablePrimaryKeys"
}
],
"DD_TRACE_ELASTICSEARCH_ENABLED": [
@@ -2525,8 +2499,9 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "traceEnabled"
+ "internalPropertyName": "tracing",
+ "aliases": [
+ "DD_TRACING_ENABLED"
]
}
],
@@ -2644,10 +2619,7 @@
{
"implementation": "A",
"type": "boolean",
- "default": "true",
- "configurationNames": [
- "isGCPPubSubPushSubscriptionEnabled"
- ]
+ "default": "true"
}
],
"DD_TRACE_GENERIC_POOL_ENABLED": [
@@ -2662,9 +2634,7 @@
"implementation": "A",
"type": "boolean",
"default": "true",
- "configurationNames": [
- "gitMetadataEnabled"
- ]
+ "internalPropertyName": "gitMetadataEnabled"
}
],
"DD_TRACE_GOOGLE_CLOUD_PUBSUB_ENABLED": [
@@ -2707,9 +2677,7 @@
"implementation": "A",
"type": "array",
"default": "",
- "configurationNames": [
- "graphqlErrorExtensions"
- ]
+ "internalPropertyName": "graphqlErrorExtensions"
}
],
"DD_TRACE_GRAPHQL_TAG_ENABLED": [
@@ -2796,7 +2764,8 @@
"default": "",
"configurationNames": [
"headerTags"
- ]
+ ],
+ "transform": "stripColonWhitespace"
}
],
"DD_TRACE_HONO_ENABLED": [
@@ -3064,13 +3033,6 @@
"default": "true"
}
],
- "DD_TRACE_LOG_LEVEL": [
- {
- "implementation": "C",
- "type": "string",
- "default": "debug"
- }
- ],
"DD_TRACE_LOOPBACK_ENABLED": [
{
"implementation": "A",
@@ -3090,9 +3052,7 @@
"implementation": "A",
"type": "boolean",
"default": "false",
- "configurationNames": [
- "memcachedCommandEnabled"
- ]
+ "internalPropertyName": "memcachedCommandEnabled"
}
],
"DD_TRACE_MEMCACHED_ENABLED": [
@@ -3208,9 +3168,7 @@
"implementation": "A",
"type": "boolean",
"default": "false",
- "configurationNames": [
- "trace.nativeSpanEvents"
- ]
+ "internalPropertyName": "trace.nativeSpanEvents"
}
],
"DD_TRACE_NET_ENABLED": [
@@ -3260,9 +3218,7 @@
"implementation": "F",
"type": "string",
"default": "(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:(?:\\s|%20)*(?:=|%3D)[^&]+|(?:\"|%22)(?:\\s|%20)*(?::|%3A)(?:\\s|%20)*(?:\"|%22)(?:%2[^2]|%[^2]|[^\"%])+(?:\"|%22))|bearer(?:\\s|%20)+[a-z0-9\\._\\-]+|token(?::|%3A)[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L](?:[\\w=-]|%3D)+\\.ey[I-L](?:[\\w=-]|%3D)+(?:\\.(?:[\\w.+\\/=-]|%3D|%2F|%2B)+)?|[\\-]{5}BEGIN(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY[\\-]{5}[^\\-]+[\\-]{5}END(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY|ssh-rsa(?:\\s|%20)*(?:[a-z0-9\\/\\.+]|%2F|%5C|%2B){100,}",
- "configurationNames": [
- "queryStringObfuscation"
- ]
+ "internalPropertyName": "queryStringObfuscation"
}
],
"DD_TRACE_OPENAI_ENABLED": [
@@ -3453,10 +3409,10 @@
{
"implementation": "B",
"type": "string",
+ "allowed": "continue|restart|ignore",
+ "transform": "toLowerCase",
"default": "continue",
- "configurationNames": [
- "tracePropagationBehaviorExtract"
- ]
+ "internalPropertyName": "tracePropagationBehaviorExtract"
}
],
"DD_TRACE_PROPAGATION_EXTRACT_FIRST": [
@@ -3464,19 +3420,18 @@
"implementation": "A",
"type": "boolean",
"default": "false",
- "configurationNames": [
- "tracePropagationExtractFirst"
- ]
+ "internalPropertyName": "tracePropagationExtractFirst"
}
],
"DD_TRACE_PROPAGATION_STYLE": [
{
"implementation": "D",
"type": "array",
- "configurationNames": [
- "tracePropagationStyle"
- ],
- "default": "datadog,tracecontext,baggage"
+ "default": "datadog,tracecontext,baggage",
+ "transform": "validatePropagationStyles",
+ "aliases": [
+ "OTEL_PROPAGATORS"
+ ]
}
],
"DD_TRACE_PROPAGATION_STYLE_EXTRACT": [
@@ -3486,7 +3441,8 @@
"configurationNames": [
"tracePropagationStyle.extract"
],
- "default": "datadog, tracecontext, baggage"
+ "default": "datadog, tracecontext, baggage",
+ "transform": "toLowerCase"
}
],
"DD_TRACE_PROPAGATION_STYLE_INJECT": [
@@ -3496,7 +3452,8 @@
"configurationNames": [
"tracePropagationStyle.inject"
],
- "default": "datadog, tracecontext, baggage"
+ "default": "datadog, tracecontext, baggage",
+ "transform": "toLowerCase"
}
],
"DD_TRACE_PROTOBUFJS_ENABLED": [
@@ -3525,8 +3482,8 @@
"implementation": "A",
"type": "int",
"configurationNames": [
- "ingestion.rateLimit",
- "sampler.rateLimit"
+ "rateLimit",
+ "ingestion.rateLimit"
],
"default": "100"
}
@@ -3591,9 +3548,7 @@
"implementation": "A",
"type": "boolean",
"default": "false",
- "configurationNames": [
- "resourceRenamingEnabled"
- ]
+ "internalPropertyName": "resourceRenamingEnabled"
}
],
"DD_TRACE_RESTIFY_ENABLED": [
@@ -3622,20 +3577,22 @@
"implementation": "B",
"type": "decimal",
"configurationNames": [
+ "sampleRate",
"ingestion.sampleRate"
],
- "default": null
+ "default": null,
+ "transform": "sampleRate"
}
],
"DD_TRACE_SAMPLING_RULES": [
{
- "implementation": "A",
- "type": "array",
+ "implementation": "E",
+ "type": "json",
"configurationNames": [
- "samplingRules",
- "sampler.rules"
+ "samplingRules"
],
- "default": ""
+ "default": "[]",
+ "transform": "toCamelCase"
}
],
"DD_TRACE_SCOPE": [
@@ -3643,9 +3600,7 @@
"implementation": "A",
"type": "string",
"default": null,
- "configurationNames": [
- "scope"
- ]
+ "internalPropertyName": "scope"
}
],
"DD_TRACE_SELENIUM_ENABLED": [
@@ -3687,6 +3642,8 @@
{
"implementation": "B",
"type": "string",
+ "allowed": "v0|v1",
+ "transform": "toLowerCase",
"configurationNames": [
"spanAttributeSchema"
],
@@ -3697,9 +3654,7 @@
{
"implementation": "A",
"type": "int",
- "configurationNames": [
- "spanLeakDebug"
- ],
+ "internalPropertyName": "spanLeakDebug",
"default": "0"
}
],
@@ -3712,20 +3667,21 @@
],
"DD_TRACE_STARTUP_LOGS": [
{
- "implementation": "D",
+ "implementation": "C",
"type": "boolean",
"configurationNames": [
"startupLogs"
],
- "default": "false"
+ "default": "true"
}
],
"DD_TRACE_STATS_COMPUTATION_ENABLED": [
{
"implementation": "A",
"type": "boolean",
+ "internalPropertyName": "stats.enabled",
"configurationNames": [
- "stats.enabled"
+ "stats"
],
"default": "false"
}
@@ -3852,19 +3808,7 @@
"implementation": "A",
"type": "int",
"default": "512",
- "configurationNames": [
- "tagsHeaderMaxLength"
- ]
- }
- ],
- "DD_TRACING_ENABLED": [
- {
- "implementation": "A",
- "type": "boolean",
- "default": "true",
- "configurationNames": [
- "tracing"
- ]
+ "internalPropertyName": "tagsHeaderMaxLength"
}
],
"DD_VERSION": [
@@ -3882,9 +3826,7 @@
"implementation": "A",
"type": "int",
"default": "128",
- "configurationNames": [
- "vertexai.spanCharLimit"
- ]
+ "internalPropertyName": "vertexai.spanCharLimit"
}
],
"DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE": [
@@ -3892,9 +3834,7 @@
"implementation": "A",
"type": "decimal",
"default": "1",
- "configurationNames": [
- "vertexai.spanPromptCompletionSampleRate"
- ]
+ "internalPropertyName": "vertexai.spanPromptCompletionSampleRate"
}
],
"DD_VITEST_WORKER": [
@@ -3908,50 +3848,42 @@
{
"implementation": "A",
"type": "int",
- "configurationNames": [
- "otelMaxExportBatchSize"
- ],
- "default": "512"
+ "internalPropertyName": "otelMaxExportBatchSize",
+ "default": "512",
+ "allowed": "[1-9]\\d*"
}
],
"OTEL_BSP_MAX_QUEUE_SIZE": [
{
"implementation": "A",
"type": "int",
- "configurationNames": [
- "otelMaxQueueSize"
- ],
- "default": "2048"
+ "internalPropertyName": "otelMaxQueueSize",
+ "default": "2048",
+ "allowed": "[1-9]\\d*"
}
],
"OTEL_BSP_SCHEDULE_DELAY": [
{
"implementation": "A",
"type": "int",
- "configurationNames": [
- "otelBatchTimeout"
- ],
- "default": "5000"
+ "internalPropertyName": "otelBatchTimeout",
+ "default": "5000",
+ "allowed": "[1-9]\\d*"
}
],
"OTEL_EXPORTER_OTLP_ENDPOINT": [
{
"implementation": "A",
"type": "string",
- "default": null,
- "configurationNames": [
- "otelUrl"
- ]
+ "default": null
}
],
"OTEL_EXPORTER_OTLP_HEADERS": [
{
- "implementation": "B",
- "type": "map",
+ "implementation": "C",
+ "type": "string",
"default": null,
- "configurationNames": [
- "otelHeaders"
- ]
+ "internalPropertyName": "otelHeaders"
}
],
"OTEL_EXPORTER_OTLP_LOGS_ENDPOINT": [
@@ -3959,18 +3891,20 @@
"implementation": "A",
"type": "string",
"default": null,
- "configurationNames": [
- "otelLogsUrl"
+ "internalPropertyName": "otelLogsUrl",
+ "aliases": [
+ "OTEL_EXPORTER_OTLP_ENDPOINT"
]
}
],
"OTEL_EXPORTER_OTLP_LOGS_HEADERS": [
{
- "implementation": "B",
- "type": "map",
+ "implementation": "A",
+ "type": "string",
"default": null,
- "configurationNames": [
- "otelLogsHeaders"
+ "internalPropertyName": "otelLogsHeaders",
+ "aliases": [
+ "OTEL_EXPORTER_OTLP_HEADERS"
]
}
],
@@ -3979,8 +3913,9 @@
"implementation": "D",
"type": "string",
"default": "http/protobuf",
- "configurationNames": [
- "otelLogsProtocol"
+ "internalPropertyName": "otelLogsProtocol",
+ "aliases": [
+ "OTEL_EXPORTER_OTLP_PROTOCOL"
]
}
],
@@ -3988,10 +3923,12 @@
{
"implementation": "A",
"type": "int",
- "configurationNames": [
- "otelLogsTimeout"
- ],
- "default": "10000"
+ "internalPropertyName": "otelLogsTimeout",
+ "default": "10000",
+ "allowed": "[1-9]\\d*",
+ "aliases": [
+ "OTEL_EXPORTER_OTLP_TIMEOUT"
+ ]
}
],
"OTEL_EXPORTER_OTLP_METRICS_ENDPOINT": [
@@ -3999,18 +3936,20 @@
"implementation": "A",
"type": "string",
"default": null,
- "configurationNames": [
- "otelMetricsUrl"
+ "internalPropertyName": "otelMetricsUrl",
+ "aliases": [
+ "OTEL_EXPORTER_OTLP_ENDPOINT"
]
}
],
"OTEL_EXPORTER_OTLP_METRICS_HEADERS": [
{
- "implementation": "A",
- "type": "map",
+ "implementation": "B",
+ "type": "string",
"default": null,
- "configurationNames": [
- "otelMetricsHeaders"
+ "internalPropertyName": "otelMetricsHeaders",
+ "aliases": [
+ "OTEL_EXPORTER_OTLP_HEADERS"
]
}
],
@@ -4019,8 +3958,9 @@
"implementation": "B",
"type": "string",
"default": "http/protobuf",
- "configurationNames": [
- "otelMetricsProtocol"
+ "internalPropertyName": "otelMetricsProtocol",
+ "aliases": [
+ "OTEL_EXPORTER_OTLP_PROTOCOL"
]
}
],
@@ -4028,9 +3968,9 @@
{
"implementation": "A",
"type": "string",
- "configurationNames": [
- "otelMetricsTemporalityPreference"
- ],
+ "allowed": "Delta|Cumulative|LowMemory",
+ "transform": "toUpperCase",
+ "internalPropertyName": "otelMetricsTemporalityPreference",
"default": "delta"
}
],
@@ -4038,10 +3978,12 @@
{
"implementation": "B",
"type": "int",
- "configurationNames": [
- "otelMetricsTimeout"
- ],
- "default": "10000"
+ "allowed": "[1-9]\\d*",
+ "internalPropertyName": "otelMetricsTimeout",
+ "default": "10000",
+ "aliases": [
+ "OTEL_EXPORTER_OTLP_TIMEOUT"
+ ]
}
],
"OTEL_EXPORTER_OTLP_PROTOCOL": [
@@ -4049,18 +3991,15 @@
"implementation": "A",
"type": "string",
"default": "http/protobuf",
- "configurationNames": [
- "otelProtocol"
- ]
+ "internalPropertyName": "otelProtocol"
}
],
"OTEL_EXPORTER_OTLP_TIMEOUT": [
{
"implementation": "A",
"type": "int",
- "configurationNames": [
- "otelTimeout"
- ],
+ "allowed": "[1-9]\\d*",
+ "internalPropertyName": "otelTimeout",
"default": "10000"
}
],
@@ -4068,30 +4007,26 @@
{
"implementation": "A",
"type": "string",
- "default": null
- }
- ],
- "OTEL_LOG_LEVEL": [
- {
- "implementation": "C",
- "type": "string",
- "default": null
+ "default": null,
+ "allowed": "none|otlp",
+ "transform": "toLowerCase"
}
],
"OTEL_METRICS_EXPORTER": [
{
"implementation": "C",
"type": "string",
- "default": null
+ "default": null,
+ "allowed": "none|otlp",
+ "transform": "toLowerCase"
}
],
"OTEL_METRIC_EXPORT_INTERVAL": [
{
"implementation": "A",
"type": "int",
- "configurationNames": [
- "otelMetricsExportInterval"
- ],
+ "allowed": "[1-9]\\d*",
+ "internalPropertyName": "otelMetricsExportInterval",
"default": "10000"
}
],
@@ -4099,27 +4034,17 @@
{
"implementation": "A",
"type": "int",
- "configurationNames": [
- "otelMetricsExportTimeout"
- ],
+ "allowed": "[1-9]\\d*",
+ "internalPropertyName": "otelMetricsExportTimeout",
"default": "7500"
}
],
- "OTEL_PROPAGATORS": [
- {
- "implementation": "A",
- "type": "array",
- "default": "",
- "configurationNames": [
- "tracePropagationStyle.otelPropagators"
- ]
- }
- ],
"OTEL_RESOURCE_ATTRIBUTES": [
{
"implementation": "B",
"type": "string",
- "default": ""
+ "default": "",
+ "transform": "parseOtelTags"
}
],
"OTEL_SDK_DISABLED": [
@@ -4129,38 +4054,30 @@
"default": "true"
}
],
- "OTEL_SERVICE_NAME": [
- {
- "implementation": "B",
- "type": "string",
- "configurationNames": [
- "service"
- ],
- "default": null
- }
- ],
"OTEL_TRACES_EXPORTER": [
{
"implementation": "F",
"type": "string",
- "default": "otlp"
+ "default": "otlp",
+ "allowed": "none|otlp",
+ "transform": "toLowerCase"
}
],
"OTEL_TRACES_SAMPLER": [
{
"implementation": "E",
"type": "string",
- "default": "parentbased_always_on"
+ "default": "parentbased_always_on",
+ "allowed": "always_on|always_off|traceidratio|parentbased_always_on|parentbased_always_off|parentbased_traceidratio",
+ "transform": "toLowerCase"
}
],
"OTEL_TRACES_SAMPLER_ARG": [
{
"implementation": "D",
"type": "decimal",
- "configurationNames": [
- "sampleRate"
- ],
- "default": null
+ "default": null,
+ "allowed": "\\d+(\\.\\d+)?"
}
]
}
diff --git a/packages/dd-trace/src/crashtracking/crashtracker.js b/packages/dd-trace/src/crashtracking/crashtracker.js
index 1fd2a822fb6..10b02988dc2 100644
--- a/packages/dd-trace/src/crashtracking/crashtracker.js
+++ b/packages/dd-trace/src/crashtracking/crashtracker.js
@@ -23,6 +23,9 @@ class Crashtracker {
}
}
+ /**
+ * @param {import('../config/config-base')} config - Tracer configuration
+ */
start (config) {
if (this.#started) return this.configure(config)
@@ -35,7 +38,7 @@ class Crashtracker {
this.#getMetadata(config)
)
} catch (e) {
- log.error('Error initialising crashtracker', e)
+ log.error('Error initializing crashtracker', e)
}
}
@@ -49,6 +52,9 @@ class Crashtracker {
}
// TODO: Send only configured values when defaults are fixed.
+ /**
+ * @param {import('../config/config-base')} config - Tracer configuration
+ */
#getConfig (config) {
const url = getAgentUrl(config)
diff --git a/packages/dd-trace/src/crashtracking/index.js b/packages/dd-trace/src/crashtracking/index.js
index 5addf3864ef..2ba38e72658 100644
--- a/packages/dd-trace/src/crashtracking/index.js
+++ b/packages/dd-trace/src/crashtracking/index.js
@@ -1,15 +1,9 @@
'use strict'
-const { existsSync } = require('node:fs')
const { isMainThread } = require('worker_threads')
const log = require('../log')
-// libdatadog v29 crashtracker segfaults during init on ARM64 musl (Alpine).
-// The segfault bypasses JS try/catch so we must avoid loading it entirely.
-// See: https://github.com/DataDog/libdatadog-nodejs/issues/114
-const isArm64Musl = process.arch === 'arm64' && existsSync('/etc/alpine-release')
-
-if (isMainThread && !isArm64Musl) {
+if (isMainThread) {
try {
module.exports = require('./crashtracker')
} catch (e) {
diff --git a/packages/dd-trace/src/debugger/index.js b/packages/dd-trace/src/debugger/index.js
index fe70f9dc5c2..9f3e750702e 100644
--- a/packages/dd-trace/src/debugger/index.js
+++ b/packages/dd-trace/src/debugger/index.js
@@ -147,7 +147,7 @@ function start (config, rcInstance) {
* Sends the new configuration to the worker thread via the config channel.
* Does nothing if the worker is not started.
*
- * @param {Config} config - The updated tracer configuration object
+ * @param {import('../config/config-base')} config - The updated tracer configuration object
*/
function configure (config) {
if (configChannel === null) return
diff --git a/packages/dd-trace/src/dogstatsd.js b/packages/dd-trace/src/dogstatsd.js
index b9f1491febc..f3853203cb2 100644
--- a/packages/dd-trace/src/dogstatsd.js
+++ b/packages/dd-trace/src/dogstatsd.js
@@ -1,13 +1,11 @@
'use strict'
-const lookup = require('dns').lookup // cache to avoid instrumentation
const dgram = require('dgram')
const isIP = require('net').isIP
const request = require('./exporters/common/request')
const log = require('./log')
const Histogram = require('./histogram')
-const defaults = require('./config/defaults')
const { getAgentUrl } = require('./agent/url')
const { entityId } = require('./exporters/common/docker')
@@ -23,7 +21,9 @@ const TYPE_HISTOGRAM = 'h'
* @implements {DogStatsD}
*/
class DogStatsDClient {
- constructor (options = {}) {
+ #lookup
+ constructor (options) {
+ this.#lookup = options.lookup
if (options.metricsProxyUrl) {
this._httpOptions = {
method: 'POST',
@@ -32,11 +32,10 @@ class DogStatsDClient {
}
}
- this._host = options.host || defaults['dogstatsd.hostname']
+ this._host = options.host
this._family = isIP(this._host)
- this._port = options.port || defaults['dogstatsd.port']
- this._prefix = options.prefix || ''
- this._tags = options.tags || []
+ this._port = options.port
+ this._tags = options.tags
this._queue = []
this._buffer = ''
this._offset = 0
@@ -99,7 +98,7 @@ class DogStatsDClient {
_sendUdp (queue) {
if (this._family === 0) {
- lookup(this._host, (err, address, family) => {
+ this.#lookup(this._host, (err, address, family) => {
if (err) return log.error('DogStatsDClient: Host not found', err)
this._sendUdpFromQueue(queue, address, family)
})
@@ -118,7 +117,7 @@ class DogStatsDClient {
}
_add (stat, value, type, tags) {
- let message = `${this._prefix + stat}:${value}|${type}`
+ let message = `${stat}:${value}|${type}`
// Don't manipulate this._tags as it is still used
tags = tags ? [...this._tags, ...tags] : this._tags
@@ -164,6 +163,9 @@ class DogStatsDClient {
return socket
}
+ /**
+ * @param {import('./config/config-base')} config - Tracer configuration
+ */
static generateClientConfig (config) {
const tags = []
@@ -183,6 +185,7 @@ class DogStatsDClient {
host: config.dogstatsd.hostname,
port: config.dogstatsd.port,
tags,
+ lookup: config.lookup,
}
if (config.url || config.port) {
diff --git a/packages/dd-trace/src/encode/0.4.js b/packages/dd-trace/src/encode/0.4.js
index 6a21762bf63..9a253325223 100644
--- a/packages/dd-trace/src/encode/0.4.js
+++ b/packages/dd-trace/src/encode/0.4.js
@@ -140,7 +140,7 @@ class AgentEncoder {
this._traceBytes.length = 0
this._stringCount = 0
this._stringBytes.length = 0
- this._stringMap = {}
+ this._stringMap = Object.create(null)
this._cacheString('')
}
diff --git a/packages/dd-trace/src/exporters/agent/writer.js b/packages/dd-trace/src/exporters/agent/writer.js
index a9f5ada40b9..f880b2ba1be 100644
--- a/packages/dd-trace/src/exporters/agent/writer.js
+++ b/packages/dd-trace/src/exporters/agent/writer.js
@@ -1,6 +1,8 @@
'use strict'
const { inspect } = require('node:util')
+const { channel } = require('dc-polyfill')
+
const request = require('../common/request')
const { logIntegrations, logAgentError } = require('../../startup-log')
const runtimeMetrics = require('../../runtime_metrics')
@@ -10,10 +12,14 @@ const BaseWriter = require('../common/writer')
const propagationHash = require('../../propagation-hash')
const METRIC_PREFIX = 'datadog.tracer.node.exporter.agent'
+const firstFlushChannel = channel('dd-trace:exporter:first-flush')
class AgentWriter extends BaseWriter {
constructor (...args) {
- super(...args)
+ super({
+ ...args[0],
+ beforeFirstFlush: () => firstFlushChannel.publish(),
+ })
const { prioritySampler, lookup, protocolVersion, headers, config = {} } = args[0]
const AgentEncoder = getEncoder(protocolVersion)
diff --git a/packages/dd-trace/src/exporters/common/request.js b/packages/dd-trace/src/exporters/common/request.js
index f21daa0de5f..ca2e5ca752d 100644
--- a/packages/dd-trace/src/exporters/common/request.js
+++ b/packages/dd-trace/src/exporters/common/request.js
@@ -18,6 +18,10 @@ const maxActiveBufferSize = 1024 * 1024 * 64
let activeBufferSize = 0
+/**
+ * @param {string|URL|object} urlObjOrString
+ * @returns {object}
+ */
function parseUrl (urlObjOrString) {
if (urlObjOrString !== null && typeof urlObjOrString === 'object') return urlToHttpOptions(urlObjOrString)
@@ -33,6 +37,11 @@ function parseUrl (urlObjOrString) {
return url
}
+/**
+ * @param {Buffer|string|Readable|Array} data
+ * @param {object} options
+ * @param {(error: Error|null, result: string, statusCode: number) => void} callback
+ */
function request (data, options, callback) {
if (!options.headers) {
options.headers = {}
diff --git a/packages/dd-trace/src/exporters/common/writer.js b/packages/dd-trace/src/exporters/common/writer.js
index 20e63ae66af..9b352a703d2 100644
--- a/packages/dd-trace/src/exporters/common/writer.js
+++ b/packages/dd-trace/src/exporters/common/writer.js
@@ -1,14 +1,21 @@
'use strict'
+const { channel } = require('dc-polyfill')
+
const log = require('../../log')
const request = require('./request')
const { safeJSONStringify } = require('./util')
+const firstFlushChannel = channel('dd-trace:exporter:first-flush')
+
class Writer {
- constructor ({ url }) {
+ constructor ({ url, beforeFirstFlush }) {
this._url = url
+ this._beforeFirstFlush = beforeFirstFlush
}
+ #isFirstFlush = true
+
flush (done = () => {}) {
const count = this._encoder.count()
@@ -16,8 +23,11 @@ class Writer {
this._encoder.reset()
done()
} else if (count > 0) {
+ if (this.#isFirstFlush && firstFlushChannel.hasSubscribers && this._beforeFirstFlush) {
+ this.#isFirstFlush = false
+ this._beforeFirstFlush()
+ }
const payload = this._encoder.makePayload()
-
this._sendPayload(payload, count, done)
} else {
done()
diff --git a/packages/dd-trace/src/heap_snapshots.js b/packages/dd-trace/src/heap_snapshots.js
index 35360d892c0..19ef1206a69 100644
--- a/packages/dd-trace/src/heap_snapshots.js
+++ b/packages/dd-trace/src/heap_snapshots.js
@@ -45,6 +45,9 @@ function getName (destination) {
}
module.exports = {
+ /**
+ * @param {import('./config/config-base')} config - Tracer configuration
+ */
async start (config) {
const destination = config.heapSnapshot.destination
diff --git a/packages/dd-trace/src/index.js b/packages/dd-trace/src/index.js
index 582511ab6a9..0366f023e4c 100644
--- a/packages/dd-trace/src/index.js
+++ b/packages/dd-trace/src/index.js
@@ -1,7 +1,7 @@
'use strict'
const { getValueFromEnvSources } = require('./config/helper')
-const { isFalse } = require('./util')
+const { isFalse, isTrue } = require('./util')
// Global `jest` is only present in Jest workers.
const inJestWorker = typeof jest !== 'undefined'
@@ -9,7 +9,10 @@ const inJestWorker = typeof jest !== 'undefined'
const ddTraceDisabled = getValueFromEnvSources('DD_TRACE_ENABLED')
? isFalse(getValueFromEnvSources('DD_TRACE_ENABLED'))
: String(getValueFromEnvSources('OTEL_TRACES_EXPORTER')).toLowerCase() === 'none'
+const shouldUseProxyWhenTracingDisabled =
+ isTrue(getValueFromEnvSources('DD_DYNAMIC_INSTRUMENTATION_ENABLED')) ||
+ isTrue(getValueFromEnvSources('DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED'))
-module.exports = ddTraceDisabled || inJestWorker
+module.exports = (ddTraceDisabled && !shouldUseProxyWhenTracingDisabled) || inJestWorker
? require('./noop/proxy')
: require('./proxy')
diff --git a/packages/dd-trace/src/lambda/runtime/ritm.js b/packages/dd-trace/src/lambda/runtime/ritm.js
index 5418a1e7fc7..40ab513f0d9 100644
--- a/packages/dd-trace/src/lambda/runtime/ritm.js
+++ b/packages/dd-trace/src/lambda/runtime/ritm.js
@@ -89,12 +89,12 @@ const registerLambdaHook = () => {
const lambdaFilePaths = _getLambdaFilePaths(lambdaStylePath)
// TODO: Redo this like any other instrumentation.
- Hook(lambdaFilePaths, (moduleExports, name) => {
+ Hook(lambdaFilePaths, (moduleExports, name, _, moduleVersion) => {
require('./patch')
for (const { hook } of instrumentations[name]) {
try {
- moduleExports = hook(moduleExports)
+ moduleExports = hook(moduleExports, moduleVersion) ?? moduleExports
} catch (e) {
log.error('Error executing lambda hook', e)
}
@@ -104,16 +104,16 @@ const registerLambdaHook = () => {
})
} else {
const moduleToPatch = 'datadog-lambda-js'
- Hook([moduleToPatch], (moduleExports, moduleName, _) => {
+ Hook([moduleToPatch], (moduleExports, moduleName, _, moduleVersion) => {
moduleName = moduleName.replace(pathSepExpr, '/')
require('./patch')
- for (const { name, file, hook } of instrumentations[moduleToPatch]) {
- const fullFilename = filename(name, file)
+ for (const { file, hook } of instrumentations[moduleToPatch]) {
+ const fullFilename = filename(moduleToPatch, file)
if (moduleName === fullFilename) {
try {
- moduleExports = hook(moduleExports)
+ moduleExports = hook(moduleExports, moduleVersion) ?? moduleExports
} catch (e) {
log.error('Error executing lambda hook for datadog-lambda-js', e)
}
diff --git a/packages/dd-trace/src/llmobs/index.js b/packages/dd-trace/src/llmobs/index.js
index f3a4972a7a8..1e34a5a71ab 100644
--- a/packages/dd-trace/src/llmobs/index.js
+++ b/packages/dd-trace/src/llmobs/index.js
@@ -43,9 +43,12 @@ let spanWriter
/** @type {LLMObsEvalMetricsWriter | null} */
let evalWriter
-/** @type {import('../config')} */
+/** @type {import('../config/config-base')} */
let globalTracerConfig
+/**
+ * @param {@type import('../config/config-base')} config
+ */
function enable (config) {
globalTracerConfig = config
diff --git a/packages/dd-trace/src/llmobs/plugins/ai/index.js b/packages/dd-trace/src/llmobs/plugins/ai/index.js
index 55efcb0f929..185659e43e5 100644
--- a/packages/dd-trace/src/llmobs/plugins/ai/index.js
+++ b/packages/dd-trace/src/llmobs/plugins/ai/index.js
@@ -18,6 +18,7 @@ const {
getToolNameFromTags,
getToolCallResultContent,
getLlmObsSpanName,
+ getTelemetryMetadata,
} = require('./util')
/**
@@ -216,6 +217,9 @@ class VercelAILLMObsPlugin extends BaseLLMObsPlugin {
this._tagger.tagEmbeddingIO(span, parsedInputs, output)
+ const metadata = getTelemetryMetadata(tags)
+ this._tagger.tagMetadata(span, metadata)
+
const usage = tags['ai.usage.tokens']
this._tagger.tagMetrics(span, {
inputTokens: usage,
@@ -234,7 +238,7 @@ class VercelAILLMObsPlugin extends BaseLLMObsPlugin {
this._tagger.tagTextIO(span, prompt, output)
- const metadata = getGenerationMetadata(tags) ?? {}
+ const metadata = getGenerationMetadata(tags)
metadata.schema = getJsonStringValue(tags['ai.schema'], {})
this._tagger.tagMetadata(span, metadata)
}
diff --git a/packages/dd-trace/src/llmobs/plugins/ai/util.js b/packages/dd-trace/src/llmobs/plugins/ai/util.js
index d8aee774bd2..9d28567094d 100644
--- a/packages/dd-trace/src/llmobs/plugins/ai/util.js
+++ b/packages/dd-trace/src/llmobs/plugins/ai/util.js
@@ -10,6 +10,10 @@ const MODEL_METADATA_KEYS = new Set([
'stop_sequences',
])
+const VERCEL_AI_TELEMETRY_METADATA_PREFIX = 'ai.telemetry.metadata.'
+const VERCEL_AI_MODEL_METADATA_PREFIX = 'gen_ai.request.'
+const VERCEL_AI_GENERATION_METADATA_PREFIX = 'ai.settings.'
+
/**
* @typedef {import('../../../opentracing/span')} Span
*
@@ -107,17 +111,29 @@ function getJsonStringValue (str, defaultValue) {
/**
* Get the model metadata from the span tags (top_p, top_k, temperature, etc.)
+ * Additionally, set telemetry metadata from manual telemetry tags.
* @param {SpanTags} tags
* @returns {Record | null}
*/
function getModelMetadata (tags) {
/** @type {Record} */
const modelMetadata = {}
- for (const metadata of MODEL_METADATA_KEYS) {
- const metadataTagKey = `gen_ai.request.${metadata}`
- const metadataValue = tags[metadataTagKey]
- if (metadataValue) {
- modelMetadata[metadata] = metadataValue
+ for (const tag of Object.keys(tags)) {
+ const isModelMetadata = tag.startsWith(VERCEL_AI_MODEL_METADATA_PREFIX)
+ if (isModelMetadata) {
+ const lastCommaPosition = tag.lastIndexOf('.')
+ const metadataKey = lastCommaPosition === -1 ? tag : tag.slice(lastCommaPosition + 1)
+ if (metadataKey && MODEL_METADATA_KEYS.has(metadataKey)) {
+ modelMetadata[metadataKey] = tags[tag]
+ }
+ } else {
+ const isTelemetryMetadata = tag.startsWith(VERCEL_AI_TELEMETRY_METADATA_PREFIX)
+ if (isTelemetryMetadata) {
+ const metadataKey = tag.slice(VERCEL_AI_TELEMETRY_METADATA_PREFIX.length)
+ if (metadataKey) {
+ modelMetadata[metadataKey] = tags[tag]
+ }
+ }
}
}
@@ -126,6 +142,7 @@ function getModelMetadata (tags) {
/**
* Get the generation metadata from the span tags (maxSteps, maxRetries, etc.)
+ * Additionally, set telemetry metadata from manual telemetry tags.
* @param {SpanTags} tags
* @returns {Record | null}
*/
@@ -134,14 +151,24 @@ function getGenerationMetadata (tags) {
const metadata = {}
for (const tag of Object.keys(tags)) {
- if (!tag.startsWith('ai.settings')) continue
-
- const settingKey = tag.split('.').pop()
- const transformedKey = settingKey.replaceAll(/[A-Z]/g, letter => '_' + letter.toLowerCase())
- if (MODEL_METADATA_KEYS.has(transformedKey)) continue
+ const isGenerationMetadata = tag.startsWith(VERCEL_AI_GENERATION_METADATA_PREFIX)
+ if (isGenerationMetadata) {
+ const lastCommaPosition = tag.lastIndexOf('.')
+ const settingKey = lastCommaPosition === -1 ? tag : tag.slice(lastCommaPosition + 1)
+ const transformedKey = settingKey.replaceAll(/[A-Z]/g, letter => '_' + letter.toLowerCase())
+ if (MODEL_METADATA_KEYS.has(transformedKey)) continue
- const settingValue = tags[tag]
- metadata[settingKey] = settingValue
+ const settingValue = tags[tag]
+ metadata[settingKey] = settingValue
+ } else {
+ const isTelemetryMetadata = tag.startsWith(VERCEL_AI_TELEMETRY_METADATA_PREFIX)
+ if (isTelemetryMetadata) {
+ const metadataKey = tag.slice(VERCEL_AI_TELEMETRY_METADATA_PREFIX.length)
+ if (metadataKey) {
+ metadata[metadataKey] = tags[tag]
+ }
+ }
+ }
}
return Object.keys(metadata).length ? metadata : null
@@ -205,6 +232,26 @@ function getLlmObsSpanName (operation, functionId) {
return functionId ? `${functionId}.${operation}` : operation
}
+/**
+ * Get custom telemetry metadata from ai.telemetry.metadata.* attributes
+ * @param {Record} tags
+ * @returns {Record | null}
+ */
+function getTelemetryMetadata (tags) {
+ const metadata = {}
+
+ for (const tag of Object.keys(tags)) {
+ if (!tag.startsWith(VERCEL_AI_TELEMETRY_METADATA_PREFIX)) continue
+
+ const metadataKey = tag.slice(VERCEL_AI_TELEMETRY_METADATA_PREFIX.length)
+ if (metadataKey) {
+ metadata[metadataKey] = tags[tag]
+ }
+ }
+
+ return Object.keys(metadata).length ? metadata : null
+}
+
module.exports = {
getSpanTags,
getOperation,
@@ -215,4 +262,5 @@ module.exports = {
getToolNameFromTags,
getToolCallResultContent,
getLlmObsSpanName,
+ getTelemetryMetadata,
}
diff --git a/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js b/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js
index 3747c0dffec..db0249b8a6b 100644
--- a/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js
+++ b/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js
@@ -65,10 +65,12 @@ class BedrockRuntimeLLMObsPlugin extends BaseLLMObsPlugin {
telemetry.incrementLLMObsSpanStartCount({ autoinstrumented: true, integration: 'bedrock' })
const parent = llmobsStore.getStore()?.span
+ // Use full modelId and unified provider for LLMObs (required for backend cost estimation).
+ // Split modelProvider/modelName from parseModelId() are still used below for response parsing.
this._tagger.registerLLMObsSpan(span, {
parent,
- modelName: modelName.toLowerCase(),
- modelProvider: modelProvider.toLowerCase(),
+ modelName: request.params.modelId.toLowerCase(),
+ modelProvider: 'amazon_bedrock',
kind: 'llm',
name: 'bedrock-runtime.command',
integration: 'bedrock',
diff --git a/packages/dd-trace/src/llmobs/sdk.js b/packages/dd-trace/src/llmobs/sdk.js
index 6e06027953c..8149b02ac77 100644
--- a/packages/dd-trace/src/llmobs/sdk.js
+++ b/packages/dd-trace/src/llmobs/sdk.js
@@ -29,16 +29,23 @@ class LLMObs extends NoopLLMObs {
*/
#hasUserSpanProcessor = false
+ /**
+ * @param {import('../tracer')} tracer - Tracer instance
+ * @param {import('./index')} llmobsModule - LLMObs module instance
+ * @param {import('../config/config-base')} config - Tracer configuration
+ */
constructor (tracer, llmobsModule, config) {
super(tracer)
+ /** @type {import('../config/config-base')} */
this._config = config
+
this._llmobsModule = llmobsModule
this._tagger = new LLMObsTagger(config)
}
get enabled () {
- return this._config.llmobs.enabled
+ return this._config.llmobs.enabled ?? false
}
enable (options = {}) {
@@ -56,13 +63,10 @@ class LLMObs extends NoopLLMObs {
return
}
- const llmobs = {
- mlApp: options.mlApp,
- agentlessEnabled: options.agentlessEnabled,
- }
- // TODO: This will update config telemetry with the origin 'code', which is not ideal when `enable()` is called
- // based on `APM_TRACING` RC product updates.
- this._config.updateOptions({ llmobs })
+ // TODO: These configs should be passed through directly at construction time instead.
+ this._config.llmobs.enabled = true
+ this._config.llmobs.mlApp = options.mlApp
+ this._config.llmobs.agentlessEnabled = options.agentlessEnabled
// configure writers and channel subscribers
this._llmobsModule.enable(this._config)
diff --git a/packages/dd-trace/src/llmobs/span_processor.js b/packages/dd-trace/src/llmobs/span_processor.js
index 0e873b6a5d9..9bf7cd3bd80 100644
--- a/packages/dd-trace/src/llmobs/span_processor.js
+++ b/packages/dd-trace/src/llmobs/span_processor.js
@@ -49,7 +49,7 @@ class LLMObservabilitySpan {
}
class LLMObsSpanProcessor {
- /** @type {import('../config')} */
+ /** @type {import('../config/config-base')} */
#config
/** @type {((span: LLMObservabilitySpan) => LLMObservabilitySpan | null) | null} */
diff --git a/packages/dd-trace/src/llmobs/tagger.js b/packages/dd-trace/src/llmobs/tagger.js
index db840693daf..baa303a1d9f 100644
--- a/packages/dd-trace/src/llmobs/tagger.js
+++ b/packages/dd-trace/src/llmobs/tagger.js
@@ -47,8 +47,11 @@ const { storage } = require('./storage')
const registry = new WeakMap()
class LLMObsTagger {
+ /** @type {import('../config/config-base')} */
+ #config
+
constructor (config, softFail = false) {
- this._config = config
+ this.#config = config
this.softFail = softFail
}
@@ -72,15 +75,15 @@ class LLMObsTagger {
integration,
_decorator,
} = {}) {
- if (!this._config.llmobs.enabled) return
+ if (!this.#config.llmobs.enabled) return
if (!kind) return // do not register it in the map if it doesn't have an llmobs span kind
const spanMlApp =
mlApp ||
registry.get(parent)?.[ML_APP] ||
span.context()._trace.tags[PROPAGATED_ML_APP_KEY] ||
- this._config.llmobs.mlApp ||
- this._config.service // this should always have a default
+ this.#config.llmobs.mlApp ||
+ this.#config.service // this should always have a default
if (!spanMlApp) {
throw new Error(
@@ -624,7 +627,7 @@ class LLMObsTagger {
}
_register (span) {
- if (!this._config.llmobs.enabled) return
+ if (!this.#config.llmobs.enabled) return
if (registry.has(span)) {
this.#handleFailure(`LLMObs Span "${span._name}" already registered.`)
return
@@ -634,7 +637,7 @@ class LLMObsTagger {
}
_setTag (span, key, value) {
- if (!this._config.llmobs.enabled) return
+ if (!this.#config.llmobs.enabled) return
if (!registry.has(span)) {
this.#handleFailure(`Span "${span._name}" must be an LLMObs generated span.`)
return
diff --git a/packages/dd-trace/src/llmobs/writers/base.js b/packages/dd-trace/src/llmobs/writers/base.js
index c32903dd886..eb60158ff44 100644
--- a/packages/dd-trace/src/llmobs/writers/base.js
+++ b/packages/dd-trace/src/llmobs/writers/base.js
@@ -45,7 +45,9 @@ class BaseLLMObsWriter {
/** @type {LLMObsBuffer} */
this._buffer = new LLMObsBuffer({ events: [], size: 0, isDefault: true })
+ /** @type {import('../../config/config-base')} */
this._config = config
+
this._endpoint = endpoint
this._baseEndpoint = endpoint // should not be unset
this._intake = intake
diff --git a/packages/dd-trace/src/llmobs/writers/util.js b/packages/dd-trace/src/llmobs/writers/util.js
index b6e6f487069..eb1150e7702 100644
--- a/packages/dd-trace/src/llmobs/writers/util.js
+++ b/packages/dd-trace/src/llmobs/writers/util.js
@@ -6,6 +6,9 @@ const telemetry = require('../telemetry')
const { fetchAgentInfo } = require('../../agent/info')
const { getAgentUrl } = require('../../agent/url')
+/**
+ * @param {import('../../config/config-base')} config
+ */
function setAgentStrategy (config, setWritersAgentlessValue) {
const agentlessEnabled = config.llmobs.agentlessEnabled
diff --git a/packages/dd-trace/src/log/index.js b/packages/dd-trace/src/log/index.js
index 7ff2a82fe99..a237325d899 100644
--- a/packages/dd-trace/src/log/index.js
+++ b/packages/dd-trace/src/log/index.js
@@ -1,5 +1,8 @@
'use strict'
+
const { inspect } = require('util')
+
+const { defaults } = require('../config/defaults')
const { isTrue } = require('../util')
const { getValueFromEnvSources } = require('../config/helper')
const { traceChannel, debugChannel, infoChannel, warnChannel, errorChannel } = require('./channels')
@@ -8,12 +11,17 @@ const { Log, LogConfig, NoTransmitError } = require('./log')
const { memoize } = require('./utils')
const config = {
- enabled: false,
+ enabled: defaults.DD_TRACE_DEBUG,
logger: undefined,
- logLevel: 'debug',
+ logLevel: defaults.logLevel,
}
-// in most places where we know we want to mute a log we use log.error() directly
+const deprecate = memoize((code, message) => {
+ publishFormatted(errorChannel, null, message)
+ return true
+})
+
+// In most places where we know we want to mute a log we use log.error() directly
const NO_TRANSMIT = new LogConfig(false)
const log = {
@@ -21,36 +29,6 @@ const log = {
NO_TRANSMIT,
NoTransmitError,
- /**
- * @returns Read-only version of logging config. To modify config, call `log.use` and `log.toggle`
- */
- getConfig () {
- return { ...config }
- },
-
- use (logger) {
- config.logger = logger
- logWriter.use(logger)
- return log
- },
-
- toggle (enabled, logLevel) {
- config.enabled = enabled
- config.logLevel = logLevel
- logWriter.toggle(enabled, logLevel)
- return log
- },
-
- reset () {
- logWriter.reset()
- log._deprecate = memoize((code, message) => {
- publishFormatted(errorChannel, null, message)
- return true
- })
-
- return log
- },
-
trace (...args) {
if (traceChannel.hasSubscribers) {
const logRecord = {}
@@ -66,6 +44,8 @@ const log = {
publishFormatted(traceChannel, null, stack.join('\n'))
}
+ // TODO: Why do we allow chaining here? This is likely not used anywhere.
+ // If it is used, that seems like a mistake.
return log
},
@@ -103,30 +83,23 @@ const log = {
},
deprecate (code, message) {
- return log._deprecate(code, message)
+ return deprecate(code, message)
},
- isEnabled (fleetStableConfigValue, localStableConfigValue) {
- return isTrue(
- fleetStableConfigValue ??
+ configure (options) {
+ config.logger = options.logger
+ config.logLevel = options.logLevel ??
+ getValueFromEnvSources('DD_TRACE_LOG_LEVEL') ??
+ config.logLevel
+ config.enabled = isTrue(
getValueFromEnvSources('DD_TRACE_DEBUG') ??
- (getValueFromEnvSources('OTEL_LOG_LEVEL') === 'debug' || undefined) ??
- localStableConfigValue ??
- config.enabled
+ // TODO: Handle this by adding a log buffer so that configure may be called with the actual configurations.
+ // eslint-disable-next-line eslint-rules/eslint-process-env
+ (process.env.OTEL_LOG_LEVEL === 'debug' || config.enabled)
)
- },
+ logWriter.configure(config.enabled, config.logLevel, options.logger)
- getLogLevel (
- optionsValue,
- fleetStableConfigValue,
- localStableConfigValue
- ) {
- return optionsValue ??
- fleetStableConfigValue ??
- getValueFromEnvSources('DD_TRACE_LOG_LEVEL') ??
- getValueFromEnvSources('OTEL_LOG_LEVEL') ??
- localStableConfigValue ??
- config.logLevel
+ return config.enabled
},
}
@@ -150,8 +123,6 @@ function getErrorLog (err) {
return err
}
-log.reset()
-
-log.toggle(log.isEnabled(), log.getLogLevel())
+log.configure({})
module.exports = log
diff --git a/packages/dd-trace/src/log/writer.js b/packages/dd-trace/src/log/writer.js
index 13ce84d92d7..358a3b680fe 100644
--- a/packages/dd-trace/src/log/writer.js
+++ b/packages/dd-trace/src/log/writer.js
@@ -2,6 +2,7 @@
const { storage } = require('../../../datadog-core')
const { LogChannel } = require('./channels')
+
const defaultLogger = {
debug: msg => console.debug(msg), /* eslint-disable-line no-console */
info: msg => console.info(msg), /* eslint-disable-line no-console */
@@ -17,12 +18,8 @@ function withNoop (fn) {
storage('legacy').run({ noop: true }, fn)
}
-function unsubscribeAll () {
- logChannel.unsubscribe({ trace, debug, info, warn, error })
-}
-
function toggleSubscription (enable, level) {
- unsubscribeAll()
+ logChannel.unsubscribe({ trace, debug, info, warn, error })
if (enable) {
logChannel = new LogChannel(level)
@@ -30,23 +27,14 @@ function toggleSubscription (enable, level) {
}
}
-function toggle (enable, level) {
+function configure (enable, level, newLogger) {
enabled = enable
+ logger = typeof newLogger?.debug === 'function' && typeof newLogger.error === 'function'
+ ? newLogger
+ : defaultLogger
toggleSubscription(enabled, level)
}
-function use (newLogger) {
- if (typeof newLogger?.debug === 'function' && typeof newLogger.error === 'function') {
- logger = newLogger
- }
-}
-
-function reset () {
- logger = defaultLogger
- enabled = false
- toggleSubscription(false)
-}
-
function error (err) {
withNoop(() => logger.error(err))
}
@@ -69,4 +57,4 @@ function trace (log) {
withNoop(() => logger.debug(log))
}
-module.exports = { use, toggle, reset, error, warn, info, debug, trace }
+module.exports = { configure, error, warn, info, debug, trace }
diff --git a/packages/dd-trace/src/noop/proxy.js b/packages/dd-trace/src/noop/proxy.js
index b223aab3a65..53dd1e709b1 100644
--- a/packages/dd-trace/src/noop/proxy.js
+++ b/packages/dd-trace/src/noop/proxy.js
@@ -13,6 +13,10 @@ const noopDogStatsDClient = new NoopDogStatsDClient()
const noopLLMObs = new NoopLLMObsSDK(noop)
const noopOpenFeatureProvider = new NoopFlaggingProvider()
const noopAIGuard = new NoopAIGuardSDK()
+const noopProfiling = {
+ setCustomLabelKeys () {},
+ runWithLabels (labels, fn) { return fn() },
+}
/** @type {import('../../src/index')} Proxy */
class NoopProxy {
@@ -98,6 +102,10 @@ class NoopProxy {
return this
}
+ get profiling () {
+ return noopProfiling
+ }
+
get TracerProvider () {
return require('../opentelemetry/tracer_provider')
}
diff --git a/packages/dd-trace/src/opentelemetry/logs/index.js b/packages/dd-trace/src/opentelemetry/logs/index.js
index 2d9ec8c71d7..a36446d7dbe 100644
--- a/packages/dd-trace/src/opentelemetry/logs/index.js
+++ b/packages/dd-trace/src/opentelemetry/logs/index.js
@@ -33,7 +33,7 @@ const OtlpHttpLogExporter = require('./otlp_http_log_exporter')
/**
* Initializes OpenTelemetry Logs support
- * @param {Config} config - Tracer configuration instance
+ * @param {import('../../config/config-base')} config - Tracer configuration instance
*/
function initializeOpenTelemetryLogs (config) {
// Build resource attributes
diff --git a/packages/dd-trace/src/opentelemetry/metrics/index.js b/packages/dd-trace/src/opentelemetry/metrics/index.js
index c0d116e2075..914baeee330 100644
--- a/packages/dd-trace/src/opentelemetry/metrics/index.js
+++ b/packages/dd-trace/src/opentelemetry/metrics/index.js
@@ -35,7 +35,7 @@ const OtlpHttpMetricExporter = require('./otlp_http_metric_exporter')
/**
* Initializes OpenTelemetry Metrics support
- * @param {Config} config - Tracer configuration instance
+ * @param {import('../../config/config-base')} config - Tracer configuration instance
*/
function initializeOpenTelemetryMetrics (config) {
const resourceAttributes = {
diff --git a/packages/dd-trace/src/opentracing/propagation/text_map.js b/packages/dd-trace/src/opentracing/propagation/text_map.js
index 3c7b65eefb0..50efb42c9e0 100644
--- a/packages/dd-trace/src/opentracing/propagation/text_map.js
+++ b/packages/dd-trace/src/opentracing/propagation/text_map.js
@@ -6,6 +6,7 @@ const id = require('../../id')
const DatadogSpanContext = require('../span_context')
const log = require('../../log')
const tags = require('../../../../../ext/tags')
+const { getConfiguredEnvName } = require('../../config/helper')
const { setBaggageItem, getAllBaggageItems, removeAllBaggageItems } = require('../../baggage')
const telemetryMetrics = require('../../telemetry/metrics')
@@ -65,8 +66,15 @@ const zeroTraceId = '0000000000000000'
const hex16 = /^[0-9A-Fa-f]{16}$/
class TextMapPropagator {
+ #extractB3Context
+
constructor (config) {
this._config = config
+
+ // TODO: should match "b3 single header" in next major
+ const envName = getConfiguredEnvName('DD_TRACE_PROPAGATION_STYLE')
+ // eslint-disable-next-line eslint-rules/eslint-env-aliases
+ this.#extractB3Context = envName === 'OTEL_PROPAGATORS' ? this._extractB3SingleContext : this._extractB3MultiContext
}
inject (spanContext, carrier) {
@@ -363,10 +371,7 @@ class TextMapPropagator {
extractedContext = this._extractB3SingleContext(carrier)
break
case 'b3':
- extractedContext = this._config.tracePropagationStyle.otelPropagators
- // TODO: should match "b3 single header" in next major
- ? this._extractB3SingleContext(carrier)
- : this._extractB3MultiContext(carrier)
+ extractedContext = this.#extractB3Context(carrier)
break
case 'b3multi':
extractedContext = this._extractB3MultiContext(carrier)
diff --git a/packages/dd-trace/src/payload-tagging/config/index.js b/packages/dd-trace/src/payload-tagging/config/index.js
index 1f91dd9d6e7..c103349ca8b 100644
--- a/packages/dd-trace/src/payload-tagging/config/index.js
+++ b/packages/dd-trace/src/payload-tagging/config/index.js
@@ -3,16 +3,17 @@
const aws = require('./aws.json')
const sdks = { aws }
+/** @typedef {Record} SDKRules */
/**
* Builds rules per service for a given SDK, appending user-provided rules.
*
- * @param {Record} sdk
+ * @param {SDKRules} sdk
* @param {string[]} requestInput
* @param {string[]} responseInput
- * @returns {Record}
+ * @returns {SDKRules}
*/
function getSDKRules (sdk, requestInput, responseInput) {
- const sdkServiceRules = {}
+ const sdkServiceRules = /** @type {SDKRules} */ ({})
for (const [service, serviceRules] of Object.entries(sdk)) {
sdkServiceRules[service] = {
// Make a copy. Otherwise calling the function multiple times would append
@@ -31,10 +32,10 @@ function getSDKRules (sdk, requestInput, responseInput) {
*
* @param {string[]} [requestInput=[]]
* @param {string[]} [responseInput=[]]
- * @returns {Record>}
+ * @returns {Record}
*/
function appendRules (requestInput = [], responseInput = []) {
- const sdkRules = {}
+ const sdkRules = /** @type {Record} */ ({})
for (const [name, sdk] of Object.entries(sdks)) {
sdkRules[name] = getSDKRules(sdk, requestInput, responseInput)
}
diff --git a/packages/dd-trace/src/plugin_manager.js b/packages/dd-trace/src/plugin_manager.js
index 2bf92b390c6..3a56b0d3a42 100644
--- a/packages/dd-trace/src/plugin_manager.js
+++ b/packages/dd-trace/src/plugin_manager.js
@@ -67,7 +67,6 @@ function getEnabled (Plugin) {
module.exports = class PluginManager {
constructor (tracer) {
this._tracer = tracer
- this._tracerConfig = null
this._pluginsByName = {}
this._configsByName = {}
@@ -104,7 +103,7 @@ module.exports = class PluginManager {
// extracts predetermined configuration from tracer and combines it with plugin-specific config
this._pluginsByName[name].configure({
- ...this._getSharedConfig(name),
+ ...this.#getSharedConfig(name),
...pluginConfig,
})
}
@@ -121,8 +120,11 @@ module.exports = class PluginManager {
this.loadPlugin(name)
}
- // like instrumenter.enable()
- configure (config = {}) {
+ /**
+ * Like instrumenter.enable()
+ * @param {import('./config/config-base')} config - Tracer configuration
+ */
+ configure (config) {
this._tracerConfig = config
this._tracer._nomenclature.configure(config)
@@ -148,7 +150,7 @@ module.exports = class PluginManager {
}
// TODO: figure out a better way to handle this
- _getSharedConfig (name) {
+ #getSharedConfig (name) {
const {
logInjection,
serviceMapping,
@@ -172,7 +174,7 @@ module.exports = class PluginManager {
traceWebsocketMessagesSeparateTraces,
experimental,
resourceRenamingEnabled,
- } = this._tracerConfig
+ } = /** @type {import('./config/config-base')} */ (this._tracerConfig)
const sharedConfig = {
codeOriginForSpans,
diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js
index 9008186c107..d318174bf61 100644
--- a/packages/dd-trace/src/plugins/ci_plugin.js
+++ b/packages/dd-trace/src/plugins/ci_plugin.js
@@ -469,6 +469,10 @@ module.exports = class CiPlugin extends Plugin {
return getSessionRequestErrorTags(this.testSessionSpan)
}
+ /**
+ * @param {import('../config/config-base')} config - Tracer configuration
+ * @param {boolean} shouldGetEnvironmentData - Whether to get environment data
+ */
configure (config, shouldGetEnvironmentData = true) {
super.configure(config)
diff --git a/packages/dd-trace/src/plugins/plugin.js b/packages/dd-trace/src/plugins/plugin.js
index 0f12da1d81c..784b71720dd 100644
--- a/packages/dd-trace/src/plugins/plugin.js
+++ b/packages/dd-trace/src/plugins/plugin.js
@@ -72,7 +72,7 @@ module.exports = class Plugin {
* Create a new plugin instance.
*
* @param {object} tracer Tracer instance or wrapper containing it under `_tracer`.
- * @param {object} tracerConfig Global tracer configuration object.
+ * @param {import('../config/config-base')} tracerConfig Global tracer configuration object.
*/
constructor (tracer, tracerConfig) {
this._subscriptions = []
@@ -80,6 +80,8 @@ module.exports = class Plugin {
this._enabled = false
this._tracer = tracer
this.config = {} // plugin-specific configuration, unset until .configure() is called
+
+ /** @type {import('../config/config-base')} */
this._tracerConfig = tracerConfig // global tracer configuration
}
@@ -163,9 +165,10 @@ module.exports = class Plugin {
/**
* Enable or disable the plugin and (re)apply its configuration.
*
- * @param {boolean|object} config Either a boolean to enable/disable or a configuration object
- * containing at least `{ enabled: boolean }`.
- * @returns {void}
+ * TODO: Remove the overloading with `enabled` and use the config object directly.
+ *
+ * @param {boolean|import('../config/config-base')} config Either a boolean to enable/disable
+ * or a configuration object containing at least `{ enabled: boolean }`.
*/
configure (config) {
if (typeof config === 'boolean') {
diff --git a/packages/dd-trace/src/process-tags/index.js b/packages/dd-trace/src/process-tags/index.js
index 6fe87b848cb..98f7cf3a2aa 100644
--- a/packages/dd-trace/src/process-tags/index.js
+++ b/packages/dd-trace/src/process-tags/index.js
@@ -72,6 +72,9 @@ function buildProcessTags (config) {
// Singleton with constant defaults so pre-init reads don't blow up
const processTags = module.exports = {
+ /**
+ * @param {import('../config/config-base')} config
+ */
initialize (config) {
// check if one of the properties added during build exist and if so return
if (processTags.tags) return
diff --git a/packages/dd-trace/src/profiler.js b/packages/dd-trace/src/profiler.js
index 4990212fe92..d1bacecc31e 100644
--- a/packages/dd-trace/src/profiler.js
+++ b/packages/dd-trace/src/profiler.js
@@ -5,13 +5,38 @@ const { profiler } = require('./profiling')
globalThis[Symbol.for('dd-trace')].beforeExitHandlers.add(() => { profiler.stop() })
module.exports = {
- start: config => {
+ /**
+ * @param {import('./config/config-base')} config - Tracer configuration
+ */
+ start (config) {
// Forward the full tracer config to the profiling layer.
// Profiling code is responsible for deriving the specific options it needs.
return profiler.start(config)
},
- stop: () => {
+ stop () {
profiler.stop()
},
+
+ /**
+ * Declares the set of custom label keys that will be used with
+ * {@link runWithLabels}.
+ *
+ * @param {Iterable} keys - Custom label key names
+ */
+ setCustomLabelKeys: (keys) => {
+ profiler.setCustomLabelKeys(keys)
+ },
+
+ /**
+ * Runs a function with custom profiling labels attached to wall profiler samples.
+ *
+ * @param {Record} labels - Custom labels to attach
+ * @param {function(): T} fn - Function to execute with the labels
+ * @returns {T} The return value of fn
+ * @template T
+ */
+ runWithLabels: (labels, fn) => {
+ return profiler.runWithLabels(labels, fn)
+ },
}
diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js
index 040618c5b58..f34135284af 100644
--- a/packages/dd-trace/src/profiling/config.js
+++ b/packages/dd-trace/src/profiling/config.js
@@ -6,9 +6,8 @@ const { pathToFileURL } = require('url')
const satisfies = require('../../../../vendor/dist/semifies')
const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('../plugins/util/tags')
const { getIsAzureFunction } = require('../serverless')
-const { isFalse, isTrue } = require('../util')
const { getAzureTagsFromMetadata, getAzureAppMetadata, getAzureFunctionMetadata } = require('../azure_metadata')
-const { getEnvironmentVariable, getValueFromEnvSources } = require('../config/helper')
+const { getEnvironmentVariable } = require('../config/helper')
const { getAgentUrl } = require('../agent/url')
const { isACFActive } = require('../../../datadog-core/src/storage')
@@ -22,59 +21,22 @@ const { oomExportStrategies, snapshotKinds } = require('./constants')
const { tagger } = require('./tagger')
class Config {
- constructor (options = {}) {
- // TODO: Remove entries that were already resolved in config.
- // For the others, move them over to config.
+ constructor (options) {
const AWS_LAMBDA_FUNCTION_NAME = getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME')
- // TODO: Move initialization of these values to packages/dd-trace/src/config/index.js, and just read from config
- const {
- DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED,
- DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED,
- DD_PROFILING_CODEHOTSPOTS_ENABLED,
- DD_PROFILING_CPU_ENABLED,
- DD_PROFILING_DEBUG_SOURCE_MAPS,
- DD_PROFILING_DEBUG_UPLOAD_COMPRESSION,
- DD_PROFILING_ENDPOINT_COLLECTION_ENABLED,
- DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES,
- DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE,
- DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT,
- DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED,
- DD_PROFILING_HEAP_ENABLED,
- DD_PROFILING_HEAP_SAMPLING_INTERVAL,
- DD_PROFILING_PPROF_PREFIX,
- DD_PROFILING_PROFILERS,
- DD_PROFILING_TIMELINE_ENABLED,
- DD_PROFILING_UPLOAD_PERIOD,
- DD_PROFILING_UPLOAD_TIMEOUT,
- DD_PROFILING_V8_PROFILER_BUG_WORKAROUND,
- DD_PROFILING_WALLTIME_ENABLED,
- DD_TAGS,
- } = getProfilingEnvValues()
-
- // Must be longer than one minute so pad with five seconds
- const flushInterval = options.interval ?? (Number(DD_PROFILING_UPLOAD_PERIOD) * 1000 || 65 * 1000)
- const uploadTimeout = options.uploadTimeout ?? (Number(DD_PROFILING_UPLOAD_TIMEOUT) || 60 * 1000)
- const pprofPrefix = options.pprofPrefix ?? DD_PROFILING_PPROF_PREFIX ?? ''
-
- // TODO: Remove the fallback. Just use the value from the config.
- this.service = options.service || 'node'
+ this.version = options.version
+ this.service = options.service
this.env = options.env
this.functionname = AWS_LAMBDA_FUNCTION_NAME
- this.version = options.version
- this.tags = Object.assign(
- tagger.parse(DD_TAGS),
- tagger.parse(options.tags),
- tagger.parse({
- env: options.env,
+ this.tags = {
+ ...options.tags,
+ ...tagger.parse({
host: options.reportHostname ? require('os').hostname() : undefined,
- service: this.service,
- version: this.version,
functionname: AWS_LAMBDA_FUNCTION_NAME,
}),
- getAzureTagsFromMetadata(getIsAzureFunction() ? getAzureFunctionMetadata() : getAzureAppMetadata())
- )
+ ...getAzureTagsFromMetadata(getIsAzureFunction() ? getAzureFunctionMetadata() : getAzureAppMetadata()),
+ }
// Add source code integration tags if available
if (options.repositoryUrl && options.commitSHA) {
@@ -82,58 +44,35 @@ class Config {
this.tags[GIT_COMMIT_SHA] = options.commitSHA
}
- this.logger = ensureLogger(options.logger)
- // Profiler sampling contexts are not available on Windows, so features
- // depending on those (code hotspots and endpoint collection) need to default
- // to false on Windows.
- const samplingContextsAvailable = process.platform !== 'win32'
- function checkOptionAllowed (option, description, condition) {
- if (option && !condition) {
- // injection hardening: all of these can only happen if user explicitly
- // sets an environment variable to its non-default value on the platform.
- // In practical terms, it'd require someone explicitly turning on OOM
- // monitoring, code hotspots, endpoint profiling, or CPU profiling on
- // Windows, where it is not supported.
- throw new Error(`${description} not supported on ${process.platform}.`)
- }
- }
- function checkOptionWithSamplingContextAllowed (option, description) {
- checkOptionAllowed(option, description, samplingContextsAvailable)
- }
+ // Normalize from seconds to milliseconds. Default must be longer than a minute.
+ this.flushInterval = options.DD_PROFILING_UPLOAD_PERIOD * 1000
+ this.uploadTimeout = options.DD_PROFILING_UPLOAD_TIMEOUT
+ this.sourceMap = options.DD_PROFILING_SOURCE_MAP
+ this.debugSourceMaps = options.DD_PROFILING_DEBUG_SOURCE_MAPS
+ this.endpointCollectionEnabled = options.DD_PROFILING_ENDPOINT_COLLECTION_ENABLED
+ this.pprofPrefix = options.DD_PROFILING_PPROF_PREFIX
+ this.v8ProfilerBugWorkaroundEnabled = options.DD_PROFILING_V8_PROFILER_BUG_WORKAROUND
- this.flushInterval = flushInterval
- this.uploadTimeout = uploadTimeout
- this.sourceMap = options.sourceMap
- this.debugSourceMaps = isTrue(options.debugSourceMaps ?? DD_PROFILING_DEBUG_SOURCE_MAPS)
- this.endpointCollectionEnabled = isTrue(options.endpointCollection ??
- DD_PROFILING_ENDPOINT_COLLECTION_ENABLED ?? samplingContextsAvailable)
- checkOptionWithSamplingContextAllowed(this.endpointCollectionEnabled, 'Endpoint collection')
-
- this.pprofPrefix = pprofPrefix
- this.v8ProfilerBugWorkaroundEnabled = isTrue(options.v8ProfilerBugWorkaround ??
- DD_PROFILING_V8_PROFILER_BUG_WORKAROUND ?? true)
+ this.logger = ensureLogger(options.logger)
this.url = getAgentUrl(options)
- this.libraryInjected = options.libraryInjected
- this.activation = options.activation
- this.exporters = ensureExporters(options.exporters || [
- new AgentExporter(this),
- ], this)
+ this.libraryInjected = !!options.DD_INJECTION_ENABLED
- // OOM monitoring does not work well on Windows, so it is disabled by default.
- const oomMonitoringSupported = process.platform !== 'win32'
+ let activation
+ if (options.profiling.enabled === 'auto') {
+ activation = 'auto'
+ } else if (options.profiling.enabled === 'true') {
+ activation = 'manual'
+ } // else activation = undefined
- const oomMonitoringEnabled = isTrue(options.oomMonitoring ??
- DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED ?? oomMonitoringSupported)
- checkOptionAllowed(oomMonitoringEnabled, 'OOM monitoring', oomMonitoringSupported)
+ this.activation = activation
+ this.exporters = ensureExporters(options.DD_PROFILING_EXPORTERS, this)
- const heapLimitExtensionSize = options.oomHeapLimitExtensionSize ??
- (Number(DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE) || 0)
- const maxHeapExtensionCount = options.oomMaxHeapExtensionCount ??
- (Number(DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT) || 0)
+ const oomMonitoringEnabled = options.DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED
+ const heapLimitExtensionSize = options.DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE
+ const maxHeapExtensionCount = options.DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT
const exportStrategies = oomMonitoringEnabled
- ? ensureOOMExportStrategies(options.oomExportStrategies ?? DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES ??
- [oomExportStrategies.PROCESS], this)
+ ? ensureOOMExportStrategies(options.DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES, this)
: []
const exportCommand = oomMonitoringEnabled ? buildExportCommand(this) : undefined
this.oomMonitoring = {
@@ -144,61 +83,26 @@ class Config {
exportCommand,
}
- const profilers = options.profilers || getProfilers({
- DD_PROFILING_HEAP_ENABLED,
- DD_PROFILING_WALLTIME_ENABLED,
- DD_PROFILING_PROFILERS,
- })
+ const profilers = getProfilers(options)
- this.timelineEnabled = isTrue(
- options.timelineEnabled ?? DD_PROFILING_TIMELINE_ENABLED ?? samplingContextsAvailable
- )
- checkOptionWithSamplingContextAllowed(this.timelineEnabled, 'Timeline view')
- this.timelineSamplingEnabled = isTrue(
- options.timelineSamplingEnabled ?? DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED ?? true
- )
+ this.timelineEnabled = options.DD_PROFILING_TIMELINE_ENABLED
+ this.timelineSamplingEnabled = options.DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED
+ this.codeHotspotsEnabled = options.DD_PROFILING_CODEHOTSPOTS_ENABLED
+ this.cpuProfilingEnabled = options.DD_PROFILING_CPU_ENABLED
+ this.heapSamplingInterval = options.DD_PROFILING_HEAP_SAMPLING_INTERVAL
- this.codeHotspotsEnabled = isTrue(
- options.codeHotspotsEnabled ?? DD_PROFILING_CODEHOTSPOTS_ENABLED ?? samplingContextsAvailable
- )
- checkOptionWithSamplingContextAllowed(this.codeHotspotsEnabled, 'Code hotspots')
-
- this.cpuProfilingEnabled = isTrue(
- options.cpuProfilingEnabled ?? DD_PROFILING_CPU_ENABLED ?? samplingContextsAvailable
- )
- checkOptionWithSamplingContextAllowed(this.cpuProfilingEnabled, 'CPU profiling')
-
- this.samplingInterval = options.samplingInterval || 1e3 / 99 // 99hz in millis
-
- this.heapSamplingInterval = options.heapSamplingInterval ??
- (Number(DD_PROFILING_HEAP_SAMPLING_INTERVAL) || 512 * 1024)
+ this.samplingInterval = 1e3 / 99 // 99hz in milliseconds
const isAtLeast24 = satisfies(process.versions.node, '>=24.0.0')
- const uploadCompression0 = options.uploadCompression ?? DD_PROFILING_DEBUG_UPLOAD_COMPRESSION ?? 'on'
+ const uploadCompression0 = options.DD_PROFILING_DEBUG_UPLOAD_COMPRESSION
let [uploadCompression, level0] = uploadCompression0.split('-')
- if (!['on', 'off', 'gzip', 'zstd'].includes(uploadCompression)) {
- this.logger.warn(`Invalid profile upload compression method "${uploadCompression0}". Will use "on".`)
- uploadCompression = 'on'
- }
let level = level0 ? Number.parseInt(level0, 10) : undefined
if (level !== undefined) {
- if (['on', 'off'].includes(uploadCompression)) {
- this.logger.warn(`Compression levels are not supported for "${uploadCompression}".`)
- level = undefined
- } else if (Number.isNaN(level)) {
- this.logger.warn(
- `Invalid compression level "${level0}". Will use default level.`)
- level = undefined
- } else if (level < 1) {
- this.logger.warn(`Invalid compression level ${level}. Will use 1.`)
- level = 1
- } else {
- const maxLevel = { gzip: 9, zstd: 22 }[uploadCompression]
- if (level > maxLevel) {
- this.logger.warn(`Invalid compression level ${level}. Will use ${maxLevel}.`)
- level = maxLevel
- }
+ const maxLevel = { gzip: 9, zstd: 22 }[uploadCompression]
+ if (level > maxLevel) {
+ this.logger.warn(`Invalid compression level ${level}. Will use ${maxLevel}.`)
+ level = maxLevel
}
}
@@ -219,13 +123,9 @@ class Config {
that.asyncContextFrameEnabled = false
}
- const canUseAsyncContextFrame = samplingContextsAvailable && isACFActive
-
- this.asyncContextFrameEnabled = isTrue(DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED ?? canUseAsyncContextFrame)
- if (this.asyncContextFrameEnabled && !canUseAsyncContextFrame) {
- if (!samplingContextsAvailable) {
- turnOffAsyncContextFrame(`on ${process.platform}`)
- } else if (isAtLeast24) {
+ this.asyncContextFrameEnabled = options.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED ?? isACFActive
+ if (this.asyncContextFrameEnabled && !isACFActive) {
+ if (isAtLeast24) {
turnOffAsyncContextFrame('with --no-async-context-frame')
} else if (satisfies(process.versions.node, '>=22.9.0')) {
turnOffAsyncContextFrame('without --experimental-async-context-frame')
@@ -234,7 +134,7 @@ class Config {
}
}
- this.heartbeatInterval = options.heartbeatInterval || 60 * 1000 // 1 minute
+ this.heartbeatInterval = options.telemetry.heartbeatInterval
this.profilers = ensureProfilers(profilers, this)
}
@@ -248,7 +148,7 @@ class Config {
endpointCollectionEnabled: this.endpointCollectionEnabled,
heapSamplingInterval: this.heapSamplingInterval,
oomMonitoring: { ...this.oomMonitoring },
- profilerTypes: this.profilers.map(p => p.type),
+ profilerTypes: this.profilers.map(profiler => profiler.type),
sourceMap: this.sourceMap,
timelineEnabled: this.timelineEnabled,
timelineSamplingEnabled: this.timelineSamplingEnabled,
@@ -263,7 +163,9 @@ class Config {
module.exports = { Config }
function getProfilers ({
- DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, DD_PROFILING_PROFILERS,
+ DD_PROFILING_HEAP_ENABLED,
+ DD_PROFILING_WALLTIME_ENABLED,
+ DD_PROFILING_PROFILERS,
}) {
// First consider "legacy" DD_PROFILING_PROFILERS env variable, defaulting to space + wall
// Use a Set to avoid duplicates
@@ -272,26 +174,26 @@ function getProfilers ({
// snapshots the space profile won't include memory taken by profiles created
// before it in the sequence. That memory is ultimately transient and will be
// released when all profiles are subsequently encoded.
- const profilers = new Set((DD_PROFILING_PROFILERS ?? 'space,wall').split(','))
+ const profilers = new Set(DD_PROFILING_PROFILERS)
let spaceExplicitlyEnabled = false
// Add/remove space depending on the value of DD_PROFILING_HEAP_ENABLED
- if (DD_PROFILING_HEAP_ENABLED != null) {
- if (isTrue(DD_PROFILING_HEAP_ENABLED)) {
+ if (DD_PROFILING_HEAP_ENABLED !== undefined) {
+ if (DD_PROFILING_HEAP_ENABLED) {
if (!profilers.has('space')) {
profilers.add('space')
spaceExplicitlyEnabled = true
}
- } else if (isFalse(DD_PROFILING_HEAP_ENABLED)) {
+ } else {
profilers.delete('space')
}
}
// Add/remove wall depending on the value of DD_PROFILING_WALLTIME_ENABLED
- if (DD_PROFILING_WALLTIME_ENABLED != null) {
- if (isTrue(DD_PROFILING_WALLTIME_ENABLED)) {
+ if (DD_PROFILING_WALLTIME_ENABLED !== undefined) {
+ if (DD_PROFILING_WALLTIME_ENABLED) {
profilers.add('wall')
- } else if (isFalse(DD_PROFILING_WALLTIME_ENABLED)) {
+ } else {
profilers.delete('wall')
profilers.delete('cpu') // remove alias too
}
@@ -321,22 +223,12 @@ function getExportStrategy (name, options) {
}
function ensureOOMExportStrategies (strategies, options) {
- if (!strategies) {
- return []
+ const set = new Set()
+ for (const strategy of strategies) {
+ set.add(getExportStrategy(strategy, options))
}
- if (typeof strategies === 'string') {
- strategies = strategies.split(',')
- }
-
- for (let i = 0; i < strategies.length; i++) {
- const strategy = strategies[i]
- if (typeof strategy === 'string') {
- strategies[i] = getExportStrategy(strategy, options)
- }
- }
-
- return [...new Set(strategies)]
+ return [...set]
}
function getExporter (name, options) {
@@ -345,22 +237,13 @@ function getExporter (name, options) {
return new AgentExporter(options)
case 'file':
return new FileExporter(options)
+ default:
+ options.logger.error(`Unknown exporter "${name}"`)
}
}
function ensureExporters (exporters, options) {
- if (typeof exporters === 'string') {
- exporters = exporters.split(',')
- }
-
- for (let i = 0; i < exporters.length; i++) {
- const exporter = exporters[i]
- if (typeof exporter === 'string') {
- exporters[i] = getExporter(exporter, options)
- }
- }
-
- return exporters
+ return exporters.map((exporter) => getExporter(exporter, options))
}
function getProfiler (name, options) {
@@ -376,30 +259,26 @@ function getProfiler (name, options) {
}
function ensureProfilers (profilers, options) {
- if (typeof profilers === 'string') {
- profilers = profilers.split(',')
- }
+ const filteredProfilers = []
for (let i = 0; i < profilers.length; i++) {
- const profiler = profilers[i]
- if (typeof profiler === 'string') {
- profilers[i] = getProfiler(profiler, options)
+ const profiler = getProfiler(profilers[i], options)
+ if (profiler !== undefined) {
+ filteredProfilers.push(profiler)
}
}
// Events profiler is a profiler that produces timeline events. It is only
// added if timeline is enabled and there's a wall profiler.
- if (options.timelineEnabled && profilers.some(p => p instanceof WallProfiler)) {
- profilers.push(new EventsProfiler(options))
+ if (options.timelineEnabled && filteredProfilers.some(profiler => profiler instanceof WallProfiler)) {
+ filteredProfilers.push(new EventsProfiler(options))
}
- // Filter out any invalid profilers
- return profilers.filter(Boolean)
+ return filteredProfilers
}
function ensureLogger (logger) {
- if (typeof logger !== 'object' ||
- typeof logger.debug !== 'function' ||
+ if (typeof logger?.debug !== 'function' ||
typeof logger.info !== 'function' ||
typeof logger.warn !== 'function' ||
typeof logger.error !== 'function') {
@@ -424,50 +303,3 @@ function buildExportCommand (options) {
path.join(__dirname, 'exporter_cli.js'),
urls.join(','), tags, 'space']
}
-
-function getProfilingEnvValues () {
- return {
- DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED:
- getValueFromEnvSources('DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED'),
- DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED:
- getValueFromEnvSources('DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED'),
- DD_PROFILING_CODEHOTSPOTS_ENABLED:
- getValueFromEnvSources('DD_PROFILING_CODEHOTSPOTS_ENABLED'),
- DD_PROFILING_CPU_ENABLED:
- getValueFromEnvSources('DD_PROFILING_CPU_ENABLED'),
- DD_PROFILING_DEBUG_SOURCE_MAPS:
- getValueFromEnvSources('DD_PROFILING_DEBUG_SOURCE_MAPS'),
- DD_PROFILING_DEBUG_UPLOAD_COMPRESSION:
- getValueFromEnvSources('DD_PROFILING_DEBUG_UPLOAD_COMPRESSION'),
- DD_PROFILING_ENDPOINT_COLLECTION_ENABLED:
- getValueFromEnvSources('DD_PROFILING_ENDPOINT_COLLECTION_ENABLED'),
- DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES:
- getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES'),
- DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE:
- getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE'),
- DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT:
- getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT'),
- DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED:
- getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED'),
- DD_PROFILING_HEAP_ENABLED:
- getValueFromEnvSources('DD_PROFILING_HEAP_ENABLED'),
- DD_PROFILING_HEAP_SAMPLING_INTERVAL:
- getValueFromEnvSources('DD_PROFILING_HEAP_SAMPLING_INTERVAL'),
- DD_PROFILING_PPROF_PREFIX:
- getValueFromEnvSources('DD_PROFILING_PPROF_PREFIX'),
- DD_PROFILING_PROFILERS:
- getValueFromEnvSources('DD_PROFILING_PROFILERS'),
- DD_PROFILING_TIMELINE_ENABLED:
- getValueFromEnvSources('DD_PROFILING_TIMELINE_ENABLED'),
- DD_PROFILING_UPLOAD_PERIOD:
- getValueFromEnvSources('DD_PROFILING_UPLOAD_PERIOD'),
- DD_PROFILING_UPLOAD_TIMEOUT:
- getValueFromEnvSources('DD_PROFILING_UPLOAD_TIMEOUT'),
- DD_PROFILING_V8_PROFILER_BUG_WORKAROUND:
- getValueFromEnvSources('DD_PROFILING_V8_PROFILER_BUG_WORKAROUND'),
- DD_PROFILING_WALLTIME_ENABLED:
- getValueFromEnvSources('DD_PROFILING_WALLTIME_ENABLED'),
- DD_TAGS:
- getValueFromEnvSources('DD_TAGS'),
- }
-}
diff --git a/packages/dd-trace/src/profiling/exporter_cli.js b/packages/dd-trace/src/profiling/exporter_cli.js
index cba3d6349b1..a122a334664 100644
--- a/packages/dd-trace/src/profiling/exporter_cli.js
+++ b/packages/dd-trace/src/profiling/exporter_cli.js
@@ -17,9 +17,6 @@ function exporterFromURL (url) {
if (url.protocol === 'file:') {
return new FileExporter({ pprofPrefix: fileURLToPath(url) })
}
- // TODO: Why is DD_INJECTION_ENABLED a comma separated list?
- const injectionEnabled = (getValueFromEnvSources('DD_INJECTION_ENABLED') ?? '').split(',')
- const libraryInjected = injectionEnabled.length > 0
const profilingEnabled = (getValueFromEnvSources('DD_PROFILING_ENABLED') ?? '').toLowerCase()
const activation = ['true', '1'].includes(profilingEnabled)
? 'manual'
@@ -30,7 +27,7 @@ function exporterFromURL (url) {
url,
logger,
uploadTimeout: timeoutMs,
- libraryInjected,
+ libraryInjected: !!getValueFromEnvSources('DD_INJECTION_ENABLED'),
activation,
})
}
diff --git a/packages/dd-trace/src/profiling/exporters/event_serializer.js b/packages/dd-trace/src/profiling/exporters/event_serializer.js
index a7bd652f9e9..5929766709b 100644
--- a/packages/dd-trace/src/profiling/exporters/event_serializer.js
+++ b/packages/dd-trace/src/profiling/exporters/event_serializer.js
@@ -14,7 +14,7 @@ class EventSerializer {
this._host = host
this._service = service
this._appVersion = version
- this._libraryInjected = !!libraryInjected
+ this._libraryInjected = libraryInjected
this._activation = activation || 'unknown'
}
@@ -22,7 +22,7 @@ class EventSerializer {
return `${type}.pprof`
}
- getEventJSON ({ profiles, infos, start, end, tags = {}, endpointCounts }) {
+ getEventJSON ({ profiles, infos, start, end, tags = {}, endpointCounts, customAttributes }) {
const event = {
attachments: Object.keys(profiles).map(t => this.typeToFile(t)),
start: start.toISOString(),
@@ -80,6 +80,10 @@ class EventSerializer {
},
}
+ if (customAttributes) {
+ event.custom_attributes = customAttributes
+ }
+
if (processTags.serialized) {
event[processTags.PROFILING_FIELD_NAME] = processTags.serialized
}
diff --git a/packages/dd-trace/src/profiling/profiler.js b/packages/dd-trace/src/profiling/profiler.js
index c107fc82750..72966640664 100644
--- a/packages/dd-trace/src/profiling/profiler.js
+++ b/packages/dd-trace/src/profiling/profiler.js
@@ -51,6 +51,7 @@ class Profiler extends EventEmitter {
#compressionFnInitialized = false
#compressionOptions
#config
+ #customLabelKeys = new Set()
#enabled = false
#endpointCounts = new Map()
#lastStart
@@ -70,56 +71,22 @@ class Profiler extends EventEmitter {
return this.#config?.flushInterval
}
+ /**
+ * @param {import('../config/config-base')} config - Tracer configuration
+ */
start (config) {
- const {
- service,
- version,
- env,
- url,
- hostname,
- port,
- tags,
- repositoryUrl,
- commitSHA,
- injectionEnabled,
- reportHostname,
- } = config
- const { enabled, sourceMap, exporters } = config.profiling
- const { heartbeatInterval } = config.telemetry
-
// TODO: Unify with main logger and rewrite template strings to use printf formatting.
const logger = {
- debug (message) { log.debug(message) },
- info (message) { log.info(message) },
- warn (message) { log.warn(message) },
- error (...args) { log.error(...args) },
+ debug: log.debug.bind(log),
+ info: log.info.bind(log),
+ warn: log.warn.bind(log),
+ error: log.error.bind(log),
}
- const libraryInjected = injectionEnabled.length > 0
- let activation
- if (enabled === 'auto') {
- activation = 'auto'
- } else if (enabled === 'true') {
- activation = 'manual'
- } // else activation = undefined
-
+ // TODO: Rewrite this to not need to copy the config.
const options = {
- service,
- version,
- env,
+ ...config,
logger,
- sourceMap,
- exporters,
- url,
- hostname,
- port,
- tags,
- repositoryUrl,
- commitSHA,
- libraryInjected,
- activation,
- heartbeatInterval,
- reportHostname,
}
try {
@@ -135,6 +102,45 @@ class Profiler extends EventEmitter {
return this.#enabled
}
+ /**
+ * Declares the set of custom label keys that will be used with
+ * {@link runWithLabels}. This is used for profile upload metadata and
+ * for pprof serialization optimization (low-cardinality deduplication).
+ *
+ * @param {Iterable} keys - Custom label key names
+ */
+ setCustomLabelKeys (keys) {
+ this.#customLabelKeys.clear()
+ for (const key of keys) {
+ this.#customLabelKeys.add(key)
+ }
+ if (this.#config) {
+ for (const profiler of this.#config.profilers) {
+ profiler.setCustomLabelKeys?.(this.#customLabelKeys)
+ }
+ }
+ }
+
+ /**
+ * Runs a function with custom profiling labels attached to wall profiler samples.
+ *
+ * @param {Record} labels - Custom labels to attach
+ * @param {function(): T} fn - Function to execute with the labels
+ * @returns {T} The return value of fn
+ * @template T
+ */
+ runWithLabels (labels, fn) {
+ if (!this.#enabled || !this.#config) {
+ return fn()
+ }
+ for (const profiler of this.#config.profilers) {
+ if (profiler.runWithLabels) {
+ return profiler.runWithLabels(labels, fn)
+ }
+ }
+ return fn()
+ }
+
#logError (err) {
logError(this.#logger, err)
}
@@ -182,6 +188,9 @@ class Profiler extends EventEmitter {
return this.#compressionFn
}
+ /**
+ * @param {import('../config/config-base')} options - Tracer configuration
+ */
_start (options) {
if (this.enabled) return true
@@ -410,7 +419,10 @@ class Profiler extends EventEmitter {
tags.snapshot = snapshotKind
tags.profile_seq = this.#profileSeq++
- const exportSpec = { profiles, infos, start, end, tags, endpointCounts }
+ const customAttributes = this.#customLabelKeys.size > 0
+ ? [...this.#customLabelKeys]
+ : undefined
+ const exportSpec = { profiles, infos, start, end, tags, endpointCounts, customAttributes }
const tasks = this.#config.exporters.map(exporter =>
exporter.export(exportSpec).catch(err => {
if (this.#logger) {
diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js
index c2e5aa02fd9..eddeef7fab1 100644
--- a/packages/dd-trace/src/profiling/profilers/events.js
+++ b/packages/dd-trace/src/profiling/profilers/events.js
@@ -51,8 +51,7 @@ function labelFromStrStr (stringTable, keyStr, valStr) {
}
function getMaxSamples (options) {
- const flushInterval = options.flushInterval || 65 * 1e3 // 65 seconds
- const maxCpuSamples = flushInterval / options.samplingInterval
+ const maxCpuSamples = options.flushInterval / options.samplingInterval
// The lesser of max parallelism and libuv thread pool size, plus one so we can detect
// oversubscription on libuv thread pool, plus another one for GC.
@@ -403,7 +402,7 @@ class EventsProfiler {
get type () { return 'events' }
- constructor (options = {}) {
+ constructor (options) {
this.#maxSamples = getMaxSamples(options)
this.#timelineSamplingEnabled = !!options.timelineSamplingEnabled
this.#eventSerializer = new EventSerializer(this.#maxSamples)
diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js
index 72afd547f72..54559d888cf 100644
--- a/packages/dd-trace/src/profiling/profilers/wall.js
+++ b/packages/dd-trace/src/profiling/profilers/wall.js
@@ -108,6 +108,8 @@ class NativeWallProfiler {
#captureSpanData = false
#codeHotspotsEnabled = false
#cpuProfilingEnabled = false
+ #customLabelsActive = false
+ #customLabelKeys
#endpointCollectionEnabled = false
#flushIntervalMillis = 0
#logger
@@ -245,7 +247,24 @@ class NativeWallProfiler {
// context -- we simply can't tell which one it might've been across all
// possible async context frames.
if (this.#asyncContextFrameEnabled) {
- this.#pprof.time.setContext(sampleContext)
+ if (this.#customLabelsActive) {
+ // Custom labels may be active in this async context. The current CPED
+ // context could be a 2-element array [profilingContext, customLabels].
+ // Replace the profiling context while preserving the custom labels.
+ // This flag is monotonic (once set, stays true) because async
+ // continuations from runWithLabels can fire at any time after the
+ // synchronous runWithLabels call has returned.
+ const current = this.#pprof.time.getContext()
+ if (Array.isArray(current)) {
+ if (current[0] !== sampleContext) {
+ this.#pprof.time.setContext([sampleContext, current[1]])
+ }
+ } else if (current !== sampleContext) {
+ this.#pprof.time.setContext(sampleContext)
+ }
+ } else {
+ this.#pprof.time.setContext(sampleContext)
+ }
} else {
const sampleCount = this._profilerState[kSampleCount]
if (sampleCount !== this._lastSampleCount) {
@@ -344,6 +363,13 @@ class NativeWallProfiler {
const lowCardinalityLabels = Object.keys(getThreadLabels())
lowCardinalityLabels.push(TRACE_ENDPOINT_LABEL)
+ // Custom labels are expected to be low-cardinality (e.g. customer tier, region)
+ if (this.#customLabelKeys) {
+ for (const key of this.#customLabelKeys) {
+ lowCardinalityLabels.push(key)
+ }
+ }
+
const profile = this.#pprof.time.stop(restart, this.#boundGenerateLabels, lowCardinalityLabels)
if (restart) {
@@ -383,7 +409,29 @@ class NativeWallProfiler {
return getThreadLabels()
}
- const labels = { ...getThreadLabels() }
+ // Native profiler doesn't set context.context for some samples, such as idle samples or when
+ // the context was otherwise unavailable when the sample was taken. Note that with ACF, we don't
+ // use the "ref" indirection.
+ let ref
+ let customLabels
+ const cctx = context.context
+ if (this.#asyncContextFrameEnabled) {
+ // When custom labels are active with ACF, context.context is a 2-element array:
+ // [profilingContext, customLabels]. Otherwise it's a plain object.
+ if (Array.isArray(cctx)) {
+ [ref, customLabels] = cctx
+ } else {
+ ref = cctx
+ }
+ } else {
+ ref = cctx?.ref
+ }
+
+ // Custom labels are spread first so that internal labels always take
+ // precedence and overwrite them.
+ const labels = customLabels === undefined
+ ? { ...getThreadLabels() }
+ : { ...customLabels, ...getThreadLabels() }
if (this.#timelineEnabled) {
// Incoming timestamps are in microseconds, we emit nanos.
@@ -395,10 +443,6 @@ class NativeWallProfiler {
labels['async id'] = asyncId
}
- // Native profiler doesn't set context.context for some samples, such as idle samples or when
- // the context was otherwise unavailable when the sample was taken. Note that with async context
- // frame, we don't use the "ref" indirection.
- const ref = this.#asyncContextFrameEnabled ? context.context : context.context?.ref
if (typeof ref !== 'object') {
return labels
}
@@ -421,6 +465,45 @@ class NativeWallProfiler {
return labels
}
+ /**
+ * Sets the custom label keys used for pprof low-cardinality deduplication.
+ * Called once by the top-level Profiler when keys are declared.
+ *
+ * @param {Iterable} keys
+ */
+ setCustomLabelKeys (keys) {
+ this.#customLabelKeys = keys
+ }
+
+ /**
+ * Runs a function with custom profiling labels attached to all wall profiler
+ * samples taken during its execution. Labels are key-value pairs that appear
+ * in the pprof output and can be used to filter flame graphs in the Datadog UI.
+ *
+ * Requires AsyncContextFrame (ACF) to be enabled. Supports nesting: inner
+ * calls merge labels with outer calls, with inner values taking precedence.
+ *
+ * @param {Record} labels - Custom labels to attach
+ * @param {function(): T} fn - Function to execute with the labels
+ * @returns {T} The return value of fn
+ * @template T
+ */
+ runWithLabels (labels, fn) {
+ if (!this.#asyncContextFrameEnabled || !this.#withContexts) {
+ return fn()
+ }
+
+ // Read current context; merge custom labels if already in a runWithLabels scope
+ const current = this.#pprof.time.getContext()
+ const isCurrentArray = Array.isArray(current)
+ const customLabels = isCurrentArray ? { ...current[1], ...labels } : labels
+
+ const profilingContext = (isCurrentArray ? current[0] : current) ?? {}
+
+ this.#customLabelsActive = true
+ return this.#pprof.time.runWithContext([profilingContext, customLabels], fn)
+ }
+
profile (restart) {
return this.#stop(restart)
}
diff --git a/packages/dd-trace/src/profiling/ssi-heuristics.js b/packages/dd-trace/src/profiling/ssi-heuristics.js
index 994cf7d6a46..e83ba71b18f 100644
--- a/packages/dd-trace/src/profiling/ssi-heuristics.js
+++ b/packages/dd-trace/src/profiling/ssi-heuristics.js
@@ -1,6 +1,6 @@
'use strict'
-const dc = require('dc-polyfill')
+const dc = /** @type {typeof import('diagnostics_channel')} */ (require('dc-polyfill'))
const log = require('../log')
// If the process lives for at least 30 seconds, it's considered long-lived
@@ -10,6 +10,9 @@ const DEFAULT_LONG_LIVED_THRESHOLD = 30_000
* This class embodies the SSI profiler-triggering heuristics under SSI.
*/
class SSIHeuristics {
+ /**
+ * @param {import('../config/config-base')} config - Tracer configuration
+ */
constructor (config) {
const longLivedThreshold = config.profiling.longLivedThreshold || DEFAULT_LONG_LIVED_THRESHOLD
if (typeof longLivedThreshold !== 'number' || longLivedThreshold <= 0) {
diff --git a/packages/dd-trace/src/propagation-hash/index.js b/packages/dd-trace/src/propagation-hash/index.js
index 74d61f3938b..29cb6069809 100644
--- a/packages/dd-trace/src/propagation-hash/index.js
+++ b/packages/dd-trace/src/propagation-hash/index.js
@@ -17,11 +17,12 @@ class PropagationHashManager {
_cachedHash = null
_cachedHashString = null
_cachedHashBase64 = null
+ /** @type {import('../config/config-base') | null} */
_config = null
/**
* Configure the propagation hash manager with tracer config
- * @param {object} config - Tracer configuration
+ * @param {import('../config/config-base')} config - Tracer configuration
*/
configure (config) {
this._config = config
diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js
index d7a4cedf7b9..0ad8a0416e8 100644
--- a/packages/dd-trace/src/proxy.js
+++ b/packages/dd-trace/src/proxy.js
@@ -27,9 +27,12 @@ class LazyModule {
this.provider = provider
}
- enable (...args) {
+ /**
+ * @param {import('./config/config-base')} config - Tracer configuration
+ */
+ enable (config, ...args) {
this.module = this.provider()
- this.module.enable(...args)
+ this.module.enable(config, ...args)
}
disable () {
@@ -238,12 +241,16 @@ class Tracer extends NoopProxy {
getDynamicInstrumentationClient(config)
}
} catch (e) {
- log.error('Error initialising tracer', e)
+ log.error('Error initializing tracer', e)
+ // TODO: Should we stop everything started so far?
}
return this
}
+ /**
+ * @param {import('./config/config-base')} config - Tracer configuration
+ */
_startProfiler (config) {
// do not stop tracer initialization if the profiler fails to be imported
try {
@@ -257,6 +264,9 @@ class Tracer extends NoopProxy {
}
}
+ /**
+ * @param {import('./config/config-base')} config - Tracer configuration
+ */
#updateTracing (config) {
if (config.tracing !== false) {
if (config.appsec.enabled) {
@@ -330,6 +340,25 @@ class Tracer extends NoopProxy {
}
}
+ /**
+ * @override
+ */
+ get profiling () {
+ // Lazily require the profiler module and cache the result. If profiling
+ // is not enabled, runWithLabels still works as a passthrough (just calls fn()).
+ const profilerModule = require('./profiler')
+ const profiling = {
+ setCustomLabelKeys (keys) {
+ profilerModule.setCustomLabelKeys(keys)
+ },
+ runWithLabels (labels, fn) {
+ return profilerModule.runWithLabels(labels, fn)
+ },
+ }
+ Reflect.defineProperty(this, 'profiling', { value: profiling, configurable: true, enumerable: true })
+ return profiling
+ }
+
/**
* @override
*/
diff --git a/packages/dd-trace/src/remote_config/index.js b/packages/dd-trace/src/remote_config/index.js
index d4451234938..83a3b016e15 100644
--- a/packages/dd-trace/src/remote_config/index.js
+++ b/packages/dd-trace/src/remote_config/index.js
@@ -25,6 +25,9 @@ class RemoteConfig {
#products = new Set()
#batchHandlers = new Map()
+ /**
+ * @param {import('../config/config-base')} config - Tracer configuration
+ */
constructor (config) {
const pollInterval = Math.floor(config.remoteConfig.pollInterval * 1000)
diff --git a/packages/dd-trace/src/require-package-json.js b/packages/dd-trace/src/require-package-json.js
index 3f685993fdf..5800155c2c5 100644
--- a/packages/dd-trace/src/require-package-json.js
+++ b/packages/dd-trace/src/require-package-json.js
@@ -21,10 +21,14 @@ function requirePackageJson (name, module) {
}
for (const modulePath of module.paths) {
const candidate = path.join(modulePath, name, 'package.json')
- try {
- return JSON.parse(fs.readFileSync(candidate, 'utf8'))
- } catch {
- continue
+ // fs.existsSync is faster than fs.readFileSync due to not throwing an error if the file does not exist.
+ // The race condition should also not matter here as the time window is very small.
+ if (fs.existsSync(candidate)) {
+ try {
+ return JSON.parse(fs.readFileSync(candidate, 'utf8'))
+ } catch {
+ continue
+ }
}
}
throw new Error(`could not find ${name}/package.json`)
diff --git a/packages/dd-trace/src/ritm.js b/packages/dd-trace/src/ritm.js
index 29b4d09260e..6038c99c83c 100644
--- a/packages/dd-trace/src/ritm.js
+++ b/packages/dd-trace/src/ritm.js
@@ -22,19 +22,44 @@ let patchedRequire = null
const moduleLoadStartChannel = dc.channel('dd-trace:moduleLoadStart')
const moduleLoadEndChannel = dc.channel('dd-trace:moduleLoadEnd')
+function stripNodePrefix (name) {
+ if (typeof name !== 'string') return name
+ return name.startsWith('node:') ? name.slice(5) : name
+}
+
+const builtinModules = new Set(Module.builtinModules.map(stripNodePrefix))
+
+function isBuiltinModuleName (name) {
+ if (typeof name !== 'string') return false
+ return builtinModules.has(stripNodePrefix(name))
+}
+
+function normalizeModuleName (name) {
+ if (typeof name !== 'string') return name
+ const stripped = stripNodePrefix(name)
+ return builtinModules.has(stripped) ? stripped : name
+}
+
+/**
+ * @overload
+ * @param {string[]} modules list of modules to hook into
+ * @param {object} options hook options
+ * @param {Function} onrequire callback to be executed upon encountering module
+ */
+/**
+ * @overload
+ * @param {string[]} modules list of modules to hook into
+ * @param {Function} onrequire callback to be executed upon encountering module
+ */
function Hook (modules, options, onrequire) {
if (!(this instanceof Hook)) return new Hook(modules, options, onrequire)
- if (typeof modules === 'function') {
- onrequire = modules
- modules = null
- options = {}
- } else if (typeof options === 'function') {
+ if (typeof options === 'function') {
onrequire = options
options = {}
}
- modules = modules || []
- options = options || {}
+ modules ??= []
+ options ??= {}
this.modules = modules
this.options = options
@@ -63,32 +88,34 @@ function Hook (modules, options, onrequire) {
*/
let filename
try {
- // @ts-expect-error Module._resolveFilename is not typed
+ // @ts-expect-error - Module._resolveFilename is not typed
filename = Module._resolveFilename(request, this)
} catch {
return _origRequire.apply(this, arguments)
}
- const core = !filename.includes(path.sep)
+
+ const builtin = isBuiltinModuleName(filename)
+ const moduleId = builtin ? normalizeModuleName(filename) : filename
let name, basedir, hooks
// return known patched modules immediately
- if (cache[filename]) {
- const externalCacheEntry = require.cache[filename]
+ if (cache[moduleId]) {
// require.cache was potentially altered externally
- if (externalCacheEntry && externalCacheEntry.exports !== cache[filename].original) {
- return externalCacheEntry.exports
+ const cacheEntry = require.cache[filename]
+ if (cacheEntry && cacheEntry.exports !== cache[filename].original) {
+ return cacheEntry.exports
}
- return cache[filename].exports
+ return cache[moduleId].exports
}
// Check if this module has a patcher in-progress already.
// Otherwise, mark this module as patching in-progress.
- const patched = patching[filename]
+ const patched = patching[moduleId]
if (patched) {
// If it's already patched, just return it as-is.
return origRequire.apply(this, arguments)
}
- patching[filename] = true
+ patching[moduleId] = true
const payload = {
filename,
@@ -107,12 +134,12 @@ function Hook (modules, options, onrequire) {
// The module has already been loaded,
// so the patching mark can be cleaned up.
- delete patching[filename]
+ delete patching[moduleId]
- if (core) {
- hooks = moduleHooks[filename]
+ if (builtin) {
+ hooks = moduleHooks[moduleId]
if (!hooks) return exports // abort if module name isn't on whitelist
- name = filename
+ name = moduleId
} else {
const inAWSLambda = getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME') !== undefined
const hasLambdaHandler = getValueFromEnvSources('DD_LAMBDA_HANDLER') !== undefined
@@ -129,7 +156,8 @@ function Hook (modules, options, onrequire) {
hooks = moduleHooks[name]
if (!hooks) return exports // abort if module name isn't on whitelist
- // @ts-expect-error Module._resolveLookupPaths is not typed
+ // figure out if this is the main module file, or a file inside the module
+ // @ts-expect-error - Module._resolveLookupPaths is meant to be internal and is not typed
const paths = Module._resolveLookupPaths(name, this, true)
if (!paths) {
// abort if _resolveLookupPaths return null
@@ -138,7 +166,7 @@ function Hook (modules, options, onrequire) {
let res
try {
- // @ts-expect-error Module._findPath is not typed
+ // @ts-expect-error - Module._findPath is meant to be internal and is not typed
res = Module._findPath(name, [basedir, ...paths])
} catch {
// case where the file specified in package.json "main" doesn't exist
@@ -163,17 +191,21 @@ function Hook (modules, options, onrequire) {
// ensure that the cache entry is assigned a value before calling
// onrequire, in case calling onrequire requires the same module.
- cache[filename] = { exports }
- cache[filename].original = exports
+ cache[moduleId] = { exports }
+ cache[moduleId].original = exports
for (const hook of hooks) {
- cache[filename].exports = hook(cache[filename].exports, name, basedir)
+ cache[moduleId].exports = hook(cache[moduleId].exports, name, basedir)
}
- return cache[filename].exports
+ return cache[moduleId].exports
}
}
+/**
+ * Reset the Ritm hook. This is used to reset the hook after a test.
+ * TODO: Remove this and instead use proxyquire to reset the hook.
+ */
Hook.reset = function () {
Module.prototype.require = origRequire
patchedRequire = null
diff --git a/packages/dd-trace/src/runtime_metrics/index.js b/packages/dd-trace/src/runtime_metrics/index.js
index 9b2602844e7..72f51dae1fb 100644
--- a/packages/dd-trace/src/runtime_metrics/index.js
+++ b/packages/dd-trace/src/runtime_metrics/index.js
@@ -14,6 +14,9 @@ const noop = runtimeMetrics = {
}
module.exports = {
+ /**
+ * @param {import('../config/config-base')} config - Tracer configuration
+ */
start (config) {
if (!config?.runtimeMetrics.enabled) return
diff --git a/packages/dd-trace/src/runtime_metrics/runtime_metrics.js b/packages/dd-trace/src/runtime_metrics/runtime_metrics.js
index 5e042b8484b..7fd0ccdd7c1 100644
--- a/packages/dd-trace/src/runtime_metrics/runtime_metrics.js
+++ b/packages/dd-trace/src/runtime_metrics/runtime_metrics.js
@@ -35,6 +35,9 @@ let eventLoopDelayObserver = null
// https://github.com/DataDog/dogweb/blob/prod/integration/node/node_metadata.csv
module.exports = {
+ /**
+ * @param {import('../config/config-base')} config - Tracer configuration
+ */
start (config) {
this.stop()
const clientConfig = DogStatsDClient.generateClientConfig(config)
diff --git a/packages/dd-trace/src/sampler.js b/packages/dd-trace/src/sampler.js
index b023c55b6de..df9eadb1dec 100644
--- a/packages/dd-trace/src/sampler.js
+++ b/packages/dd-trace/src/sampler.js
@@ -42,7 +42,7 @@ class Sampler {
/**
* Determines whether a trace/span should be sampled based on the configured sampling rate.
*
- * @param {Span|SpanContext} span - The span or span context to evaluate.
+ * @param {import("../../..").Span|import("../../..").SpanContext} span - The span or span context to evaluate.
* @returns {boolean} `true` if the trace/span should be sampled, otherwise `false`.
*/
isSampled (span) {
diff --git a/packages/dd-trace/src/standalone/index.js b/packages/dd-trace/src/standalone/index.js
index eb43ee87d4d..699e48c220c 100644
--- a/packages/dd-trace/src/standalone/index.js
+++ b/packages/dd-trace/src/standalone/index.js
@@ -11,6 +11,9 @@ const startCh = channel('dd-trace:span:start')
const injectCh = channel('dd-trace:span:inject')
const extractCh = channel('dd-trace:span:extract')
+/**
+ * @param {import('../config/config-base')} config - Tracer configuration
+ */
function configure (config) {
if (startCh.hasSubscribers) startCh.unsubscribe(onSpanStart)
if (injectCh.hasSubscribers) injectCh.unsubscribe(onSpanInject)
diff --git a/packages/dd-trace/src/telemetry/index.js b/packages/dd-trace/src/telemetry/index.js
index 05a7ec8b96b..43e884b026f 100644
--- a/packages/dd-trace/src/telemetry/index.js
+++ b/packages/dd-trace/src/telemetry/index.js
@@ -5,15 +5,14 @@ let telemetry
// Lazy load the telemetry module to avoid the performance impact of loading it unconditionally
module.exports = {
start (config, ...args) {
+ if (!config.telemetry.enabled) return
telemetry ??= require('./telemetry')
telemetry.start(config, ...args)
},
- stop () {
- telemetry?.stop()
- },
// This might be called before `start` so we have to trigger loading the
// underlying module here as well.
updateConfig (changes, config, ...args) {
+ if (!config.telemetry.enabled) return
telemetry ??= require('./telemetry')
telemetry.updateConfig(changes, config, ...args)
},
diff --git a/packages/dd-trace/src/telemetry/send-data.js b/packages/dd-trace/src/telemetry/send-data.js
index fb7af48e64d..ef0d86634df 100644
--- a/packages/dd-trace/src/telemetry/send-data.js
+++ b/packages/dd-trace/src/telemetry/send-data.js
@@ -62,19 +62,6 @@ const { getValueFromEnvSources } = require('../config/helper')
* kernel_name?: string
* } & Record} TelemetryHost
*/
-/**
- * @typedef {{
- * hostname?: string,
- * port?: string | number,
- * url?: string | URL,
- * site?: string,
- * apiKey?: string,
- * isCiVisibility?: boolean,
- * spanAttributeSchema?: string,
- * tags: Record,
- * telemetry?: { debug?: boolean }
- * }} TelemetryConfig
- */
/**
* @callback SendDataCallback
* @param {Error | null | undefined} error
@@ -85,23 +72,22 @@ const { getValueFromEnvSources } = require('../config/helper')
let agentTelemetry = true
/**
- * @param {TelemetryConfig} config
+ * @param {import('../config/config-base')} config
* @param {TelemetryApplication} application
* @param {TelemetryRequestType} reqType
* @returns {Record}
*/
function getHeaders (config, application, reqType) {
- const sessionId = config.tags['runtime-id']
const headers = {
'content-type': 'application/json',
'dd-telemetry-api-version': 'v2',
'dd-telemetry-request-type': reqType,
'dd-client-library-language': application.language_name,
'dd-client-library-version': application.tracer_version,
- 'dd-session-id': sessionId,
+ 'dd-session-id': config.tags['runtime-id'],
}
- if (config.rootSessionId && config.rootSessionId !== sessionId) {
- headers['dd-root-session-id'] = config.rootSessionId
+ if (config.DD_ROOT_JS_SESSION_ID) {
+ headers['dd-root-session-id'] = config.DD_ROOT_JS_SESSION_ID
}
const debug = config.telemetry && config.telemetry.debug
if (debug) {
@@ -141,7 +127,7 @@ function getPayload (payload) {
// TODO(BridgeAR): Simplify this code. A lot does not need to be recalculated on every call.
/**
- * @param {TelemetryConfig} config
+ * @param {import('../config/config-base')} config
* @param {TelemetryApplication} application
* @param {TelemetryHost} host
* @param {TelemetryRequestType} reqType
diff --git a/packages/dd-trace/src/telemetry/session-propagation.js b/packages/dd-trace/src/telemetry/session-propagation.js
index 0af4968db52..7f191f02d7a 100644
--- a/packages/dd-trace/src/telemetry/session-propagation.js
+++ b/packages/dd-trace/src/telemetry/session-propagation.js
@@ -1,53 +1,37 @@
'use strict'
-const dc = require('dc-polyfill')
-
+const dc = /** @type {typeof import('diagnostics_channel')} */ (require('dc-polyfill'))
const childProcessChannel = dc.tracingChannel('datadog:child_process:execution')
let subscribed = false
-let rootSessionId
let runtimeId
-function injectSessionEnv (existingEnv) {
- // eslint-disable-next-line eslint-rules/eslint-process-env -- not in supported-configurations.json
- const base = existingEnv == null ? process.env : existingEnv
- return {
- ...base,
- DD_ROOT_JS_SESSION_ID: rootSessionId,
- DD_PARENT_JS_SESSION_ID: runtimeId,
- }
+function isOptionsObject (value) {
+ return value != null && typeof value === 'object' && !Array.isArray(value) && value
}
-function findOptionsIndex (args, shell) {
- if (Array.isArray(args[1])) {
- return { index: 2, exists: args[2] != null && typeof args[2] === 'object' }
- }
- if (args[1] != null && typeof args[1] === 'object') {
- return { index: 1, exists: true }
- }
- if (!shell && args[2] != null && typeof args[2] === 'object') {
- return { index: 2, exists: true }
- }
- return { index: shell ? 1 : 2, exists: false }
+function getEnvWithRuntimeId (env) {
+ // eslint-disable-next-line eslint-rules/eslint-process-env
+ return { ...(env ?? process.env), DD_ROOT_JS_SESSION_ID: runtimeId }
}
function onChildProcessStart (context) {
- if (!context.callArgs) return
-
const args = context.callArgs
- const { index, exists } = findOptionsIndex(args, context.shell)
+ if (!args) return
- if (exists) {
- args[index] = { ...args[index], env: injectSessionEnv(args[index].env) }
+ const index = Array.isArray(args[1]) || (!context.shell && !isOptionsObject(args[1])) ? 2 : 1
+ const options = isOptionsObject(args[index]) ? args[index] : undefined
+
+ if (options) {
+ args[index] = { ...options, env: getEnvWithRuntimeId(options.env) }
return
}
- const opts = { env: injectSessionEnv(null) }
-
- if (!context.shell && !Array.isArray(args[1])) {
+ if (index === 2 && !Array.isArray(args[1])) {
args.splice(1, 0, [])
}
+ const opts = { env: getEnvWithRuntimeId() }
if (typeof args[index] === 'function') {
args.splice(index, 0, opts)
} else {
@@ -55,24 +39,15 @@ function onChildProcessStart (context) {
}
}
-const handler = { start: onChildProcessStart }
-
function start (config) {
if (!config.telemetry?.enabled || subscribed) return
subscribed = true
- rootSessionId = config.rootSessionId
- runtimeId = config.tags['runtime-id']
-
- childProcessChannel.subscribe(handler)
-}
+ runtimeId = config.DD_ROOT_JS_SESSION_ID || config.tags['runtime-id']
-function stop () {
- if (!subscribed) return
- childProcessChannel.unsubscribe(handler)
- subscribed = false
- rootSessionId = undefined
- runtimeId = undefined
+ childProcessChannel.subscribe(
+ /** @type {import('diagnostics_channel').TracingChannelSubscribers