diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f950389e079..6b409dccfc2 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -111,7 +111,21 @@ /integration-tests/config-jest-multiproject.js @DataDog/ci-app-libraries /integration-tests/config-jest.js @DataDog/ci-app-libraries /integration-tests/cypress-config.json @DataDog/ci-app-libraries +/integration-tests/cypress-custom-after-hooks.config.js @DataDog/ci-app-libraries +/integration-tests/cypress-custom-after-hooks.config.mjs @DataDog/ci-app-libraries +/integration-tests/cypress-auto-esm.config.mjs @DataDog/ci-app-libraries +/integration-tests/cypress-double-run.js @DataDog/ci-app-libraries +/integration-tests/cypress-double-run.mjs @DataDog/ci-app-libraries /integration-tests/cypress-esm-config.mjs @DataDog/ci-app-libraries +/integration-tests/cypress-legacy-plugin.config.js @DataDog/ci-app-libraries +/integration-tests/cypress-legacy-plugin.config.mjs @DataDog/ci-app-libraries +/integration-tests/cypress-plain-object-auto.config.js @DataDog/ci-app-libraries +/integration-tests/cypress-plain-object-auto.config.mjs @DataDog/ci-app-libraries +/integration-tests/cypress-plain-object-manual.config.js @DataDog/ci-app-libraries +/integration-tests/cypress-plain-object-manual.config.mjs @DataDog/ci-app-libraries +/integration-tests/cypress-return-config.config.js @DataDog/ci-app-libraries +/integration-tests/cypress-return-config.config.mjs @DataDog/ci-app-libraries +/integration-tests/cypress-typescript.config.ts @DataDog/ci-app-libraries /integration-tests/cypress.config.js @DataDog/ci-app-libraries /integration-tests/my-nyc.config.js @DataDog/ci-app-libraries /integration-tests/playwright.config.js @DataDog/ci-app-libraries @@ -143,6 +157,8 @@ /packages/**/*.dsm.spec.js @DataDog/data-streams-monitoring # API SDK Capabilities +/eslint-rules/ @DataDog/apm-sdk-capabilities-js + /integration-tests/log_injection.spec.js @DataDog/apm-sdk-capabilities-js /integration-tests/opentelemetry/ @DataDog/apm-sdk-capabilities-js /integration-tests/opentelemetry-logs.spec.js @DataDog/apm-sdk-capabilities-js @@ -217,6 +233,7 @@ /integration-tests/bun/ @DataDog/lang-platform-js /integration-tests/init.spec.js @DataDog/lang-platform-js /integration-tests/package-guardrails.spec.js @DataDog/lang-platform-js +/integration-tests/package-guardrails/flush.js @DataDog/lang-platform-js /integration-tests/startup.spec.js @DataDog/lang-platform-js /packages/datadog-core @DataDog/lang-platform-js diff --git a/.github/chainguard/self.github.release.push-tags.sts.yaml b/.github/chainguard/self.github.release.push-tags.sts.yaml new file mode 100644 index 00000000000..c69957e33c7 --- /dev/null +++ b/.github/chainguard/self.github.release.push-tags.sts.yaml @@ -0,0 +1,12 @@ +issuer: https://token.actions.githubusercontent.com + +subject: repo:DataDog/dd-trace-js:environment:npm + +claim_pattern: + event_name: (push|workflow_dispatch) + ref: refs/heads/(v[345]\.x|master) + repository: DataDog/dd-trace-js + job_workflow_ref: DataDog/dd-trace-js/\.github/workflows/release\.yml@refs/heads/(v[345]\.x|master) + +permissions: + contents: write diff --git a/.github/workflows/flakiness.yml b/.github/workflows/flakiness.yml index 9a069eecbba..02dec302075 100644 --- a/.github/workflows/flakiness.yml +++ b/.github/workflows/flakiness.yml @@ -36,6 +36,7 @@ jobs: with: method: chat.postMessage token: ${{ secrets.SLACK_BOT_TOKEN }} + errors: true payload: | channel: ${{ secrets.SLACK_CHANNEL_ID }} blocks: diff --git a/.github/workflows/project.yml b/.github/workflows/project.yml index a9801e5d064..6df7e4e985e 100644 --- a/.github/workflows/project.yml +++ b/.github/workflows/project.yml @@ -52,6 +52,14 @@ jobs: - uses: ./.github/actions/install - run: npm run verify-exercised-tests + generated-config-types: + runs-on: ubuntu-latest + name: Generated config types + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - uses: ./.github/actions/node/latest + - run: npm run verify:config:types + workflow-job-names: runs-on: ubuntu-latest name: Workflow job names (unique) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 36f3525a84b..6f11f755020 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,11 +21,14 @@ jobs: url: https://npmjs.com/package/dd-trace permissions: id-token: write - contents: write - pull-requests: read env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: + - uses: DataDog/dd-octo-sts-action@acaa02eee7e3bb0839e4272dacb37b8f3b58ba80 # v1.0.3 + id: octo-sts + with: + scope: DataDog/dd-trace-js + policy: self.github.release.push-tags - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: ./.github/actions/node - run: npm publish --tag latest-node14 @@ -35,7 +38,7 @@ jobs: echo "json=$content" >> $GITHUB_OUTPUT - run: | git tag v${{ fromJson(steps.pkg.outputs.json).version }} - git push origin v${{ fromJson(steps.pkg.outputs.json).version }} + git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git v${{ fromJson(steps.pkg.outputs.json).version }} - run: node scripts/release/notes publish-v4: @@ -46,11 +49,14 @@ jobs: url: https://npmjs.com/package/dd-trace permissions: id-token: write - contents: write - pull-requests: read env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: + - uses: DataDog/dd-octo-sts-action@acaa02eee7e3bb0839e4272dacb37b8f3b58ba80 # v1.0.3 + id: octo-sts + with: + scope: DataDog/dd-trace-js + policy: self.github.release.push-tags - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - uses: ./.github/actions/node - run: npm publish --tag latest-node16 @@ -60,7 +66,7 @@ jobs: echo "json=$content" >> $GITHUB_OUTPUT - run: | git tag v${{ fromJson(steps.pkg.outputs.json).version }} - git push origin v${{ fromJson(steps.pkg.outputs.json).version }} + git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git v${{ fromJson(steps.pkg.outputs.json).version }} - run: node scripts/release/notes publish-latest: @@ -71,12 +77,17 @@ jobs: url: https://npmjs.com/package/dd-trace permissions: id-token: write - contents: write - pull-requests: read env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: + - uses: DataDog/dd-octo-sts-action@acaa02eee7e3bb0839e4272dacb37b8f3b58ba80 # v1.0.3 + id: octo-sts + with: + scope: DataDog/dd-trace-js + policy: self.github.release.push-tags - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false - uses: ./.github/actions/node - run: npm publish - id: pkg @@ -85,7 +96,7 @@ jobs: echo "json=$content" >> $GITHUB_OUTPUT - run: | git tag v${{ fromJson(steps.pkg.outputs.json).version }} - git push origin v${{ fromJson(steps.pkg.outputs.json).version }} + git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git v${{ fromJson(steps.pkg.outputs.json).version }} - run: node scripts/release/notes --latest docs: @@ -130,9 +141,15 @@ jobs: url: https://npmjs.com/package/dd-trace permissions: id-token: write - contents: write steps: + - uses: DataDog/dd-octo-sts-action@acaa02eee7e3bb0839e4272dacb37b8f3b58ba80 # v1.0.3 + id: octo-sts + with: + scope: DataDog/dd-trace-js + policy: self.github.release.push-tags - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false - uses: ./.github/actions/node - uses: ./.github/actions/install - id: pkg @@ -143,5 +160,22 @@ jobs: - run: npm publish --tag dev - run: | git tag --force dev - git push origin :refs/tags/dev - git push origin --tags + git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git :refs/tags/dev + git push https://x-access-token:${{ steps.octo-sts.outputs.token }}@github.com/${{ github.repository }}.git --tags + + status: + needs: ["publish-v3", "publish-v4", "publish-latest"] + if: always() && contains(needs.*.result, 'success') + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + pull-requests: read + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: scripts/release/status.js + - uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0 + - run: node scripts/release/status.js diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 182a99016f0..9c18fb3f318 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -466,3 +466,244 @@ $ yarn bench ``` [1]: https://docs.datadoghq.com/help + +## Working with Configurations + +`packages/dd-trace/src/config/supported-configurations.json` is the source of truth for tracer configuration metadata. + +When you add a new configuration here, the config system can usually derive: + +- default values +- env var parsing +- `tracer.init({...})` option mapping +- generated config types +- config telemetry + +## What A Developer Needs To Know + +Each entry defines: + +- the canonical env var name +- the runtime type +- the default value +- the programmatic option path +- optional aliases, validation, and transforms + +Minimal example: + +```json +"DD_AGENT_HOST": [{ + "implementation": "E", + "type": "string", + "configurationNames": ["hostname"], + "default": "127.0.0.1", + "aliases": ["DD_TRACE_AGENT_HOSTNAME"] +}] +``` + +Important fields: + +- `type`: parser to use for environment variables. Common values are `string`, `boolean`, `int`, `decimal`, `array`, `map`, `json`. +- `default`: parsed into the runtime type. `null` means `undefined` at runtime. +- `configurationNames`: programmatic option names. The first entry becomes the main internal property path. +- `internalPropertyName`: use this instead of `configurationNames` when the runtime property path should differ from the public option name. +- `transform`: extra conversion after parsing. This applies to both env vars and programmatic options. +- `allowed`: whitelist of accepted values. +- `aliases`: old or alternate env var names. +- `deprecated`: emits a deprecation warning when used. +- `description`: developer-facing note in the JSON. +- `implementation`: metadata only in the current flow. + +## Runtime Flow + +```mermaid +flowchart LR + A["supported-configurations.json"] --> B["defaults.js
build defaults + lookup tables"] + A --> C["helper.js
aliases + deprecations"] + A --> D["generate-config-types.js
generated-config-types.d.ts"] + B --> E["config/index.js"] + C --> E + E --> F["Config singleton"] + E --> G["Config telemetry"] + H["remote_config.js"] --> E +``` + +Load order in `config/index.js`: + +1. defaults +2. local stable config +3. env vars +4. fleet stable config +5. `tracer.init({...})` options +6. calculated values + +## Examples That Matter + +### Simple boolean + +```json +"DD_RUNTIME_METRICS_ENABLED": [{ + "type": "boolean", + "configurationNames": ["runtimeMetrics.enabled", "runtimeMetrics"], + "default": "false" +}] +``` + +Both of these work: + +```js +tracer.init({ runtimeMetrics: true }) +``` + +```js +tracer.init({ + runtimeMetrics: { + enabled: true + } +}) +``` + +Result: + +```js +config.runtimeMetrics.enabled === true +``` + +### Decimal with transform + +```json +"DD_TRACE_SAMPLE_RATE": [{ + "type": "decimal", + "configurationNames": ["sampleRate", "ingestion.sampleRate"], + "default": null, + "transform": "sampleRate" +}] +``` + +The `sampleRate` transform validates and clamps the value to the supported `0..1` range. + +### Array with transform + +```json +"DD_TRACE_HEADER_TAGS": [{ + "type": "array", + "configurationNames": ["headerTags"], + "default": "", + "transform": "stripColonWhitespace" +}] +``` + +This matters because the transform is reused for both input styles: + +```bash +DD_TRACE_HEADER_TAGS="x-user-id : user.id, x-team : team" +``` + +```js +tracer.init({ + headerTags: ['x-user-id : user.id', 'x-team : team'] +}) +``` + +Both become: + +```js +config.headerTags +// ['x-user-id:user.id', 'x-team:team'] +``` + +### JSON with nested output + +```json +"DD_TRACE_SAMPLING_RULES": [{ + "type": "json", + "configurationNames": ["samplingRules"], + "default": "[]", + "transform": "toCamelCase" +}] +``` + +```bash +DD_TRACE_SAMPLING_RULES='[{"sample_rate":0.5,"service":"api"}]' +``` + +Result: + +```js +config.samplingRules +// [{ sampleRate: 0.5, service: 'api' }] +``` + +### Internal property path + +```json +"DD_API_KEY": [{ + "type": "string", + "default": null, + "internalPropertyName": "apiKey" +}] +``` + +Result: + +```js +config.apiKey +``` + +## Nested Properties + +Dot notation creates nested objects on the config singleton. + +```json +"DD_API_SECURITY_ENABLED": [{ + "type": "boolean", + "configurationNames": [ + "appsec.apiSecurity.enabled", + "experimental.appsec.apiSecurity.enabled" + ], + "default": "true" +}] +``` + +```js +tracer.init({ + appsec: { + apiSecurity: { + enabled: true + } + } +}) +``` + +Result: + +```js +config.appsec.apiSecurity.enabled === true +``` + +## Telemetry And Remote Config + +Config telemetry is handled automatically by the standard config flow. + +If your config is defined in `supported-configurations.json` and goes through the normal parsing/application path, telemetry usually works without extra code. Telemetry records the canonical name, the normalized value, and the origin such as `default`, `env_var`, `code`, `remote_config`, or `calculated`. + +Remote config is not a separate system. `packages/dd-trace/src/config/remote_config.js` translates remote field names into local option names and then applies them through `config.setRemoteConfig(...)`. After that, the normal pipeline runs again: apply options, recompute calculated values, and update telemetry. + +## Adding A New Configuration + +Use this checklist: + +1. Add the new entry to `packages/dd-trace/src/config/supported-configurations.json`. +2. Pick the correct `type` and `default`. +3. Add `configurationNames` if the setting should be exposed via `tracer.init({...})`. Add the documentation to `index.d.ts`. +4. Use `internalPropertyName` if the runtime property path should differ. +5. Add `transform` or `allowed` only if the raw parsed value is not enough. +6. Add `aliases` or `deprecated` only for compatibility. +7. Regenerate types if needed. +8. Add tests for env vars, programmatic options, and edge cases. + +## Mental Model + +Think of `supported-configurations.json` as the schema for one config singleton. + +You describe the input shape once, and the runtime uses that to build defaults, parse env vars, map programmatic options, generate types, and emit telemetry. diff --git a/benchmark/sirun/exporting-pipeline/index.js b/benchmark/sirun/exporting-pipeline/index.js index b8588c62973..f3395667f00 100644 --- a/benchmark/sirun/exporting-pipeline/index.js +++ b/benchmark/sirun/exporting-pipeline/index.js @@ -7,7 +7,7 @@ const SpanProcessor = require('../../../packages/dd-trace/src/span_processor') const Exporter = require('../../../packages/dd-trace/src/exporters/agent/index') const PrioritySampler = require('../../../packages/dd-trace/src/priority_sampler') const id = require('../../../packages/dd-trace/src/id') -const defaults = require('../../../packages/dd-trace/src/config/defaults') +const { defaults } = require('../../../packages/dd-trace/src/config/defaults') const config = { url: `http://${defaults.hostname}:${defaults.port}`, diff --git a/benchmark/sirun/statsd.js b/benchmark/sirun/statsd.js index 462889874f1..dc71e6d71a3 100644 --- a/benchmark/sirun/statsd.js +++ b/benchmark/sirun/statsd.js @@ -1,7 +1,7 @@ 'use strict' const dgram = require('dgram') -const defaults = require('../../packages/dd-trace/src/config/defaults') +const { defaults } = require('../../packages/dd-trace/src/config/defaults') const port = process.env.SIRUN_STATSD_PORT || defaults['dogstatsd.port'] class StatsD { diff --git a/docs/test.ts b/docs/test.ts index c6418d5780d..979c84abfaf 100644 --- a/docs/test.ts +++ b/docs/test.ts @@ -522,6 +522,14 @@ const res = {} as OutgoingMessage resBlockRequest = tracer.appsec.blockRequest(req, res) tracer.appsec.setUser(user) +// Profiling custom labels +tracer.profiling.setCustomLabelKeys(['customer', 'region']) +tracer.profiling.setCustomLabelKeys(new Set(['customer', 'region'])) +const labelResult: number = tracer.profiling.runWithLabels({ customer: 'acme', region: 'us-east' }, () => 42) +tracer.profiling.runWithLabels({ tier: 'premium' }, () => { + tracer.profiling.runWithLabels({ region: 'eu-west' }, () => {}) +}) + // OTel TracerProvider registers and provides a tracer const provider: opentelemetry.TracerProvider = new tracer.TracerProvider(); provider.register(); diff --git a/eslint-rules/eslint-config-names-sync.mjs b/eslint-rules/eslint-config-names-sync.mjs new file mode 100644 index 00000000000..ed716f68893 --- /dev/null +++ b/eslint-rules/eslint-config-names-sync.mjs @@ -0,0 +1,578 @@ +import fs from 'node:fs' +import path from 'node:path' + +import ts from 'typescript' + +const IGNORED_CONFIGURATION_NAMES = new Set([ + 'tracePropagationStyle', + 'tracing', +]) +const UNSUPPORTED_CONFIGURATION_ROOTS = new Set([ + 'isCiVisibility', + 'logger', + 'lookup', + 'plugins', +]) + +/** + * @typedef {{ + * node: import('typescript').InterfaceDeclaration | import('typescript').TypeAliasDeclaration + * namespaceKey: string + * key: string + * }} DeclarationEntry + */ + +/** + * @typedef {{ + * hasEnvDescendant: boolean + * hasBooleanBranch: boolean + * hasObjectBranch: boolean + * }} TypeInspectionResult + */ + +/** + * @typedef {{ + * names: Set + * primaryEnvTargets: Map> + * knownAliasEnvNames: Set + * }} SupportedConfigurationInfo + */ + +/** + * @typedef {{ + * declarations: Map + * primaryEnvTargets: Map> + * knownAliasEnvNames: Set + * names: Set + * visitedDeclarations: Set + * envTagNamesCache: WeakMap> + * interfacePropertiesCache: WeakMap< + * import('typescript').InterfaceDeclaration, + * Map + * > + * }} InspectionState + */ + +/** @type {InspectionState} */ +let currentInspectionState + +/** + * @param {Partial} [overrides] + * @returns {TypeInspectionResult} + */ +function createInspectionResult (overrides) { + return { + hasEnvDescendant: false, + hasBooleanBranch: false, + hasObjectBranch: false, + ...overrides, + } +} + +/** + * @param {string} filePath + * @returns {SupportedConfigurationInfo} + */ +function getSupportedConfigurationInfo (filePath) { + const parsed = JSON.parse(fs.readFileSync(filePath, 'utf8')) + const supportedConfigurations = parsed?.supportedConfigurations + + const names = new Set() + const primaryEnvTargets = new Map() + const knownAliasEnvNames = new Set() + + /** + * @param {string} envName + * @param {Set} targets + */ + function addPrimaryEnvTargets (envName, targets) { + let existingTargets = primaryEnvTargets.get(envName) + if (!existingTargets) { + existingTargets = new Set() + primaryEnvTargets.set(envName, existingTargets) + } + + for (const target of targets) { + existingTargets.add(target) + } + } + + for (const [envName, entries] of Object.entries(supportedConfigurations)) { + /** @type {Set} */ + const targets = new Set() + + for (const entry of entries) { + if (typeof entry.internalPropertyName === 'string') { + targets.add(entry.internalPropertyName) + } + + for (const alias of entry.aliases ?? []) { + if (typeof alias === 'string') { + knownAliasEnvNames.add(alias) + } + } + + for (const name of entry.configurationNames ?? []) { + if (typeof name === 'string' && !IGNORED_CONFIGURATION_NAMES.has(name)) { + names.add(name) + targets.add(name) + } + } + } + + addPrimaryEnvTargets(envName, targets) + } + + return { names, primaryEnvTargets, knownAliasEnvNames } +} + +/** + * @param {import('typescript').EntityName} entityName + * @returns {string} + */ +function getEntityNameText (entityName) { + if (ts.isIdentifier(entityName)) { + return entityName.text + } + + return `${getEntityNameText(entityName.left)}.${entityName.right.text}` +} + +/** + * @param {import('typescript').SourceFile} sourceFile + * @returns {Map} + */ +function getDeclarationRegistry (sourceFile) { + const declarations = new Map() + + /** + * @param {readonly import('typescript').Statement[]} statements + * @param {string} namespaceKey + */ + function visitStatements (statements, namespaceKey) { + for (const statement of statements) { + if (ts.isModuleDeclaration(statement)) { + visitModuleDeclaration(statement, namespaceKey) + continue + } + + if (!ts.isInterfaceDeclaration(statement) && !ts.isTypeAliasDeclaration(statement)) continue + + const key = namespaceKey ? `${namespaceKey}.${statement.name.text}` : statement.name.text + declarations.set(key, { + node: statement, + namespaceKey, + key, + }) + } + } + + /** + * @param {import('typescript').ModuleDeclaration} declaration + * @param {string} namespaceKey + */ + function visitModuleDeclaration (declaration, namespaceKey) { + const nextNamespaceKey = namespaceKey ? `${namespaceKey}.${declaration.name.text}` : declaration.name.text + + if (!declaration.body) return + + if (ts.isModuleBlock(declaration.body)) { + visitStatements(declaration.body.statements, nextNamespaceKey) + return + } + + visitModuleDeclaration( + /** @type {import('typescript').ModuleDeclaration} */ (declaration.body), + nextNamespaceKey + ) + } + + visitStatements(sourceFile.statements, '') + + return declarations +} + +/** + * @param {Map} declarations + * @param {import('typescript').EntityName} identifier + * @param {string} namespaceKey + * @returns {DeclarationEntry | undefined} + */ +function resolveDeclaration (declarations, identifier, namespaceKey) { + const typeName = getEntityNameText(identifier) + let currentNamespaceKey = namespaceKey + + while (true) { + const key = currentNamespaceKey ? `${currentNamespaceKey}.${typeName}` : typeName + const declaration = declarations.get(key) + if (declaration) { + return declaration + } + + if (!currentNamespaceKey) { + return undefined + } + + const lastSeparatorIndex = currentNamespaceKey.lastIndexOf('.') + currentNamespaceKey = lastSeparatorIndex === -1 + ? '' + : currentNamespaceKey.slice(0, lastSeparatorIndex) + } +} + +/** + * @param {import('typescript').PropertyName} propertyName + * @returns {string | undefined} + */ +function getPropertyName (propertyName) { + if (ts.isIdentifier(propertyName) || ts.isStringLiteral(propertyName)) { + return propertyName.text + } +} + +/** + * @param {import('typescript').Node} node + * @returns {Set} + */ +function getEnvTagNames (node) { + const { envTagNamesCache } = currentInspectionState + const cachedNames = envTagNamesCache.get(node) + if (cachedNames) return cachedNames + + const envTagNames = new Set() + for (const tag of ts.getJSDocTags(node)) { + if (tag.tagName.text !== 'env' || typeof tag.comment !== 'string') continue + + for (const match of tag.comment.matchAll(/\b(?:DD|OTEL)_[A-Z0-9_]+\b/g)) { + envTagNames.add(match[0]) + } + } + + envTagNamesCache.set(node, envTagNames) + return envTagNames +} + +/** + * @param {import('typescript').InterfaceDeclaration} declaration + * @param {string} propertyName + * @returns {import('typescript').PropertySignature | undefined} + */ +function getInterfaceProperty (declaration, propertyName) { + const { interfacePropertiesCache } = currentInspectionState + let properties = interfacePropertiesCache.get(declaration) + + if (!properties) { + properties = new Map() + + for (const member of declaration.members) { + if (!ts.isPropertySignature(member) || !member.type) continue + + const memberName = getPropertyName(member.name) + if (memberName) { + properties.set(memberName, member) + } + } + + interfacePropertiesCache.set(declaration, properties) + } + + return properties.get(propertyName) +} + +/** + * @param {string} fullPath + * @param {Set} envTagNames + * @returns {boolean} + */ +function hasSupportedDirectEnvTag (fullPath, envTagNames) { + const { primaryEnvTargets, knownAliasEnvNames } = currentInspectionState + + for (const envName of envTagNames) { + const targets = primaryEnvTargets.get(envName) + if (targets?.has(fullPath) || (!targets && !knownAliasEnvNames.has(envName))) { + return true + } + } + + return false +} + +/** + * @param {readonly import('typescript').TypeElement[]} members + * @param {string} namespaceKey + * @param {string} pathPrefix + * @returns {TypeInspectionResult} + */ +function inspectMembers (members, namespaceKey, pathPrefix) { + const result = createInspectionResult({ hasObjectBranch: true }) + + for (const member of members) { + if (!ts.isPropertySignature(member) || !member.type) continue + + const propertyName = getPropertyName(member.name) + if (!propertyName) continue + + const propertyResult = inspectProperty( + member, + namespaceKey, + pathPrefix ? `${pathPrefix}.${propertyName}` : propertyName + ) + result.hasEnvDescendant ||= propertyResult.hasEnvDescendant + } + + return result +} + +/** + * @param {import('typescript').PropertySignature} property + * @param {string} namespaceKey + * @param {string} fullPath + * @returns {TypeInspectionResult} + */ +function inspectProperty (property, namespaceKey, fullPath) { + if (UNSUPPORTED_CONFIGURATION_ROOTS.has(fullPath.split('.', 1)[0])) { + return createInspectionResult() + } + + const result = inspectTypeNode(property.type, namespaceKey, fullPath) + const envTagNames = getEnvTagNames(property) + const isLeafConfiguration = !result.hasObjectBranch + const isBooleanAlias = + result.hasBooleanBranch && + result.hasObjectBranch && + result.hasEnvDescendant + const hasSupportedOwnEnvTag = hasSupportedDirectEnvTag(fullPath, envTagNames) + + if (hasSupportedOwnEnvTag || isLeafConfiguration || isBooleanAlias) { + currentInspectionState.names.add(fullPath) + } + + result.hasEnvDescendant ||= hasSupportedOwnEnvTag + + return result +} + +/** + * @param {DeclarationEntry} declaration + * @param {string} fullPath + * @returns {TypeInspectionResult} + */ +function inspectDeclaration (declaration, fullPath) { + const state = currentInspectionState + + if (state.visitedDeclarations.has(declaration.key)) { + return createInspectionResult({ hasObjectBranch: true }) + } + + state.visitedDeclarations.add(declaration.key) + + try { + return ts.isInterfaceDeclaration(declaration.node) + ? inspectMembers(declaration.node.members, declaration.namespaceKey, fullPath) + : inspectTypeNode(declaration.node.type, declaration.namespaceKey, fullPath) + } finally { + state.visitedDeclarations.delete(declaration.key) + } +} + +/** + * @param {import('typescript').TypeNode | undefined} typeNode + * @param {string} namespaceKey + * @param {string} fullPath + * @returns {TypeInspectionResult} + */ +function inspectTypeNode (typeNode, namespaceKey, fullPath) { + const { declarations } = currentInspectionState + + if (!typeNode) { + return createInspectionResult() + } + + if (ts.isParenthesizedTypeNode(typeNode)) { + return inspectTypeNode(typeNode.type, namespaceKey, fullPath) + } + + if (typeNode.kind === ts.SyntaxKind.BooleanKeyword) { + return createInspectionResult({ hasBooleanBranch: true }) + } + + if (ts.isTypeLiteralNode(typeNode)) { + return inspectMembers(typeNode.members, namespaceKey, fullPath) + } + + if (ts.isUnionTypeNode(typeNode) || ts.isIntersectionTypeNode(typeNode)) { + const result = createInspectionResult() + + for (const part of typeNode.types) { + const partResult = inspectTypeNode(part, namespaceKey, fullPath) + result.hasEnvDescendant ||= partResult.hasEnvDescendant + result.hasBooleanBranch ||= partResult.hasBooleanBranch + result.hasObjectBranch ||= partResult.hasObjectBranch + } + + return result + } + + if (ts.isTypeReferenceNode(typeNode)) { + const declaration = resolveDeclaration(declarations, typeNode.typeName, namespaceKey) + return declaration ? inspectDeclaration(declaration, fullPath) : createInspectionResult() + } + + if ( + ts.isIndexedAccessTypeNode(typeNode) && + ts.isLiteralTypeNode(typeNode.indexType) && + ts.isStringLiteral(typeNode.indexType.literal) && + ts.isTypeReferenceNode(typeNode.objectType) + ) { + const declaration = resolveDeclaration(declarations, typeNode.objectType.typeName, namespaceKey) + + if (!declaration || !ts.isInterfaceDeclaration(declaration.node)) { + return createInspectionResult() + } + + const property = getInterfaceProperty(declaration.node, typeNode.indexType.literal.text) + return property ? inspectProperty(property, declaration.namespaceKey, fullPath) : createInspectionResult() + } + + return createInspectionResult() +} + +/** + * @param {string} filePath + * @param {SupportedConfigurationInfo} supportedConfigurationInfo + * @returns {Set} + */ +function getIndexDtsConfigurationNames (filePath, supportedConfigurationInfo) { + const sourceFile = ts.createSourceFile( + filePath, + fs.readFileSync(filePath, 'utf8'), + ts.ScriptTarget.Latest, + true, + ts.ScriptKind.TS + ) + const declarations = getDeclarationRegistry(sourceFile) + const tracerOptions = declarations.get('tracer.TracerOptions') + + if (!tracerOptions || !ts.isInterfaceDeclaration(tracerOptions.node)) { + throw new Error('Could not resolve tracer.TracerOptions.') + } + + const names = new Set() + currentInspectionState = { + declarations, + primaryEnvTargets: supportedConfigurationInfo.primaryEnvTargets, + knownAliasEnvNames: supportedConfigurationInfo.knownAliasEnvNames, + names, + visitedDeclarations: new Set(), + envTagNamesCache: new WeakMap(), + interfacePropertiesCache: new WeakMap(), + } + + inspectMembers(tracerOptions.node.members, tracerOptions.namespaceKey, '') + + for (const ignoredConfigurationName of IGNORED_CONFIGURATION_NAMES) { + names.delete(ignoredConfigurationName) + } + + return names +} + +/** + * @param {import('eslint').Rule.RuleContext} context + * @param {import('estree').Program} node + * @param {Set} sourceNames + * @param {Set} targetNames + * @param {string} messageId + * @returns {void} + */ +function reportMissingConfigurations (context, node, sourceNames, targetNames, messageId) { + const missing = [] + + for (const name of sourceNames) { + if (!targetNames.has(name)) { + missing.push(name) + } + } + + for (const configurationName of missing.sort()) { + context.report({ + node, + messageId, + data: { configurationName }, + }) + } +} + +/** @type {import('eslint').Rule.RuleModule} */ +export default { + meta: { + type: 'problem', + docs: { + description: 'Ensure supported configuration names stay in sync with index.d.ts', + }, + schema: [{ + type: 'object', + properties: { + indexDtsPath: { + type: 'string', + }, + supportedConfigurationsPath: { + type: 'string', + }, + }, + additionalProperties: false, + }], + messages: { + configurationMissingInIndexDts: + "Configuration name '{{configurationName}}' exists in supported-configurations.json but not in index.d.ts.", + configurationMissingInSupportedConfigurations: + "Configuration name '{{configurationName}}' exists in index.d.ts but not in supported-configurations.json.", + readFailure: + 'Unable to compare supported configuration names: {{reason}}', + }, + }, + create (context) { + const options = context.options[0] || {} + const indexDtsPath = path.resolve(context.cwd, options.indexDtsPath || 'index.d.ts') + const supportedConfigurationsPath = path.resolve( + context.cwd, + options.supportedConfigurationsPath || 'packages/dd-trace/src/config/supported-configurations.json' + ) + + return { + Program (node) { + let indexDtsNames + let supportedConfigurationInfo + + try { + supportedConfigurationInfo = getSupportedConfigurationInfo(supportedConfigurationsPath) + indexDtsNames = getIndexDtsConfigurationNames(indexDtsPath, supportedConfigurationInfo) + } catch (error) { + context.report({ + node, + messageId: 'readFailure', + data: { + reason: error instanceof Error ? error.message : String(error), + }, + }) + return + } + + reportMissingConfigurations( + context, + node, + supportedConfigurationInfo.names, + indexDtsNames, + 'configurationMissingInIndexDts' + ) + reportMissingConfigurations( + context, + node, + indexDtsNames, + supportedConfigurationInfo.names, + 'configurationMissingInSupportedConfigurations' + ) + }, + } + }, +} diff --git a/eslint-rules/eslint-config-names-sync.test.mjs b/eslint-rules/eslint-config-names-sync.test.mjs new file mode 100644 index 00000000000..46d0913fa0d --- /dev/null +++ b/eslint-rules/eslint-config-names-sync.test.mjs @@ -0,0 +1,93 @@ +import path from 'node:path' + +import { RuleTester } from 'eslint' + +import rule from './eslint-config-names-sync.mjs' + +const ruleTester = new RuleTester({ + languageOptions: { + ecmaVersion: 2022, + sourceType: 'script', + }, +}) + +const fixturesDirectory = path.join(process.cwd(), 'eslint-rules/fixtures/config-names-sync') + +/** + * @param {string} fixtureName + * @returns {{ indexDtsPath: string, supportedConfigurationsPath: string }} + */ +function getFixtureOptions (fixtureName) { + const fixtureDirectory = path.join(fixturesDirectory, fixtureName) + + return { + indexDtsPath: path.relative(process.cwd(), path.join(fixtureDirectory, 'index.d.ts')), + supportedConfigurationsPath: path.relative( + process.cwd(), + path.join(fixtureDirectory, 'supported-configurations.json') + ), + } +} + +ruleTester.run('eslint-config-names-sync', rule, { + valid: [ + { + filename: path.join(fixturesDirectory, 'valid', 'lint-anchor.js'), + code: '', + options: [getFixtureOptions('valid')], + }, + { + filename: path.join(fixturesDirectory, 'trace-propagation-style-exception', 'lint-anchor.js'), + code: '', + options: [getFixtureOptions('trace-propagation-style-exception')], + }, + { + filename: path.join(fixturesDirectory, 'internal-env-and-ignored-names', 'lint-anchor.js'), + code: '', + options: [getFixtureOptions('internal-env-and-ignored-names')], + }, + ], + invalid: [ + { + filename: path.join(fixturesDirectory, 'missing-in-index-dts', 'lint-anchor.js'), + code: '', + options: [getFixtureOptions('missing-in-index-dts')], + errors: [ + { + messageId: 'configurationMissingInIndexDts', + data: { + configurationName: 'missingFromTypes', + }, + }, + { + messageId: 'configurationMissingInIndexDts', + data: { + configurationName: 'telemetry', + }, + }, + ], + }, + { + filename: path.join(fixturesDirectory, 'missing-in-supported-configurations', 'lint-anchor.js'), + code: '', + options: [getFixtureOptions('missing-in-supported-configurations')], + errors: [{ + messageId: 'configurationMissingInSupportedConfigurations', + data: { + configurationName: 'missingFromJson', + }, + }], + }, + { + filename: path.join(fixturesDirectory, 'missing-nested-leaf-in-supported-configurations', 'lint-anchor.js'), + code: '', + options: [getFixtureOptions('missing-nested-leaf-in-supported-configurations')], + errors: [{ + messageId: 'configurationMissingInSupportedConfigurations', + data: { + configurationName: 'llmobs.agentlessEnabledasd', + }, + }], + }, + ], +}) diff --git a/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/index.d.ts b/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/index.d.ts new file mode 100644 index 00000000000..e46ff0b872c --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/index.d.ts @@ -0,0 +1,14 @@ +declare namespace tracer { + export interface TracerOptions { + /** + * @env DD_LLMOBS_ENABLED + * The environment variable listed above takes precedence over programmatic configuration. + */ + llmobs?: { + /** + * @env DD_LLMOBS_ML_APP + */ + mlApp?: string + } + } +} diff --git a/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/supported-configurations.json new file mode 100644 index 00000000000..5e989725d9c --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/internal-env-and-ignored-names/supported-configurations.json @@ -0,0 +1,23 @@ +{ + "supportedConfigurations": { + "DD_LLMOBS_ENABLED": [ + { + "internalPropertyName": "llmobs.enabled" + } + ], + "DD_LLMOBS_ML_APP": [ + { + "configurationNames": [ + "llmobs.mlApp" + ] + } + ], + "DD_TRACE_ENABLED": [ + { + "configurationNames": [ + "tracing" + ] + } + ] + } +} diff --git a/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/index.d.ts b/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/index.d.ts new file mode 100644 index 00000000000..e6db5eb3358 --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/index.d.ts @@ -0,0 +1,20 @@ +declare namespace tracer { + export interface TracerOptions { + /** + * @env DD_SIMPLE + */ + simple?: string + + /** + * @env DD_TRACE_TELEMETRY_ENABLED + */ + telemetry?: { + exporter?: { + /** + * @env DD_TELEMETRY_EXPORTER_URL + */ + url?: string + } + } + } +} diff --git a/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/supported-configurations.json new file mode 100644 index 00000000000..df3d4c17dd0 --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/missing-in-index-dts/supported-configurations.json @@ -0,0 +1,35 @@ +{ + "supportedConfigurations": { + "DD_SIMPLE": [ + { + "configurationNames": [ + "simple" + ] + } + ], + "DD_MISSING_FROM_TYPES": [ + { + "configurationNames": [ + "missingFromTypes" + ] + } + ], + "DD_INSTRUMENTATION_TELEMETRY_ENABLED": [ + { + "configurationNames": [ + "telemetry" + ], + "aliases": [ + "DD_TRACE_TELEMETRY_ENABLED" + ] + } + ], + "DD_TELEMETRY_EXPORTER_URL": [ + { + "configurationNames": [ + "telemetry.exporter.url" + ] + } + ] + } +} diff --git a/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/index.d.ts b/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/index.d.ts new file mode 100644 index 00000000000..491702f278b --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/index.d.ts @@ -0,0 +1,13 @@ +declare namespace tracer { + export interface TracerOptions { + /** + * @env DD_SIMPLE + */ + simple?: string + + /** + * @env DD_MISSING_FROM_JSON + */ + missingFromJson?: boolean + } +} diff --git a/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/supported-configurations.json new file mode 100644 index 00000000000..90553c533af --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/missing-in-supported-configurations/supported-configurations.json @@ -0,0 +1,11 @@ +{ + "supportedConfigurations": { + "DD_SIMPLE": [ + { + "configurationNames": [ + "simple" + ] + } + ] + } +} diff --git a/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/index.d.ts b/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/index.d.ts new file mode 100644 index 00000000000..597e4028452 --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/index.d.ts @@ -0,0 +1,16 @@ +declare namespace tracer { + export interface TracerOptions { + /** + * @env DD_LLMOBS_ENABLED + * The environment variable listed above takes precedence over programmatic configuration. + */ + llmobs?: { + /** + * @env DD_LLMOBS_ML_APP + */ + mlApp?: string + + agentlessEnabledasd?: string + } + } +} diff --git a/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/supported-configurations.json new file mode 100644 index 00000000000..8cbeba651ac --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/missing-nested-leaf-in-supported-configurations/supported-configurations.json @@ -0,0 +1,16 @@ +{ + "supportedConfigurations": { + "DD_LLMOBS_ENABLED": [ + { + "internalPropertyName": "llmobs.enabled" + } + ], + "DD_LLMOBS_ML_APP": [ + { + "configurationNames": [ + "llmobs.mlApp" + ] + } + ] + } +} diff --git a/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/index.d.ts b/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/index.d.ts new file mode 100644 index 00000000000..87aa771ca5d --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/index.d.ts @@ -0,0 +1,20 @@ +declare namespace tracer { + export interface PropagationStyle { + /** + * @env DD_TRACE_PROPAGATION_STYLE, DD_TRACE_PROPAGATION_STYLE_INJECT + */ + inject: string[] + + /** + * @env DD_TRACE_PROPAGATION_STYLE, DD_TRACE_PROPAGATION_STYLE_EXTRACT + */ + extract: string[] + } + + export interface TracerOptions { + /** + * @env DD_TRACE_PROPAGATION_STYLE, DD_TRACE_PROPAGATION_STYLE_INJECT, DD_TRACE_PROPAGATION_STYLE_EXTRACT + */ + tracePropagationStyle?: string[] | PropagationStyle + } +} diff --git a/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/supported-configurations.json new file mode 100644 index 00000000000..5816e540058 --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/trace-propagation-style-exception/supported-configurations.json @@ -0,0 +1,23 @@ +{ + "supportedConfigurations": { + "DD_TRACE_PROPAGATION_STYLE": [ + { + "type": "array" + } + ], + "DD_TRACE_PROPAGATION_STYLE_INJECT": [ + { + "configurationNames": [ + "tracePropagationStyle.inject" + ] + } + ], + "DD_TRACE_PROPAGATION_STYLE_EXTRACT": [ + { + "configurationNames": [ + "tracePropagationStyle.extract" + ] + } + ] + } +} diff --git a/eslint-rules/fixtures/config-names-sync/valid/index.d.ts b/eslint-rules/fixtures/config-names-sync/valid/index.d.ts new file mode 100644 index 00000000000..1a3b57c3edf --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/valid/index.d.ts @@ -0,0 +1,35 @@ +declare namespace tracer { + export interface TracerOptions { + /** + * @env DD_SIMPLE + */ + simple?: string + + objectOnly?: { + /** + * @env DD_OBJECT_ONLY_ENABLED + */ + enabled?: boolean + } + + appsec?: boolean | { + /** + * @env DD_APPSEC_ENABLED + */ + enabled?: boolean + } + + experimental?: { + appsec?: boolean | TracerOptions['appsec'] + + iast?: boolean | IastOptions + } + } + + interface IastOptions { + /** + * @env DD_IAST_ENABLED + */ + enabled?: boolean + } +} diff --git a/eslint-rules/fixtures/config-names-sync/valid/supported-configurations.json b/eslint-rules/fixtures/config-names-sync/valid/supported-configurations.json new file mode 100644 index 00000000000..17238484371 --- /dev/null +++ b/eslint-rules/fixtures/config-names-sync/valid/supported-configurations.json @@ -0,0 +1,36 @@ +{ + "supportedConfigurations": { + "DD_SIMPLE": [ + { + "configurationNames": [ + "simple" + ] + } + ], + "DD_OBJECT_ONLY_ENABLED": [ + { + "configurationNames": [ + "objectOnly.enabled" + ] + } + ], + "DD_APPSEC_ENABLED": [ + { + "configurationNames": [ + "appsec.enabled", + "appsec", + "experimental.appsec.enabled", + "experimental.appsec" + ] + } + ], + "DD_IAST_ENABLED": [ + { + "configurationNames": [ + "experimental.iast.enabled", + "experimental.iast" + ] + } + ] + } +} diff --git a/eslint.config.mjs b/eslint.config.mjs index 3a39c81ebaa..bb79b34eafe 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -14,11 +14,12 @@ import eslintPluginPromise from 'eslint-plugin-promise' import eslintPluginUnicorn from 'eslint-plugin-unicorn' import globals from 'globals' -import eslintProcessEnv from './eslint-rules/eslint-process-env.mjs' +import eslintConfigNamesSync from './eslint-rules/eslint-config-names-sync.mjs' import eslintEnvAliases from './eslint-rules/eslint-env-aliases.mjs' -import eslintSafeTypeOfObject from './eslint-rules/eslint-safe-typeof-object.mjs' import eslintLogPrintfStyle from './eslint-rules/eslint-log-printf-style.mjs' +import eslintProcessEnv from './eslint-rules/eslint-process-env.mjs' import eslintRequireExportExists from './eslint-rules/eslint-require-export-exists.mjs' +import eslintSafeTypeOfObject from './eslint-rules/eslint-safe-typeof-object.mjs' const { dependencies } = JSON.parse(readFileSync('./vendor/package.json', 'utf8')) @@ -269,6 +270,7 @@ export default [ }], 'import/no-useless-path-segments': 'error', 'import/no-webpack-loader-syntax': 'error', + 'jsdoc/check-param-names': ['error', { disableMissingParamChecks: true }], 'jsdoc/check-tag-names': ['error', { definedTags: ['datadog'] }], // TODO: Enable the rules that we want to use. // no-defaults: This should be activated, since the defaults will not be picked up in a description. @@ -375,6 +377,7 @@ export default [ rules: { 'eslint-process-env': eslintProcessEnv, 'eslint-env-aliases': eslintEnvAliases, + 'eslint-config-names-sync': eslintConfigNamesSync, 'eslint-safe-typeof-object': eslintSafeTypeOfObject, 'eslint-log-printf-style': eslintLogPrintfStyle, 'eslint-require-export-exists': eslintRequireExportExists, @@ -514,6 +517,15 @@ export default [ 'unicorn/switch-case-braces': 'off', // Questionable benefit }, }, + { + name: 'dd-trace/config-sync', + files: [ + 'eslint.config.mjs', + ], + rules: { + 'eslint-rules/eslint-config-names-sync': 'error', + }, + }, { name: 'dd-trace/scripts', files: [ diff --git a/index.d.ts b/index.d.ts index 3d567238fd3..099a8afdddd 100644 --- a/index.d.ts +++ b/index.d.ts @@ -130,6 +130,11 @@ interface Tracer extends opentracing.Tracer { appsec: tracer.Appsec; + /** + * Profiling API for attaching custom labels to profiler samples. + */ + profiling: tracer.Profiling; + TracerProvider: tracer.opentelemetry.TracerProvider; dogstatsd: tracer.DogStatsD; @@ -1570,6 +1575,35 @@ declare namespace tracer { trackUserLoginFailure(login: string, metadata?: any): void; } + export interface Profiling { + /** + * Declares the set of custom label keys that will be used with + * {@link runWithLabels}. This is used for profile upload metadata + * (so the Datadog UI knows which keys to index for filtering) and + * for pprof serialization optimization. + * + * @param keys Custom label key names. + */ + setCustomLabelKeys(keys: Iterable): void; + + /** + * Runs a function with custom profiling labels attached to all wall profiler + * samples taken during its execution. Labels are key-value pairs that appear + * in the pprof output and can be used to filter flame graphs in the Datadog UI. + * + * Requires AsyncContextFrame (ACF) to be enabled. Supports nesting: inner + * calls merge labels with outer calls, with inner values taking precedence. + * + * When profiling is not enabled or ACF is not active, the function is still + * called but labels are silently dropped. + * + * @param labels Custom labels to attach to profiler samples. + * @param fn Function to execute with the labels. + * @returns The return value of fn. + */ + runWithLabels(labels: Record, fn: () => T): T; + } + export interface Appsec { /** * Links a successful login event to the current trace. Will link the passed user to the current trace with Appsec.setUser() internally. diff --git a/integration-tests/aiguard/index.spec.js b/integration-tests/aiguard/index.spec.js index a9e325f55a9..61e5d613dbc 100644 --- a/integration-tests/aiguard/index.spec.js +++ b/integration-tests/aiguard/index.spec.js @@ -39,8 +39,8 @@ describe('AIGuard SDK integration tests', () => { env: { DD_SERVICE: 'ai_guard_integration_test', DD_ENV: 'test', - DD_TRACING_ENABLED: 'true', - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_ENABLED: 'true', + DD_TRACE_AGENT_PORT: String(agent.port), DD_AI_GUARD_ENABLED: 'true', DD_AI_GUARD_BLOCK: 'true', DD_AI_GUARD_ENDPOINT: `http://localhost:${api.address().port}`, diff --git a/integration-tests/ci-visibility/dynamic-instrumentation/fake-timers-test-hit-breakpoint.js b/integration-tests/ci-visibility/dynamic-instrumentation/fake-timers-test-hit-breakpoint.js new file mode 100644 index 00000000000..f924cea0f5f --- /dev/null +++ b/integration-tests/ci-visibility/dynamic-instrumentation/fake-timers-test-hit-breakpoint.js @@ -0,0 +1,22 @@ +'use strict' + +const assert = require('assert') +const sinon = require('sinon') + +const sum = require('./dependency') + +describe('dynamic-instrumentation-fake-timers', () => { + let clock + + beforeEach(function () { + clock = sinon.useFakeTimers() + }) + + afterEach(function () { + clock.restore() + }) + + it('retries with DI and fake timers', function () { + assert.strictEqual(sum(11, 3), 14) + }) +}) diff --git a/integration-tests/ci-visibility/features-di-fake-timers/support/steps.js b/integration-tests/ci-visibility/features-di-fake-timers/support/steps.js new file mode 100644 index 00000000000..6f9754b0a19 --- /dev/null +++ b/integration-tests/ci-visibility/features-di-fake-timers/support/steps.js @@ -0,0 +1,25 @@ +'use strict' + +const assert = require('assert') +const { When, Then, BeforeAll, AfterAll } = require('@cucumber/cucumber') +const sinon = require('sinon') +const sum = require('../../features-di/support/sum') + +let clock + +BeforeAll(function () { + clock = sinon.useFakeTimers() +}) + +AfterAll(function () { + clock.restore() +}) + +When('the greeter says hello', function () { + this.whatIHeard = 'hello' +}) + +Then('I should have heard {string}', function (expectedResponse) { + sum(11, 3) + assert.equal(this.whatIHeard, expectedResponse) +}) diff --git a/integration-tests/ci-visibility/features-di-fake-timers/test-hit-breakpoint.feature b/integration-tests/ci-visibility/features-di-fake-timers/test-hit-breakpoint.feature new file mode 100644 index 00000000000..7ec60251966 --- /dev/null +++ b/integration-tests/ci-visibility/features-di-fake-timers/test-hit-breakpoint.feature @@ -0,0 +1,6 @@ + +Feature: Greeting with fake timers + + Scenario: Say hello with fake timers + When the greeter says hello + Then I should have heard "hello" diff --git a/integration-tests/ci-visibility/jest-flaky/fake-timers-flaky-fails.js b/integration-tests/ci-visibility/jest-flaky/fake-timers-flaky-fails.js new file mode 100644 index 00000000000..a860280abd6 --- /dev/null +++ b/integration-tests/ci-visibility/jest-flaky/fake-timers-flaky-fails.js @@ -0,0 +1,26 @@ +'use strict' + +const assert = require('assert') + +describe('test-fake-timers', () => { + beforeAll(() => { + jest.useFakeTimers() + }) + + afterEach(() => { + // This pattern (from @testing-library/react's enableFakeTimers helper) + // clears all pending timers after each test but BEFORE test_done fires. + // If dd-trace scheduled a setTimeout in test_done, clearAllTimers + // destroys it, orphaning the promise and deadlocking the process. + jest.runOnlyPendingTimers() + jest.clearAllTimers() + }) + + afterAll(() => { + jest.useRealTimers() + }) + + it('can retry failed tests with fake timers', () => { + assert.deepStrictEqual(1, 2) + }) +}) diff --git a/integration-tests/ci-visibility/subproject/cypress.config.js b/integration-tests/ci-visibility/subproject/cypress.config.js index 7d9c2df8db4..3544598d6c0 100644 --- a/integration-tests/ci-visibility/subproject/cypress.config.js +++ b/integration-tests/ci-visibility/subproject/cypress.config.js @@ -1,13 +1,12 @@ 'use strict' -module.exports = { +const { defineConfig } = require('cypress') + +module.exports = defineConfig({ defaultCommandTimeout: 1000, e2e: { - setupNodeEvents (on, config) { - return require('dd-trace/ci/cypress/plugin')(on, config) - }, specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', }, video: false, screenshotOnRunFailure: false, -} +}) diff --git a/integration-tests/ci-visibility/vitest-tests/fake-timers-di.mjs b/integration-tests/ci-visibility/vitest-tests/fake-timers-di.mjs new file mode 100644 index 00000000000..c5319a4f846 --- /dev/null +++ b/integration-tests/ci-visibility/vitest-tests/fake-timers-di.mjs @@ -0,0 +1,18 @@ +import { describe, test, expect, beforeAll, afterAll, vi } from 'vitest' +import { sum } from './bad-sum' + +describe('dynamic instrumentation fake timers', () => { + // Install fake timers in beforeAll — they persist through test finish hooks, + // which is the pattern that triggers the deadlock with DI's setTimeout. + beforeAll(() => { + vi.useFakeTimers() + }) + + afterAll(() => { + vi.useRealTimers() + }) + + test('can sum with fake timers', () => { + expect(sum(11, 2)).to.equal(13) + }) +}) diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index 6058d512c99..5fbd5b6bc21 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -97,7 +97,7 @@ describe(`cucumber@${version} commonJS`, () => { let cwd, receiver, childProcess, testOutput - useSandbox([`@cucumber/cucumber@${version}`, 'assert', 'nyc'], true) + useSandbox([`@cucumber/cucumber@${version}`, 'assert', 'nyc', 'sinon'], true) before(function () { cwd = sandboxCwd() @@ -2160,6 +2160,36 @@ describe(`cucumber@${version} commonJS`, () => { }) }) + onlyLatestIt('does not hang when tests use fake timers and Failed Test Replay is enabled', async () => { + receiver.setSettings({ + flaky_test_retries_enabled: true, + di_enabled: true, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + assert.strictEqual(tests.length, 2) + const retriedTests = tests.filter( + t => t.meta[TEST_RETRY_REASON] === TEST_RETRY_REASON_TYPES.atr + ) + assert.strictEqual(retriedTests.length, 1) + }) + + const featurePath = 'ci-visibility/features-di-fake-timers/test-hit-breakpoint.feature' + childProcess = exec( + `./node_modules/.bin/cucumber-js ${featurePath} --retry 1`, + { + cwd, + env: envVars, + } + ) + + const [[exitCode]] = await Promise.all([once(childProcess, 'exit'), eventsPromise]) + assert.strictEqual(exitCode, 1) + }) + onlyLatestIt('does not crash if the retry does not hit the breakpoint', (done) => { receiver.setSettings({ flaky_test_retries_enabled: true, @@ -2596,6 +2626,54 @@ describe(`cucumber@${version} commonJS`, () => { }) }) + it('does not tag known attempt to fix tests as new', async () => { + receiver.setKnownTests({ + cucumber: { + 'ci-visibility/features-test-management/attempt-to-fix.feature': [ + 'Say attempt to fix', + ], + }, + }) + receiver.setSettings({ + test_management: { enabled: true, attempt_to_fix_retries: 2 }, + early_flake_detection: { + enabled: true, + slow_test_retries: { '5s': 2 }, + faulty_session_threshold: 100, + }, + known_tests_enabled: true, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + const atfTests = tests.filter( + t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true' + ) + assert.ok(atfTests.length > 0) + for (const test of atfTests) { + assert.ok( + !(TEST_IS_NEW in test.meta), + 'ATF test that is in known tests should not be tagged as new' + ) + } + }) + + childProcess = exec( + './node_modules/.bin/cucumber-js ci-visibility/features-test-management/attempt-to-fix.feature', + { + cwd, + env: getCiVisAgentlessConfig(receiver.port), + } + ) + + await Promise.all([ + once(childProcess, 'exit'), + eventsPromise, + ]) + }) + it('does not fail retry if a test is quarantined', (done) => { receiver.setSettings({ test_management: { enabled: true, attempt_to_fix_retries: 3 } }) receiver.setTestManagementTests({ diff --git a/integration-tests/cypress-auto-esm.config.mjs b/integration-tests/cypress-auto-esm.config.mjs new file mode 100644 index 00000000000..ad0f92f07e8 --- /dev/null +++ b/integration-tests/cypress-auto-esm.config.mjs @@ -0,0 +1,11 @@ +import { defineConfig } from 'cypress' + +export default defineConfig({ + defaultCommandTimeout: 1000, + e2e: { + testIsolation: process.env.CYPRESS_TEST_ISOLATION !== 'false', + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + }, + video: false, + screenshotOnRunFailure: false, +}) diff --git a/integration-tests/cypress-custom-after-hooks.config.js b/integration-tests/cypress-custom-after-hooks.config.js new file mode 100644 index 00000000000..78d271b7b9f --- /dev/null +++ b/integration-tests/cypress-custom-after-hooks.config.js @@ -0,0 +1,36 @@ +'use strict' + +const { defineConfig } = require('cypress') + +module.exports = defineConfig({ + defaultCommandTimeout: 1000, + e2e: { + setupNodeEvents (on, config) { + on('after:spec', (spec, results) => { + // eslint-disable-next-line no-console + console.log('[custom:after:spec]', spec.relative, results.stats.passes) + return new Promise((resolve) => { + setTimeout(() => { + // eslint-disable-next-line no-console + console.log('[custom:after:spec:resolved]') + resolve() + }, 50) + }) + }) + on('after:run', (results) => { + // eslint-disable-next-line no-console + console.log('[custom:after:run]', results.totalPassed) + return new Promise((resolve) => { + setTimeout(() => { + // eslint-disable-next-line no-console + console.log('[custom:after:run:resolved]') + resolve() + }, 50) + }) + }) + }, + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + }, + video: false, + screenshotOnRunFailure: false, +}) diff --git a/integration-tests/cypress-custom-after-hooks.config.mjs b/integration-tests/cypress-custom-after-hooks.config.mjs new file mode 100644 index 00000000000..a4cb02b6e3c --- /dev/null +++ b/integration-tests/cypress-custom-after-hooks.config.mjs @@ -0,0 +1,34 @@ +import { defineConfig } from 'cypress' + +export default defineConfig({ + defaultCommandTimeout: 1000, + e2e: { + setupNodeEvents (on, config) { + on('after:spec', (spec, results) => { + // eslint-disable-next-line no-console + console.log('[custom:after:spec]', spec.relative, results.stats.passes) + return new Promise((resolve) => { + setTimeout(() => { + // eslint-disable-next-line no-console + console.log('[custom:after:spec:resolved]') + resolve() + }, 50) + }) + }) + on('after:run', (results) => { + // eslint-disable-next-line no-console + console.log('[custom:after:run]', results.totalPassed) + return new Promise((resolve) => { + setTimeout(() => { + // eslint-disable-next-line no-console + console.log('[custom:after:run:resolved]') + resolve() + }, 50) + }) + }) + }, + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + }, + video: false, + screenshotOnRunFailure: false, +}) diff --git a/integration-tests/cypress-double-run.js b/integration-tests/cypress-double-run.js new file mode 100644 index 00000000000..acd5ace5ea3 --- /dev/null +++ b/integration-tests/cypress-double-run.js @@ -0,0 +1,35 @@ +'use strict' + +// Tests that cypress.run() works twice in the same process (resetRunState). +// Instrumentation works via the default cypress.config.js in the project +// (which uses defineConfig), NOT via the inline config below — Cypress +// does not call setupNodeEvents from inline config objects. +const cypress = require('cypress') + +const runOptions = { + config: { + defaultCommandTimeout: 1000, + e2e: { + supportFile: 'cypress/support/e2e.js', + testIsolation: process.env.CYPRESS_TEST_ISOLATION !== 'false', + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + }, + video: false, + screenshotOnRunFailure: false, + }, +} + +async function runCypressTwice () { + for (let runNumber = 0; runNumber < 2; runNumber++) { + const results = await cypress.run(runOptions) + if (results.totalFailed !== 0) { + process.exit(1) + } + } +} + +runCypressTwice().catch((error) => { + // eslint-disable-next-line no-console + console.error(error) + process.exit(1) +}) diff --git a/integration-tests/cypress-double-run.mjs b/integration-tests/cypress-double-run.mjs new file mode 100644 index 00000000000..a4e6d2a87d3 --- /dev/null +++ b/integration-tests/cypress-double-run.mjs @@ -0,0 +1,25 @@ +// Tests that cypress.run() works twice in the same process (resetRunState). +// Instrumentation works via the default cypress.config.js in the project +// (which uses defineConfig), NOT via the inline config below — Cypress +// does not call setupNodeEvents from inline config objects. +import cypress from 'cypress' + +const runOptions = { + config: { + defaultCommandTimeout: 1000, + e2e: { + supportFile: 'cypress/support/e2e.js', + testIsolation: process.env.CYPRESS_TEST_ISOLATION !== 'false', + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + }, + video: false, + screenshotOnRunFailure: false, + }, +} + +for (let runNumber = 0; runNumber < 2; runNumber++) { + const results = await cypress.run(runOptions) + if (results.totalFailed !== 0) { + process.exit(1) + } +} diff --git a/integration-tests/cypress-esm-config.mjs b/integration-tests/cypress-esm-config.mjs index 4e36b444ae0..e835d1636a2 100644 --- a/integration-tests/cypress-esm-config.mjs +++ b/integration-tests/cypress-esm-config.mjs @@ -1,3 +1,7 @@ +// Programmatic ESM entry point for the 'esm' module type tests. +// Instrumentation works via the default cypress.config.js in the project +// (which uses defineConfig), NOT via the inline setupNodeEvents below — +// Cypress does not call setupNodeEvents from inline config objects. import cypress from 'cypress' async function runCypress () { @@ -8,31 +12,10 @@ async function runCypress () { testIsolation: process.env.CYPRESS_TEST_ISOLATION !== 'false', setupNodeEvents (on, config) { if (process.env.CYPRESS_ENABLE_INCOMPATIBLE_PLUGIN) { - import('cypress-fail-fast/plugin').then(module => { + return import('cypress-fail-fast/plugin').then(module => { module.default(on, config) }) } - if (process.env.CYPRESS_ENABLE_AFTER_RUN_CUSTOM) { - on('after:run', (...args) => { - // do custom stuff - // and call after-run at the end - return import('dd-trace/ci/cypress/after-run').then(module => { - module.default(...args) - }) - }) - } - if (process.env.CYPRESS_ENABLE_AFTER_SPEC_CUSTOM) { - on('after:spec', (...args) => { - // do custom stuff - // and call after-spec at the end - return import('dd-trace/ci/cypress/after-spec').then(module => { - module.default(...args) - }) - }) - } - return import('dd-trace/ci/cypress/plugin').then(module => { - return module.default(on, config) - }) }, specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', }, @@ -40,6 +23,7 @@ async function runCypress () { screenshotOnRunFailure: false, }, }) + if (results.totalFailed !== 0) { process.exit(1) } diff --git a/integration-tests/cypress-legacy-plugin.config.js b/integration-tests/cypress-legacy-plugin.config.js new file mode 100644 index 00000000000..016ae6ee76c --- /dev/null +++ b/integration-tests/cypress-legacy-plugin.config.js @@ -0,0 +1,20 @@ +'use strict' + +// Backwards compatibility config: uses defineConfig AND the old manual plugin. +// When NODE_OPTIONS is set, the instrumentation wraps defineConfig and injects +// setupNodeEvents. The manual plugin call sets cypressPlugin._isInit = true, +// so the instrumentation skips its own registration to avoid double hooks. +const { defineConfig } = require('cypress') +const ddTracePlugin = require('dd-trace/ci/cypress/plugin') + +module.exports = defineConfig({ + defaultCommandTimeout: 1000, + e2e: { + setupNodeEvents (on, config) { + return ddTracePlugin(on, config) + }, + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + }, + video: false, + screenshotOnRunFailure: false, +}) diff --git a/integration-tests/cypress-legacy-plugin.config.mjs b/integration-tests/cypress-legacy-plugin.config.mjs new file mode 100644 index 00000000000..23d8791a826 --- /dev/null +++ b/integration-tests/cypress-legacy-plugin.config.mjs @@ -0,0 +1,14 @@ +import { defineConfig } from 'cypress' +import ddTracePlugin from 'dd-trace/ci/cypress/plugin.js' + +export default defineConfig({ + defaultCommandTimeout: 1000, + e2e: { + setupNodeEvents (on, config) { + return ddTracePlugin(on, config) + }, + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + }, + video: false, + screenshotOnRunFailure: false, +}) diff --git a/integration-tests/cypress-plain-object-auto.config.js b/integration-tests/cypress-plain-object-auto.config.js new file mode 100644 index 00000000000..4c5bf96a93b --- /dev/null +++ b/integration-tests/cypress-plain-object-auto.config.js @@ -0,0 +1,13 @@ +'use strict' + +// Plain object config without defineConfig and without manual plugin. +// Relies solely on the CLI wrapper to inject setupNodeEvents. +module.exports = { + defaultCommandTimeout: 1000, + e2e: { + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + supportFile: 'cypress/support/e2e.js', + }, + video: false, + screenshotOnRunFailure: false, +} diff --git a/integration-tests/cypress-plain-object-auto.config.mjs b/integration-tests/cypress-plain-object-auto.config.mjs new file mode 100644 index 00000000000..e43e70e2026 --- /dev/null +++ b/integration-tests/cypress-plain-object-auto.config.mjs @@ -0,0 +1,11 @@ +// Plain object config without defineConfig and without manual plugin. +// Relies solely on the CLI wrapper to inject setupNodeEvents. +export default { + defaultCommandTimeout: 1000, + e2e: { + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + supportFile: 'cypress/support/e2e.js', + }, + video: false, + screenshotOnRunFailure: false, +} diff --git a/integration-tests/cypress-plain-object-manual.config.js b/integration-tests/cypress-plain-object-manual.config.js new file mode 100644 index 00000000000..c8e76f42994 --- /dev/null +++ b/integration-tests/cypress-plain-object-manual.config.js @@ -0,0 +1,16 @@ +'use strict' + +const ddTracePlugin = require('dd-trace/ci/cypress/plugin') + +module.exports = { + defaultCommandTimeout: 1000, + e2e: { + setupNodeEvents (on, config) { + return ddTracePlugin(on, config) + }, + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + supportFile: 'cypress/support/e2e.js', + }, + video: false, + screenshotOnRunFailure: false, +} diff --git a/integration-tests/cypress-plain-object-manual.config.mjs b/integration-tests/cypress-plain-object-manual.config.mjs new file mode 100644 index 00000000000..c4925f3c3ca --- /dev/null +++ b/integration-tests/cypress-plain-object-manual.config.mjs @@ -0,0 +1,14 @@ +import ddTracePlugin from 'dd-trace/ci/cypress/plugin.js' + +export default { + defaultCommandTimeout: 1000, + e2e: { + setupNodeEvents (on, config) { + return ddTracePlugin(on, config) + }, + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + supportFile: 'cypress/support/e2e.js', + }, + video: false, + screenshotOnRunFailure: false, +} diff --git a/integration-tests/cypress-return-config.config.js b/integration-tests/cypress-return-config.config.js new file mode 100644 index 00000000000..fde4398e7a0 --- /dev/null +++ b/integration-tests/cypress-return-config.config.js @@ -0,0 +1,21 @@ +'use strict' + +const { defineConfig } = require('cypress') + +module.exports = defineConfig({ + defaultCommandTimeout: 1000, + e2e: { + async setupNodeEvents () { + await new Promise((resolve) => setTimeout(resolve, 50)) + return { + env: { + RETURNED_CONFIG_FLAG: 'true', + }, + specPattern: 'cypress/e2e/returned-config.cy.js', + } + }, + specPattern: 'cypress/e2e/basic-fail.js', + }, + video: false, + screenshotOnRunFailure: false, +}) diff --git a/integration-tests/cypress-return-config.config.mjs b/integration-tests/cypress-return-config.config.mjs new file mode 100644 index 00000000000..25ab995728a --- /dev/null +++ b/integration-tests/cypress-return-config.config.mjs @@ -0,0 +1,19 @@ +import { defineConfig } from 'cypress' + +export default defineConfig({ + defaultCommandTimeout: 1000, + e2e: { + async setupNodeEvents () { + await new Promise((resolve) => setTimeout(resolve, 50)) + return { + env: { + RETURNED_CONFIG_FLAG: 'true', + }, + specPattern: 'cypress/e2e/returned-config.cy.js', + } + }, + specPattern: 'cypress/e2e/basic-fail.js', + }, + video: false, + screenshotOnRunFailure: false, +}) diff --git a/integration-tests/cypress-typescript.config.ts b/integration-tests/cypress-typescript.config.ts new file mode 100644 index 00000000000..35111b34608 --- /dev/null +++ b/integration-tests/cypress-typescript.config.ts @@ -0,0 +1,11 @@ +import { defineConfig } from 'cypress' + +export default defineConfig({ + defaultCommandTimeout: 1000, + e2e: { + specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', + supportFile: 'cypress/support/e2e.js', + }, + video: false, + screenshotOnRunFailure: false, +}) diff --git a/integration-tests/cypress.config.js b/integration-tests/cypress.config.js index 091320304c9..3c665d78524 100644 --- a/integration-tests/cypress.config.js +++ b/integration-tests/cypress.config.js @@ -1,36 +1,33 @@ 'use strict' -const ddAfterRun = require('dd-trace/ci/cypress/after-run') -const ddAfterSpec = require('dd-trace/ci/cypress/after-spec') -const cypressFailFast = require('cypress-fail-fast/plugin') -const ddTracePlugin = require('dd-trace/ci/cypress/plugin') +const { defineConfig } = require('cypress') -module.exports = { +module.exports = defineConfig({ defaultCommandTimeout: 1000, e2e: { testIsolation: process.env.CYPRESS_TEST_ISOLATION !== 'false', setupNodeEvents (on, config) { if (process.env.CYPRESS_ENABLE_INCOMPATIBLE_PLUGIN) { - cypressFailFast(on, config) + require('cypress-fail-fast/plugin')(on, config) } if (process.env.CYPRESS_ENABLE_AFTER_RUN_CUSTOM) { + const ddAfterRun = require('dd-trace/ci/cypress/after-run') on('after:run', (...args) => { - // do custom stuff - // and call after-run at the end return ddAfterRun(...args) }) } if (process.env.CYPRESS_ENABLE_AFTER_SPEC_CUSTOM) { + const ddAfterSpec = require('dd-trace/ci/cypress/after-spec') on('after:spec', (...args) => { - // do custom stuff - // and call after-spec at the end return ddAfterSpec(...args) }) } - return ddTracePlugin(on, config) + if (process.env.CYPRESS_ENABLE_MANUAL_PLUGIN) { + return require('dd-trace/ci/cypress/plugin')(on, config) + } }, specPattern: process.env.SPEC_PATTERN || 'cypress/e2e/**/*.cy.js', }, video: false, screenshotOnRunFailure: false, -} +}) diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index f6642405c4e..b78ad582d57 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -76,7 +76,6 @@ const version = process.env.CYPRESS_VERSION const hookFile = 'dd-trace/loader-hook.mjs' const NUM_RETRIES_EFD = 3 const CYPRESS_PRECOMPILED_SPEC_DIST_DIR = 'cypress/e2e/dist' - const over12It = (version === 'latest' || semver.gte(version, '12.0.0')) ? it : it.skip const moduleTypes = [ @@ -151,7 +150,10 @@ moduleTypes.forEach(({ // cypress-fail-fast is required as an incompatible plugin. // typescript is required to compile .cy.ts spec files in the pre-compiled JS tests. - useSandbox([`cypress@${version}`, 'cypress-fail-fast@7.1.0', 'typescript'], true) + // typescript@5 is pinned because typescript@6 emits "use strict" on line 1 for + // non-module files, shifting compiled line numbers and breaking source map resolution. + // TODO: Update tests files accordingly and test with different TS versions + useSandbox([`cypress@${version}`, 'cypress-fail-fast@7.1.0', 'typescript@5'], true) before(async function () { // Note: Cypress binary is already installed during useSandbox() via the postinstall script @@ -288,26 +290,568 @@ moduleTypes.forEach(({ assert.ok(!('addTagsAfterFailure' in failedTestSpan.meta)) }, 60000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/basic-*.js' - // For Cypress 6.7.0, we need to override the --spec flag that's hardcoded in testCommand - const command = version === '6.7.0' - ? `./node_modules/.bin/cypress run --config-file cypress-config.json --spec "${specToRun}"` - : testCommand + // For Cypress 6.7.0, we need to override the --spec flag that's hardcoded in testCommand + const command = version === '6.7.0' + ? `./node_modules/.bin/cypress run --config-file cypress-config.json --spec "${specToRun}"` + : testCommand + + childProcess = exec( + command, + { + cwd, + env: { + ...envVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: specToRun, + }, + } + ) + + await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + }) + + if (version === '6.7.0') { + it('logs a warning if using a deprecated version of cypress', async () => { + let stdout = '' + const { + NODE_OPTIONS, + ...restEnvVars + } = getCiVisEvpProxyConfig(receiver.port) + + childProcess = exec( + `${testCommand} --spec cypress/e2e/spec.cy.js`, + { + cwd, + env: { + ...restEnvVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + }, + } + ) + + childProcess.stdout?.on('data', (chunk) => { + stdout += chunk.toString() + }) + + await Promise.all([ + once(childProcess, 'exit'), + once(childProcess.stdout, 'end'), + ]) + assert.match( + stdout, + /WARNING: dd-trace support for Cypress<10.2.0 is deprecated/ + ) + }) + } + + // These tests require Cypress >=10 features (defineConfig, setupNodeEvents) + const over10It = (version !== '6.7.0') ? it : it.skip + over10It('is backwards compatible with the old manual plugin approach', async () => { + receiver.setInfoResponse({ endpoints: [] }) + + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url === '/v0.4/traces', (payloads) => { + const testSpans = payloads.flatMap(({ payload }) => payload.flatMap(trace => trace)) + + const passedTestSpan = testSpans.find(span => + span.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass' + ) + + assertObjectContains(passedTestSpan, { + name: 'cypress.test', + type: 'test', + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }, 60000) + + const envVars = getCiVisEvpProxyConfig(receiver.port) + + const legacyConfigFile = type === 'esm' + ? 'cypress-legacy-plugin.config.mjs' + : 'cypress-legacy-plugin.config.js' + + childProcess = exec( + `./node_modules/.bin/cypress run --config-file ${legacyConfigFile}`, + { + cwd, + env: { + ...envVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: 'cypress/e2e/basic-pass.js', + }, + } + ) + + await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + }) + + over10It('reports tests when using cypress.config.mjs with NODE_OPTIONS', async () => { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads + .flatMap(({ payload }) => payload.events) + .filter(event => event.type === 'test') + const passedTest = events.find(event => + event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass' + ) + + assertObjectContains(passedTest?.content, { + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }, 20000) + + let testOutput = '' + const envVars = getCiVisAgentlessConfig(receiver.port) + + childProcess = exec( + './node_modules/.bin/cypress run --config-file cypress-auto-esm.config.mjs', + { + cwd, + env: { + ...envVars, + NODE_OPTIONS: '-r dd-trace/ci/init', + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: 'cypress/e2e/basic-pass.js', + }, + } + ) + childProcess.stdout?.on('data', (d) => { testOutput += d }) + childProcess.stderr?.on('data', (d) => { testOutput += d }) + + const [[exitCode]] = await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + + assert.strictEqual(exitCode, 0, `cypress process should exit successfully\n${testOutput}`) + }) + + over10It('reports tests when cypress.run is called twice (multi-run state reset)', async () => { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const passedTests = payloads + .flatMap(({ payload }) => payload.events) + .filter(event => event.type === 'test') + .filter(event => event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass') + + assert.strictEqual(passedTests.length, 2) + passedTests.forEach((passedTest) => { + assertObjectContains(passedTest.content, { + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }) + }, 60000) + + const envVars = getCiVisAgentlessConfig(receiver.port) + + const doubleRunScript = type === 'esm' + ? 'node ./cypress-double-run.mjs' + : 'node ./cypress-double-run.js' + + childProcess = exec( + doubleRunScript, + { + cwd, + env: { + ...envVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: 'cypress/e2e/basic-pass.js', + }, + } + ) + + const [[exitCode]] = await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + + assert.strictEqual(exitCode, 0, 'cypress process should exit successfully') + }) + + over10It( + 'reports tests with a plain-object config when dd-trace is manually configured', + async () => { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads + .flatMap(({ payload }) => payload.events) + .filter(event => event.type === 'test') + const passedTest = events.find(event => + event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass' + ) + + assertObjectContains(passedTest?.content, { + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }, 60000) + + const envVars = getCiVisAgentlessConfig(receiver.port) + + const plainObjectConfigFile = type === 'esm' + ? 'cypress-plain-object-manual.config.mjs' + : 'cypress-plain-object-manual.config.js' + + childProcess = exec( + `./node_modules/.bin/cypress run --config-file ${plainObjectConfigFile}`, + { + cwd, + env: { + ...envVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: 'cypress/e2e/basic-pass.js', + }, + } + ) + + const [[exitCode]] = await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + + assert.strictEqual(exitCode, 0, 'cypress process should exit successfully') + } + ) + + over10It( + 'auto-instruments a plain-object config without defineConfig or manual plugin', + async () => { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads + .flatMap(({ payload }) => payload.events) + .filter(event => event.type === 'test') + const passedTest = events.find(event => + event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass' + ) + + assertObjectContains(passedTest?.content, { + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }, 20000) + + const envVars = getCiVisAgentlessConfig(receiver.port) + + const plainObjectAutoConfigFile = type === 'esm' + ? 'cypress-plain-object-auto.config.mjs' + : 'cypress-plain-object-auto.config.js' + + childProcess = exec( + `./node_modules/.bin/cypress run --config-file ${plainObjectAutoConfigFile}`, + { + cwd, + env: { + ...envVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: 'cypress/e2e/basic-pass.js', + }, + } + ) + + const [[exitCode]] = await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + + assert.strictEqual(exitCode, 0, 'cypress process should exit successfully') + } + ) + + over10It( + 'auto-instruments a plain-object default config (no --config-file)', + async () => { + const originalConfig = path.join(cwd, 'cypress.config.js') + const backupConfig = path.join(cwd, 'cypress.config.js.bak') + const plainObjectConfig = path.join(cwd, 'cypress-plain-object-auto.config.js') + + // Replace default cypress.config.js with the plain-object config + fs.renameSync(originalConfig, backupConfig) + fs.copyFileSync(plainObjectConfig, originalConfig) + + try { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads + .flatMap(({ payload }) => payload.events) + .filter(event => event.type === 'test') + const passedTest = events.find(event => + event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass' + ) + + assertObjectContains(passedTest?.content, { + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }, 20000) + + const envVars = getCiVisAgentlessConfig(receiver.port) + + childProcess = exec( + './node_modules/.bin/cypress run', + { + cwd, + env: { + ...envVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: 'cypress/e2e/basic-pass.js', + }, + } + ) + + const [[exitCode]] = await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + + assert.strictEqual(exitCode, 0, 'cypress process should exit successfully') + } finally { + fs.renameSync(backupConfig, originalConfig) + } + } + ) + + over10It('reports tests with a TypeScript config file', async () => { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads + .flatMap(({ payload }) => payload.events) + .filter(event => event.type === 'test') + const passedTest = events.find(event => + event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass' + ) + + assertObjectContains(passedTest?.content, { + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }, 20000) + + const envVars = getCiVisAgentlessConfig(receiver.port) + + childProcess = exec( + './node_modules/.bin/cypress run --config-file cypress-typescript.config.ts', + { + cwd, + env: { + ...envVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: 'cypress/e2e/basic-pass.js', + }, + } + ) + + const [[exitCode]] = await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + + assert.strictEqual(exitCode, 0, 'cypress process should exit successfully') + }) + + over10It('does not modify the user support file and cleans up the injected wrapper', async () => { + const supportFilePath = path.join(cwd, 'cypress/support/e2e.js') + const originalSupportContent = fs.readFileSync(supportFilePath, 'utf8') + const supportContentWithoutDdTrace = originalSupportContent + .split('\n') + .filter(line => !line.includes("require('dd-trace/ci/cypress/support')")) + .join('\n') + + const getSupportWrappers = () => fs.readdirSync(os.tmpdir()) + .filter(filename => filename.startsWith('dd-cypress-support-')) + .sort() + + fs.writeFileSync(supportFilePath, supportContentWithoutDdTrace) + + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads + .flatMap(({ payload }) => payload.events) + .filter(event => event.type === 'test') + const passedTest = events.find(event => + event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass' + ) + + assertObjectContains(passedTest?.content, { + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }, 60000) + + const envVars = getCiVisAgentlessConfig(receiver.port) + const wrapperFilesBefore = getSupportWrappers() + + try { + childProcess = exec(testCommand, { + cwd, + env: { + ...envVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: 'cypress/e2e/basic-pass.js', + }, + }) + + const [[exitCode]] = await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + + assert.strictEqual(exitCode, 0, 'cypress process should exit successfully') + assert.strictEqual(fs.readFileSync(supportFilePath, 'utf8'), supportContentWithoutDdTrace) + assert.doesNotMatch(fs.readFileSync(supportFilePath, 'utf8'), /dd-trace\/ci\/cypress\/support/) + assert.deepStrictEqual(getSupportWrappers(), wrapperFilesBefore) + } finally { + fs.writeFileSync(supportFilePath, originalSupportContent) + } + }) + + over10It('preserves config returned from setupNodeEvents', async () => { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads + .flatMap(({ payload }) => payload.events) + .filter(event => event.type === 'test') + const passedTest = events.find(event => + event.content.resource === + 'cypress/e2e/returned-config.cy.js.returned config uses env from setupNodeEvents return value' + ) + + assertObjectContains(passedTest?.content, { + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }, 60000) + + const envVars = getCiVisAgentlessConfig(receiver.port) + + const returnConfigFile = type === 'esm' + ? 'cypress-return-config.config.mjs' + : 'cypress-return-config.config.js' + + childProcess = exec( + `./node_modules/.bin/cypress run --config-file ${returnConfigFile}`, + { + cwd, + env: envVars, + } + ) + + const [[exitCode]] = await Promise.all([ + once(childProcess, 'exit'), + receiverPromise, + ]) + + assert.strictEqual(exitCode, 0, 'cypress process should exit successfully') + }) + + over10It('custom after:spec and after:run handlers are chained with dd-trace instrumentation', async () => { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads + .flatMap(({ payload }) => payload.events) + .filter(event => event.type === 'test') + const passedTest = events.find(event => + event.content.resource === 'cypress/e2e/basic-pass.js.basic pass suite can pass' + ) + assertObjectContains(passedTest?.content, { + meta: { + [TEST_STATUS]: 'pass', + [TEST_FRAMEWORK]: 'cypress', + }, + }) + }, 60000) + + const envVars = getCiVisAgentlessConfig(receiver.port) + + let testOutput = '' + const customHooksConfigFile = type === 'esm' + ? 'cypress-custom-after-hooks.config.mjs' + : 'cypress-custom-after-hooks.config.js' + + childProcess = exec( + `./node_modules/.bin/cypress run --config-file ${customHooksConfigFile}`, + { + cwd, + env: { + ...envVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: 'cypress/e2e/basic-pass.js', + }, + } + ) + childProcess.stdout?.on('data', (d) => { testOutput += d }) + childProcess.stderr?.on('data', (d) => { testOutput += d }) + + await Promise.all([ + once(childProcess, 'exit'), + once(childProcess.stdout, 'end'), + once(childProcess.stderr, 'end'), + receiverPromise, + ]) + + // Verify both dd-trace spans AND the custom handlers ran (including their async resolutions) + assert.match(testOutput, /\[custom:after:spec\]/) + assert.match(testOutput, /\[custom:after:spec:resolved\]/) + assert.match(testOutput, /\[custom:after:run\]/) + assert.match(testOutput, /\[custom:after:run:resolved\]/) + }) + + // Tests the old manual API: dd-trace/ci/cypress/after-run and after-spec + // used alongside the manual plugin, without NODE_OPTIONS auto-instrumentation. + over10It('works if after:run and after:spec are explicitly used with the manual plugin', async () => { + const receiverPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSessionEvent = events.find(event => event.type === 'test_session_end') + assert.ok(testSessionEvent) + const testEvents = events.filter(event => event.type === 'test') + assert.ok(testEvents.length > 0) + }, 30000) + + const envVars = getCiVisAgentlessConfig(receiver.port) childProcess = exec( - command, + testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, - SPEC_PATTERN: specToRun, + CYPRESS_ENABLE_AFTER_RUN_CUSTOM: '1', + CYPRESS_ENABLE_AFTER_SPEC_CUSTOM: '1', + CYPRESS_ENABLE_MANUAL_PLUGIN: '1', + SPEC_PATTERN: 'cypress/e2e/basic-pass.js', }, } ) @@ -319,16 +863,14 @@ moduleTypes.forEach(({ }) over12It('reports correct source file and line for pre-compiled typescript test files', async function () { - const { NODE_OPTIONS, ...restEnvVars } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) try { cleanupPrecompiledSourceLineDist(cwd) // Compile the TypeScript spec to JS + source map so the plugin can resolve // the original TypeScript source file and line via the adjacent .js.map file. - // We intentionally run with NODE_OPTIONS removed because sandboxed CWDs may not - // have local preload paths (e.g. -r ./ci/init) set by outer test environments. - compilePrecompiledTypeScriptSpecs(cwd, restEnvVars) + compilePrecompiledTypeScriptSpecs(cwd, envVars) const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { @@ -379,7 +921,7 @@ moduleTypes.forEach(({ childProcess = exec(testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/dist/spec-source-line.cy.js', }, @@ -434,12 +976,12 @@ moduleTypes.forEach(({ }) over12It('uses declaration scanning fallback when invocationDetails line is invalid', async function () { - const { NODE_OPTIONS, ...restEnvVars } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) try { cleanupPrecompiledSourceLineDist(cwd) - compilePrecompiledTypeScriptSpecs(cwd, restEnvVars) + compilePrecompiledTypeScriptSpecs(cwd, envVars) const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { @@ -465,7 +1007,7 @@ moduleTypes.forEach(({ childProcess = exec(testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/dist/spec-source-line-fallback.cy.js', }, @@ -480,12 +1022,12 @@ moduleTypes.forEach(({ over12It('keeps original invocationDetails line when no declaration match is found', async function () { this.timeout(140000) - const { NODE_OPTIONS, ...restEnvVars } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) try { cleanupPrecompiledSourceLineDist(cwd) - compilePrecompiledTypeScriptSpecs(cwd, restEnvVars) + compilePrecompiledTypeScriptSpecs(cwd, envVars) const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { @@ -513,7 +1055,7 @@ moduleTypes.forEach(({ childProcess = exec(testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/dist/spec-source-line-no-match.cy.js', }, @@ -527,7 +1069,7 @@ moduleTypes.forEach(({ }) over12It('uses invocationDetails line directly for plain javascript specs without source maps', async function () { - const { NODE_OPTIONS, ...restEnvVars } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { @@ -555,7 +1097,7 @@ moduleTypes.forEach(({ childProcess = exec(testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/spec-source-line-invocation.cy.js', }, @@ -609,14 +1151,14 @@ moduleTypes.forEach(({ ) }, 60000) - const { NODE_OPTIONS, ...restEnvVars } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) // Run Cypress directly with the TypeScript spec file — no manual compilation step. // Cypress compiles .cy.ts files on the fly via its own preprocessor/bundler. childProcess = exec(testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/spec-source-line.cy.ts', }, @@ -626,46 +1168,8 @@ moduleTypes.forEach(({ assert.strictEqual(exitCode, 0, 'cypress process should exit successfully') }) - if (version === '6.7.0') { - // to be removed when we drop support for cypress@6.7.0 - it('logs a warning if using a deprecated version of cypress', async () => { - let stdout = '' - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) - - childProcess = exec( - `${testCommand} --spec cypress/e2e/spec.cy.js`, - { - cwd, - env: { - ...restEnvVars, - CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, - }, - } - ) - - childProcess.stdout?.on('data', (chunk) => { - stdout += chunk.toString() - }) - - await Promise.all([ - once(childProcess, 'exit'), - once(childProcess.stdout, 'end'), - ]) - assert.match( - stdout, - /WARNING: dd-trace support for Cypress<10.2.0 is deprecated and will not be supported in future versions of dd-trace./ - ) - }) - } - it('tags session and children with _dd.ci.library_configuration_error when settings fails 4xx', async () => { - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) receiver.setSettingsResponseCode(404) const eventsPromise = receiver @@ -685,7 +1189,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/spec.cy.js', }, @@ -697,9 +1201,8 @@ moduleTypes.forEach(({ it('does not crash if badly init', async () => { const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress DD_CIVISIBILITY_AGENTLESS_URL, - ...restEnvVars + ...envVars } = getCiVisAgentlessConfig(receiver.port) let hasReceivedEvents = false @@ -715,7 +1218,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, DD_SITE: '= invalid = url', SPEC_PATTERN: 'cypress/e2e/spec.cy.js', @@ -755,14 +1258,15 @@ moduleTypes.forEach(({ it('can run and report tests', async () => { const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { - const metadataDicts = payloads.flatMap(({ payload }) => payload.metadata) + const ciVisPayloads = payloads.filter(({ payload }) => payload.metadata?.test) + const ciVisMetadataDicts = ciVisPayloads.flatMap(({ payload }) => payload.metadata) - metadataDicts.forEach(metadata => { + ciVisMetadataDicts.forEach(metadata => { for (const testLevel of TEST_LEVEL_EVENT_TYPES) { assert.strictEqual(metadata[testLevel][TEST_SESSION_NAME], 'my-test-session') } }) - const events = payloads.flatMap(({ payload }) => payload.events) + const events = ciVisPayloads.flatMap(({ payload }) => payload.events) const testSessionEvent = events.find(event => event.type === 'test_session_end') const testModuleEvent = events.find(event => event.type === 'test_module_end') @@ -896,17 +1400,14 @@ moduleTypes.forEach(({ assert.match(describeHookSuite.content.meta[ERROR_MESSAGE], /error in after hook/) }, 25000) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) childProcess = exec( testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, DD_TAGS: 'test.customtag:customvalue,test.customtag2:customvalue2', DD_TEST_SESSION_NAME: 'my-test-session', @@ -923,10 +1424,7 @@ moduleTypes.forEach(({ }) it('can report code coverage if it is available', async () => { - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) const receiverPromise = receiver.gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcov', payloads => { const [{ payload: coveragePayloads }] = payloads @@ -953,7 +1451,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/spec.cy.js', }, @@ -975,17 +1473,14 @@ moduleTypes.forEach(({ const packfileRequestPromise = receiver .payloadReceived(({ url }) => url.endsWith('/api/v2/git/repository/packfile'), 25000) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) childProcess = exec( testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/spec.cy.js', }, @@ -1023,17 +1518,14 @@ moduleTypes.forEach(({ assertObjectContains(eventTypes, ['test', 'test_session_end', 'test_module_end', 'test_suite_end']) }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) childProcess = exec( testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/spec.cy.js', }, @@ -1094,17 +1586,14 @@ moduleTypes.forEach(({ assert.strictEqual(skippableRequest.headers['dd-api-key'], '1') }) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) childProcess = exec( testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/{other,spec}.cy.js', }, @@ -1152,17 +1641,14 @@ moduleTypes.forEach(({ assert.strictEqual(notSkippedTest.content.meta[TEST_STATUS], 'pass') }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) childProcess = exec( testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/other.cy.js', }, @@ -1230,17 +1716,14 @@ moduleTypes.forEach(({ assert.ok(!(TEST_ITR_FORCED_RUN in unskippableFailedTest.content.meta)) }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) childProcess = exec( testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/{other,spec}.cy.js', }, @@ -1302,17 +1785,14 @@ moduleTypes.forEach(({ assert.ok(!(TEST_ITR_FORCED_RUN in unskippableFailedTest.content.meta)) }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) childProcess = exec( testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/{other,spec}.cy.js', }, @@ -1358,17 +1838,14 @@ moduleTypes.forEach(({ assert.strictEqual(skippableRequest.headers['dd-api-key'], '1') }) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) childProcess = exec( testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/spec.cy.js', }, @@ -1398,17 +1875,14 @@ moduleTypes.forEach(({ }) }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) childProcess = exec( testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/spec.cy.js', }, @@ -1438,10 +1912,7 @@ moduleTypes.forEach(({ command = `node --loader=${hookFile} ../../cypress-esm-config.mjs` } - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) const eventsPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcov'), (payloads) => { @@ -1464,7 +1935,7 @@ moduleTypes.forEach(({ { cwd: `${cwd}/ci-visibility/subproject`, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, }, } @@ -1482,10 +1953,7 @@ moduleTypes.forEach(({ }) it('still reports correct format if there is a plugin incompatibility', async () => { - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { @@ -1507,7 +1975,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, CYPRESS_ENABLE_INCOMPATIBLE_PLUGIN: '1', SPEC_PATTERN: 'cypress/e2e/spec.cy.js', @@ -1521,45 +1989,6 @@ moduleTypes.forEach(({ ]) }) - it('works if after:run and after:spec are explicitly used', async () => { - const receiverPromise = receiver - .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { - const events = payloads.flatMap(({ payload }) => payload.events) - const testSessionEvent = events.find(event => event.type === 'test_session_end') - assert.ok(testSessionEvent) - const testModuleEvent = events.find(event => event.type === 'test_module_end') - assert.ok(testModuleEvent) - const testSuiteEvents = events.filter(event => event.type === 'test_suite_end') - assert.strictEqual(testSuiteEvents.length, 4) - const testEvents = events.filter(event => event.type === 'test') - assert.strictEqual(testEvents.length, 9) - }, 30000) - - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) - - childProcess = exec( - testCommand, - { - cwd, - env: { - ...restEnvVars, - CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, - CYPRESS_ENABLE_AFTER_RUN_CUSTOM: '1', - CYPRESS_ENABLE_AFTER_SPEC_CUSTOM: '1', - SPEC_PATTERN: 'cypress/e2e/{spec,other,hook-describe-error,hook-test-error}.cy.js', - }, - } - ) - - await Promise.all([ - once(childProcess, 'exit'), - receiverPromise, - ]) - }) - context('early flake detection', () => { it('retries new tests', async () => { receiver.setSettings({ @@ -1609,10 +2038,7 @@ moduleTypes.forEach(({ assert.strictEqual(testSession.meta[TEST_EARLY_FLAKE_ENABLED], 'true') }, 25000) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/spec.cy.js' @@ -1621,7 +2047,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, }, @@ -1654,10 +2080,7 @@ moduleTypes.forEach(({ }, }) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { @@ -1682,7 +2105,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED: 'false', @@ -1711,10 +2134,7 @@ moduleTypes.forEach(({ cypress: {}, }) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { @@ -1739,7 +2159,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: 'cypress/e2e/skipped-test.js', }, @@ -1768,10 +2188,7 @@ moduleTypes.forEach(({ cypress: {}, }) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) // Request module waits before retrying; browser runs are slow — need longer gather timeout const receiverPromise = receiver @@ -1796,7 +2213,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, }, @@ -1829,10 +2246,7 @@ moduleTypes.forEach(({ }, }) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { @@ -1857,7 +2271,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED: 'false', @@ -1890,10 +2304,7 @@ moduleTypes.forEach(({ }, }) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { @@ -1919,7 +2330,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, }, @@ -1976,10 +2387,7 @@ moduleTypes.forEach(({ }) }, 25000) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/spec.cy.js' @@ -1988,7 +2396,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, CYPRESS_TEST_ISOLATION: 'false', @@ -2076,10 +2484,7 @@ moduleTypes.forEach(({ }) }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/spec.cy.js' @@ -2088,7 +2493,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, }, @@ -2195,10 +2600,7 @@ moduleTypes.forEach(({ assert.equal(testExecutionOrder[9].isRetry, false) }, 30000) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/flaky-test-retries.js' @@ -2207,7 +2609,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, }, @@ -2253,10 +2655,7 @@ moduleTypes.forEach(({ assert.ok(!tests.some(test => test.meta[TEST_RETRY_REASON] === TEST_RETRY_REASON_TYPES.atr)) }, 25000) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/flaky-test-retries.js' @@ -2265,7 +2664,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, DD_CIVISIBILITY_FLAKY_RETRY_ENABLED: 'false', SPEC_PATTERN: specToRun, @@ -2314,10 +2713,7 @@ moduleTypes.forEach(({ ) }, 25000) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/flaky-test-retries.js' @@ -2326,7 +2722,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, DD_CIVISIBILITY_FLAKY_RETRY_COUNT: '1', SPEC_PATTERN: specToRun, @@ -2368,10 +2764,7 @@ moduleTypes.forEach(({ assert.strictEqual(lastFailed.meta[TEST_RETRY_REASON], TEST_RETRY_REASON_TYPES.atr) }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/flaky-test-retries.js' @@ -2380,7 +2773,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, DD_CIVISIBILITY_FLAKY_RETRY_COUNT: '1', SPEC_PATTERN: specToRun, @@ -2418,10 +2811,7 @@ moduleTypes.forEach(({ assert.equal(tests.filter(test => test.meta[TEST_RETRY_REASON] === TEST_RETRY_REASON_TYPES.atr).length, 0) }, 30000) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/flaky-test-retries.js' @@ -2430,7 +2820,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, CYPRESS_TEST_ISOLATION: 'false', @@ -2457,10 +2847,7 @@ moduleTypes.forEach(({ command = `node --loader=${hookFile} ../../cypress-esm-config.mjs` } - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisAgentlessConfig(receiver.port) + const envVars = getCiVisAgentlessConfig(receiver.port) const eventsPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { @@ -2479,7 +2866,7 @@ moduleTypes.forEach(({ { cwd: `${cwd}/ci-visibility/subproject`, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, }, } @@ -2530,11 +2917,6 @@ moduleTypes.forEach(({ 25000 ) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) - const specToRun = 'cypress/e2e/dynamic-name-test.cy.js' childProcess = exec( @@ -2542,7 +2924,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...getCiVisEvpProxyConfig(receiver.port), CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, }, @@ -2573,10 +2955,7 @@ moduleTypes.forEach(({ }, }) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { @@ -2601,7 +2980,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED: 'false', @@ -2619,14 +2998,12 @@ moduleTypes.forEach(({ // cy.origin is not available in old versions of Cypress if (version === 'latest') { it('does not crash for multi origin tests', async () => { - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { const events = payloads.flatMap(({ payload }) => payload.events) + .filter(event => event.type !== 'span') assert.strictEqual(events.length, 4) const test = events.find(event => event.type === 'test').content @@ -2656,7 +3033,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, CYPRESS_BASE_URL_SECOND: `http://localhost:${secondWebAppPort}`, SPEC_PATTERN: specToRun, @@ -2686,17 +3063,14 @@ moduleTypes.forEach(({ }) }, 25000) - const { - NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) childProcess = exec( testCommand, { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, DD_SERVICE: 'my-service', SPEC_PATTERN: 'cypress/e2e/spec.cy.js', @@ -2835,10 +3209,7 @@ moduleTypes.forEach(({ isDisabled, }) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/attempt-to-fix.js' @@ -2847,7 +3218,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, ...extraEnvVars, @@ -2903,6 +3274,62 @@ moduleTypes.forEach(({ await runAttemptToFixTest({ extraEnvVars: { DD_TEST_MANAGEMENT_ENABLED: '0' } }) }) + it('does not tag known attempt to fix tests as new', async () => { + receiver.setKnownTests({ + cypress: { + 'cypress/e2e/attempt-to-fix.js': [ + 'attempt to fix is attempt to fix', + ], + }, + }) + receiver.setSettings({ + test_management: { enabled: true, attempt_to_fix_retries: 2 }, + early_flake_detection: { + enabled: true, + slow_test_retries: { '5s': 2 }, + faulty_session_threshold: 100, + }, + known_tests_enabled: true, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + const atfTests = tests.filter( + t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true' + ) + assert.ok(atfTests.length > 0) + for (const test of atfTests) { + assert.ok( + !(TEST_IS_NEW in test.meta), + 'ATF test that is in known tests should not be tagged as new' + ) + } + }, 25000) + + const envVars = getCiVisEvpProxyConfig(receiver.port) + const specToRun = 'cypress/e2e/attempt-to-fix.js' + + childProcess = exec( + version === 'latest' ? testCommand : `${testCommand} --spec ${specToRun}`, + { + cwd, + env: { + ...envVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, + SPEC_PATTERN: specToRun, + CYPRESS_SHOULD_ALWAYS_PASS: '1', + }, + } + ) + + await Promise.all([ + once(childProcess, 'exit'), + eventsPromise, + ]) + }) + /** * TODO: * The spec says that quarantined tests that are not attempted to fix should be run and their result ignored. @@ -3004,10 +3431,7 @@ moduleTypes.forEach(({ const runDisableTest = async (isDisabling, extraEnvVars = {}) => { const testAssertionsPromise = getTestAssertions(isDisabling) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/disable.js' @@ -3016,7 +3440,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, ...extraEnvVars, @@ -3107,10 +3531,7 @@ moduleTypes.forEach(({ const runQuarantineTest = async (isQuarantining, extraEnvVars = {}) => { const testAssertionsPromise = getTestAssertions(isQuarantining) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/quarantine.js' @@ -3119,7 +3540,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, ...extraEnvVars, @@ -3178,10 +3599,7 @@ moduleTypes.forEach(({ assert.strictEqual(tests.length, 1) }, 60000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/attempt-to-fix.js' @@ -3190,7 +3608,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, DD_TRACE_DEBUG: '1', @@ -3250,10 +3668,7 @@ moduleTypes.forEach(({ }) }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/attempt-to-fix.js' @@ -3262,7 +3677,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, CYPRESS_SHOULD_ALWAYS_PASS: '1', @@ -3341,10 +3756,7 @@ moduleTypes.forEach(({ }) }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/attempt-to-fix-order.js' @@ -3353,7 +3765,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, }, @@ -3382,7 +3794,9 @@ moduleTypes.forEach(({ it('adds capabilities to tests', async () => { const receiverPromise = receiver .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), payloads => { - const metadataDicts = payloads.flatMap(({ payload }) => payload.metadata) + const metadataDicts = payloads + .filter(({ payload }) => payload.metadata?.test) + .flatMap(({ payload }) => payload.metadata) assert.ok(metadataDicts.length > 0) metadataDicts.forEach(metadata => { @@ -3399,10 +3813,7 @@ moduleTypes.forEach(({ }) }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/spec.cy.js' @@ -3411,7 +3822,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, DD_TEST_SESSION_NAME: 'my-test-session-name', SPEC_PATTERN: specToRun, @@ -3542,10 +3953,7 @@ moduleTypes.forEach(({ ) => { const testAssertionsPromise = getTestAssertions({ isModified, isEfd, isNew }) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/impacted-test.js' @@ -3554,7 +3962,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, GITHUB_BASE_REF: '', @@ -3661,10 +4069,7 @@ moduleTypes.forEach(({ assert.equal(retriedTests.length, 0) }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/impacted-test.js' @@ -3673,7 +4078,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, GITHUB_BASE_REF: '', @@ -3754,10 +4159,7 @@ moduleTypes.forEach(({ }) }, 25000) - const { - NODE_OPTIONS, - ...restEnvVars - } = getCiVisEvpProxyConfig(receiver.port) + const envVars = getCiVisEvpProxyConfig(receiver.port) const specToRun = 'cypress/e2e/impacted-test-order.js' @@ -3766,7 +4168,7 @@ moduleTypes.forEach(({ { cwd, env: { - ...restEnvVars, + ...envVars, CYPRESS_BASE_URL: `http://localhost:${webAppPort}`, SPEC_PATTERN: specToRun, GITHUB_BASE_REF: '', diff --git a/integration-tests/cypress/e2e/returned-config.cy.js b/integration-tests/cypress/e2e/returned-config.cy.js new file mode 100644 index 00000000000..b8206ac74ba --- /dev/null +++ b/integration-tests/cypress/e2e/returned-config.cy.js @@ -0,0 +1,6 @@ +/* eslint-disable */ +describe('returned config', () => { + it('uses env from setupNodeEvents return value', () => { + expect(Cypress.env('RETURNED_CONFIG_FLAG')).to.equal('true') + }) +}) diff --git a/integration-tests/debugger/tracing-integration.spec.js b/integration-tests/debugger/tracing-integration.spec.js index c654c88f382..048ba645b76 100644 --- a/integration-tests/debugger/tracing-integration.spec.js +++ b/integration-tests/debugger/tracing-integration.spec.js @@ -4,6 +4,45 @@ const assert = require('assert') const { setup, testBasicInput, testBasicInputWithoutDD } = require('./utils') describe('Dynamic Instrumentation', function () { + describe('DD_TRACE_ENABLED=true, DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED=true', function () { + const t = setup({ + testApp: 'target-app/basic.js', + env: { DD_TRACE_ENABLED: 'true', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED: true }, + dependencies: ['fastify'], + }) + + describe('input messages', function () { + it('should capture and send expected payload when a log line probe is triggered', testBasicInput.bind(null, t)) + }) + }) + + describe('DD_TRACE_ENABLED=true, DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED=false', function () { + const t = setup({ + testApp: 'target-app/basic.js', + env: { DD_TRACE_ENABLED: 'true', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED: false }, + dependencies: ['fastify'], + }) + + describe('input messages', function () { + it('should capture and send expected payload when a log line probe is triggered', testBasicInput.bind(null, t)) + }) + }) + + describe('DD_TRACE_ENABLED=false', function () { + const t = setup({ + testApp: 'target-app/basic.js', + env: { DD_TRACE_ENABLED: 'false' }, + dependencies: ['fastify'], + }) + + describe('input messages', function () { + it( + 'should capture and send expected payload when a log line probe is triggered', + testBasicInputWithoutDD.bind(null, t) + ) + }) + }) + describe('DD_TRACING_ENABLED=true, DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED=true', function () { const t = setup({ testApp: 'target-app/basic.js', diff --git a/integration-tests/debugger/utils.js b/integration-tests/debugger/utils.js index 0074f88943d..1af0b399653 100644 --- a/integration-tests/debugger/utils.js +++ b/integration-tests/debugger/utils.js @@ -304,14 +304,15 @@ function setupAssertionListeners (t, done, probe) { let traceId, spanId, dd const messageListener = ({ payload }) => { - const span = payload.find((arr) => arr[0].name === 'fastify.request')?.[0] + const span = payload + .flat() + .find((span) => span.name === 'fastify.request' && (!dd || span.span_id.toString() === dd.span_id)) + if (!span) return traceId = span.trace_id.toString() spanId = span.span_id.toString() - t.agent.removeListener('message', messageListener) - assertDD() } @@ -336,6 +337,7 @@ function setupAssertionListeners (t, done, probe) { if (!traceId || !spanId || !dd) return assert.strictEqual(dd.trace_id, traceId) assert.strictEqual(dd.span_id, spanId) + t.agent.removeListener('message', messageListener) done() } } diff --git a/integration-tests/init.spec.js b/integration-tests/init.spec.js index 30674a338d8..79340120571 100644 --- a/integration-tests/init.spec.js +++ b/integration-tests/init.spec.js @@ -273,7 +273,9 @@ describe('init.js', () => { // or on 18.0.0 in particular. if (semver.satisfies(process.versions.node, '>=14.13.1')) { describe('initialize.mjs', () => { - setShouldKill(false) + // Node 20.0.0 can leave short-lived loader-based children alive after they + // print the expected output, so terminate them after a short grace period. + setShouldKill(process.versions.node === '20.0.0') useSandbox() stubTracerIfNeeded() diff --git a/integration-tests/jest/jest.spec.js b/integration-tests/jest/jest.spec.js index bd69529db43..fbb3260ad7d 100644 --- a/integration-tests/jest/jest.spec.js +++ b/integration-tests/jest/jest.spec.js @@ -996,6 +996,36 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { }).catch(done) }) }) + + onlyLatestIt('does not hang when tests use fake timers and Failed Test Replay is enabled', async () => { + receiver.setSettings({ + flaky_test_retries_enabled: true, + di_enabled: true, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + // Must have 2 tests: 1 original + 1 ATR retry + assert.strictEqual(tests.length, 2) + const retriedTests = tests.filter(t => t.meta[TEST_IS_RETRY] === 'true') + assert.strictEqual(retriedTests.length, 1) + }) + + childProcess = exec(runTestsCommand, { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TESTS_TO_RUN: 'jest-flaky/fake-timers-flaky-fails', + DD_CIVISIBILITY_FLAKY_RETRY_COUNT: '1', + SHOULD_CHECK_RESULTS: '1', + }, + }) + + const [[exitCode]] = await Promise.all([once(childProcess, 'exit'), eventsPromise]) + assert.strictEqual(exitCode, 1) + }) }) context('when jest is using worker threads', () => { @@ -5560,6 +5590,74 @@ describe(`jest@${JEST_VERSION} commonJS`, () => { ]) }) + it('does not tag known attempt to fix tests as new', async () => { + receiver.setKnownTests({ + jest: { + 'ci-visibility/jest-flaky/flaky-fails.js': [ + 'test-flaky-test-retries can retry failed tests', + ], + }, + }) + receiver.setSettings({ + test_management: { enabled: true, attempt_to_fix_retries: 2 }, + early_flake_detection: { + enabled: true, + slow_test_retries: { + '5s': 2, + }, + faulty_session_threshold: 100, + }, + known_tests_enabled: true, + }) + + receiver.setTestManagementTests({ + jest: { + suites: { + 'ci-visibility/jest-flaky/flaky-fails.js': { + tests: { + 'test-flaky-test-retries can retry failed tests': { + properties: { + attempt_to_fix: true, + }, + }, + }, + }, + }, + }, + }) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + const atfTests = tests.filter( + t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true' + ) + assert.ok(atfTests.length > 0) + for (const test of atfTests) { + assert.ok( + !(TEST_IS_NEW in test.meta), + 'ATF test that is in known tests should not be tagged as new' + ) + } + }) + + childProcess = exec( + runTestsCommand, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TESTS_TO_RUN: 'jest-flaky/flaky-fails.js', + }, + } + ) + + await Promise.all([ + once(childProcess, 'exit'), + eventsPromise, + ]) + }) + it('resets mock state between attempt to fix retries', async () => { const NUM_RETRIES = 3 receiver.setSettings({ test_management: { enabled: true, attempt_to_fix_retries: NUM_RETRIES } }) diff --git a/integration-tests/mocha/mocha.spec.js b/integration-tests/mocha/mocha.spec.js index 68c6a93f176..9ed82aa46d4 100644 --- a/integration-tests/mocha/mocha.spec.js +++ b/integration-tests/mocha/mocha.spec.js @@ -105,6 +105,7 @@ describe(`mocha@${MOCHA_VERSION}`, function () { 'nyc', 'mocha-each', 'workerpool', + 'sinon', ], true ) @@ -3811,6 +3812,41 @@ describe(`mocha@${MOCHA_VERSION}`, function () { }) }) + onlyLatestIt('does not hang when tests use fake timers and Failed Test Replay is enabled', async () => { + receiver.setSettings({ + flaky_test_retries_enabled: true, + di_enabled: true, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + assert.strictEqual(tests.length, 2) + const retriedTests = tests.filter( + t => t.meta[TEST_IS_RETRY] === 'true' + ) + assert.strictEqual(retriedTests.length, 1) + }) + + childProcess = exec( + runTestsCommand, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TESTS_TO_RUN: JSON.stringify([ + './dynamic-instrumentation/fake-timers-test-hit-breakpoint', + ]), + DD_CIVISIBILITY_FLAKY_RETRY_COUNT: '1', + }, + } + ) + + const [[exitCode]] = await Promise.all([once(childProcess, 'exit'), eventsPromise]) + assert.strictEqual(exitCode, 0) + }) + it('tags new tests with dynamic names and logs a warning', async () => { receiver.setKnownTests({ mocha: {} }) receiver.setSettings({ @@ -4153,6 +4189,59 @@ describe(`mocha@${MOCHA_VERSION}`, function () { runAttemptToFixTest(done, { extraEnvVars: { DD_TEST_MANAGEMENT_ENABLED: '0' } }) }) + onlyLatestIt('does not tag known attempt to fix tests as new', async () => { + receiver.setKnownTests({ + mocha: { + 'ci-visibility/test-management/test-attempt-to-fix-1.js': [ + 'attempt to fix tests can attempt to fix a test', + ], + }, + }) + receiver.setSettings({ + test_management: { enabled: true, attempt_to_fix_retries: 2 }, + early_flake_detection: { + enabled: true, + slow_test_retries: { '5s': 2 }, + faulty_session_threshold: 100, + }, + known_tests_enabled: true, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + const atfTests = tests.filter( + t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true' + ) + assert.ok(atfTests.length > 0) + for (const test of atfTests) { + assert.ok( + !(TEST_IS_NEW in test.meta), + 'ATF test that is in known tests should not be tagged as new' + ) + } + }) + + childProcess = exec( + runTestsCommand, + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TESTS_TO_RUN: JSON.stringify([ + './test-management/test-attempt-to-fix-1.js', + ]), + }, + } + ) + + await Promise.all([ + once(childProcess, 'exit'), + eventsPromise, + ]) + }) + onlyLatestIt('does not fail retry if a test is quarantined', (done) => { receiver.setSettings({ test_management: { enabled: true, attempt_to_fix_retries: 3 } }) receiver.setTestManagementTests({ diff --git a/integration-tests/opentelemetry.spec.js b/integration-tests/opentelemetry.spec.js index 2e4e93e1f84..b0feff6328e 100644 --- a/integration-tests/opentelemetry.spec.js +++ b/integration-tests/opentelemetry.spec.js @@ -50,10 +50,12 @@ function nearNow (ts, now = Date.now(), range = 1000) { return delta < range && delta >= 0 } -describe('opentelemetry', () => { - let agent +describe('opentelemetry', function () { + this.timeout(20000) + + let agent = /** @type {FakeAgent | null} */ (null) let proc - let cwd + let cwd = /** @type {string} */ ('') const timeout = 5000 const dependencies = [ '@opentelemetry/api@1.8.0', @@ -75,14 +77,14 @@ describe('opentelemetry', () => { after(async () => { await stopProc(proc) - await agent.stop() + await agent?.stop() }) it("should not capture telemetry DD and OTEL vars don't conflict", async () => { proc = fork(join(cwd, 'opentelemetry/basic.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', TIMEOUT: '1500', @@ -114,7 +116,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/basic.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', TIMEOUT: '1500', @@ -147,42 +149,20 @@ describe('opentelemetry', () => { const otelHiding = metrics.series.filter(({ metric }) => metric === 'otel.env.hiding') const otelInvalid = metrics.series.filter(({ metric }) => metric === 'otel.env.invalid') - assert.strictEqual(otelHiding.length, 9) - assert.strictEqual(otelInvalid.length, 0) - - assert.deepStrictEqual(otelHiding[0].tags, [ - 'config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level', - ]) - assert.deepStrictEqual(otelHiding[1].tags, [ - 'config_datadog:dd_trace_propagation_style', 'config_opentelemetry:otel_propagators', - ]) - assert.deepStrictEqual(otelHiding[2].tags, [ - 'config_datadog:dd_service', 'config_opentelemetry:otel_service_name', - ]) - - assert.deepStrictEqual(otelHiding[3].tags, [ - 'config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler', - ]) - - assert.deepStrictEqual(otelHiding[4].tags, [ - 'config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler_arg', - ]) - - assert.deepStrictEqual(otelHiding[5].tags, [ - 'config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter', - ]) - - assert.deepStrictEqual(otelHiding[6].tags, [ - 'config_datadog:dd_runtime_metrics_enabled', 'config_opentelemetry:otel_metrics_exporter', - ]) - assert.deepStrictEqual(otelHiding[7].tags, [ - 'config_datadog:dd_tags', 'config_opentelemetry:otel_resource_attributes', - ]) + assert.deepStrictEqual(sortMetricTags(otelHiding), sortMetricTags([ + ['config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level'], + ['config_datadog:dd_trace_propagation_style', 'config_opentelemetry:otel_propagators'], + ['config_datadog:dd_service', 'config_opentelemetry:otel_service_name'], + ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler'], + ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler_arg'], + ['config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter'], + ['config_datadog:dd_runtime_metrics_enabled', 'config_opentelemetry:otel_metrics_exporter'], + ['config_datadog:dd_tags', 'config_opentelemetry:otel_resource_attributes'], + ['config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled'], + ])) - assert.deepStrictEqual(otelHiding[8].tags, [ - 'config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled', - ]) + assert.deepStrictEqual(sortMetricTags(otelInvalid), []) for (const metric of otelHiding) { assert.strictEqual(metric.points[0][1], 1) @@ -194,7 +174,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/basic.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', TIMEOUT: '1500', @@ -221,47 +201,20 @@ describe('opentelemetry', () => { const otelHiding = metrics.series.filter(({ metric }) => metric === 'otel.env.hiding') const otelInvalid = metrics.series.filter(({ metric }) => metric === 'otel.env.invalid') - assert.strictEqual(otelHiding.length, 1) - assert.strictEqual(otelInvalid.length, 8) - - assert.deepStrictEqual(otelHiding[0].tags, [ - 'config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled', - ]) - - assert.deepStrictEqual(otelInvalid[0].tags, [ - 'config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level', - ]) - - assert.deepStrictEqual(otelInvalid[1].tags, [ - 'config_datadog:dd_trace_sample_rate', - 'config_opentelemetry:otel_traces_sampler', - ]) - - assert.deepStrictEqual(otelInvalid[2].tags, [ - 'config_datadog:dd_trace_sample_rate', - 'config_opentelemetry:otel_traces_sampler_arg', - ]) - assert.deepStrictEqual(otelInvalid[3].tags, [ - 'config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter', - ]) - - assert.deepStrictEqual(otelInvalid[4].tags, [ - 'config_datadog:dd_runtime_metrics_enabled', - 'config_opentelemetry:otel_metrics_exporter', - ]) - - assert.deepStrictEqual(otelInvalid[5].tags, [ - 'config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled', - ]) - - assert.deepStrictEqual(otelInvalid[6].tags, [ - 'config_opentelemetry:otel_logs_exporter', - ]) - - assert.deepStrictEqual(otelInvalid[7].tags, [ - 'config_datadog:dd_trace_propagation_style', - 'config_opentelemetry:otel_propagators', - ]) + assert.deepStrictEqual(sortMetricTags(otelHiding), sortMetricTags([ + ['config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled'], + ])) + + assert.deepStrictEqual(sortMetricTags(otelInvalid), sortMetricTags([ + ['config_datadog:dd_trace_log_level', 'config_opentelemetry:otel_log_level'], + ['config_datadog:dd_trace_propagation_style', 'config_opentelemetry:otel_propagators'], + ['config_opentelemetry:otel_logs_exporter'], + ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler'], + ['config_datadog:dd_trace_sample_rate', 'config_opentelemetry:otel_traces_sampler_arg'], + ['config_datadog:dd_trace_enabled', 'config_opentelemetry:otel_traces_exporter'], + ['config_datadog:dd_runtime_metrics_enabled', 'config_opentelemetry:otel_metrics_exporter'], + ['config_datadog:dd_trace_otel_enabled', 'config_opentelemetry:otel_sdk_disabled'], + ])) for (const metric of otelInvalid) { assert.strictEqual(metric.points[0][1], 1) @@ -273,7 +226,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/basic.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, }, }) await check(agent, proc, timeout, ({ payload }) => { @@ -292,7 +245,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/basic.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', DD_TELEMETRY_HEARTBEAT_INTERVAL: '1', TIMEOUT: '1500', @@ -334,7 +287,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/auto-instrumentation.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', SERVER_PORT, DD_TRACE_DISABLED_INSTRUMENTATIONS: 'http,dns,express,net', @@ -378,7 +331,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/server.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, }, }) await check(agent, proc, timeout, ({ payload }) => { @@ -407,7 +360,7 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/auto-instrumentation.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, DD_TRACE_OTEL_ENABLED: '1', SERVER_PORT, DD_TRACE_DISABLED_INSTRUMENTATIONS: 'http,dns,express,net', @@ -456,18 +409,12 @@ describe('opentelemetry', () => { proc = fork(join(cwd, 'opentelemetry/env-var.js'), { cwd, env: { - DD_TRACE_AGENT_PORT: agent.port, + DD_TRACE_AGENT_PORT: agent?.port, }, }) await check(agent, proc, timeout, ({ payload }) => { - // Should have a single trace with a single span - assert.strictEqual(payload.length, 1) - const [trace] = payload - assert.strictEqual(trace.length, 1) - const [span] = trace - - // Should be the expected otel span - assert.strictEqual(span.name, 'otel-sub') + const trace = payload.find(trace => trace.length === 1 && trace[0].name === 'otel-sub') + assert.ok(trace) }) }) }) @@ -477,3 +424,9 @@ function isChildOf (childSpan, parentSpan) { assert.notStrictEqual(childSpan.span_id.toString(), parentSpan.span_id.toString()) assert.strictEqual(childSpan.parent_id.toString(), parentSpan.span_id.toString()) } + +function sortMetricTags (metrics) { + return metrics + .map(metric => Array.isArray(metric) ? metric : metric.tags) + .sort((a, b) => a.join(',').localeCompare(b.join(','))) +} diff --git a/integration-tests/package-guardrails.spec.js b/integration-tests/package-guardrails.spec.js index 7b8ec191930..563a5beab41 100644 --- a/integration-tests/package-guardrails.spec.js +++ b/integration-tests/package-guardrails.spec.js @@ -13,7 +13,8 @@ const { const NODE_OPTIONS = '--require dd-trace/init.js' const DD_TRACE_DEBUG = 'true' const DD_INJECTION_ENABLED = 'tracing' -const DD_LOG_LEVEL = 'error' +const DD_LOG_LEVEL = 'info' +const DD_TRACE_FLUSH_INTERVAL = '0' const NODE_MAJOR = Number(process.versions.node.split('.')[0]) const FASTIFY_DEP = NODE_MAJOR < 20 ? 'fastify@4' : 'fastify' @@ -41,6 +42,17 @@ describe('package guardrails', () => { )) }) + context('when flushing and DD_INJECTION_ENABLED', () => { + useEnv({ DD_INJECTION_ENABLED, DD_TRACE_FLUSH_INTERVAL }) + + it('should send abort.integration on first flush via diagnostic channel', () => + testFile('package-guardrails/flush.js', 'false\n', + ['complete', 'injection_forced:false', + 'abort.integration', 'integration:bluebird,integration_version:1.0.0', + ] + )) + }) + context('with logging disabled', () => { it('should not instrument the package', () => runTest('false\n', [])) }) @@ -50,8 +62,9 @@ describe('package guardrails', () => { it('should not instrument the package', () => runTest(`Application instrumentation bootstrapping complete -Found incompatible integration version: bluebird@1.0.0 false +instrumentation source: manual +Found incompatible integration version: bluebird@1.0.0 `, [])) }) }) diff --git a/integration-tests/package-guardrails/flush.js b/integration-tests/package-guardrails/flush.js new file mode 100644 index 00000000000..d7943e7a4c8 --- /dev/null +++ b/integration-tests/package-guardrails/flush.js @@ -0,0 +1,19 @@ +'use strict' + +// Remove only the register.js beforeExit handler so this test verifies +// that abort.integration comes from the first flush diagnostic channel. +const beforeExitHandlers = globalThis[Symbol.for('dd-trace')].beforeExitHandlers +for (const handler of beforeExitHandlers) { + if (handler.name === 'logAbortedIntegrations') { + beforeExitHandlers.delete(handler) + } +} + +const tracer = require('dd-trace') +const P = require('bluebird') + +const isWrapped = P.prototype._then.toString().includes('AsyncResource') +tracer.trace('first.flush.guardrails', () => {}) + +// eslint-disable-next-line no-console +console.log(isWrapped) diff --git a/integration-tests/playwright/playwright.spec.js b/integration-tests/playwright/playwright.spec.js index 030ce61b34e..b1c7010c7e3 100644 --- a/integration-tests/playwright/playwright.spec.js +++ b/integration-tests/playwright/playwright.spec.js @@ -84,7 +84,8 @@ versions.forEach((version) => { this.retries(2) this.timeout(80000) - useSandbox([`@playwright/test@${version}`, '@types/node', 'typescript'], true) + // TODO: Update tests files accordingly and test with different TS versions + useSandbox([`@playwright/test@${version}`, '@types/node', 'typescript@5'], true) before(function (done) { // Increase timeout for this hook specifically to account for slow chromium installation in CI @@ -1659,6 +1660,59 @@ versions.forEach((version) => { await runAttemptToFixTest({ extraEnvVars: { DD_TEST_MANAGEMENT_ENABLED: '0' } }) }) + it('does not tag known attempt to fix tests as new', async () => { + receiver.setKnownTests({ + playwright: { + 'attempt-to-fix-test.js': [ + 'attempt to fix should attempt to fix failed test', + 'attempt to fix should attempt to fix passed test', + ], + }, + }) + receiver.setSettings({ + test_management: { enabled: true, attempt_to_fix_retries: 2 }, + early_flake_detection: { + enabled: true, + slow_test_retries: { '5s': 2 }, + faulty_session_threshold: 100, + }, + known_tests_enabled: true, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + const atfTests = tests.filter( + t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true' + ) + assert.ok(atfTests.length > 0) + for (const test of atfTests) { + assert.ok( + !(TEST_IS_NEW in test.meta), + 'ATF test that is in known tests should not be tagged as new' + ) + } + }) + + childProcess = exec( + './node_modules/.bin/playwright test -c playwright.config.js attempt-to-fix-test.js', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + PW_BASE_URL: `http://localhost:${webAppPort}`, + TEST_DIR: './ci-visibility/playwright-tests-test-management', + }, + } + ) + + await Promise.all([ + once(childProcess, 'exit'), + eventsPromise, + ]) + }) + it('does not fail retry if a test is quarantined', async () => { receiver.setSettings({ test_management: { enabled: true, attempt_to_fix_retries: ATTEMPT_TO_FIX_NUM_RETRIES }, diff --git a/integration-tests/telemetry.spec.js b/integration-tests/telemetry.spec.js index 462b36bc777..617d45c3486 100644 --- a/integration-tests/telemetry.spec.js +++ b/integration-tests/telemetry.spec.js @@ -26,7 +26,7 @@ describe('telemetry', () => { proc = await spawnProc(startupTestFile, { cwd, env: { - AGENT_PORT: agent.port, + AGENT_PORT: String(agent.port), DD_LOGS_INJECTION: 'true', }, }) @@ -66,9 +66,9 @@ describe('telemetry', () => { await agent.assertTelemetryReceived(msg => { const { configuration } = msg.payload.payload assertObjectContains(configuration, [ - { name: 'DD_LOG_INJECTION', value: true, origin: 'default' }, - { name: 'DD_LOG_INJECTION', value: true, origin: 'env_var' }, - { name: 'DD_LOG_INJECTION', value: false, origin: 'code' }, + { name: 'DD_LOGS_INJECTION', value: true, origin: 'default' }, + { name: 'DD_LOGS_INJECTION', value: true, origin: 'env_var' }, + { name: 'DD_LOGS_INJECTION', value: false, origin: 'code' }, ]) }, 'app-started', 5_000, 1) }) diff --git a/integration-tests/vitest/vitest.spec.js b/integration-tests/vitest/vitest.spec.js index 105f67b46a6..20f37a296e0 100644 --- a/integration-tests/vitest/vitest.spec.js +++ b/integration-tests/vitest/vitest.spec.js @@ -1639,6 +1639,37 @@ versions.forEach((version) => { }).catch(done) }) }) + + it('does not hang when tests use fake timers and Failed Test Replay is enabled', async () => { + receiver.setSettings({ + flaky_test_retries_enabled: true, + di_enabled: true, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + assert.strictEqual(tests.length, 2) + const retriedTests = tests.filter(t => t.meta[TEST_IS_RETRY] === 'true') + assert.strictEqual(retriedTests.length, 1) + }) + + childProcess = exec( + './node_modules/.bin/vitest run --retry=1', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/fake-timers-di*', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init', + }, + } + ) + + const [[exitCode]] = await Promise.all([once(childProcess, 'exit'), eventsPromise]) + assert.strictEqual(exitCode, 1) + }) }) } @@ -1931,6 +1962,58 @@ versions.forEach((version) => { runAttemptToFixTest(done, { extraEnvVars: { DD_TEST_MANAGEMENT_ENABLED: '0' } }) }) + it('does not tag known attempt to fix tests as new', async () => { + receiver.setKnownTests({ + vitest: { + 'ci-visibility/vitest-tests/test-attempt-to-fix.mjs': [ + 'attempt to fix tests can attempt to fix a test', + ], + }, + }) + receiver.setSettings({ + test_management: { enabled: true, attempt_to_fix_retries: 2 }, + early_flake_detection: { + enabled: true, + slow_test_retries: { '5s': 2 }, + faulty_session_threshold: 100, + }, + known_tests_enabled: true, + }) + + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url === '/api/v2/citestcycle', (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + const atfTests = tests.filter( + t => t.meta[TEST_MANAGEMENT_IS_ATTEMPT_TO_FIX] === 'true' + ) + assert.ok(atfTests.length > 0) + for (const test of atfTests) { + assert.ok( + !(TEST_IS_NEW in test.meta), + 'ATF test that is in known tests should not be tagged as new' + ) + } + }) + + childProcess = exec( + './node_modules/.bin/vitest run', + { + cwd, + env: { + ...getCiVisAgentlessConfig(receiver.port), + TEST_DIR: 'ci-visibility/vitest-tests/test-attempt-to-fix*', + NODE_OPTIONS: '--import dd-trace/register.js -r dd-trace/ci/init --no-warnings', + }, + } + ) + + await Promise.all([ + once(childProcess, 'exit'), + eventsPromise, + ]) + }) + it('does not fail retry if a test is quarantined', (done) => { receiver.setSettings({ test_management: { enabled: true, attempt_to_fix_retries: 3 } }) receiver.setTestManagementTests({ diff --git a/integration-tests/webpack/package.json b/integration-tests/webpack/package.json index f0f0c768f2a..4ee7e9e0196 100644 --- a/integration-tests/webpack/package.json +++ b/integration-tests/webpack/package.json @@ -15,7 +15,7 @@ "author": "Thomas Hunter II ", "license": "ISC", "dependencies": { - "axios": "1.13.5", + "axios": "1.15.0", "express": "4.22.1", "knex": "3.1.0" } diff --git a/package.json b/package.json index 73a378a25cc..88f1b374514 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dd-trace", - "version": "5.96.0", + "version": "5.97.0", "description": "Datadog APM tracing client for JavaScript", "main": "index.js", "typings": "index.d.ts", @@ -11,6 +11,8 @@ "bench": "node benchmark/index.js", "bench:e2e:test-optimization": "node benchmark/e2e-test-optimization/benchmark-run.js", "dependencies:dedupe": "yarn-deduplicate yarn.lock", + "generate:config:types": "node scripts/generate-config-types.js", + "verify:config:types": "node scripts/generate-config-types.js --check", "type:check": "tsc --noEmit -p tsconfig.dev.json", "type:doc:build": "cd docs && yarn && yarn build", "type:doc:test": "cd docs && yarn && yarn test", @@ -142,7 +144,7 @@ "import-in-the-middle": "^3.0.1" }, "optionalDependencies": { - "@datadog/libdatadog": "0.9.2", + "@datadog/libdatadog": "0.9.3", "@datadog/native-appsec": "11.0.1", "@datadog/native-iast-taint-tracking": "4.1.0", "@datadog/native-metrics": "3.1.1", @@ -166,19 +168,19 @@ "@types/mocha": "^10.0.10", "@types/node": "^18.19.106", "@types/sinon": "^21.0.0", - "axios": "^1.13.4", + "axios": "^1.15.0", "benchmark": "^2.1.4", "body-parser": "^2.2.2", "bun": "1.3.11", "codeowners-audit": "^2.9.0", "eslint": "^9.39.2", - "eslint-plugin-cypress": "^6.2.1", + "eslint-plugin-cypress": "^6.2.2", "eslint-plugin-import": "^2.32.0", - "eslint-plugin-jsdoc": "^62.8.1", + "eslint-plugin-jsdoc": "^62.9.0", "eslint-plugin-mocha": "^11.2.0", "eslint-plugin-n": "^17.23.2", "eslint-plugin-promise": "^7.2.1", - "eslint-plugin-unicorn": "^63.0.0", + "eslint-plugin-unicorn": "^64.0.0", "express": "^5.1.0", "glob": "^10.4.5", "globals": "^17.2.0", @@ -200,7 +202,7 @@ "semver": "^7.7.2", "sinon": "^21.0.3", "tiktoken": "^1.0.21", - "typescript": "^5.9.2", + "typescript": "^6.0.2", "workerpool": "^10.0.0", "yaml": "^2.8.3", "yarn-deduplicate": "^6.0.2" diff --git a/packages/datadog-esbuild/index.js b/packages/datadog-esbuild/index.js index 8e42a7fbf46..21f6600f3f1 100644 --- a/packages/datadog-esbuild/index.js +++ b/packages/datadog-esbuild/index.js @@ -2,7 +2,6 @@ const { execSync } = require('node:child_process') const fs = require('node:fs') -const RAW_BUILTINS = require('node:module').builtinModules const path = require('node:path') const { pathToFileURL, fileURLToPath } = require('node:url') @@ -25,15 +24,27 @@ for (const hook of Object.values(hooks)) { } } +function moduleOfInterestKey (name, file) { + return file ? `${name}/${file}` : name +} + +const builtinModules = new Set(require('module').builtinModules) + +function addModuleOfInterest (name, file) { + if (!name) return + + modulesOfInterest.add(moduleOfInterestKey(name, file)) + + if (builtinModules.has(name)) { + modulesOfInterest.add(moduleOfInterestKey(`node:${name}`, file)) + } +} + const modulesOfInterest = new Set() -for (const instrumentation of Object.values(instrumentations)) { +for (const [name, instrumentation] of Object.entries(instrumentations)) { for (const entry of instrumentation) { - if (entry.file) { - modulesOfInterest.add(`${entry.name}/${entry.file}`) // e.g. "redis/my/file.js" - } else { - modulesOfInterest.add(entry.name) // e.g. "redis" - } + addModuleOfInterest(name, entry.file) } } @@ -41,7 +52,7 @@ const CHANNEL = 'dd-trace:bundler:load' const builtins = new Set() -for (const builtin of RAW_BUILTINS) { +for (const builtin of builtinModules) { builtins.add(builtin) builtins.add(`node:${builtin}`) } @@ -247,7 +258,7 @@ ${build.initialOptions.banner.js}` } try { - const packageJson = JSON.parse(fs.readFileSync(/** @type {string} */ (pathToPackageJson)).toString()) + const packageJson = JSON.parse(fs.readFileSync(/** @type {string} */(pathToPackageJson)).toString()) const isESM = isESMFile(fullPathToModule, pathToPackageJson, packageJson) if (isESM && !interceptedESMModules.has(fullPathToModule)) { diff --git a/packages/datadog-instrumentations/src/child_process.js b/packages/datadog-instrumentations/src/child_process.js index dd58deae249..d91a5a0ab29 100644 --- a/packages/datadog-instrumentations/src/child_process.js +++ b/packages/datadog-instrumentations/src/child_process.js @@ -14,11 +14,6 @@ const childProcessChannel = dc.tracingChannel('datadog:child_process:execution') // ignored exec method because it calls to execFile directly const execAsyncMethods = ['execFile', 'spawn', 'fork'] -const names = ['child_process', 'node:child_process'] - -// child_process and node:child_process returns the same object instance, we only want to add hooks once -let patched = false - function throwSyncError (error) { throw error } @@ -37,19 +32,14 @@ function returnSpawnSyncError (error, context) { return context.result } -for (const name of names) { - addHook({ name }, childProcess => { - if (!patched) { - patched = true - shimmer.massWrap(childProcess, execAsyncMethods, wrapChildProcessAsyncMethod(childProcess.ChildProcess)) - shimmer.wrap(childProcess, 'execSync', wrapChildProcessSyncMethod(throwSyncError, true)) - shimmer.wrap(childProcess, 'execFileSync', wrapChildProcessSyncMethod(throwSyncError)) - shimmer.wrap(childProcess, 'spawnSync', wrapChildProcessSyncMethod(returnSpawnSyncError)) - } +addHook({ name: 'child_process' }, childProcess => { + shimmer.massWrap(childProcess, execAsyncMethods, wrapChildProcessAsyncMethod(childProcess.ChildProcess)) + shimmer.wrap(childProcess, 'execSync', wrapChildProcessSyncMethod(throwSyncError, true)) + shimmer.wrap(childProcess, 'execFileSync', wrapChildProcessSyncMethod(throwSyncError)) + shimmer.wrap(childProcess, 'spawnSync', wrapChildProcessSyncMethod(returnSpawnSyncError)) - return childProcess - }) -} + return childProcess +}) function normalizeArgs (args, shell) { const childProcessInfo = { diff --git a/packages/datadog-instrumentations/src/crypto.js b/packages/datadog-instrumentations/src/crypto.js index a0ca705434e..69955b08e8b 100644 --- a/packages/datadog-instrumentations/src/crypto.js +++ b/packages/datadog-instrumentations/src/crypto.js @@ -11,9 +11,8 @@ const cryptoCipherCh = channel('datadog:crypto:cipher:start') const hashMethods = ['createHash', 'createHmac', 'createSign', 'createVerify', 'sign', 'verify'] const cipherMethods = ['createCipheriv', 'createDecipheriv'] -const names = ['crypto', 'node:crypto'] -addHook({ name: names }, crypto => { +addHook({ name: 'crypto' }, crypto => { shimmer.massWrap(crypto, hashMethods, wrapCryptoMethod(cryptoHashCh)) shimmer.massWrap(crypto, cipherMethods, wrapCryptoMethod(cryptoCipherCh)) return crypto diff --git a/packages/datadog-instrumentations/src/cucumber.js b/packages/datadog-instrumentations/src/cucumber.js index 36054ac579b..95320bb9a06 100644 --- a/packages/datadog-instrumentations/src/cucumber.js +++ b/packages/datadog-instrumentations/src/cucumber.js @@ -61,6 +61,8 @@ const numRetriesByPickleId = new Map() const numAttemptToCtx = new Map() const newTestsByTestFullname = new Map() const modifiedTestsByPickleId = new Map() +// Pickle IDs for tests that are genuinely new (not in known tests list). +const newTestPickleIds = new Set() let eventDataCollector = null let pickleByFile = {} @@ -359,7 +361,7 @@ function wrapRun (pl, isLatestVersion, version) { } if (isKnownTestsEnabled && status !== 'skip') { - isNew = numRetries !== undefined + isNew = newTestPickleIds.has(this.pickle.id) } if (isNew || isModified) { @@ -714,6 +716,7 @@ function getWrappedRunTestCase (runTestCaseFunction, isNewerCucumberVersion = fa if (isKnownTestsEnabled && !isAttemptToFix) { isNew = isNewTest(testSuitePath, pickle.name) if (isNew) { + newTestPickleIds.add(pickle.id) numRetriesByPickleId.set(pickle.id, 0) } } diff --git a/packages/datadog-instrumentations/src/cypress-config.js b/packages/datadog-instrumentations/src/cypress-config.js new file mode 100644 index 00000000000..ce479d08c66 --- /dev/null +++ b/packages/datadog-instrumentations/src/cypress-config.js @@ -0,0 +1,324 @@ +'use strict' + +const fs = require('fs') +const os = require('os') +const path = require('path') +const { pathToFileURL } = require('url') + +const DD_CONFIG_WRAPPED = Symbol('dd-trace.cypress.config.wrapped') + +const noopTask = { + 'dd:testSuiteStart': () => null, + 'dd:beforeEach': () => ({}), + 'dd:afterEach': () => null, + 'dd:addTags': () => null, + 'dd:log': () => null, +} + +/** + * @param {unknown} value + * @returns {boolean} + */ +function isPlainObject (value) { + if (!value || typeof value !== 'object') return false + const prototype = Object.getPrototypeOf(value) + return prototype === Object.prototype || prototype === null +} + +/** + * Cypress allows setupNodeEvents to return partial config fragments that it + * diffs and merges into the resolved config. Preserve that behavior here so + * the wrapper does not drop user-provided config updates. + * + * @param {object} config Cypress resolved config object + * @param {unknown} updatedConfig value returned from setupNodeEvents + * @returns {object} resolved config with returned overrides applied + */ +function mergeReturnedConfig (config, updatedConfig) { + if (!isPlainObject(updatedConfig) || updatedConfig === config) { + return config + } + + const mergedConfig = { ...config } + + for (const [key, value] of Object.entries(updatedConfig)) { + mergedConfig[key] = isPlainObject(value) && isPlainObject(mergedConfig[key]) + ? mergeReturnedConfig(mergedConfig[key], value) + : value + } + + return mergedConfig +} + +/** + * Creates a temporary wrapper support file under os.tmpdir() that loads + * dd-trace's browser-side hooks before the user's original support file. + * Returns the wrapper path (for cleanup) or undefined if injection was skipped. + * + * @param {object} config Cypress resolved config object + * @returns {string|undefined} wrapper file path, or undefined if skipped + */ +function injectSupportFile (config) { + const originalSupportFile = config.supportFile + if (!originalSupportFile || originalSupportFile === false) return + + try { + const content = fs.readFileSync(originalSupportFile, 'utf8') + // Naive check: skip lines starting with // or * to avoid matching commented-out imports. + const hasActiveDdTraceImport = content.split('\n').some(line => { + const trimmed = line.trim() + return trimmed.includes('dd-trace/ci/cypress/support') && + !trimmed.startsWith('//') && !trimmed.startsWith('*') + }) + if (hasActiveDdTraceImport) return + } catch { + return + } + + const ddSupportFile = require.resolve('../../../ci/cypress/support') + const wrapperFile = path.join(os.tmpdir(), `dd-cypress-support-${process.pid}.mjs`) + + // Always use ESM: it can import both CJS and ESM support files. + const wrapperContent = + `import ${JSON.stringify(ddSupportFile)}\nimport ${JSON.stringify(originalSupportFile)}\n` + + try { + fs.writeFileSync(wrapperFile, wrapperContent) + config.supportFile = wrapperFile + return wrapperFile + } catch { + // Can't write wrapper - skip injection + } +} + +/** + * Registers dd-trace's Cypress hooks (before:run, after:spec, after:run, tasks) + * and injects the support file. Handles chaining with user-registered handlers + * for after:spec/after:run so both the user's code and dd-trace's run in sequence. + * + * @param {Function} on Cypress event registration function + * @param {object} config Cypress resolved config object + * @param {Function[]} userAfterSpecHandlers user's after:spec handlers collected from wrappedOn + * @param {Function[]} userAfterRunHandlers user's after:run handlers collected from wrappedOn + * @returns {object} the config object (possibly modified) + */ +function registerDdTraceHooks (on, config, userAfterSpecHandlers, userAfterRunHandlers) { + const wrapperFile = injectSupportFile(config) + + const cleanupWrapper = () => { + if (wrapperFile) { + try { fs.unlinkSync(wrapperFile) } catch { /* best effort */ } + } + } + + const tracer = global._ddtrace + + const registerAfterRunWithCleanup = () => { + on('after:run', (results) => { + const chain = userAfterRunHandlers.reduce( + (p, h) => p.then(() => h(results)), + Promise.resolve() + ) + return chain.finally(cleanupWrapper) + }) + } + + const registerNoopHandlers = () => { + for (const h of userAfterSpecHandlers) on('after:spec', h) + registerAfterRunWithCleanup() + on('task', noopTask) + } + + if (!tracer || !tracer._initialized) { + registerNoopHandlers() + return config + } + + const NoopTracer = require('../../../packages/dd-trace/src/noop/tracer') + + if (tracer._tracer instanceof NoopTracer) { + registerNoopHandlers() + return config + } + + const cypressPlugin = require('../../../packages/datadog-plugin-cypress/src/cypress-plugin') + + if (cypressPlugin._isInit) { + for (const h of userAfterSpecHandlers) on('after:spec', h) + registerAfterRunWithCleanup() + return config + } + + on('before:run', cypressPlugin.beforeRun.bind(cypressPlugin)) + + on('after:spec', (spec, results) => { + const chain = userAfterSpecHandlers.reduce( + (p, h) => p.then(() => h(spec, results)), + Promise.resolve() + ) + return chain.then(() => cypressPlugin.afterSpec(spec, results)) + }) + + on('after:run', (results) => { + const chain = userAfterRunHandlers.reduce( + (p, h) => p.then(() => h(results)), + Promise.resolve() + ) + return chain + .then(() => cypressPlugin.afterRun(results)) + .finally(cleanupWrapper) + }) + + on('task', cypressPlugin.getTasks()) + + return Promise.resolve(cypressPlugin.init(tracer, config)).then(() => config) +} + +/** + * @param {Function|undefined} originalSetupNodeEvents + * @returns {Function} + */ +function wrapSetupNodeEvents (originalSetupNodeEvents) { + return function ddSetupNodeEvents (on, config) { + const userAfterSpecHandlers = [] + const userAfterRunHandlers = [] + + const wrappedOn = (event, handler) => { + if (event === 'after:spec') { + userAfterSpecHandlers.push(handler) + } else if (event === 'after:run') { + userAfterRunHandlers.push(handler) + } else { + on(event, handler) + } + } + + const maybePromise = originalSetupNodeEvents + ? originalSetupNodeEvents.call(this, wrappedOn, config) + : undefined + + if (maybePromise && typeof maybePromise.then === 'function') { + return maybePromise.then((result) => { + return registerDdTraceHooks( + on, + mergeReturnedConfig(config, result), + userAfterSpecHandlers, + userAfterRunHandlers + ) + }) + } + + return registerDdTraceHooks( + on, + mergeReturnedConfig(config, maybePromise), + userAfterSpecHandlers, + userAfterRunHandlers + ) + } +} + +/** + * @param {object} config + * @returns {object} + */ +function wrapConfig (config) { + if (!config || config[DD_CONFIG_WRAPPED]) return config + config[DD_CONFIG_WRAPPED] = true + + if (config.e2e) { + config.e2e.setupNodeEvents = wrapSetupNodeEvents(config.e2e.setupNodeEvents) + } + if (config.component) { + config.component.setupNodeEvents = wrapSetupNodeEvents(config.component.setupNodeEvents) + } + + return config +} + +/** + * @param {string} originalConfigFile absolute path to the original config file + * @returns {string} path to the generated wrapper file + */ +function createConfigWrapper (originalConfigFile) { + const wrapperFile = path.join( + path.dirname(originalConfigFile), + `.dd-cypress-config-${process.pid}.mjs` + ) + + const cypressConfigPath = require.resolve('./cypress-config') + + // Always use ESM: it can import both CJS and ESM configs, so it works + // regardless of the original file's extension or "type": "module" in package.json. + // Import cypress-config.js directly (CJS default = module.exports object). + fs.writeFileSync(wrapperFile, [ + `import originalConfig from ${JSON.stringify(pathToFileURL(originalConfigFile).href)}`, + `import cypressConfig from ${JSON.stringify(pathToFileURL(cypressConfigPath).href)}`, + '', + 'export default cypressConfig.wrapConfig(originalConfig)', + '', + ].join('\n')) + + return wrapperFile +} + +/** + * Wraps the Cypress config file for a CLI start() call. When an explicit + * configFile is provided, creates a temp wrapper that imports the original + * and passes it through wrapConfig. This handles ESM configs (.mjs) and + * plain-object configs (without defineConfig) that can't be intercepted + * via the defineConfig shimmer. + * + * @param {object|undefined} options + * @returns {{ options: object|undefined, cleanup: Function }} + */ +function wrapCliConfigFileOptions (options) { + const noop = { options, cleanup: () => {} } + + if (!options) return noop + + const projectRoot = typeof options.project === 'string' ? options.project : process.cwd() + let configFilePath + + if (options.configFile === false) { + // configFile: false means "no config file" — respect Cypress's semantics + return noop + } else if (typeof options.configFile === 'string') { + configFilePath = path.isAbsolute(options.configFile) + ? options.configFile + : path.resolve(projectRoot, options.configFile) + } else { + // No explicit --config-file: resolve the default cypress.config.{js,ts,cjs,mjs} + for (const ext of ['.js', '.ts', '.cjs', '.mjs']) { + const candidate = path.join(projectRoot, `cypress.config${ext}`) + if (fs.existsSync(candidate)) { + configFilePath = candidate + break + } + } + } + + // Skip .ts files — Cypress transpiles them internally via its own loader. + // The ESM wrapper can't import .ts directly. The defineConfig shimmer + // handles .ts configs since they're transpiled to CJS by Cypress. + if (!configFilePath || !fs.existsSync(configFilePath) || path.extname(configFilePath) === '.ts') return noop + + try { + const wrapperFile = createConfigWrapper(configFilePath) + + return { + options: { ...options, configFile: wrapperFile }, + cleanup: () => { + try { fs.unlinkSync(wrapperFile) } catch { /* best effort */ } + }, + } + } catch { + // Config directory may be read-only — fall back to no wrapping. + // The defineConfig shimmer will still handle configs that use defineConfig. + return noop + } +} + +module.exports = { + wrapCliConfigFileOptions, + wrapConfig, +} diff --git a/packages/datadog-instrumentations/src/cypress.js b/packages/datadog-instrumentations/src/cypress.js index 1d22ffe0a42..5acd4b89e75 100644 --- a/packages/datadog-instrumentations/src/cypress.js +++ b/packages/datadog-instrumentations/src/cypress.js @@ -1,11 +1,93 @@ 'use strict' +const shimmer = require('../../datadog-shimmer') const { DD_MAJOR } = require('../../../version') const { addHook } = require('./helpers/instrument') +const { + wrapCliConfigFileOptions, + wrapConfig, +} = require('./cypress-config') -// No handler because this is only useful for testing. -// Cypress plugin does not patch any library. +// Wrap defineConfig() so configs are instrumented when loaded in Cypress's +// config child process. This covers both CLI and programmatic usage with CJS configs. addHook({ name: 'cypress', - versions: DD_MAJOR >= 6 ? ['>=10.2.0'] : ['>=6.7.0'], -}, lib => lib) + versions: ['>=10.2.0'], +}, (cypress) => { + if (typeof cypress.defineConfig === 'function') { + shimmer.wrap(cypress, 'defineConfig', (defineConfig) => function (config) { + wrapConfig(config) + return defineConfig(config) + }) + } + return cypress +}) + +// Wrap the CLI entry points (cypress run / cypress open) to handle config files +// that can't be intercepted via the defineConfig shimmer: ESM configs (.mjs) +// and plain-object configs (without defineConfig). +function getCliStartWrapper (start) { + return function ddTraceCliStart (options) { + const { options: wrappedOptions, cleanup } = wrapCliConfigFileOptions(options) + const result = start.call(this, wrappedOptions) + + if (result && typeof result.then === 'function') { + return result.finally(cleanup) + } + + cleanup() + return result + } +} + +/** + * Wraps `start` on an object (or its `.default`) if present. + * + * @param {object} mod module exports + * @returns {object} mod + */ +function wrapStartOnModule (mod) { + const target = mod.default || mod + if (typeof target.start === 'function') { + shimmer.wrap(target, 'start', getCliStartWrapper) + } + return mod +} + +// Hook the CLI entry points where Cypress resolves and executes `run`/`open`. +// Cypress 10-14: lib/exec/{run,open}.js as separate files. +// Cypress 15-15.10: dist/exec/{run,open}.js as separate files. +// Cypress >=15.11: bundled into dist/cli-.js exporting runModule/openModule. +for (const file of ['lib/exec/run.js', 'lib/exec/open.js', 'dist/exec/run.js', 'dist/exec/open.js']) { + addHook({ + name: 'cypress', + versions: ['>=10.2.0'], + file, + }, wrapStartOnModule) +} + +// Cypress >=15.11 bundles run/open into a single CLI chunk (dist/cli-.js). +// The chunk exports runModule and openModule, each with a start() method. +addHook({ + name: 'cypress', + versions: ['>=10.2.0'], + filePattern: 'dist/cli.*', +}, (cliChunk) => { + if (cliChunk.runModule?.start) { + shimmer.wrap(cliChunk.runModule, 'start', getCliStartWrapper) + } + if (cliChunk.openModule?.start) { + shimmer.wrap(cliChunk.openModule, 'start', getCliStartWrapper) + } + return cliChunk +}) + +// Cypress <10 uses the old pluginsFile approach. No auto-instrumentation; +// users must use the manual dd-trace/ci/cypress/plugin setup. +// This hook is kept so the plugin system registers Cypress for version tracking. +if (DD_MAJOR < 6) { + addHook({ + name: 'cypress', + versions: ['>=6.7.0 <10.2.0'], + }, lib => lib) +} diff --git a/packages/datadog-instrumentations/src/dns.js b/packages/datadog-instrumentations/src/dns.js index 1c1f1f2b619..21c70853338 100644 --- a/packages/datadog-instrumentations/src/dns.js +++ b/packages/datadog-instrumentations/src/dns.js @@ -18,9 +18,8 @@ const rrtypes = { } const rrtypeMap = new WeakMap() -const names = ['dns', 'node:dns'] -addHook({ name: names }, dns => { +addHook({ name: 'dns' }, dns => { shimmer.wrap(dns, 'lookup', fn => wrap('apm:dns:lookup', fn, 2)) shimmer.wrap(dns, 'lookupService', fn => wrap('apm:dns:lookup_service', fn, 2)) shimmer.wrap(dns, 'resolve', fn => wrap('apm:dns:resolve', fn, 2)) diff --git a/packages/datadog-instrumentations/src/express.js b/packages/datadog-instrumentations/src/express.js index 13daeba0c22..49951a0808b 100644 --- a/packages/datadog-instrumentations/src/express.js +++ b/packages/datadog-instrumentations/src/express.js @@ -146,7 +146,7 @@ function wrapAppUse (use) { } } -addHook({ name: 'express', versions: ['>=4'], file: ['lib/express.js'] }, express => { +addHook({ name: 'express', versions: ['>=4'], file: 'lib/express.js' }, express => { shimmer.wrap(express.application, 'handle', wrapHandle) shimmer.wrap(express.application, 'all', wrapAppAll) shimmer.wrap(express.application, 'route', wrapAppRoute) @@ -224,19 +224,19 @@ function wrapProcessParamsMethod (requestPositionInArguments) { } } -addHook({ name: 'express', versions: ['>=4.0.0 <4.3.0'], file: ['lib/express.js'] }, express => { +addHook({ name: 'express', versions: ['>=4.0.0 <4.3.0'], file: 'lib/express.js' }, express => { shimmer.wrap(express.Router, 'process_params', wrapProcessParamsMethod(1)) return express }) -addHook({ name: 'express', versions: ['>=4.3.0 <5.0.0'], file: ['lib/express.js'] }, express => { +addHook({ name: 'express', versions: ['>=4.3.0 <5.0.0'], file: 'lib/express.js' }, express => { shimmer.wrap(express.Router, 'process_params', wrapProcessParamsMethod(2)) return express }) const queryReadCh = channel('datadog:express:query:finish') -addHook({ name: 'express', file: ['lib/request.js'], versions: ['>=5.0.0'] }, request => { +addHook({ name: 'express', file: 'lib/request.js', versions: ['>=5.0.0'] }, request => { shimmer.wrap(request, 'query', function (originalGet) { return function wrappedGet () { const query = originalGet.call(this) diff --git a/packages/datadog-instrumentations/src/fs.js b/packages/datadog-instrumentations/src/fs.js index 16f01ea145c..1a50d4d6a87 100644 --- a/packages/datadog-instrumentations/src/fs.js +++ b/packages/datadog-instrumentations/src/fs.js @@ -84,37 +84,35 @@ const paramsByFileHandleMethods = { writeFile: ['data', 'options'], writev: ['buffers', 'position'], } -const names = ['fs', 'node:fs'] -for (const name of names) { - addHook({ name }, fs => { - const asyncMethods = Object.keys(paramsByMethod) - const syncMethods = asyncMethods.map(name => `${name}Sync`) - - massWrap(fs, asyncMethods, createWrapFunction()) - massWrap(fs, syncMethods, createWrapFunction()) - massWrap(fs.promises, asyncMethods, createWrapFunction('promises.')) - - wrap(fs.realpath, 'native', createWrapFunction('', 'realpath.native')) - wrap(fs.realpathSync, 'native', createWrapFunction('', 'realpath.native')) - wrap(fs.promises.realpath, 'native', createWrapFunction('', 'realpath.native')) - - wrap(fs, 'createReadStream', wrapCreateStream) - wrap(fs, 'createWriteStream', wrapCreateStream) - if (fs.Dir) { - wrap(fs.Dir.prototype, 'close', createWrapFunction('dir.')) - wrap(fs.Dir.prototype, 'closeSync', createWrapFunction('dir.')) - wrap(fs.Dir.prototype, 'read', createWrapFunction('dir.')) - wrap(fs.Dir.prototype, 'readSync', createWrapFunction('dir.')) - wrap(fs.Dir.prototype, Symbol.asyncIterator, createWrapDirAsyncIterator()) - } +addHook({ name: 'fs' }, fs => { + const asyncMethods = Object.keys(paramsByMethod) + const syncMethods = asyncMethods.map(name => `${name}Sync`) + + massWrap(fs, asyncMethods, createWrapFunction()) + massWrap(fs, syncMethods, createWrapFunction()) + massWrap(fs.promises, asyncMethods, createWrapFunction('promises.')) + + wrap(fs.realpath, 'native', createWrapFunction('', 'realpath.native')) + wrap(fs.realpathSync, 'native', createWrapFunction('', 'realpath.native')) + wrap(fs.promises.realpath, 'native', createWrapFunction('', 'realpath.native')) + + wrap(fs, 'createReadStream', wrapCreateStream) + wrap(fs, 'createWriteStream', wrapCreateStream) + if (fs.Dir) { + wrap(fs.Dir.prototype, 'close', createWrapFunction('dir.')) + wrap(fs.Dir.prototype, 'closeSync', createWrapFunction('dir.')) + wrap(fs.Dir.prototype, 'read', createWrapFunction('dir.')) + wrap(fs.Dir.prototype, 'readSync', createWrapFunction('dir.')) + wrap(fs.Dir.prototype, Symbol.asyncIterator, createWrapDirAsyncIterator()) + } - wrap(fs, 'unwatchFile', createWatchWrapFunction()) - wrap(fs, 'watch', createWatchWrapFunction()) - wrap(fs, 'watchFile', createWatchWrapFunction()) + wrap(fs, 'unwatchFile', createWatchWrapFunction()) + wrap(fs, 'watch', createWatchWrapFunction()) + wrap(fs, 'watchFile', createWatchWrapFunction()) + + return fs +}) - return fs - }) -} function isFirstMethodReturningFileHandle (original) { return !kHandle && original.name === 'open' } diff --git a/packages/datadog-instrumentations/src/graphql.js b/packages/datadog-instrumentations/src/graphql.js index 52562c8316c..1ec3ce0a564 100644 --- a/packages/datadog-instrumentations/src/graphql.js +++ b/packages/datadog-instrumentations/src/graphql.js @@ -171,7 +171,7 @@ function wrapExecute (execute) { args, docSource: documentSources.get(document), source, - fields: {}, + fields: Object.create(null), abortController: new AbortController(), } diff --git a/packages/datadog-instrumentations/src/helpers/bundler-register.js b/packages/datadog-instrumentations/src/helpers/bundler-register.js index 99e6675c1d6..57da69cda5e 100644 --- a/packages/datadog-instrumentations/src/helpers/bundler-register.js +++ b/packages/datadog-instrumentations/src/helpers/bundler-register.js @@ -45,45 +45,73 @@ if (!dc.unsubscribe) { dc.unsubscribe = (channel, cb) => { if (dc.channel(channel).hasSubscribers) { dc.channel(channel).unsubscribe(cb) + return true } + return false } } -function doHook (payload) { - const hook = hooks[payload.package] +/** + * @param {string} name + */ +function doHook (name) { + const hook = hooks[name] ?? hooks[`node:${name}`] if (!hook) { - log.error('esbuild-wrapped %s missing in list of hooks', payload.package) + log.error('esbuild-wrapped %s missing in list of hooks', name) return } const hookFn = hook.fn ?? hook if (typeof hookFn !== 'function') { - log.error('esbuild-wrapped hook %s is not a function', payload.package) + log.error('esbuild-wrapped hook %s is not a function', name) return } try { hookFn() } catch { - log.error('esbuild-wrapped %s hook failed', payload.package) + log.error('esbuild-wrapped %s hook failed', name) } } -dc.subscribe(CHANNEL, (payload) => { - doHook(payload) +/** @type {Set} */ +const instrumentedNodeModules = new Set() - if (!instrumentations[payload.package]) { - log.error('esbuild-wrapped %s missing in list of instrumentations', payload.package) +/** @typedef {{ package: string, module: unknown, version: string, path: string }} Payload */ +dc.subscribe(CHANNEL, (message) => { + const payload = /** @type {Payload} */ (message) + const name = payload.package + + const isPrefixedWithNode = name.startsWith('node:') + + const isNodeModule = isPrefixedWithNode || !hooks[name] + + if (isNodeModule) { + const nodeName = isPrefixedWithNode ? name.slice(5) : name + // Used for node: prefixed modules to prevent double instrumentation. + if (instrumentedNodeModules.has(nodeName)) { + return + } + instrumentedNodeModules.add(nodeName) + } + + doHook(name) + + const instrumentation = instrumentations[name] ?? instrumentations[`node:${name}`] + + if (!instrumentation) { + log.error('esbuild-wrapped %s missing in list of instrumentations', name) return } - for (const { name, file, versions, hook } of instrumentations[payload.package]) { - if (payload.path !== filename(name, file)) continue - if (!matchVersion(payload.version, versions)) continue + for (const { file, versions, hook } of instrumentation) { + if (payload.path !== filename(name, file) || !matchVersion(payload.version, versions)) { + continue + } try { loadChannel.publish({ name, version: payload.version, file }) - payload.module = hook(payload.module, payload.version) + payload.module = hook(payload.module, payload.version) ?? payload.module } catch (e) { log.error('Error executing bundler hook', e) } diff --git a/packages/datadog-instrumentations/src/helpers/hook.js b/packages/datadog-instrumentations/src/helpers/hook.js index c1b03fea446..b5db3a487ff 100644 --- a/packages/datadog-instrumentations/src/helpers/hook.js +++ b/packages/datadog-instrumentations/src/helpers/hook.js @@ -1,17 +1,41 @@ 'use strict' + const path = require('path') + const iitm = require('../../../dd-trace/src/iitm') const ritm = require('../../../dd-trace/src/ritm') +const log = require('../../../dd-trace/src/log') +const requirePackageJson = require('../../../dd-trace/src/require-package-json') + +/** + * @param {string} moduleBaseDir + * @returns {string|undefined} + */ +function getVersion (moduleBaseDir) { + if (moduleBaseDir) { + return requirePackageJson(moduleBaseDir, /** @type {import('module').Module} */ (module)).version + } + + return process.version +} /** * This is called for every package/internal-module that dd-trace supports instrumentation for * In practice, `modules` is always an array with a single entry. * + * @overload + * @param {string[]} modules list of modules to hook into + * @param {object} hookOptions hook options + * @param {Function} onrequire callback to be executed upon encountering module + */ +/** + * @overload * @param {string[]} modules list of modules to hook into * @param {object} hookOptions hook options * @param {Function} onrequire callback to be executed upon encountering module */ function Hook (modules, hookOptions, onrequire) { + // TODO: Rewrite this to use class syntax. The same should be done for ritm. if (!(this instanceof Hook)) return new Hook(modules, hookOptions, onrequire) if (typeof hookOptions === 'function') { @@ -42,6 +66,13 @@ function Hook (modules, hookOptions, onrequire) { return result } + try { + moduleVersion ||= getVersion(moduleBaseDir) + } catch (error) { + log.error('Error getting version for "%s": %s', moduleName, error.message, error) + return + } + if ( isIitm && moduleExports.default && @@ -66,10 +97,4 @@ function Hook (modules, hookOptions, onrequire) { }) } -Hook.prototype.unhook = function () { - this._ritmHook.unhook() - this._iitmHook.unhook() - this._patched = Object.create(null) -} - module.exports = Hook diff --git a/packages/datadog-instrumentations/src/helpers/hooks.js b/packages/datadog-instrumentations/src/helpers/hooks.js index 3d83b13e406..3d648a03ffa 100644 --- a/packages/datadog-instrumentations/src/helpers/hooks.js +++ b/packages/datadog-instrumentations/src/helpers/hooks.js @@ -1,6 +1,18 @@ 'use strict' module.exports = { + // Only list unprefixed node modules. They will automatically be instrumented as prefixed and unprefixed. + child_process: () => require('../child_process'), + crypto: () => require('../crypto'), + dns: () => require('../dns'), + fs: { serverless: false, fn: () => require('../fs') }, + http: () => require('../http'), + http2: () => require('../http2'), + https: () => require('../http'), + net: () => require('../net'), + url: () => require('../url'), + vm: () => require('../vm'), + // Non Node.js modules '@anthropic-ai/sdk': { esmFirst: true, fn: () => require('../anthropic') }, '@apollo/server': () => require('../apollo-server'), '@apollo/gateway': () => require('../apollo'), @@ -47,31 +59,24 @@ module.exports = { bullmq: () => require('../bullmq'), bunyan: () => require('../bunyan'), 'cassandra-driver': () => require('../cassandra-driver'), - child_process: () => require('../child_process'), connect: () => require('../connect'), cookie: () => require('../cookie'), 'cookie-parser': () => require('../cookie-parser'), couchbase: () => require('../couchbase'), - crypto: () => require('../crypto'), cypress: () => require('../cypress'), 'dd-trace-api': () => require('../dd-trace-api'), - dns: () => require('../dns'), elasticsearch: () => require('../elasticsearch'), express: () => require('../express'), 'express-mongo-sanitize': () => require('../express-mongo-sanitize'), 'express-session': () => require('../express-session'), fastify: () => require('../fastify'), 'find-my-way': () => require('../find-my-way'), - fs: { serverless: false, fn: () => require('../fs') }, 'generic-pool': () => require('../generic-pool'), graphql: () => require('../graphql'), grpc: () => require('../grpc'), handlebars: () => require('../handlebars'), hapi: () => require('../hapi'), hono: { esmFirst: true, fn: () => require('../hono') }, - http: () => require('../http'), - http2: () => require('../http2'), - https: () => require('../http'), ioredis: () => require('../ioredis'), iovalkey: () => require('../iovalkey'), 'jest-circus': () => require('../jest'), @@ -103,18 +108,8 @@ module.exports = { multer: () => require('../multer'), mysql: () => require('../mysql'), mysql2: () => require('../mysql2'), - net: () => require('../net'), next: () => require('../next'), 'node-serialize': () => require('../node-serialize'), - 'node:child_process': () => require('../child_process'), - 'node:crypto': () => require('../crypto'), - 'node:dns': () => require('../dns'), - 'node:http': () => require('../http'), - 'node:http2': () => require('../http2'), - 'node:https': () => require('../http'), - 'node:net': () => require('../net'), - 'node:url': () => require('../url'), - 'node:vm': () => require('../vm'), nyc: () => require('../nyc'), oracledb: () => require('../oracledb'), openai: { esmFirst: true, fn: () => require('../openai') }, @@ -142,9 +137,7 @@ module.exports = { tedious: () => require('../tedious'), tinypool: { esmFirst: true, fn: () => require('../vitest') }, undici: () => require('../undici'), - url: () => require('../url'), vitest: { esmFirst: true, fn: () => require('../vitest') }, - vm: () => require('../vm'), when: () => require('../when'), winston: () => require('../winston'), workerpool: () => require('../mocha'), diff --git a/packages/datadog-instrumentations/src/helpers/instrument.js b/packages/datadog-instrumentations/src/helpers/instrument.js index 6c7c493233e..2695807757c 100644 --- a/packages/datadog-instrumentations/src/helpers/instrument.js +++ b/packages/datadog-instrumentations/src/helpers/instrument.js @@ -1,11 +1,23 @@ 'use strict' const { AsyncResource } = require('async_hooks') -const dc = require('dc-polyfill') +const dc = /** @type {typeof import('node:diagnostics_channel')} */ (require('dc-polyfill')) const instrumentations = require('./instrumentations') const rewriterInstrumentations = require('./rewriter/instrumentations') +/** + * @typedef {import('node:diagnostics_channel').Channel} Channel + * @typedef {import('node:diagnostics_channel').TracingChannel} TracingChannel + */ + +/** + * @type {Record} + */ const channelMap = {} +/** + * @param {string} name + * @returns {Channel} + */ exports.channel = function (name) { const maybe = channelMap[name] if (maybe) return maybe @@ -14,7 +26,14 @@ exports.channel = function (name) { return ch } +/** + * @type {Record} + */ const tracingChannelMap = {} +/** + * @param {string} name + * @returns {TracingChannel} + */ exports.tracingChannel = function (name) { const maybe = tracingChannelMap[name] if (maybe) return maybe @@ -34,24 +53,19 @@ exports.getHooks = function getHooks (names) { /** * @param {object} args - * @param {string|string[]} args.name module name - * @param {string[]} args.versions array of semver range strings + * @param {string} args.name module name + * @param {string[]} [args.versions] array of semver range strings * @param {string} [args.file='index.js'] path to file within package to instrument * @param {string} [args.filePattern] pattern to match files within package to instrument - * @param {boolean} [args.patchDefault] whether to patch the default export - * @param {(moduleExports: unknown, version: string) => unknown} hook + * @param {boolean} [args.patchDefault=true] whether to patch the default export + * @param {(moduleExports: unknown, version: string, isIitm?: boolean) => unknown} [hook] Patches module exports */ exports.addHook = function addHook ({ name, versions, file, filePattern, patchDefault }, hook) { - if (typeof name === 'string') { - name = [name] + if (!instrumentations[name]) { + instrumentations[name] = [] } - for (const val of name) { - if (!instrumentations[val]) { - instrumentations[val] = [] - } - instrumentations[val].push({ name: val, versions, file, filePattern, hook, patchDefault }) - } + instrumentations[name].push({ versions, file, filePattern, hook, patchDefault }) } exports.AsyncResource = AsyncResource diff --git a/packages/datadog-instrumentations/src/helpers/register.js b/packages/datadog-instrumentations/src/helpers/register.js index 3785777cd07..66c2de19f47 100644 --- a/packages/datadog-instrumentations/src/helpers/register.js +++ b/packages/datadog-instrumentations/src/helpers/register.js @@ -1,9 +1,9 @@ 'use strict' +const { builtinModules } = require('module') const path = require('path') const { channel } = require('dc-polyfill') const satisfies = require('../../../../vendor/dist/semifies') -const requirePackageJson = require('../../../dd-trace/src/require-package-json') const log = require('../../../dd-trace/src/log') const telemetry = require('../../../dd-trace/src/guardrails/telemetry') const { IS_SERVERLESS } = require('../../../dd-trace/src/serverless') @@ -36,27 +36,47 @@ if (!disabledInstrumentations.has('process')) { require('../process') } -const HOOK_SYMBOL = Symbol('hookExportsSet') - if (DD_TRACE_DEBUG && DD_TRACE_DEBUG.toLowerCase() !== 'false') { checkRequireCache.checkForRequiredModules() setImmediate(checkRequireCache.checkForPotentialConflicts) } -const seenCombo = new Set() -const allInstrumentations = {} - for (const inst of disabledInstrumentations) { rewriter.disable(inst) } -// TODO: make this more efficient -for (const packageName of names) { - if (disabledInstrumentations.has(packageName)) continue +/** @type {Map} */ +const instrumentedNodeModules = new Map() +/** @type {Map} */ +const instrumentedIntegrationsSuccess = new Map() +/** @type {Set} */ +const alreadyLoggedIncompatibleIntegrations = new Set() + +// Always disable prefixed and unprefixed node modules if one is disabled. +if (disabledInstrumentations.size) { + const builtinsSet = new Set(builtinModules) + for (const name of disabledInstrumentations) { + const hasPrefix = name.startsWith('node:') + if (hasPrefix || builtinsSet.has(name)) { + if (hasPrefix) { + const unprefixedName = name.slice(5) + if (!disabledInstrumentations.has(unprefixedName)) { + disabledInstrumentations.add(unprefixedName) + } + } else if (!disabledInstrumentations.has(`node:${name}`)) { + disabledInstrumentations.add(`node:${name}`) + } + } + } + builtinsSet.clear() +} + +for (const name of names) { + if (disabledInstrumentations.has(name)) continue const hookOptions = {} - let hook = hooks[packageName] + let hook = hooks[name] if (hook !== null && typeof hook === 'object') { if (hook.serverless === false && IS_SERVERLESS) continue @@ -65,173 +85,114 @@ for (const packageName of names) { hook = hook.fn } - // get the instrumentation file name to save all hooked versions - const instrumentationFileName = parseHookInstrumentationFileName(packageName) - - Hook([packageName], hookOptions, (moduleExports, moduleName, moduleBaseDir, moduleVersion, isIitm) => { - moduleName = moduleName.replace(pathSepExpr, '/') + Hook([name], hookOptions, (moduleExports, moduleName, moduleBaseDir, moduleVersion, isIitm) => { + // All loaded versions are first expected to fail instrumentation. + if (!instrumentedIntegrationsSuccess.has(`${name}@${moduleVersion}`)) { + instrumentedIntegrationsSuccess.set(`${name}@${moduleVersion}`, false) + } // This executes the integration file thus adding its entries to `instrumentations` hook() - if (!instrumentations[packageName]) { + if (!instrumentations[name] || moduleExports === instrumentedNodeModules.get(name)) { return moduleExports } - const namesAndSuccesses = {} - for (const { name, file, versions, hook, filePattern, patchDefault } of instrumentations[packageName]) { - if (patchDefault === false && !moduleExports.default && isIitm) { - return moduleExports - } else if (patchDefault === true && moduleExports.default && isIitm) { - moduleExports = moduleExports.default + // Used for node: prefixed modules to prevent double instrumentation. + if (moduleBaseDir) { + moduleName = moduleName.replace(pathSepExpr, '/') + } else { + instrumentedNodeModules.set(name, moduleExports) + } + + for (const { file, versions, hook, filePattern, patchDefault } of instrumentations[name]) { + if (isIitm && patchDefault === !!moduleExports.default) { + if (patchDefault) { + moduleExports = moduleExports.default + } else { + return moduleExports + } } - let fullFilePattern = filePattern const fullFilename = filename(name, file) - if (fullFilePattern) { - fullFilePattern = filename(name, fullFilePattern) - } - // Create a WeakSet associated with the hook function so that patches on the same moduleExport only happens once - // for example by instrumenting both dns and node:dns double the spans would be created - // since they both patch the same moduleExport, this WeakSet is used to mitigate that - // TODO(BridgeAR): Instead of using a WeakSet here, why not just use aliases for the hook in register? - // That way it would also not be duplicated. The actual name being used has to be identified else wise. - // Maybe it is also not important to know what name was actually used? - hook[HOOK_SYMBOL] ??= new WeakSet() let matchesFile = moduleName === fullFilename if (!matchesFile && isRelativeRequire(name)) matchesFile = true + const fullFilePattern = filePattern && filename(name, filePattern) if (fullFilePattern) { // Some libraries include a hash in their filenames when installed, // so our instrumentation has to include a '.*' to match them for more than a single version. - matchesFile = matchesFile || new RegExp(fullFilePattern).test(moduleName) + matchesFile ||= new RegExp(fullFilePattern).test(moduleName) } - if (matchesFile) { - let version = moduleVersion + if (matchesFile && matchVersion(moduleVersion, versions)) { + // Do not log in case of an error to prevent duplicate telemetry for the same integration version. + instrumentedIntegrationsSuccess.set(`${name}@${moduleVersion}`, true) try { - version = version || getVersion(moduleBaseDir) - allInstrumentations[instrumentationFileName] = allInstrumentations[instrumentationFileName] || false - } catch (e) { - log.error('Error getting version for "%s": %s', name, e.message, e) - continue - } - if (namesAndSuccesses[`${name}@${version}`] === undefined && !file) { - // TODO If `file` is present, we might elsewhere instrument the result of the module - // for a version range that actually matches, so we can't assume that we're _not_ - // going to instrument that. However, the way the data model around instrumentation - // works, we can't know either way just yet, so to avoid false positives, we'll just - // ignore this if there is a `file` in the hook. The thing to do here is rework - // everything so that we can be sure that there are _no_ instrumentations that it - // could match. - namesAndSuccesses[`${name}@${version}`] = false - } - - if (matchVersion(version, versions)) { - allInstrumentations[instrumentationFileName] = true - - // Check if the hook already has a set moduleExport - if (hook[HOOK_SYMBOL].has(moduleExports)) { - namesAndSuccesses[`${name}@${version}`] = true - return moduleExports - } - - try { - loadChannel.publish({ name, version, file }) - // Send the name and version of the module back to the callback because now addHook - // takes in an array of names so by passing the name the callback will know which module name is being used - // TODO(BridgeAR): This is only true in case the name is identical - // in all loads. If they deviate, the deviating name would not be - // picked up due to the unification. Check what modules actually use the name. - // TODO(BridgeAR): Only replace moduleExports if the hook returns a new value. - // This allows to reduce the instrumentation code (no return needed). - - moduleExports = hook(moduleExports, version, name, isIitm) ?? moduleExports - // Set the moduleExports in the hooks WeakSet - hook[HOOK_SYMBOL].add(moduleExports) - } catch (e) { - log.info('Error during ddtrace instrumentation of application, aborting.', e) - telemetry('error', [ - `error_type:${e.constructor.name}`, - `integration:${name}`, - `integration_version:${version}`, - ], { - result: 'error', - result_class: 'internal_error', - result_reason: `Error during instrumentation of ${name}@${version}: ${e.message}`, - }) - } - namesAndSuccesses[`${name}@${version}`] = true + loadChannel.publish({ name }) + + moduleExports = hook(moduleExports, moduleVersion, isIitm) ?? moduleExports + } catch (error) { + log.info('Error during ddtrace instrumentation of application, aborting.', error) + telemetry('error', [ + `error_type:${error.constructor.name}`, + `integration:${name}`, + `integration_version:${moduleVersion}`, + ], { + result: 'error', + result_class: 'internal_error', + result_reason: `Error during instrumentation of ${name}@${moduleVersion}: ${error.message}`, + }) } } } - for (const nameVersion of Object.keys(namesAndSuccesses)) { - const [name, version] = nameVersion.split('@') - const success = namesAndSuccesses[nameVersion] - // we check allVersions to see if any version of the integration was successfully instrumented - if (!success && !seenCombo.has(nameVersion) && !allInstrumentations[instrumentationFileName]) { - telemetry('abort.integration', [ - `integration:${name}`, - `integration_version:${version}`, - ], { - result: 'abort', - result_class: 'incompatible_library', - result_reason: `Incompatible integration version: ${name}@${version}`, - }) - log.info('Found incompatible integration version: %s', nameVersion) - seenCombo.add(nameVersion) - } - } return moduleExports }) } -function matchVersion (version, ranges) { - return !version || !ranges || ranges.some(range => satisfies(version, range)) -} +globalThis[Symbol.for('dd-trace')]?.beforeExitHandlers.add(logAbortedIntegrations) +// TODO: check if we want to stop using channels for single subscriber tasks +channel('dd-trace:exporter:first-flush').subscribe(logAbortedIntegrations) -function getVersion (moduleBaseDir) { - if (moduleBaseDir) { - return requirePackageJson(moduleBaseDir, module).version +function logAbortedIntegrations () { + for (const [nameVersion, success] of instrumentedIntegrationsSuccess) { + // Only ever log a single version of an integration, even if it is loaded later. + if (!success && !alreadyLoggedIncompatibleIntegrations.has(nameVersion)) { + const [name, version] = nameVersion.split('@') + telemetry('abort.integration', [ + `integration:${name}`, + `integration_version:${version}`, + ], { + result: 'abort', + result_class: 'incompatible_library', + result_reason: `Incompatible integration version: ${name}@${version}`, + }) + log.info('Found incompatible integration version: %s', nameVersion) + alreadyLoggedIncompatibleIntegrations.add(nameVersion) + } } + // Clear the map to avoid reporting the same integration version again. + instrumentedIntegrationsSuccess.clear() } -function filename (name, file) { - return [name, file].filter(Boolean).join('/') +/** + * @param {string|undefined} version + * @param {string[]|undefined} ranges + */ +function matchVersion (version, ranges) { + return !version || !ranges || ranges.some(range => satisfies(version, range)) } -// This function captures the instrumentation file name for a given package by parsing the hook require -// function given the module name. It is used to ensure that instrumentations such as redis -// that have several different modules being hooked, ie: 'redis' main package, and @redis/client submodule -// return a consistent instrumentation name. This is used later to ensure that at least some portion of -// the integration was successfully instrumented. Prevents incorrect `Found incompatible integration version: ` messages -// Example: -// redis -> "() => require('../redis')" -> redis -// @redis/client -> "() => require('../redis')" -> redis -// -function parseHookInstrumentationFileName (packageName) { - let hook = hooks[packageName] - if (hook.fn) { - hook = hook.fn - } - const hookString = hook.toString() - const regex = /require\('([^']*)'\)/ - const match = hookString.match(regex) - - // try to capture the hook require file location. - if (match && match[1]) { - let moduleName = match[1] - // Remove leading '../' if present - if (moduleName.startsWith('../')) { - moduleName = moduleName.slice(3) - } - return moduleName - } - - return null +/** + * @param {string} name + * @param {string} [file] + * @returns {string} + */ +function filename (name, file) { + return file ? `${name}/${file}` : name } module.exports = { diff --git a/packages/datadog-instrumentations/src/http/client.js b/packages/datadog-instrumentations/src/http/client.js index 3625f384de4..15c5693efef 100644 --- a/packages/datadog-instrumentations/src/http/client.js +++ b/packages/datadog-instrumentations/src/http/client.js @@ -16,9 +16,8 @@ const asyncStartChannel = channel('apm:http:client:request:asyncStart') const errorChannel = channel('apm:http:client:request:error') const responseFinishChannel = channel('apm:http:client:response:finish') -const names = ['http', 'https', 'node:http', 'node:https'] - -addHook({ name: names }, hookFn) +addHook({ name: 'http' }, hookFn) +addHook({ name: 'https' }, hookFn) function hookFn (http) { patch(http, 'request') diff --git a/packages/datadog-instrumentations/src/http/server.js b/packages/datadog-instrumentations/src/http/server.js index adf61ab86da..51a488c3a85 100644 --- a/packages/datadog-instrumentations/src/http/server.js +++ b/packages/datadog-instrumentations/src/http/server.js @@ -16,10 +16,7 @@ const startSetHeaderCh = channel('datadog:http:server:response:set-header:start' const requestFinishedSet = new WeakSet() -const httpNames = ['http', 'node:http'] -const httpsNames = ['https', 'node:https'] - -addHook({ name: httpNames }, http => { +addHook({ name: 'http' }, http => { shimmer.wrap(http.ServerResponse.prototype, 'emit', wrapResponseEmit) shimmer.wrap(http.Server.prototype, 'emit', wrapEmit) shimmer.wrap(http.ServerResponse.prototype, 'writeHead', wrapWriteHead) @@ -34,7 +31,7 @@ addHook({ name: httpNames }, http => { return http }) -addHook({ name: httpsNames }, http => { +addHook({ name: 'https' }, http => { // http.ServerResponse not present on https shimmer.wrap(http.Server.prototype, 'emit', wrapEmit) return http diff --git a/packages/datadog-instrumentations/src/http2/client.js b/packages/datadog-instrumentations/src/http2/client.js index b335df7c4cd..d38837fb2d9 100644 --- a/packages/datadog-instrumentations/src/http2/client.js +++ b/packages/datadog-instrumentations/src/http2/client.js @@ -10,8 +10,6 @@ const asyncStartChannel = channel('apm:http2:client:request:asyncStart') const asyncEndChannel = channel('apm:http2:client:request:asyncEnd') const errorChannel = channel('apm:http2:client:request:error') -const names = ['http2', 'node:http2'] - function createWrapEmit (ctx) { return function wrapEmit (emit) { return function (event, arg1) { @@ -68,7 +66,7 @@ function wrapConnect (connect) { } } -addHook({ name: names }, http2 => { +addHook({ name: 'http2' }, http2 => { shimmer.wrap(http2, 'connect', wrapConnect) if (http2.default) http2.default.connect = http2.connect diff --git a/packages/datadog-instrumentations/src/http2/server.js b/packages/datadog-instrumentations/src/http2/server.js index f5e7d961f84..878b9fa7f22 100644 --- a/packages/datadog-instrumentations/src/http2/server.js +++ b/packages/datadog-instrumentations/src/http2/server.js @@ -13,9 +13,7 @@ const startServerCh = channel('apm:http2:server:request:start') const errorServerCh = channel('apm:http2:server:request:error') const emitCh = channel('apm:http2:server:response:emit') -const names = ['http2', 'node:http2'] - -addHook({ name: names }, http2 => { +addHook({ name: 'http2' }, http2 => { shimmer.wrap(http2, 'createSecureServer', wrapCreateServer) shimmer.wrap(http2, 'createServer', wrapCreateServer) }) diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index 3c575100bad..794d5525ba1 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -1,5 +1,8 @@ 'use strict' +// Capture real timers at module load time, before any test can install fake timers. +const realSetTimeout = setTimeout + const path = require('path') const shimmer = require('../../datadog-shimmer') const log = require('../../dd-trace/src/log') @@ -111,6 +114,8 @@ const efdDeterminedRetries = new Map() const efdSlowAbortedTests = new Set() // Tests added as EFD new-test candidates (not ATF, not impacted). const efdNewTestCandidates = new Set() +// Tests that are genuinely new (not in known tests list). +const newTests = new Set() const testSuiteAbsolutePathsWithFastCheck = new Set() const testSuiteJestObjects = new Map() @@ -485,7 +490,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) { } if (this.isKnownTestsEnabled) { - isNewTest = retriedTestsToNumAttempts.has(testName) + isNewTest = newTests.has(testName) } const willRunEfd = this.isEarlyFlakeDetectionEnabled && (isNewTest || isModified) @@ -605,6 +610,9 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) { } if (!isAttemptToFix && this.isKnownTestsEnabled) { const isNew = !this.knownTestsForThisSuite.includes(testFullName) + if (isNew && !isSkipped) { + newTests.add(testFullName) + } if (isNew && !isSkipped && !retriedTestsToNumAttempts.has(testFullName)) { if (DYNAMIC_NAME_RE.test(testFullName)) { // Populated directly for runInBand; for parallel workers the main process @@ -715,7 +723,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) { let isEfdRetry = false // We'll store the test statuses of the retries if (this.isKnownTestsEnabled) { - const isNewTest = retriedTestsToNumAttempts.has(testName) + const isNewTest = newTests.has(testName) if (isNewTest) { if (newTestsTestStatuses.has(testName)) { newTestsTestStatuses.get(testName).push(status) @@ -776,7 +784,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) { // This means that tests retried with DI are BREAKPOINT_HIT_GRACE_PERIOD_MS slower at least. if (status === 'fail' && mightHitBreakpoint) { await new Promise(resolve => { - setTimeout(() => { + realSetTimeout(() => { resolve() }, BREAKPOINT_HIT_GRACE_PERIOD_MS) }) @@ -811,6 +819,7 @@ function getWrappedEnvironment (BaseEnvironment, jestVersion) { efdDeterminedRetries.clear() efdSlowAbortedTests.clear() efdNewTestCandidates.clear() + newTests.clear() retriedTestsToNumAttempts.clear() attemptToFixRetriedTestsStatuses.clear() testsToBeRetried.clear() @@ -1345,7 +1354,7 @@ function getCliWrapper (isNewJestVersion) { }) const timeoutPromise = new Promise((resolve) => { - timeoutId = setTimeout(() => { + timeoutId = realSetTimeout(() => { resolve('timeout') }, FLUSH_TIMEOUT).unref() }) diff --git a/packages/datadog-instrumentations/src/limitd-client.js b/packages/datadog-instrumentations/src/limitd-client.js index 2b519a29bf5..c843345d6da 100644 --- a/packages/datadog-instrumentations/src/limitd-client.js +++ b/packages/datadog-instrumentations/src/limitd-client.js @@ -14,7 +14,7 @@ function wrapRequest (original) { addHook({ name: 'limitd-client', versions: ['>=2.8'], - file: ['client.js'], + file: 'client.js', }, LimitdClient => { shimmer.wrap(LimitdClient.prototype, '_directRequest', wrapRequest) shimmer.wrap(LimitdClient.prototype, '_retriedRequest', wrapRequest) diff --git a/packages/datadog-instrumentations/src/mocha/utils.js b/packages/datadog-instrumentations/src/mocha/utils.js index c3afc807b3f..4a91b6754c5 100644 --- a/packages/datadog-instrumentations/src/mocha/utils.js +++ b/packages/datadog-instrumentations/src/mocha/utils.js @@ -1,5 +1,8 @@ 'use strict' +// Capture real timers at module load time, before any test can install fake timers. +const realSetTimeout = setTimeout + const { getTestSuitePath, DYNAMIC_NAME_RE } = require('../../../dd-trace/src/plugins/util/test') const { channel } = require('../helpers/instrument') const shimmer = require('../../../datadog-shimmer') @@ -293,7 +296,7 @@ function getOnTestEndHandler (config) { // This means that tests retried with DI are BREAKPOINT_HIT_GRACE_PERIOD_MS slower at least. if (test._ddShouldWaitForHitProbe || test._retriedTest?._ddShouldWaitForHitProbe) { await new Promise((resolve) => { - setTimeout(() => { + realSetTimeout(() => { resolve() }, BREAKPOINT_HIT_GRACE_PERIOD_MS) }) diff --git a/packages/datadog-instrumentations/src/net.js b/packages/datadog-instrumentations/src/net.js index 977dc61eb30..255ebe7f607 100644 --- a/packages/datadog-instrumentations/src/net.js +++ b/packages/datadog-instrumentations/src/net.js @@ -16,16 +16,10 @@ const errorTCPCh = channel('apm:net:tcp:error') const readyCh = channel('apm:net:tcp:ready') const connectionCh = channel('apm:net:tcp:connection') -const names = ['net', 'node:net'] - -addHook({ name: names }, (net, version, name) => { +addHook({ name: 'net' }, (net) => { // explicitly require dns so that net gets an instrumented instance // so that we don't miss the dns calls - if (name === 'net') { - require('dns') - } else { - require('node:dns') - } + require('node:dns') shimmer.wrap(net.Socket.prototype, 'connect', connect => function () { if (!startICPCh.hasSubscribers || !startTCPCh.hasSubscribers) { diff --git a/packages/datadog-instrumentations/src/pino.js b/packages/datadog-instrumentations/src/pino.js index 043d97ac9d4..3d44cb7a181 100644 --- a/packages/datadog-instrumentations/src/pino.js +++ b/packages/datadog-instrumentations/src/pino.js @@ -97,7 +97,7 @@ addHook({ name: 'pino', versions: ['>=5.14.0 <6.8.0'] }, (pino) => { return wrapped }) -addHook({ name: 'pino', versions: ['>=6.8.0'], patchDefault: false }, (pino, _1, _2, isIitm) => { +addHook({ name: 'pino', versions: ['>=6.8.0'], patchDefault: false }, (pino) => { const mixinSym = pino.symbols.mixinSym const wrapped = shimmer.wrapFunction(pino, pino => wrapPino(mixinSym, wrapMixin, pino)) diff --git a/packages/datadog-instrumentations/src/playwright.js b/packages/datadog-instrumentations/src/playwright.js index 5a98b56dfb2..31b04eee3a6 100644 --- a/packages/datadog-instrumentations/src/playwright.js +++ b/packages/datadog-instrumentations/src/playwright.js @@ -1,5 +1,8 @@ 'use strict' +// Capture real timers at module load time, before any test can install fake timers. +const realSetTimeout = setTimeout + const satisfies = require('../../../vendor/dist/semifies') const shimmer = require('../../datadog-shimmer') @@ -1216,7 +1219,7 @@ addHook({ if (isRumActive) { // Give some time RUM to flush data, similar to what we do in selenium - await new Promise(resolve => setTimeout(resolve, RUM_FLUSH_WAIT_TIME)) + await new Promise(resolve => realSetTimeout(resolve, RUM_FLUSH_WAIT_TIME)) const url = page.url() if (url) { const domain = new URL(url).hostname diff --git a/packages/datadog-instrumentations/src/prisma.js b/packages/datadog-instrumentations/src/prisma.js index 6dd40aac3c0..fcbf0916ee3 100644 --- a/packages/datadog-instrumentations/src/prisma.js +++ b/packages/datadog-instrumentations/src/prisma.js @@ -136,11 +136,10 @@ function resolveClientDbConfig (clientConfig, datasourceName, runtimeDbConfig) { /** * @param {unknown} runtime * @param {string} versions - * @param {string} [name] * @param {boolean} [isIitm] * @returns {object} */ -const prismaHook = (runtime, versions, name, isIitm) => { +const prismaHook = (runtime, versions, isIitm) => { /** * @typedef {{ getPrismaClient?: (config: PrismaRuntimeConfig, ...args: unknown[]) => Function }} PrismaRuntime */ diff --git a/packages/datadog-instrumentations/src/selenium.js b/packages/datadog-instrumentations/src/selenium.js index 88f54c9debf..0046edd7e3e 100644 --- a/packages/datadog-instrumentations/src/selenium.js +++ b/packages/datadog-instrumentations/src/selenium.js @@ -1,5 +1,8 @@ 'use strict' +// Capture real timers at module load time, before any test can install fake timers. +const realSetTimeout = setTimeout + const shimmer = require('../../datadog-shimmer') const { getValueFromEnvSources } = require('../../dd-trace/src/config/helper') const { addHook, channel } = require('./helpers/instrument') @@ -66,7 +69,7 @@ addHook({ if (isRumActive) { // We'll have time for RUM to flush the events (there's no callback to know when it's done) await new Promise(resolve => { - setTimeout(() => { + realSetTimeout(() => { resolve() }, DD_CIVISIBILITY_RUM_FLUSH_WAIT_MILLIS) }) diff --git a/packages/datadog-instrumentations/src/sequelize.js b/packages/datadog-instrumentations/src/sequelize.js index 29b9ccdfd77..74fa264f52d 100644 --- a/packages/datadog-instrumentations/src/sequelize.js +++ b/packages/datadog-instrumentations/src/sequelize.js @@ -6,7 +6,7 @@ const { addHook, } = require('./helpers/instrument') -addHook({ name: 'sequelize', versions: ['>=4'], file: ['lib/sequelize.js'] }, Sequelize => { +addHook({ name: 'sequelize', versions: ['>=4'], file: 'lib/sequelize.js' }, Sequelize => { const startCh = channel('datadog:sequelize:query:start') const finishCh = channel('datadog:sequelize:query:finish') diff --git a/packages/datadog-instrumentations/src/url.js b/packages/datadog-instrumentations/src/url.js index 654898a826d..6c19cb9060a 100644 --- a/packages/datadog-instrumentations/src/url.js +++ b/packages/datadog-instrumentations/src/url.js @@ -2,13 +2,11 @@ const shimmer = require('../../datadog-shimmer') const { addHook, channel } = require('./helpers/instrument') -const names = ['url', 'node:url'] - const parseFinishedChannel = channel('datadog:url:parse:finish') const urlGetterChannel = channel('datadog:url:getter:finish') const instrumentedGetters = ['host', 'origin', 'hostname'] -addHook({ name: names }, function (url) { +addHook({ name: 'url' }, function (url) { shimmer.wrap(url, 'parse', (parse) => { return function wrappedParse (input) { const parsedValue = parse.apply(this, arguments) diff --git a/packages/datadog-instrumentations/src/vitest.js b/packages/datadog-instrumentations/src/vitest.js index 4b19bb8ee02..a83093973f3 100644 --- a/packages/datadog-instrumentations/src/vitest.js +++ b/packages/datadog-instrumentations/src/vitest.js @@ -1,4 +1,8 @@ 'use strict' + +// Capture real timers at module load time, before any test can install fake timers. +const realSetTimeout = setTimeout + const path = require('node:path') const shimmer = require('../../datadog-shimmer') @@ -83,7 +87,7 @@ function getTestCommand () { function waitForHitProbe () { return new Promise(resolve => { - setTimeout(() => { + realSetTimeout(() => { resolve() }, BREAKPOINT_HIT_GRACE_PERIOD_MS) }) diff --git a/packages/datadog-instrumentations/src/vm.js b/packages/datadog-instrumentations/src/vm.js index 545e797085f..46159c855f5 100644 --- a/packages/datadog-instrumentations/src/vm.js +++ b/packages/datadog-instrumentations/src/vm.js @@ -2,12 +2,10 @@ const shimmer = require('../../datadog-shimmer') const { channel, addHook } = require('./helpers/instrument') -const names = ['vm', 'node:vm'] - const runScriptStartChannel = channel('datadog:vm:run-script:start') const sourceTextModuleStartChannel = channel('datadog:vm:source-text-module:start') -addHook({ name: names }, function (vm) { +addHook({ name: 'vm' }, function (vm) { vm.Script = class extends vm.Script { constructor (code) { super(...arguments) diff --git a/packages/datadog-plugin-aws-sdk/src/base.js b/packages/datadog-plugin-aws-sdk/src/base.js index da8e40a56f1..b349ba99a80 100644 --- a/packages/datadog-plugin-aws-sdk/src/base.js +++ b/packages/datadog-plugin-aws-sdk/src/base.js @@ -23,12 +23,13 @@ class BaseAwsSdkPlugin extends ClientPlugin { return id } + /** @type {import('../../dd-trace/src/config/config-types').ConfigProperties['cloudPayloadTagging']} */ get cloudTaggingConfig () { return this._tracerConfig.cloudPayloadTagging } get payloadTaggingRules () { - return this.cloudTaggingConfig.rules.aws?.[this.constructor.id] + return this.cloudTaggingConfig.rules?.aws?.[this.constructor.id] } constructor (...args) { @@ -78,7 +79,7 @@ class BaseAwsSdkPlugin extends ClientPlugin { this.requestInject(span, request) }) - if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.requestsEnabled) { + if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.request) { const maxDepth = this.cloudTaggingConfig.maxDepth const requestTags = tagsFromRequest(this.payloadTaggingRules, request.params, { maxDepth }) span.addTags(requestTags) @@ -215,7 +216,7 @@ class BaseAwsSdkPlugin extends ClientPlugin { span.addTags(tags) - if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.responsesEnabled) { + if (this.constructor.isPayloadReporter && this.cloudTaggingConfig.response) { const maxDepth = this.cloudTaggingConfig.maxDepth const responseBody = this.extractResponseBody(response) const responseTags = tagsFromResponse(this.payloadTaggingRules, responseBody, { maxDepth }) diff --git a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js index b1454be7359..011fd1aaaf9 100644 --- a/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js +++ b/packages/datadog-plugin-confluentinc-kafka-javascript/test/index.spec.js @@ -252,6 +252,85 @@ describe('Plugin', () => { return expectedSpanPromise }) }) + + describe('consumer (eachBatch)', () => { + let consumer + let batchMessages + + beforeEach(async () => { + batchMessages = [{ key: 'key1', value: 'test2' }, { key: 'key2', value: 'test3' }] + consumer = kafka.consumer({ + kafkaJS: { groupId, fromBeginning: true, autoCommit: false }, + }) + await consumer.connect() + await consumer.subscribe({ topic: testTopic }) + }) + + afterEach(async () => { + await consumer.disconnect() + }) + + it('should be instrumented', async () => { + const expectedSpanPromise = expectSpanWithDefaults({ + name: expectedSchema.receive.opName, + service: expectedSchema.receive.serviceName, + meta: { + 'span.kind': 'consumer', + component: 'confluentinc-kafka-javascript', + 'kafka.topic': testTopic, + 'messaging.destination.name': testTopic, + 'messaging.system': 'kafka', + }, + resource: testTopic, + error: 0, + type: 'worker', + }) + + await consumer.run({ eachBatch: () => {} }) + return Promise.all([sendMessages(kafka, testTopic, batchMessages), expectedSpanPromise]) + }) + + it('should run the consumer in the context of the consumer span', done => { + const firstSpan = tracer.scope().active() + let eachBatch = async ({ batch }) => { + const currentSpan = tracer.scope().active() + + try { + assert.notEqual(currentSpan, firstSpan) + assert.strictEqual(currentSpan.context()._name, expectedSchema.receive.opName) + done() + } catch (e) { + done(e) + } finally { + eachBatch = () => {} // avoid being called for each message + } + } + + consumer.run({ eachBatch: (...args) => eachBatch(...args) }) + .then(() => sendMessages(kafka, testTopic, batchMessages)) + .catch(done) + }) + + it('should propagate context via span links', async () => { + const expectedSpanPromise = agent.assertSomeTraces(traces => { + const span = traces[0][0] + const links = span.meta['_dd.span_links'] ? JSON.parse(span.meta['_dd.span_links']) : [] + + assertObjectContains(span, { + name: expectedSchema.receive.opName, + service: expectedSchema.receive.serviceName, + resource: testTopic, + }) + + // librdkafka may deliver messages across multiple batches, + // so each batch span will have links for the messages it received. + assert.ok(links.length >= 1, `expected at least 1 span link, got ${links.length}`) + }) + + await consumer.run({ eachBatch: () => {} }) + await Promise.all([sendMessages(kafka, testTopic, batchMessages), expectedSpanPromise]) + }) + }) }) // Adding tests for the native API diff --git a/packages/datadog-plugin-cucumber/src/index.js b/packages/datadog-plugin-cucumber/src/index.js index 3687fc90a09..e49befa4396 100644 --- a/packages/datadog-plugin-cucumber/src/index.js +++ b/packages/datadog-plugin-cucumber/src/index.js @@ -1,5 +1,9 @@ 'use strict' +// Capture real timers at module load time, before any test can install fake timers. +const realDateNow = Date.now.bind(Date) +const realSetTimeout = setTimeout + const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') const { storage } = require('../../datadog-core') const { getEnvironmentVariable, getValueFromEnvSources } = require('../../dd-trace/src/config/helper') @@ -229,7 +233,7 @@ class CucumberPlugin extends CiPlugin { // Time we give the breakpoint to be hit if (promises && this.runningTestProbe) { promises.hitBreakpointPromise = new Promise((resolve) => { - setTimeout(resolve, BREAKPOINT_HIT_GRACE_PERIOD_MS) + realSetTimeout(resolve, BREAKPOINT_HIT_GRACE_PERIOD_MS) }) } @@ -252,8 +256,8 @@ class CucumberPlugin extends CiPlugin { const { file, line, stackIndex } = probeInformation this.runningTestProbe = { file, line } this.testErrorStackIndex = stackIndex - const waitUntil = Date.now() + BREAKPOINT_SET_GRACE_PERIOD_MS - while (Date.now() < waitUntil) { + const waitUntil = realDateNow() + BREAKPOINT_SET_GRACE_PERIOD_MS + while (realDateNow() < waitUntil) { // TODO: To avoid a race condition, we should wait until `probeInformation.setProbePromise` has resolved. // However, Cucumber doesn't have a mechanism for waiting asyncrounously here, so for now, we'll have to // fall back to a fixed syncronous delay. diff --git a/packages/datadog-plugin-cypress/src/cypress-plugin.js b/packages/datadog-plugin-cypress/src/cypress-plugin.js index 015a6307024..52bb099345e 100644 --- a/packages/datadog-plugin-cypress/src/cypress-plugin.js +++ b/packages/datadog-plugin-cypress/src/cypress-plugin.js @@ -307,10 +307,55 @@ class CypressPlugin { } } + /** + * Resets state that is scoped to a single Cypress run so the singleton plugin + * can be reused safely across multiple programmatic cypress.run() calls. + * + * @returns {void} + */ + resetRunState () { + this._isInit = false + this.finishedTestsByFile = {} + this.testStatuses = {} + this.isTestsSkipped = false + this.isSuitesSkippingEnabled = false + this.isCodeCoverageEnabled = false + this.isFlakyTestRetriesEnabled = false + this.flakyTestRetriesCount = 0 + this.isEarlyFlakeDetectionEnabled = false + this.isKnownTestsEnabled = false + this.earlyFlakeDetectionNumRetries = 0 + this.testsToSkip = [] + this.skippedTests = [] + this.hasForcedToRunSuites = false + this.hasUnskippableSuites = false + this.unskippableSuites = [] + this.knownTests = [] + this.knownTestsByTestSuite = undefined + this.isTestManagementTestsEnabled = false + this.testManagementAttemptToFixRetries = 0 + this.testManagementTests = undefined + this.isImpactedTestsEnabled = false + this.modifiedFiles = [] + this.activeTestSpan = null + this.testSuiteSpan = null + this.testModuleSpan = null + this.testSessionSpan = null + this.command = undefined + this.frameworkVersion = undefined + this.rootDir = undefined + this.itrCorrelationId = undefined + this.isTestIsolationEnabled = undefined + this.rumFlushWaitMillis = undefined + this._pendingRequestErrorTags = [] + this.libraryConfigurationPromise = undefined + } + // Init function returns a promise that resolves with the Cypress configuration // Depending on the received configuration, the Cypress configuration can be modified: // for example, to enable retries for failed tests. init (tracer, cypressConfig) { + this.resetRunState() this._isInit = true this.tracer = tracer this.cypressConfig = cypressConfig @@ -694,20 +739,27 @@ class CypressPlugin { } return new Promise(resolve => { + const finishAfterRun = () => { + this._isInit = false + appClosingTelemetry() + resolve(null) + } + const exporter = this.tracer._tracer._exporter if (!exporter) { - return resolve(null) + finishAfterRun() + return } if (exporter.flush) { exporter.flush(() => { - appClosingTelemetry() - resolve(null) + finishAfterRun() }) } else if (exporter._writer) { exporter._writer.flush(() => { - appClosingTelemetry() - resolve(null) + finishAfterRun() }) + } else { + finishAfterRun() } }) } diff --git a/packages/datadog-plugin-graphql/src/resolve.js b/packages/datadog-plugin-graphql/src/resolve.js index c92b828f422..3597f2002ae 100644 --- a/packages/datadog-plugin-graphql/src/resolve.js +++ b/packages/datadog-plugin-graphql/src/resolve.js @@ -28,7 +28,7 @@ class GraphQLResolvePlugin extends TracingPlugin { if (rootCtx.fields[computedPathString]) return if (!rootCtx[collapsedPathSym]) { - rootCtx[collapsedPathSym] = {} + rootCtx[collapsedPathSym] = Object.create(null) } else if (rootCtx[collapsedPathSym][computedPathString]) { return } diff --git a/packages/datadog-plugin-graphql/test/index.spec.js b/packages/datadog-plugin-graphql/test/index.spec.js index 73385f89f7d..9d943372a45 100644 --- a/packages/datadog-plugin-graphql/test/index.spec.js +++ b/packages/datadog-plugin-graphql/test/index.spec.js @@ -446,6 +446,36 @@ describe('Plugin', () => { graphql.graphql({ schema, source }).catch(done) }) + it('should trace aliased __proto__ fields with default collapsing', async () => { + const source = '{ hello(name: "world") __proto__: hello(name: "alias") }' + + const [, result] = await Promise.all([ + agent.assertSomeTraces(traces => { + const spans = sort(traces[0]) + const resolveSpans = spans.filter(span => span.name === 'graphql.resolve') + + assert.strictEqual(resolveSpans.length, 2) + + const paths = resolveSpans + .map(span => span.meta['graphql.field.path']) + .sort() + + assert.deepStrictEqual(paths, ['__proto__', 'hello']) + + for (const span of resolveSpans) { + assert.strictEqual(span.error, 0) + assert.strictEqual(span.resource, 'hello:String') + } + }), + graphql.graphql({ schema, source }), + ]) + + assert.ok(!result.errors || result.errors.length === 0) + assert.strictEqual(result.data.hello, 'world') + // eslint-disable-next-line no-proto + assert.strictEqual(result.data.__proto__, 'alias') + }) + it('should instrument each field resolver duration independently', done => { const source = ` { @@ -1667,6 +1697,31 @@ describe('Plugin', () => { graphql.graphql({ schema, source }).catch(done) }) + + it('should trace aliased __proto__ fields when collapsing is disabled', async () => { + const source = '{ __proto__: hello(name: "alias") }' + + const [, result] = await Promise.all([ + agent.assertSomeTraces(traces => { + const spans = sort(traces[0]) + const resolveSpans = spans.filter(span => span.name === 'graphql.resolve') + + assert.strictEqual(resolveSpans.length, 1) + assertObjectContains(resolveSpans[0], { + resource: 'hello:String', + error: 0, + meta: { + 'graphql.field.path': '__proto__', + }, + }) + }), + graphql.graphql({ schema, source }), + ]) + + assert.ok(!result.errors || result.errors.length === 0) + // eslint-disable-next-line no-proto + assert.strictEqual(result.data.__proto__, 'alias') + }) }) describe('with signature calculation disabled', () => { diff --git a/packages/datadog-plugin-grpc/test/client.spec.js b/packages/datadog-plugin-grpc/test/client.spec.js index 6c1eef00667..bad1443d9f4 100644 --- a/packages/datadog-plugin-grpc/test/client.spec.js +++ b/packages/datadog-plugin-grpc/test/client.spec.js @@ -12,7 +12,7 @@ const loader = require('../../../versions/@grpc/proto-loader').get() const { withNamingSchema, withPeerService, withVersions } = require('../../dd-trace/test/setup/mocha') const agent = require('../../dd-trace/test/plugins/agent') const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') -const defaults = require('../../dd-trace/src/config/defaults') +const { defaults } = require('../../dd-trace/src/config/defaults') const { NODE_MAJOR } = require('../../../version') const getService = require('./service') diff --git a/packages/datadog-plugin-grpc/test/server.spec.js b/packages/datadog-plugin-grpc/test/server.spec.js index 1c75183879d..f7c638d22fb 100644 --- a/packages/datadog-plugin-grpc/test/server.spec.js +++ b/packages/datadog-plugin-grpc/test/server.spec.js @@ -11,7 +11,7 @@ const { assertObjectContains } = require('../../../integration-tests/helpers') const { withNamingSchema, withVersions } = require('../../dd-trace/test/setup/mocha') const agent = require('../../dd-trace/test/plugins/agent') const { ERROR_MESSAGE, ERROR_TYPE, ERROR_STACK } = require('../../dd-trace/src/constants') -const defaults = require('../../dd-trace/src/config/defaults') +const { defaults } = require('../../dd-trace/src/config/defaults') const { NODE_MAJOR } = require('../../../version') const GRPC_SERVER_ERROR_STATUSES = defaults['grpc.server.error.statuses'] diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index 45cfb7d2ec5..e73767b6a33 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -1,5 +1,8 @@ 'use strict' +// Capture real timers at module load time, before any test can install fake timers. +const realSetTimeout = setTimeout + const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') const { storage } = require('../../datadog-core') const { getEnvironmentVariable, getValueFromEnvSources } = require('../../dd-trace/src/config/helper') @@ -60,8 +63,7 @@ const CHILD_MESSAGE_END = 2 function withTimeout (promise, timeoutMs) { return new Promise(resolve => { - // Set a timeout to resolve after 1s - setTimeout(resolve, timeoutMs) + realSetTimeout(resolve, timeoutMs) // Also resolve if the original promise resolves promise.then(resolve) diff --git a/packages/datadog-plugin-kafkajs/src/batch-consumer.js b/packages/datadog-plugin-kafkajs/src/batch-consumer.js index 7cc07bbac12..0c89d0e4147 100644 --- a/packages/datadog-plugin-kafkajs/src/batch-consumer.js +++ b/packages/datadog-plugin-kafkajs/src/batch-consumer.js @@ -8,20 +8,47 @@ class KafkajsBatchConsumerPlugin extends ConsumerPlugin { static id = 'kafkajs' static operation = 'consume-batch' - start (ctx) { - const { topic, messages, groupId, clusterId } = ctx.extractedArgs || ctx + bindStart (ctx) { + const { topic, partition, messages, groupId, clusterId } = ctx.extractedArgs || ctx + + const span = this.startSpan({ + resource: topic, + type: 'worker', + meta: { + component: this.constructor.id, + 'kafka.topic': topic, + 'kafka.cluster_id': clusterId, + 'messaging.destination.name': topic, + 'messaging.system': 'kafka', + }, + metrics: { + 'kafka.partition': partition, + 'messaging.batch.message_count': messages.length, + }, + }, ctx) - if (!this.config.dsmEnabled) return for (const message of messages) { if (!message || !message.headers) continue + + const headers = convertToTextMap(message.headers) + if (headers) { + const childOf = this.tracer.extract('text_map', headers) + if (childOf) { + span.addLink(childOf) + } + } + + if (!this.config.dsmEnabled) continue const payloadSize = getMessageSize(message) - this.tracer.decodeDataStreamsContext(convertToTextMap(message.headers)) + this.tracer.decodeDataStreamsContext(headers) const edgeTags = ['direction:in', `group:${groupId}`, `topic:${topic}`, 'type:kafka'] if (clusterId) { edgeTags.push(`kafka_cluster_id:${clusterId}`) } this.tracer.setCheckpoint(edgeTags, null, payloadSize) } + + return ctx.currentStore } } diff --git a/packages/datadog-plugin-kafkajs/test/index.spec.js b/packages/datadog-plugin-kafkajs/test/index.spec.js index 3bd95902c56..596482937ce 100644 --- a/packages/datadog-plugin-kafkajs/test/index.spec.js +++ b/packages/datadog-plugin-kafkajs/test/index.spec.js @@ -404,6 +404,123 @@ describe('Plugin', () => { rawExpectedSchema.receive ) }) + + describe('consumer (eachBatch)', () => { + let consumer + const batchMessages = [{ key: 'key1', value: 'test2' }, { key: 'key2', value: 'test3' }] + + beforeEach(async () => { + consumer = kafka.consumer({ groupId: 'test-group' }) + await consumer.connect() + await consumer.subscribe({ topic: testTopic, fromBeginning: true }) + }) + + afterEach(async () => { + await consumer.disconnect() + }) + + it('should be instrumented', async () => { + const meta = { + 'span.kind': 'consumer', + component: 'kafkajs', + 'kafka.topic': testTopic, + 'messaging.destination.name': testTopic, + 'messaging.system': 'kafka', + } + if (clusterIdAvailable) meta['kafka.cluster_id'] = testKafkaClusterId + + const expectedSpanPromise = expectSpanWithDefaults({ + name: expectedSchema.receive.opName, + service: expectedSchema.receive.serviceName, + meta, + metrics: { + 'messaging.batch.message_count': batchMessages.length, + }, + resource: testTopic, + error: 0, + type: 'worker', + }) + + await consumer.run({ + eachBatch: () => {}, + }) + return Promise.all([sendMessages(kafka, testTopic, batchMessages), expectedSpanPromise]) + }) + + it('should run the consumer in the context of the consumer span', done => { + const firstSpan = tracer.scope().active() + + let eachBatch = async ({ batch }) => { + const currentSpan = tracer.scope().active() + + try { + assert.notEqual(currentSpan, firstSpan) + assert.strictEqual(currentSpan.context()._name, expectedSchema.receive.opName) + done() + } catch (e) { + done(e) + } finally { + eachBatch = () => {} // avoid being called for each message + } + } + + consumer.run({ eachBatch: (...args) => eachBatch(...args) }) + .then(() => sendMessages(kafka, testTopic, batchMessages)) + .catch(done) + }) + + it('should propagate context via span links', async () => { + const expectedSpanPromise = agent.assertSomeTraces(traces => { + const span = traces[0][0] + const links = span.meta['_dd.span_links'] ? JSON.parse(span.meta['_dd.span_links']) : [] + + assertObjectContains(span, { + name: expectedSchema.receive.opName, + service: expectedSchema.receive.serviceName, + resource: testTopic, + }) + + assert.strictEqual(links.length, batchMessages.length) + }) + + await consumer.run({ eachBatch: () => {} }) + await Promise.all([sendMessages(kafka, testTopic, batchMessages), expectedSpanPromise]) + }) + + it('should not fail when messages have headers without trace context', async () => { + const messagesWithHeaders = [ + { key: 'key1', value: 'test1', headers: { 'x-custom-header': 'value' } }, + ] + const meta = { + 'span.kind': 'consumer', + component: 'kafkajs', + 'kafka.topic': testTopic, + 'messaging.destination.name': testTopic, + 'messaging.system': 'kafka', + } + if (clusterIdAvailable) meta['kafka.cluster_id'] = testKafkaClusterId + + const expectedSpanPromise = expectSpanWithDefaults({ + name: expectedSchema.receive.opName, + service: expectedSchema.receive.serviceName, + meta, + resource: testTopic, + error: 0, + type: 'worker', + }) + + await consumer.run({ eachBatch: () => {} }) + return Promise.all([sendMessages(kafka, testTopic, messagesWithHeaders), expectedSpanPromise]) + }) + + withNamingSchema( + async () => { + await consumer.run({ eachBatch: () => {} }) + await sendMessages(kafka, testTopic, batchMessages) + }, + rawExpectedSchema.receive + ) + }) }) }) }) diff --git a/packages/datadog-plugin-mocha/src/index.js b/packages/datadog-plugin-mocha/src/index.js index b108e5c2476..4a108ecdd29 100644 --- a/packages/datadog-plugin-mocha/src/index.js +++ b/packages/datadog-plugin-mocha/src/index.js @@ -1,5 +1,8 @@ 'use strict' +// Capture real Date.now at module load time, before any test can install fake timers. +const realDateNow = Date.now.bind(Date) + const CiPlugin = require('../../dd-trace/src/plugins/ci_plugin') const { storage } = require('../../datadog-core') const { getValueFromEnvSources } = require('../../dd-trace/src/config/helper') @@ -323,8 +326,8 @@ class MochaPlugin extends CiPlugin { this.runningTestProbe = { file, line } this.testErrorStackIndex = stackIndex test._ddShouldWaitForHitProbe = true - const waitUntil = Date.now() + BREAKPOINT_SET_GRACE_PERIOD_MS - while (Date.now() < waitUntil) { + const waitUntil = realDateNow() + BREAKPOINT_SET_GRACE_PERIOD_MS + while (realDateNow() < waitUntil) { // TODO: To avoid a race condition, we should wait until `probeInformation.setProbePromise` has resolved. // However, Mocha doesn't have a mechanism for waiting asyncrounously here, so for now, we'll have to // fall back to a fixed syncronous delay. diff --git a/packages/datadog-plugin-next/src/index.js b/packages/datadog-plugin-next/src/index.js index fa4216cdb7b..04aea364971 100644 --- a/packages/datadog-plugin-next/src/index.js +++ b/packages/datadog-plugin-next/src/index.js @@ -10,7 +10,6 @@ const errorPages = new Set(['/404', '/500', '/_error', '/_not-found', '/_not-fou class NextPlugin extends ServerPlugin { static id = 'next' - #requestsBySpanId = new WeakMap() constructor (...args) { super(...args) @@ -35,11 +34,7 @@ class NextPlugin extends ServerPlugin { analyticsSampler.sample(span, this.config.measured, true) - // Store request by span ID to handle cases where child spans are activated - const spanId = span.context()._spanId - this.#requestsBySpanId.set(spanId, req) - - return { ...store, span } + return { ...store, span, req } } error ({ span, error }) { @@ -90,14 +85,7 @@ class NextPlugin extends ServerPlugin { if (!store) return - const span = store.span - - const spanId = span.context()._spanId - const parentSpanId = span.context()._parentId - - // Try current span first, then parent span. - // This handles cases where pageLoad runs in a child span context - const req = this.#requestsBySpanId.get(spanId) ?? this.#requestsBySpanId.get(parentSpanId) + const { span, req } = store // safeguard against missing req in complicated timeout scenarios if (!req) return diff --git a/packages/datadog-plugin-openai/src/services.js b/packages/datadog-plugin-openai/src/services.js index f3ca56911af..917aa260544 100644 --- a/packages/datadog-plugin-openai/src/services.js +++ b/packages/datadog-plugin-openai/src/services.js @@ -20,6 +20,7 @@ module.exports.init = function (tracerConfig) { `env:${tracerConfig.tags.env}`, `version:${tracerConfig.tags.version}`, ], + lookup: tracerConfig.lookup, }) : new NoopDogStatsDClient() diff --git a/packages/datadog-plugin-openai/test/services.spec.js b/packages/datadog-plugin-openai/test/services.spec.js index 99673240f49..cd984d320e9 100644 --- a/packages/datadog-plugin-openai/test/services.spec.js +++ b/packages/datadog-plugin-openai/test/services.spec.js @@ -1,15 +1,60 @@ 'use strict' +const sinon = require('sinon') +const proxyquire = require('proxyquire') + const services = require('../src/services') const { getConfigFresh } = require('../../dd-trace/test/helpers/config') describe('Plugin', () => { describe('openai services', () => { - describe('when unconfigured', () => { - afterEach(() => { - services.shutdown() + afterEach(() => { + services.shutdown() + }) + + it('should initialize DogStatsDClient with explicit config values', () => { + const flush = sinon.stub() + const DogStatsDClient = sinon.stub().returns({ + flush, + }) + const ExternalLogger = sinon.stub().returns({ + log: sinon.stub(), + }) + const NoopDogStatsDClient = sinon.stub() + const NoopExternalLogger = sinon.stub() + const proxiedServices = proxyquire('../src/services', { + '../../dd-trace/src/dogstatsd': { DogStatsDClient }, + '../../dd-trace/src/noop/dogstatsd': NoopDogStatsDClient, + '../../dd-trace/src/external-logger/src': { + ExternalLogger, + NoopExternalLogger, + }, + }) + const config = getConfigFresh({ + env: 'prod', + hostname: 'foo', + service: 'bar', + version: '1.2.3', }) + proxiedServices.init(config) + + sinon.assert.calledOnceWithExactly(DogStatsDClient, { + host: config.dogstatsd.hostname, + lookup: config.lookup, + port: config.dogstatsd.port, + tags: [ + 'service:bar', + 'env:prod', + 'version:1.2.3', + ], + }) + sinon.assert.notCalled(NoopDogStatsDClient) + + proxiedServices.shutdown() + }) + + describe('when unconfigured', () => { it('dogstatsd does not throw when missing .dogstatsd', () => { const service = services.init(getConfigFresh({ hostname: 'foo', diff --git a/packages/datadog-plugin-prisma/test/index.spec.js b/packages/datadog-plugin-prisma/test/index.spec.js index 6cd6ec503fc..89a054cab46 100644 --- a/packages/datadog-plugin-prisma/test/index.spec.js +++ b/packages/datadog-plugin-prisma/test/index.spec.js @@ -326,16 +326,23 @@ describe('Plugin', () => { supportedRange = '>=6.16.0 <7.0.0' } withVersions('prisma', ['@prisma/client'], supportedRange, async (range, _moduleName_, version) => { + // Run prisma generate once per (config, version) pair instead of once per describe block. + // All three describe blocks below use the same schema + version, so the output is identical. + before(async function () { + this.timeout(10000) + clearPrismaEnv() + setPrismaEnv(config) + const cwd = await copySchemaToVersionDir(config.schema, range) + execPrismaGenerate(config, cwd) + }) + describe(`without configuration ${config.schema}`, () => { before(async function () { this.timeout(10000) clearPrismaEnv() setPrismaEnv(config) - const cwd = await copySchemaToVersionDir(config.schema, range) - await agent.load(['prisma', 'pg']) - execPrismaGenerate(config, cwd) prisma = loadPrismaModule(config, range) prismaClient = createPrismaClient(prisma, config) @@ -514,10 +521,6 @@ describe('Plugin', () => { clearPrismaEnv() setPrismaEnv(config) - const cwd = await copySchemaToVersionDir(config.schema, range) - - execPrismaGenerate(config, cwd) - require('../../dd-trace') prisma = loadPrismaModule(config, range) @@ -537,10 +540,6 @@ describe('Plugin', () => { clearPrismaEnv() setPrismaEnv(config) - const cwd = await copySchemaToVersionDir(config.schema, range) - - execPrismaGenerate(config, cwd) - const pluginConfig = { service: 'custom', } diff --git a/packages/datadog-plugin-prisma/test/integration-test/client.spec.js b/packages/datadog-plugin-prisma/test/integration-test/client.spec.js index 063ab2bd6b2..0ba0625656c 100644 --- a/packages/datadog-plugin-prisma/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-prisma/test/integration-test/client.spec.js @@ -207,7 +207,6 @@ describe('esm', () => { let agent let proc prismaClientConfigs.forEach(config => { - // if (!config.name.includes('prisma-generator v7 mssql adapter (url)')) return describe(config.name, () => { const isNodeSupported = semifies(semver.clean(process.version), '>=20.19.0') const isPrismaV7 = config.configFile @@ -272,7 +271,7 @@ describe('esm', () => { ' --target ES2023' + ' --module ESNext' + ' --strict true' + - ' --moduleResolution node' + + ' --moduleResolution bundler' + ' --esModuleInterop true' ) } diff --git a/packages/datadog-plugin-prisma/test/integration-test/server-output.mjs b/packages/datadog-plugin-prisma/test/integration-test/server-output.mjs index 762e29b4843..0b1d135011e 100644 --- a/packages/datadog-plugin-prisma/test/integration-test/server-output.mjs +++ b/packages/datadog-plugin-prisma/test/integration-test/server-output.mjs @@ -14,3 +14,5 @@ await prismaClient.user.findUnique({ id: user.id, }, }) + +await prismaClient.$disconnect() diff --git a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v6.mjs b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v6.mjs index f51427a7282..3342a206731 100644 --- a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v6.mjs +++ b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v6.mjs @@ -15,3 +15,5 @@ await prismaClient.user.findUnique({ id: user.id, }, }) + +await prismaClient.$disconnect() diff --git a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7-mssql.mjs b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7-mssql.mjs index 52875851ed5..5366072409e 100644 --- a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7-mssql.mjs +++ b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7-mssql.mjs @@ -31,3 +31,5 @@ await prismaClient.user.findUnique({ id: user.id, }, }) + +await prismaClient.$disconnect() diff --git a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7.mjs b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7.mjs index e2ff203e663..b39dd37785b 100644 --- a/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7.mjs +++ b/packages/datadog-plugin-prisma/test/integration-test/server-ts-v7.mjs @@ -25,3 +25,5 @@ await prismaClient.user.findUnique({ id: user.id, }, }) + +await prismaClient.$disconnect() diff --git a/packages/datadog-plugin-prisma/test/integration-test/server.mjs b/packages/datadog-plugin-prisma/test/integration-test/server.mjs index 61f8d117aa8..28eb0909f49 100644 --- a/packages/datadog-plugin-prisma/test/integration-test/server.mjs +++ b/packages/datadog-plugin-prisma/test/integration-test/server.mjs @@ -14,3 +14,5 @@ await prismaClient.user.findUnique({ id: user.id, }, }) + +await prismaClient.$disconnect() diff --git a/packages/datadog-webpack/index.js b/packages/datadog-webpack/index.js index 6ee82386e6c..0a5f91be719 100644 --- a/packages/datadog-webpack/index.js +++ b/packages/datadog-webpack/index.js @@ -21,12 +21,12 @@ for (const hook of Object.values(hooks)) { const modulesOfInterest = new Set() -for (const instrumentation of Object.values(instrumentations)) { +for (const [name, instrumentation] of Object.entries(instrumentations)) { for (const entry of instrumentation) { if (entry.file) { - modulesOfInterest.add(`${entry.name}/${entry.file}`) // e.g. "redis/my/file.js" + modulesOfInterest.add(`${name}/${entry.file}`) // e.g. "redis/my/file.js" } else { - modulesOfInterest.add(entry.name) // e.g. "redis" + modulesOfInterest.add(name) // e.g. "redis" } } } diff --git a/packages/dd-trace/index.js b/packages/dd-trace/index.js index 6f7ee42d5a6..c4175c20a30 100644 --- a/packages/dd-trace/index.js +++ b/packages/dd-trace/index.js @@ -1,23 +1,16 @@ 'use strict' if (!global._ddtrace) { - const TracerProxy = require('./src') - - Object.defineProperty(global, '_ddtrace', { - value: new TracerProxy(), - enumerable: false, - configurable: true, - writable: true, - }) - const ddTraceSymbol = Symbol.for('dd-trace') + // Set up beforeExitHandlers before loading the tracer so that modules loaded + // during require('./src') can register handlers. Object.defineProperty(globalThis, ddTraceSymbol, { value: { beforeExitHandlers: new Set(), }, enumerable: false, - configurable: true, // Allow this to be overridden by loading the tracer + configurable: true, writable: false, }) @@ -29,6 +22,15 @@ if (!global._ddtrace) { } }) + const TracerProxy = require('./src') + + Object.defineProperty(global, '_ddtrace', { + value: new TracerProxy(), + enumerable: false, + configurable: true, + writable: true, + }) + global._ddtrace.default = global._ddtrace global._ddtrace.tracer = global._ddtrace } diff --git a/packages/dd-trace/src/agent/url.js b/packages/dd-trace/src/agent/url.js index 82f734d9a9a..f2460ce24d6 100644 --- a/packages/dd-trace/src/agent/url.js +++ b/packages/dd-trace/src/agent/url.js @@ -1,7 +1,7 @@ 'use strict' const { URL, format } = require('url') -const defaults = require('../config/defaults') +const { defaults } = require('../config/defaults') module.exports = { getAgentUrl } @@ -12,7 +12,7 @@ module.exports = { getAgentUrl } /** * Gets the agent URL from config, constructing it from hostname/port if needed - * @param {ReturnType} config - Tracer configuration object + * @param {Partial} config - Tracer configuration object * @returns {URL} The agent URL */ function getAgentUrl (config) { diff --git a/packages/dd-trace/src/aiguard/sdk.js b/packages/dd-trace/src/aiguard/sdk.js index 64886ba092a..cbd3a486199 100644 --- a/packages/dd-trace/src/aiguard/sdk.js +++ b/packages/dd-trace/src/aiguard/sdk.js @@ -57,6 +57,10 @@ class AIGuard extends NoopAIGuard { #maxContentSize #meta + /** + * @param {import('../tracer')} tracer - Tracer instance + * @param {import('../config/config-base')} config - Tracer configuration + */ constructor (tracer, config) { super() diff --git a/packages/dd-trace/src/appsec/blocking.js b/packages/dd-trace/src/appsec/blocking.js index 3615e7ef2dc..a21aab0b76d 100644 --- a/packages/dd-trace/src/appsec/blocking.js +++ b/packages/dd-trace/src/appsec/blocking.js @@ -164,6 +164,9 @@ function getBlockingAction (actions) { return actions?.redirect_request || actions?.block_request } +/** + * @param {import('../config/config-base')} config - Tracer configuration + */ function setTemplates (config) { templateHtml = config.appsec.blockedTemplateHtml || blockedTemplates.html diff --git a/packages/dd-trace/src/appsec/iast/iast-plugin.js b/packages/dd-trace/src/appsec/iast/iast-plugin.js index cbabe49ff3c..3323f8b62e6 100644 --- a/packages/dd-trace/src/appsec/iast/iast-plugin.js +++ b/packages/dd-trace/src/appsec/iast/iast-plugin.js @@ -168,7 +168,7 @@ class IastPlugin extends Plugin { loadChannel.subscribe(this.onInstrumentationLoadedListener) // check for already instrumented modules - for (const name in instrumentations) { + for (const name of Object.keys(instrumentations)) { this._onInstrumentationLoaded(name) } } diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js index 5039f2bb544..1964e333d98 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler.js @@ -3,7 +3,7 @@ const log = require('../../../../log') const vulnerabilities = require('../../vulnerabilities') -const defaults = require('../../../../config/defaults') +const { defaults } = require('../../../../config/defaults') const { contains, intersects, remove } = require('./range-utils') diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js index 6e1c483a967..f9628c2673b 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities-formatter/utils.js @@ -2,7 +2,7 @@ const crypto = require('crypto') -const defaults = require('../../../config/defaults') +const { defaults } = require('../../../config/defaults') const STRINGIFY_RANGE_KEY = 'DD_' + crypto.randomBytes(20).toString('hex') const STRINGIFY_SENSITIVE_KEY = STRINGIFY_RANGE_KEY + 'SENSITIVE' diff --git a/packages/dd-trace/src/appsec/remote_config.js b/packages/dd-trace/src/appsec/remote_config.js index a56465bd3a8..5db2d30f8d8 100644 --- a/packages/dd-trace/src/appsec/remote_config.js +++ b/packages/dd-trace/src/appsec/remote_config.js @@ -76,6 +76,7 @@ function enableOrDisableAppsec (action, rcConfig, config, appsec) { appsec.disable() } + // TODO: Use configWithOrigin /generateTelemetry instead of manually constructing the change. updateConfig([ { name: 'appsec.enabled', diff --git a/packages/dd-trace/src/appsec/sdk/index.js b/packages/dd-trace/src/appsec/sdk/index.js index 1b07e25c902..499079c2b4f 100644 --- a/packages/dd-trace/src/appsec/sdk/index.js +++ b/packages/dd-trace/src/appsec/sdk/index.js @@ -26,6 +26,10 @@ class EventTrackingV2 { } class AppsecSdk { + /** + * @param {import('../../tracer')} tracer - Tracer instance + * @param {import('../../config/config-base')} config - Tracer configuration + */ constructor (tracer, config) { this._tracer = tracer if (config) { diff --git a/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js b/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js index 273d9091b98..5df5b9c8b59 100644 --- a/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js +++ b/packages/dd-trace/src/ci-visibility/dynamic-instrumentation/index.js @@ -11,6 +11,9 @@ const probeIdToResolveBreakpointSet = new Map() const probeIdToResolveBreakpointRemove = new Map() class TestVisDynamicInstrumentation { + /** + * @param {import('../../config/config-base')} config - Tracer configuration + */ constructor (config) { this._config = config this.worker = null @@ -83,7 +86,6 @@ class TestVisDynamicInstrumentation { DD_TRACE_ENABLED: 'false', DD_TEST_FAILED_TEST_REPLAY_ENABLED: 'false', DD_CIVISIBILITY_MANUAL_API_ENABLED: 'false', - DD_TRACING_ENABLED: 'false', DD_INSTRUMENTATION_TELEMETRY_ENABLED: 'false', }, workerData: { @@ -150,6 +152,9 @@ class TestVisDynamicInstrumentation { let dynamicInstrumentation +/** + * @param {import('../../config/config-base')} config - Tracer configuration + */ module.exports = function createAndGetTestVisDynamicInstrumentation (config) { if (dynamicInstrumentation) { return dynamicInstrumentation diff --git a/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js b/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js index 90602ba0a1a..c6883eb0c8d 100644 --- a/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js +++ b/packages/dd-trace/src/ci-visibility/test-api-manual/test-api-manual-plugin.js @@ -54,6 +54,10 @@ class TestApiManualPlugin extends CiPlugin { }) } + /** + * @param {import('../../config/config-base')} config - Tracer configuration + * @param {boolean} shouldGetEnvironmentData - Whether to get environment data + */ configure (config, shouldGetEnvironmentData) { this._config = config super.configure(config, shouldGetEnvironmentData) diff --git a/packages/dd-trace/src/config/defaults.js b/packages/dd-trace/src/config/defaults.js index 4c2af2c9bc2..041783dceda 100644 --- a/packages/dd-trace/src/config/defaults.js +++ b/packages/dd-trace/src/config/defaults.js @@ -1,177 +1,347 @@ 'use strict' -const pkg = require('../pkg') -const { isFalse, isTrue } = require('../util') -const { DD_MAJOR } = require('../../../../version') -const { getEnvironmentVariable: getEnv } = require('./helper') +const dns = require('dns') +const util = require('util') +const { DD_MAJOR } = require('../../../../version') +const { parsers, transformers, telemetryTransformers, setWarnInvalidValue } = require('./parsers') const { supportedConfigurations, } = /** @type {import('./helper').SupportedConfigurationsJson} */ (require('./supported-configurations.json')) -const service = getEnv('AWS_LAMBDA_FUNCTION_NAME') || - getEnv('FUNCTION_NAME') || // Google Cloud Function Name set by deprecated runtimes - getEnv('K_SERVICE') || // Google Cloud Function Name set by newer runtimes - getEnv('WEBSITE_SITE_NAME') || // set by Azure Functions - pkg.name || - 'node' +let log +let seqId = 0 +const configWithOrigin = new Map() +const parseErrors = new Map() + +if (DD_MAJOR >= 6) { + // Programmatic configuration of DD_IAST_SECURITY_CONTROLS_CONFIGURATION is not supported + // in newer major versions. This is special handled here until a better solution is found. + // TODO: Remove the programmatic configuration from supported-configurations.json once v5 is not supported anymore. + supportedConfigurations.DD_IAST_SECURITY_CONTROLS_CONFIGURATION[0].internalPropertyName = + supportedConfigurations.DD_IAST_SECURITY_CONTROLS_CONFIGURATION[0].configurationNames?.[0] + delete supportedConfigurations.DD_IAST_SECURITY_CONTROLS_CONFIGURATION[0].configurationNames +} else { + // Default value for DD_TRACE_STARTUP_LOGS is 'false' in older major versions. + // This is special handled here until a better solution is found. + // TODO: Remove this here once v5 is not supported anymore. + supportedConfigurations.DD_TRACE_STARTUP_LOGS[0].default = 'false' +} /** - * @param {string|null} raw - * @param {string} type - * @returns {string|number|boolean|Record|unknown[]|undefined} + * Warns about an invalid value for an option and adds the error to the last telemetry entry if it is not already set. + * Logging happens only if the error is not already set or the option name is different from the last telemetry entry. + * + * @param {unknown} value - The value that is invalid. + * @param {string} optionName - The name of the option. + * @param {string} source - The source of the value. + * @param {string} baseMessage - The base message to use for the warning. + * @param {Error} [error] - An error that was thrown while parsing the value. */ -function parseDefaultByType (raw, type) { - if (raw === null) { - return +function warnInvalidValue (value, optionName, source, baseMessage, error) { + const canonicalName = (optionsTable[optionName]?.canonicalName ?? optionName) + source + // Lazy load log module to avoid circular dependency + if (!parseErrors.has(canonicalName)) { + // TODO: Rephrase: It will fallback to former source (or default if not set) + let message = `${baseMessage}: ${util.inspect(value)} for ${optionName} (source: ${source}), picked default` + if (error) { + error.stack = error.toString() + message += `\n\n${util.inspect(error)}` + } + parseErrors.set(canonicalName, { message }) + log ??= require('../log') + const logLevel = error ? 'error' : 'warn' + log[logLevel](message) } +} +setWarnInvalidValue(warnInvalidValue) - switch (type) { - case 'boolean': - if (isTrue(raw)) return true - if (isFalse(raw)) return false - // TODO: What should we do with these? - return - case 'int': - case 'decimal': { - return Number(raw) - } - case 'array': { - if (!raw || raw.length === 0) return [] - // TODO: Make the parsing a helper that is reused. - return raw.split(',').map(item => { - const colonIndex = item.indexOf(':') - if (colonIndex === -1) { - return item.trim() - } - const key = item.slice(0, colonIndex).trim() - const value = item.slice(colonIndex + 1).trim() - return `${key}:${value}` - }) +/** @type {import('./config-types').ConfigDefaults} */ +const defaults = { + instrumentationSource: 'manual', + isServiceUserProvided: false, + isServiceNameInferred: true, + plugins: true, + isCiVisibility: false, + lookup: dns.lookup, + logger: undefined, +} + +for (const [name, value] of Object.entries(defaults)) { + configWithOrigin.set(`${name}default`, { + name, + value: value ?? null, + origin: 'default', + seq_id: seqId++, + }) +} + +/** + * @param {unknown} value + * @param {string} origin + * @param {string} optionName + */ +function generateTelemetry (value = null, origin, optionName) { + const { type, canonicalName = optionName } = configurationsTable[optionName] ?? { type: typeof value } + // TODO: Consider adding a preParser hook to the parsers object. + if (canonicalName === 'OTEL_RESOURCE_ATTRIBUTES') { + value = telemetryTransformers.MAP(value) + } + // TODO: Should we not send defaults to telemetry to reduce size? + // TODO: How to handle aliases/actual names in the future? Optional fields? Normalize the name at intake? + // TODO: Validate that space separated tags are parsed by the backend. Optimizations would be possible with that. + // TODO: How to handle telemetry reporting for aliases? + if (value !== null) { + if (telemetryTransformers[type]) { + value = telemetryTransformers[type](value) + } else if (typeof value === 'object' && value !== null) { + value = value instanceof URL + ? String(value) + : JSON.stringify(value) + } else if (typeof value === 'function') { + value = value.name || 'function' } - case 'map': { - if (!raw || raw.length === 0) return {} - // TODO: Make the parsing a helper that is reused. - /** @type {Record} */ - const entries = {} - for (const item of raw.split(',')) { - const colonIndex = item.indexOf(':') - if (colonIndex === -1) { - const key = item.trim() - if (key.length > 0) { - entries[key] = '' + } + const telemetryEntry = { + name: canonicalName, + value, + origin, + seq_id: seqId++, + } + const error = parseErrors.get(`${canonicalName}${origin}`) + if (error) { + parseErrors.delete(`${canonicalName}${origin}`) + telemetryEntry.error = error + } + configWithOrigin.set(`${canonicalName}${origin}`, telemetryEntry) +} + +// Iterate over the object and always handle the leaf properties as lookup. +// Example entries: +// +// cloudPayloadTagging: { +// nestedProperties: [ +// 'rules', +// 'requestsEnabled', +// 'responses', +// ], +// option: { +// property: 'rules', +// parser: parsers.JSON, +// canonicalName: 'DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING', +// transformer: transformers.toCamelCase, +// }, +// }, +// 'cloudPayloadTagging.responses': { +// nestedProperties: [ +// 'enabled', +// ], +// }, +// 'cloudPayloadTagging.rules': {}, +// 'cloudPayloadTagging.requestsEnabled': {}, +// 'cloudPayloadTagging.responses.enabled': {} +const optionsTable = { + // Additional properties that are not supported by the supported-configurations.json file. + lookup: { + transformer (value) { + if (typeof value === 'function') { + return value + } + }, + property: 'lookup', + }, + logger: { + transformer (object) { + // Create lazily to avoid the overhead when not used. + // Match at least one log level. + const knownLogLevels = new Set(supportedConfigurations.DD_TRACE_LOG_LEVEL[0].allowed?.split('|')) + if (typeof object !== 'object' || object === null) { + return object + } + let matched = false + for (const logLevel of knownLogLevels) { + if (object[logLevel] !== undefined) { + if (typeof object[logLevel] !== 'function') { + warnInvalidValue(object[logLevel], 'logger', 'default', `Invalid log level ${logLevel}`) + return } - continue - } - const key = item.slice(0, colonIndex).trim() - const value = item.slice(colonIndex + 1).trim() - if (key.length > 0) { - entries[key] = value + matched = true } } - return entries - } - default: - return raw + if (matched) { + return object + } + }, + property: 'logger', + }, + isCiVisibility: { + property: 'isCiVisibility', + }, + plugins: { + property: 'plugins', + }, +} + +const parser = (value, optionName, source) => { + const { type, canonicalName = optionName } = configurationsTable[optionName] + const parsed = parsers[type](value, canonicalName) + if (parsed === undefined) { + warnInvalidValue(value, optionName, source, `Invalid ${type} input`) } + return parsed } -/** @type {Record} */ -const metadataDefaults = {} -for (const entries of Object.values(supportedConfigurations)) { +/** + * @template {import('./config-types').ConfigPath} TPath + * @type {Partial unknown, + * canonicalName?: string, + * transformer?: (value: unknown, optionName: string, source: string) => unknown, + * telemetryTransformer?: (value: unknown) => unknown + * }>>} ConfigurationsTable + */ +const configurationsTable = {} + +// One way aliases. Must be applied in apply calculated entries. +const fallbackConfigurations = new Map() + +const regExps = {} + +for (const [canonicalName, entries] of Object.entries(supportedConfigurations)) { + if (entries.length !== 1) { + // TODO: Determine if we really want to support multiple entries for a canonical name. + // This would be needed to show official support for multiple diverging implementations + // at a time with by checking for another configuration that is not the canonical name. + throw new Error( + `Multiple entries found for canonical name: ${canonicalName}. ` + + 'This is currently not supported and must be implemented, if needed.' + ) + } for (const entry of entries) { - // TODO: Replace $dynamic with method names that would be called and that - // are also called when the user passes through the value. That way the - // handling is unified and methods can be declared as default. - // The name of that method should be expressive for users. - // TODO: Add handling for all environment variable names. They should not - // need a configuration name for being listed with their default. - if (!Array.isArray(entry.configurationNames)) { - continue + const configurationNames = entry.internalPropertyName ? [entry.internalPropertyName] : entry.configurationNames + const fullPropertyName = configurationNames?.[0] ?? canonicalName + const type = entry.type.toUpperCase() + + let transformer = transformers[entry.transform] + if (entry.allowed) { + regExps[entry.allowed] ??= new RegExp(`^(${entry.allowed})$`, 'i') + const allowed = regExps[entry.allowed] + const originalTransform = transformer + transformer = (value, optionName, source) => { + if (!allowed.test(value)) { + warnInvalidValue(value, optionName, source, 'Invalid value') + return + } + if (originalTransform) { + value = originalTransform(value) + } + return value + } } - const parsedValue = parseDefaultByType(entry.default, entry.type) - for (const configurationName of entry.configurationNames) { - metadataDefaults[configurationName] = entry.default === null ? undefined : parsedValue + const option = { parser, type } + + if (fullPropertyName !== canonicalName) { + option.property = fullPropertyName + option.canonicalName = canonicalName + configurationsTable[fullPropertyName] = option + } + if (transformer) { + option.transformer = transformer + } + if (entry.configurationNames) { + addOption(option, type, entry.configurationNames) + } + configurationsTable[canonicalName] = option + + if (entry.default === null) { + defaults[fullPropertyName] = undefined + } else { + let parsedDefault = parser(entry.default, fullPropertyName, 'default') + if (entry.transform) { + parsedDefault = transformer(parsedDefault, fullPropertyName, 'default') + } + defaults[fullPropertyName] = parsedDefault + } + generateTelemetry(defaults[fullPropertyName], 'default', fullPropertyName) + + if (entry.aliases) { + for (const alias of entry.aliases) { + if (!supportedConfigurations[alias]) { + // An actual alias has no matching entry + continue + } + if (!supportedConfigurations[alias].aliases?.includes(canonicalName)) { + // Alias will be replaced with the full property name of the alias, if it exists. + fallbackConfigurations.set(fullPropertyName, alias) + } + } } } } -// Defaults required by JS config merge/applyCalculated that are not represented in supported-configurations. -const defaultsWithoutSupportedConfigurationEntry = { - 'cloudPayloadTagging.rules': [], - 'cloudPayloadTagging.requestsEnabled': false, - 'cloudPayloadTagging.responsesEnabled': false, - isAzureFunction: false, - isCiVisibility: false, - isGCPFunction: false, - instrumentationSource: 'manual', - isServiceUserProvided: false, - isServiceNameInferred: true, - lookup: undefined, - plugins: true, +// Replace the alias with the canonical property name. +for (const [fullPropertyName, alias] of fallbackConfigurations) { + if (configurationsTable[alias].property) { + fallbackConfigurations.set(fullPropertyName, configurationsTable[alias].property) + } } -// These values are documented in supported-configurations as CI Visibility -// defaults. Keep startup baseline false and let #applyCalculated() switch them -// when CI Visibility is active. -// TODO: These entries should be removed. They are off by default -// because they rely on other configs. -const defaultsWithConditionalRuntimeBehavior = { - startupLogs: DD_MAJOR >= 6, - isGitUploadEnabled: false, - isImpactedTestsEnabled: false, - isIntelligentTestRunnerEnabled: false, - isManualApiEnabled: false, - isTestManagementEnabled: false, - // TODO: These are not conditional, they would just be of type number. - 'dogstatsd.port': '8125', - port: '8126', - // Override due to expecting numbers, not strings. TODO: Replace later. - 'grpc.client.error.statuses': [ - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - ], - 'grpc.server.error.statuses': [ - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - ], -} +function addOption (option, type, configurationNames) { + for (const name of configurationNames) { + let index = -1 + let lastNestedProperties + while (true) { + const nextIndex = name.indexOf('.', index + 1) + const intermediateName = nextIndex === -1 ? name : name.slice(0, nextIndex) + if (lastNestedProperties) { + lastNestedProperties.add(intermediateName.slice(index + 1)) + } -/** @type {Record} */ -const defaults = { - ...defaultsWithoutSupportedConfigurationEntry, - ...metadataDefaults, - ...defaultsWithConditionalRuntimeBehavior, - service, - version: pkg.version, + if (nextIndex === -1) { + if (optionsTable[name]) { + if (optionsTable[name].nestedProperties && !optionsTable[name].option) { + optionsTable[name].option = option + break + } + throw new Error(`Duplicate configuration name: ${name}`) + } + optionsTable[name] = option + break + } + + lastNestedProperties = new Set() + index = nextIndex + + if (!optionsTable[intermediateName]) { + optionsTable[intermediateName] = { + nestedProperties: lastNestedProperties, + } + } else if (optionsTable[intermediateName].nestedProperties) { + lastNestedProperties = optionsTable[intermediateName].nestedProperties + } else { + optionsTable[intermediateName] = { + nestedProperties: lastNestedProperties, + option: optionsTable[intermediateName], + } + } + } + } } -module.exports = defaults +module.exports = { + configurationsTable, + + defaults, + + fallbackConfigurations, + + optionsTable, + + configWithOrigin, + + parseErrors, + + generateTelemetry, +} diff --git a/packages/dd-trace/src/config/generated-config-types.d.ts b/packages/dd-trace/src/config/generated-config-types.d.ts index 71394bbd911..7b79f8c03b5 100644 --- a/packages/dd-trace/src/config/generated-config-types.d.ts +++ b/packages/dd-trace/src/config/generated-config-types.d.ts @@ -88,10 +88,12 @@ export interface GeneratedConfig { DD_CIVISIBILITY_TEST_MODULE_ID: string | undefined; DD_CIVISIBILITY_TEST_SESSION_ID: string | undefined; DD_CUSTOM_TRACE_ID: string | undefined; + DD_ENABLE_LAGE_PACKAGE_NAME: boolean; DD_ENABLE_NX_SERVICE_NAME: boolean; DD_EXPERIMENTAL_TEST_OPT_GIT_CACHE_DIR: string; DD_EXPERIMENTAL_TEST_OPT_GIT_CACHE_ENABLED: boolean; DD_EXPERIMENTAL_TEST_OPT_SETTINGS_CACHE: string; + DD_EXPERIMENTAL_TEST_REQUESTS_FS_CACHE: boolean; DD_EXTERNAL_ENV: string | undefined; DD_GIT_BRANCH: string | undefined; DD_GIT_COMMIT_AUTHOR_DATE: string | undefined; @@ -379,6 +381,7 @@ export interface GeneratedConfig { env: string | undefined; experimental: { aiguard: { + block: boolean; enabled: boolean; endpoint: string | undefined; maxContentSize: number; @@ -475,7 +478,6 @@ export interface GeneratedConfig { }; openAiLogsEnabled: boolean; OTEL_EXPORTER_OTLP_ENDPOINT: string | undefined; - OTEL_LOG_LEVEL: "debug" | "info" | "warn" | "error" | undefined; OTEL_LOGS_EXPORTER: "none" | "otlp" | undefined; OTEL_METRICS_EXPORTER: "none" | "otlp" | undefined; OTEL_RESOURCE_ATTRIBUTES: Record; @@ -547,6 +549,7 @@ export interface GeneratedConfig { debug: boolean; dependencyCollection: boolean; enabled: boolean; + extendedHeartbeatInterval: number; heartbeatInterval: number; logCollection: boolean; metrics: boolean; diff --git a/packages/dd-trace/src/config/helper.js b/packages/dd-trace/src/config/helper.js index 0ba7b197758..011fa0caa1a 100644 --- a/packages/dd-trace/src/config/helper.js +++ b/packages/dd-trace/src/config/helper.js @@ -9,6 +9,9 @@ * @property {string|number|boolean|null|object|unknown[]} default * @property {string[]} [aliases] * @property {string[]} [configurationNames] + * @property {string} [internalPropertyName] + * @property {string} [transform] + * @property {string} [allowed] * @property {string|boolean} [deprecated] */ @@ -57,6 +60,13 @@ for (const [canonical, configuration] of Object.entries(supportedConfigurations) const aliasToCanonical = {} for (const canonical of Object.keys(aliases)) { for (const alias of aliases[canonical]) { + if (supportedConfigurations[alias]) { + // Allow 'fallback' aliases to be used for other configurations. + // This is used to handle the case where an alias could be used for multiple configurations. + // For example, OTEL_EXPORTER_OTLP_ENDPOINT is used for OTEL_EXPORTER_OTLP_LOGS_ENDPOINT + // and OTEL_EXPORTER_OTLP_METRICS_ENDPOINT. + continue + } if (aliasToCanonical[alias]) { throw new Error(`The alias ${alias} is already used for ${aliasToCanonical[alias]}.`) } @@ -99,22 +109,37 @@ function loadStableConfig () { } function getValueFromSource (name, source) { - const value = source[name] + if (source[name] !== undefined) { + return source[name] + } - if (value === undefined && aliases[name]) { + if (aliases[name]) { for (const alias of aliases[name]) { if (source[alias] !== undefined) { return source[alias] } } } +} - return value +function getEnvNameFromSource (name, source) { + if (source[name] !== undefined) { + return name + } + + if (aliases[name]) { + for (const alias of aliases[name]) { + if (source[alias] !== undefined) { + return alias + } + } + } } function validateAccess (name) { - if ((name.startsWith('DD_') || name.startsWith('OTEL_') || aliasToCanonical[name]) && - !supportedConfigurations[name]) { + if ((name.startsWith('DD_') || name.startsWith('OTEL_')) && + !supportedConfigurations[name] && + !aliasToCanonical[name]) { throw new Error(`Missing ${name} env/configuration in "supported-configurations.json" file.`) } } @@ -144,10 +169,9 @@ module.exports = { * * @returns {TracerEnv} The environment variables */ - getEnvironmentVariables () { + getEnvironmentVariables (source = process.env, internalOnly = false) { const configs = {} - for (const [key, value] of Object.entries(process.env)) { - // TODO(BridgeAR): Handle telemetry reporting for aliases. + for (const [key, value] of Object.entries(source)) { if (key.startsWith('DD_') || key.startsWith('OTEL_') || aliasToCanonical[key]) { if (supportedConfigurations[key]) { configs[key] = value @@ -155,7 +179,7 @@ module.exports = { // The alias should only be used if the actual configuration is not set // In case that more than a single alias exist, use the one defined first in our own order for (const alias of aliases[aliasToCanonical[key]]) { - if (process.env[alias] !== undefined) { + if (source[alias] !== undefined) { configs[aliasToCanonical[key]] = value break } @@ -165,9 +189,10 @@ module.exports = { // debug( // `Missing configuration ${env} in supported-configurations file. The environment variable is ignored.` // ) + // This could be moved inside the main config logic. } deprecationMethods[key]?.() - } else { + } else if (!internalOnly) { configs[key] = value } } @@ -211,4 +236,28 @@ module.exports = { return getValueFromSource(name, localStableConfig) } }, + + /** + * Returns the actual environment variable name used for a supported configuration + * from a specific environment-based source. + * + * @param {string} name Environment variable name + * @returns {string|undefined} + */ + getConfiguredEnvName (name) { + validateAccess(name) + + if (!stableConfigLoaded) { + loadStableConfig() + } + + for (const source of [fleetStableConfig, process.env, localStableConfig]) { + if (source !== undefined) { + const fromSource = getEnvNameFromSource(name, source) + if (fromSource !== undefined) { + return fromSource + } + } + } + }, } diff --git a/packages/dd-trace/src/config/index.js b/packages/dd-trace/src/config/index.js index ed0428b26a6..93bed28e84a 100644 --- a/packages/dd-trace/src/config/index.js +++ b/packages/dd-trace/src/config/index.js @@ -5,1685 +5,749 @@ const os = require('node:os') const { URL } = require('node:url') const path = require('node:path') +const rfdc = require('../../../../vendor/dist/rfdc')({ proto: false, circles: false }) const uuid = require('../../../../vendor/dist/crypto-randomuuid') // we need to keep the old uuid dep because of cypress - const set = require('../../../datadog-core/src/utils/src/set') const { DD_MAJOR } = require('../../../../version') const log = require('../log') -const tagger = require('../tagger') -const { isTrue, isFalse, normalizeProfilingEnabledValue } = require('../util') +const pkg = require('../pkg') +const { isTrue } = require('../util') const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('../plugins/util/tags') -const { updateConfig } = require('../telemetry') +const telemetry = require('../telemetry') const telemetryMetrics = require('../telemetry/metrics') const { IS_SERVERLESS, getIsGCPFunction, getIsAzureFunction, - enableGCPPubSubPushSubscription, } = require('../serverless') const { ORIGIN_KEY, DATADOG_MINI_AGENT_PATH } = require('../constants') const { appendRules } = require('../payload-tagging/config') const { getGitMetadataFromGitProperties, removeUserSensitiveInfo, getRemoteOriginURL, resolveGitHeadSHA } = require('./git_properties') -const { getEnvironmentVariable: getEnv, getEnvironmentVariables, getStableConfigSources } = require('./helper') -const defaults = require('./defaults') - -const TELEMETRY_COUNTERS = new Map([ - ['otel.env.hiding', {}], - ['otel.env.invalid', {}], -]) -const OTEL_DD_ENV_MAPPING = new Map([ - ['OTEL_LOG_LEVEL', 'DD_TRACE_LOG_LEVEL'], - ['OTEL_PROPAGATORS', 'DD_TRACE_PROPAGATION_STYLE'], - ['OTEL_SERVICE_NAME', 'DD_SERVICE'], - ['OTEL_TRACES_SAMPLER', 'DD_TRACE_SAMPLE_RATE'], - ['OTEL_TRACES_SAMPLER_ARG', 'DD_TRACE_SAMPLE_RATE'], - ['OTEL_TRACES_EXPORTER', 'DD_TRACE_ENABLED'], - ['OTEL_METRICS_EXPORTER', 'DD_RUNTIME_METRICS_ENABLED'], - ['OTEL_RESOURCE_ATTRIBUTES', 'DD_TAGS'], - ['OTEL_SDK_DISABLED', 'DD_TRACE_OTEL_ENABLED'], - ['OTEL_LOGS_EXPORTER', undefined], -]) -const VALID_PROPAGATION_STYLES = new Set(['datadog', 'tracecontext', 'b3', 'b3 single header', 'none']) -const VALID_PROPAGATION_BEHAVIOR_EXTRACT = new Set(['continue', 'restart', 'ignore']) -const VALID_LOG_LEVELS = new Set(['debug', 'info', 'warn', 'error']) -const DEFAULT_OTLP_PORT = 4318 +const ConfigBase = require('./config-base') +const { + getEnvironmentVariable, + getEnvironmentVariables, + getStableConfigSources, + getValueFromEnvSources, +} = require('./helper') +const { + defaults, + fallbackConfigurations, + configurationsTable, + optionsTable, + configWithOrigin, + parseErrors, + generateTelemetry, +} = require('./defaults') +const { transformers } = require('./parsers') + const RUNTIME_ID = uuid() -// eslint-disable-next-line eslint-rules/eslint-process-env -- internal propagation, not user config -const ROOT_SESSION_ID = process.env.DD_ROOT_JS_SESSION_ID || RUNTIME_ID -const NAMING_VERSIONS = new Set(['v0', 'v1']) -const DEFAULT_NAMING_VERSION = 'v0' const tracerMetrics = telemetryMetrics.manager.namespace('tracers') -const changeTracker = {} +/** + * @typedef {'default' + * | 'code' + * | 'remote_config' + * | 'calculated' + * | 'env_var' + * | 'local_stable_config' + * | 'fleet_stable_config'} TelemetrySource + * @typedef {'remote_config' | 'calculated'} RevertibleTelemetrySource + * @typedef {import('../../../../index').TracerOptions} TracerOptions + * @typedef {import('./config-types').ConfigKey} ConfigKey + * @typedef {import('./config-types').ConfigPath} ConfigPath + * @typedef {{ + * value: import('./config-types').ConfigPathValue, + * source: TelemetrySource + * }} TrackedConfigEntry + * @typedef {{ + * baseValuesByPath: Partial>, + * remote_config: Set, + * calculated: Set, + * }} ChangeTracker + */ + +/** @type {Config | null} */ let configInstance = null +// An entry that is undefined means it is the default value. +/** @type {Map} */ +const trackedConfigOrigins = new Map() + +// ChangeTracker tracks the changes to the config up to programmatic options (code). +/** @type {ChangeTracker} */ +const changeTracker = { + baseValuesByPath: {}, + remote_config: new Set(), + calculated: new Set(), +} + +/** + * @param {Config} config + * @param {RevertibleTelemetrySource} source + */ +function undo (config, source) { + for (const name of changeTracker[source]) { + const entry = changeTracker.baseValuesByPath[name] ?? { source: 'default', value: defaults[name] } + setAndTrack(config, name, entry.value, undefined, entry.source) + } +} + +function get (object, path) { + // Fast path for simple property access. + if (object[path] !== undefined) { + return object[path] + } + let index = 0 + while (true) { + const nextIndex = path.indexOf('.', index) + if (nextIndex === -1) { + return object[path.slice(index)] + } + object = object[path.slice(index, nextIndex)] + index = nextIndex + 1 + } +} + +/** + * @param {Config} config + * @template {ConfigPath} TPath + * @param {TPath} name + * @param {import('./config-types').ConfigPathValue} value + * @param {unknown} [rawValue] + * @param {TelemetrySource} [source] + */ +function setAndTrack (config, name, value, rawValue = value, source = 'calculated') { + // envs can not be undefined + if (value == null) { + // TODO: This works as before while ignoring undefined programmatic options is not ideal. + if (source !== 'default') { + return + } + } else if (source === 'calculated' || source === 'remote_config') { + if (source === 'calculated' && value === get(config, name)) { + return + } + changeTracker[source].add(name) + } else { + const copy = typeof value === 'object' && value !== null ? rfdc(value) : value + changeTracker.baseValuesByPath[name] = { value: copy, source } + } + set(config, name, value) + + generateTelemetry(rawValue, source, name) + if (source === 'default') { + trackedConfigOrigins.delete(name) + } else { + trackedConfigOrigins.set(name, source) + } +} + module.exports = getConfig -class Config { +// We extend from ConfigBase to make our types work +class Config extends ConfigBase { /** * parsed DD_TAGS, usable as a standalone tag set across products * @type {Record} */ - #parsedDdTags = {} - #envUnprocessed = {} - #optsUnprocessed = {} - #remoteUnprocessed = {} - #env = {} - #options = {} - #remote = {} - #defaults = {} - #optionsArg = {} - #localStableConfig = {} - #fleetStableConfig = {} - #calculated = {} + #parsedDdTags + /** + * @type {Record} + */ + get parsedDdTags () { + return this.#parsedDdTags + } + + /** + * @param {TracerOptions} [options={}] + */ constructor (options = {}) { - if (!IS_SERVERLESS) { - const configEnvSources = getStableConfigSources() - this.stableConfig = { - fleetEntries: configEnvSources.fleetStableConfig, - localEntries: configEnvSources.localStableConfig, - warnings: configEnvSources.stableConfigWarnings, - } - } + super() - options = { - ...options, - // TODO(BridgeAR): Remove the experimental prefix once we have a major version. - // That also applies to index.d.ts - appsec: options.appsec == null ? options.experimental?.appsec : options.appsec, - iast: options.iast == null ? options.experimental?.iast : options.iast, + const configEnvSources = getStableConfigSources() + this.stableConfig = { + fleetEntries: configEnvSources.fleetStableConfig ?? {}, + localEntries: configEnvSources.localStableConfig ?? {}, + warnings: configEnvSources.stableConfigWarnings, } // Configure the logger first so it can be used to warn about other configs - const logConfig = log.getConfig() - this.debug = log.isEnabled( - this.stableConfig?.fleetEntries?.DD_TRACE_DEBUG, - this.stableConfig?.localEntries?.DD_TRACE_DEBUG - ) - this.logger = options.logger ?? logConfig.logger - this.logLevel = log.getLogLevel( - options.logLevel, - this.stableConfig?.fleetEntries?.DD_TRACE_LOG_LEVEL, - this.stableConfig?.localEntries?.DD_TRACE_LOG_LEVEL - ) - log.use(this.logger) - log.toggle(this.debug, this.logLevel) + // TODO: Implement auto buffering of inside of log module before first + // configure call. That way the logger is always available and the + // application doesn't need to configure it first and the configuration + // happens inside of config instead of inside of log module. If the logger + // is not deactivated, the buffered logs would be discarded. That way stable + // config warnings can also be logged directly and do not need special + // handling. + this.debug = log.configure(options) // Process stable config warnings, if any for (const warning of this.stableConfig?.warnings ?? []) { log.warn(warning) } - checkIfBothOtelAndDdEnvVarSet() - - if (typeof options.appsec === 'boolean') { - options.appsec = { - enabled: options.appsec, - } - } - - if (typeof options.runtimeMetrics === 'boolean') { - options.runtimeMetrics = { - enabled: options.runtimeMetrics, - } - } - - this.#defaults = defaults this.#applyDefaults() - this.#applyStableConfig(this.stableConfig?.localEntries ?? {}, this.#localStableConfig) - this.#applyEnvironment() - this.#applyStableConfig(this.stableConfig?.fleetEntries ?? {}, this.#fleetStableConfig) - this.#applyOptions(options) + // TODO: Update origin documentation to list all valid sources. Add local_stable_config and fleet_stable_config. + this.#applyEnvs(getEnvironmentVariables(this.stableConfig.localEntries, true), 'local_stable_config') + this.#applyEnvs(getEnvironmentVariables(undefined, true), 'env_var') + this.#applyEnvs(getEnvironmentVariables(this.stableConfig.fleetEntries, true), 'fleet_stable_config') + + // Experimental options are applied first, so they can be overridden by non-experimental options. + // TODO: When using programmatic options, check if there is a higher + // priority name in the same options object. Use the highest priority name. + const { experimental, ...rest } = options + if (experimental) { + // @ts-expect-error - Difficult to type this correctly. + this.#applyOptions(experimental, 'code', 'experimental') + } + this.#applyOptions(rest, 'code') this.#applyCalculated() - this.#merge() - tagger.add(this.tags, { - service: this.service, - env: this.env, - version: this.version, - 'runtime-id': RUNTIME_ID, - }) + warnWrongOtelSettings() + + if (this.gitMetadataEnabled) { + this.#loadGitMetadata() + } - this.rootSessionId = ROOT_SESSION_ID + parseErrors.clear() + } - if (this.isCiVisibility) { - tagger.add(this.tags, { - [ORIGIN_KEY]: 'ciapp-test', - }) + #applyDefaults () { + for (const [name, value] of Object.entries(defaults)) { + set(this, name, value) } + } - if (this.gitMetadataEnabled) { - this.#loadGitMetadata() + /** + * @param {import('./helper').TracerEnv} envs + * @param {'env_var' | 'local_stable_config' | 'fleet_stable_config'} source + */ + #applyEnvs (envs, source) { + for (const [name, value] of Object.entries(envs)) { + const entry = configurationsTable[name] + // TracePropagationStyle is a special case. It is a single option that is used to set both inject and extract. + // TODO: Consider what to do with this later + if (name === 'DD_TRACE_PROPAGATION_STYLE') { + if ( + getValueFromEnvSources('DD_TRACE_PROPAGATION_STYLE_INJECT') !== undefined || + getValueFromEnvSources('DD_TRACE_PROPAGATION_STYLE_EXTRACT') !== undefined + ) { + log.warn( + // eslint-disable-next-line @stylistic/max-len + 'Use either DD_TRACE_PROPAGATION_STYLE or separate DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables' + ) + continue + } + this.#applyEnvs({ DD_TRACE_PROPAGATION_STYLE_INJECT: value, DD_TRACE_PROPAGATION_STYLE_EXTRACT: value }, source) + continue + } + const parsed = entry.parser(value, name, source) + const transformed = parsed !== undefined && entry.transformer ? entry.transformer(parsed, name, source) : parsed + const rawValue = transformed !== null && typeof transformed === 'object' ? value : parsed + setAndTrack(this, entry.property ?? name, transformed, rawValue, source) } } - get parsedDdTags () { - return this.#parsedDdTags + /** + * @param {TracerOptions} options + * @param {'code' | 'remote_config'} source + * @param {string} [root] + */ + #applyOptions (options, source, root = '') { + for (const [name, value] of Object.entries(options)) { + const fullName = root ? `${root}.${name}` : name + let entry = optionsTable[fullName] + if (!entry) { + // TODO: Fix this by by changing remote config to use env styles. + if (name !== 'tracing' || source !== 'remote_config') { + log.warn('Unknown option %s with value %o', fullName, value) + continue + } + // @ts-expect-error - The entry is defined in the configurationsTable. + entry = configurationsTable.tracing + } + + if (entry.nestedProperties) { + let matched = false + if (typeof value === 'object' && value !== null) { + for (const nestedProperty of entry.nestedProperties) { + // WARNING: if the property name might be part of the value we look at, this could conflict! + // Defining an option that receives an object as value may not contain a property that is also + // potentially a nested property! + if (Object.hasOwn(value, nestedProperty)) { + this.#applyOptions(value, source, fullName) + matched = true + break + } + } + } + if (matched) { + continue + } + if (entry.option) { + entry = entry.option + } else { + if (fullName === 'tracePropagationStyle') { + // TracePropagationStyle is special. It is a single option that is used to set both inject and extract. + // @ts-expect-error - Difficult to type this correctly. + this.#applyOptions({ inject: value, extract: value }, source, 'tracePropagationStyle') + } else { + log.warn('Unknown option %s with value %o', fullName, value) + } + continue + } + } + // TODO: Coerce mismatched types to the expected type, if possible. E.g., strings <> numbers + const transformed = value !== undefined && entry.transformer ? entry.transformer(value, fullName, source) : value + setAndTrack(this, entry.property, transformed, value, source) + } } /** * Set the configuration with remote config settings. * Applies remote configuration, recalculates derived values, and merges all configuration sources. * - * @param {import('./remote_config').RemoteConfigOptions|null} options - Configurations received via Remote + * @param {TracerOptions|null} options - Configurations received via Remote * Config or null to reset all remote configuration */ setRemoteConfig (options) { // Clear all RC-managed fields to ensure previous values don't persist. // State is instead managed by the `RCClientLibConfigManager` class - this.#remote = {} - this.#remoteUnprocessed = {} + undo(this, 'remote_config') // Special case: if options is null, nothing to apply // This happens when all remote configs are removed if (options !== null) { - this.#applyRemoteConfig(options) + this.#applyOptions(options, 'remote_config') } this.#applyCalculated() - this.#merge() } - // TODO: Remove the `updateOptions` method. We don't want to support updating the config this way /** - * Updates the configuration with new programmatic options. - * - * @deprecated This method should not be used and will be removed in a future version. - * @param {object} options - Configuration options to apply (same format as tracer init options) + * @param {ConfigPath} name */ - updateOptions (options) { - this.#applyOptions(options) - this.#applyCalculated() - this.#merge() - } - getOrigin (name) { - for (const { container, origin } of this.#getSourcesInOrder()) { - const value = container[name] - if (value != null || container === this.#defaults) { - return origin - } - } - } - - #getSourcesInOrder () { - return [ - { container: this.#remote, origin: 'remote_config', unprocessed: this.#remoteUnprocessed }, - { container: this.#options, origin: 'code', unprocessed: this.#optsUnprocessed }, - { container: this.#fleetStableConfig, origin: 'fleet_stable_config' }, - { container: this.#env, origin: 'env_var', unprocessed: this.#envUnprocessed }, - { container: this.#localStableConfig, origin: 'local_stable_config' }, - { container: this.#calculated, origin: 'calculated' }, - { container: this.#defaults, origin: 'default' }, - ] - } - - #applyStableConfig (config, obj) { - this.#applyConfigValues(config, obj, {}) + return trackedConfigOrigins.get(name) ?? 'default' } - // Set environment-dependent defaults that can be overridden by users - #applyDefaults () { - const defaults = this.#defaults - - if (IS_SERVERLESS) { - setBoolean(defaults, 'crashtracking.enabled', false) - setString(defaults, 'profiling.enabled', 'false') - setBoolean(defaults, 'telemetry.enabled', false) - setBoolean(defaults, 'remoteConfig.enabled', false) - } else { - setBoolean(defaults, 'crashtracking.enabled', true) + // Handles values calculated from a mixture of options and env vars + #applyCalculated () { + undo(this, 'calculated') + + if (this.DD_CIVISIBILITY_AGENTLESS_URL || + this.url || + os.type() !== 'Windows_NT' && + !trackedConfigOrigins.has('hostname') && + !trackedConfigOrigins.has('port') && + !this.DD_CIVISIBILITY_AGENTLESS_ENABLED && + fs.existsSync('/var/run/datadog/apm.socket')) { + setAndTrack( + this, + 'url', + new URL(this.DD_CIVISIBILITY_AGENTLESS_URL || this.url || 'unix:///var/run/datadog/apm.socket') + ) } - if (getEnv('JEST_WORKER_ID')) { - setBoolean(defaults, 'telemetry.enabled', false) + if (this.isCiVisibility) { + setAndTrack(this, 'isServiceUserProvided', trackedConfigOrigins.has('service')) + this.tags[ORIGIN_KEY] = 'ciapp-test' } - } + // Compute OTLP logs and metrics URLs to send payloads to the active Datadog Agent + const agentHostname = this.hostname || /** @type {URL} */ (this.url).hostname - #applyEnvironment () { - this.#applyConfigValues(getEnvironmentVariables(), this.#env, this.#envUnprocessed) - } + if (!trackedConfigOrigins.has('dogstatsd.hostname')) { + setAndTrack(this, 'dogstatsd.hostname', agentHostname) + } + // Disable log injection when OTEL logs are enabled + // OTEL logs and DD log injection are mutually exclusive + if (this.otelLogsEnabled) { + setAndTrack(this, 'logInjection', false) + } + if (this.otelMetricsEnabled && + trackedConfigOrigins.has('OTEL_METRICS_EXPORTER') && + this.OTEL_METRICS_EXPORTER === 'none') { + setAndTrack(this, 'otelMetricsEnabled', false) + } - #applyConfigValues (source, target, unprocessedTarget) { - const { - AWS_LAMBDA_FUNCTION_NAME, - DD_AGENT_HOST, - DD_AI_GUARD_BLOCK, - DD_AI_GUARD_ENABLED, - DD_AI_GUARD_ENDPOINT, - DD_AI_GUARD_MAX_CONTENT_SIZE, - DD_AI_GUARD_MAX_MESSAGES_LENGTH, - DD_AI_GUARD_TIMEOUT, - DD_API_KEY, - DD_API_SECURITY_ENABLED, - DD_API_SECURITY_SAMPLE_DELAY, - DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED, - DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT, - DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE, - DD_API_SECURITY_MAX_DOWNSTREAM_REQUEST_BODY_ANALYSIS, - DD_APM_TRACING_ENABLED, - DD_APP_KEY, - DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE, - DD_APPSEC_COLLECT_ALL_HEADERS, - DD_APPSEC_ENABLED, - DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON, - DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED, - DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML, - DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON, - DD_APPSEC_MAX_COLLECTED_HEADERS, - DD_APPSEC_MAX_STACK_TRACES, - DD_APPSEC_MAX_STACK_TRACE_DEPTH, - DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP, - DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP, - DD_APPSEC_RULES, - DD_APPSEC_SCA_ENABLED, - DD_APPSEC_STACK_TRACE_ENABLED, - DD_APPSEC_RASP_ENABLED, - DD_APPSEC_RASP_COLLECT_REQUEST_BODY, - DD_APPSEC_TRACE_RATE_LIMIT, - DD_APPSEC_WAF_TIMEOUT, - DD_CRASHTRACKING_ENABLED, - DD_CODE_ORIGIN_FOR_SPANS_ENABLED, - DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED, - DD_DATA_STREAMS_ENABLED, - DD_DBM_PROPAGATION_MODE, - DD_DBM_INJECT_SQL_BASEHASH, - DD_DOGSTATSD_HOST, - DD_DOGSTATSD_PORT, - DD_DYNAMIC_INSTRUMENTATION_CAPTURE_TIMEOUT_MS, - DD_DYNAMIC_INSTRUMENTATION_ENABLED, - DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE, - DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS, - DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS, - DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS, - DD_ENV, - DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED, - DD_EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED, - DD_PROFILING_ENABLED, - DD_GRPC_CLIENT_ERROR_STATUSES, - DD_GRPC_SERVER_ERROR_STATUSES, - DD_HEAP_SNAPSHOT_COUNT, - DD_HEAP_SNAPSHOT_DESTINATION, - DD_HEAP_SNAPSHOT_INTERVAL, - DD_IAST_DB_ROWS_TO_TAINT, - DD_IAST_DEDUPLICATION_ENABLED, - DD_IAST_ENABLED, - DD_IAST_MAX_CONCURRENT_REQUESTS, - DD_IAST_MAX_CONTEXT_OPERATIONS, - DD_IAST_REDACTION_ENABLED, - DD_IAST_REDACTION_NAME_PATTERN, - DD_IAST_REDACTION_VALUE_PATTERN, - DD_IAST_REQUEST_SAMPLING, - DD_IAST_SECURITY_CONTROLS_CONFIGURATION, - DD_IAST_TELEMETRY_VERBOSITY, - DD_IAST_STACK_TRACE_ENABLED, - DD_INJECTION_ENABLED, - DD_INJECT_FORCE, - DD_ENABLE_NX_SERVICE_NAME, - DD_INSTRUMENTATION_TELEMETRY_ENABLED, - DD_INSTRUMENTATION_CONFIG_ID, - DD_LOGS_INJECTION, - DD_LOGS_OTEL_ENABLED, - DD_METRICS_OTEL_ENABLED, - DD_LANGCHAIN_SPAN_CHAR_LIMIT, - DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE, - DD_LLMOBS_AGENTLESS_ENABLED, - DD_LLMOBS_ENABLED, - DD_LLMOBS_ML_APP, - DD_OPENAI_LOGS_ENABLED, - DD_OPENAI_SPAN_CHAR_LIMIT, - DD_PROFILING_EXPORTERS, - DD_PROFILING_SOURCE_MAP, - DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD, - DD_INSTRUMENTATION_INSTALL_ID, - DD_INSTRUMENTATION_INSTALL_TIME, - DD_INSTRUMENTATION_INSTALL_TYPE, - DD_REMOTE_CONFIGURATION_ENABLED, - DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS, - DD_RUNTIME_METRICS_ENABLED, - DD_RUNTIME_METRICS_EVENT_LOOP_ENABLED, - DD_RUNTIME_METRICS_GC_ENABLED, - DD_SERVICE, - DD_SERVICE_MAPPING, - DD_SITE, - DD_SPAN_SAMPLING_RULES, - DD_SPAN_SAMPLING_RULES_FILE, - DD_TAGS, - DD_TELEMETRY_DEBUG, - DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED, - DD_TELEMETRY_HEARTBEAT_INTERVAL, - DD_TELEMETRY_LOG_COLLECTION_ENABLED, - DD_TELEMETRY_METRICS_ENABLED, - DD_TEST_TIA_KEEP_COV_CONFIG, - DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED, - DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED, - DD_TRACE_AGENT_PORT, - DD_TRACE_AGENT_PROTOCOL_VERSION, - DD_TRACE_AWS_ADD_SPAN_POINTERS, - DD_TRACE_BAGGAGE_MAX_BYTES, - DD_TRACE_BAGGAGE_MAX_ITEMS, - DD_TRACE_BAGGAGE_TAG_KEYS, - DD_TRACE_CLIENT_IP_ENABLED, - DD_TRACE_CLIENT_IP_HEADER, - DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING, - DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING, - DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH, - DD_TRACE_DYNAMODB_TABLE_PRIMARY_KEYS, - DD_TRACE_ENABLED, - DD_TRACE_EXPERIMENTAL_EXPORTER, - DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED, - DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED, - DD_TRACE_GIT_METADATA_ENABLED, - DD_TRACE_GRAPHQL_ERROR_EXTENSIONS, - DD_TRACE_HEADER_TAGS, - DD_TRACE_LEGACY_BAGGAGE_ENABLED, - DD_TRACE_MEMCACHED_COMMAND_ENABLED, - DD_TRACE_MIDDLEWARE_TRACING_ENABLED, - DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP, - DD_TRACE_PARTIAL_FLUSH_MIN_SPANS, - DD_TRACE_FLUSH_INTERVAL, - DD_TRACE_PEER_SERVICE_MAPPING, - DD_TRACE_PROPAGATION_EXTRACT_FIRST, - DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT, - DD_TRACE_PROPAGATION_STYLE, - DD_TRACE_PROPAGATION_STYLE_INJECT, - DD_TRACE_PROPAGATION_STYLE_EXTRACT, - DD_TRACE_RATE_LIMIT, - DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED, - DD_TRACE_REPORT_HOSTNAME, - DD_TRACE_RESOURCE_RENAMING_ENABLED, - DD_TRACE_SAMPLE_RATE, - DD_TRACE_SAMPLING_RULES, - DD_TRACE_SCOPE, - DD_TRACE_SPAN_ATTRIBUTE_SCHEMA, - DD_TRACE_SPAN_LEAK_DEBUG, - DD_TRACE_STARTUP_LOGS, - DD_TRACE_TAGS, - DD_TRACE_WEBSOCKET_MESSAGES_ENABLED, - DD_TRACE_WEBSOCKET_MESSAGES_INHERIT_SAMPLING, - DD_TRACE_WEBSOCKET_MESSAGES_SEPARATE_TRACES, - DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH, - DD_TRACING_ENABLED, - DD_VERSION, - DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE, - DD_VERTEXAI_SPAN_CHAR_LIMIT, - DD_TRACE_INFERRED_PROXY_SERVICES_ENABLED, - DD_TRACE_NATIVE_SPAN_EVENTS, - OTEL_METRICS_EXPORTER, - OTEL_PROPAGATORS, - OTEL_RESOURCE_ATTRIBUTES, - OTEL_SERVICE_NAME, - OTEL_TRACES_SAMPLER, - OTEL_TRACES_SAMPLER_ARG, - DD_EXPERIMENTAL_FLAGGING_PROVIDER_ENABLED, - DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS, - OTEL_EXPORTER_OTLP_LOGS_ENDPOINT, - OTEL_EXPORTER_OTLP_LOGS_HEADERS, - OTEL_EXPORTER_OTLP_LOGS_PROTOCOL, - OTEL_EXPORTER_OTLP_LOGS_TIMEOUT, - OTEL_EXPORTER_OTLP_METRICS_ENDPOINT, - OTEL_EXPORTER_OTLP_METRICS_HEADERS, - OTEL_EXPORTER_OTLP_METRICS_PROTOCOL, - OTEL_EXPORTER_OTLP_METRICS_TIMEOUT, - OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE, - OTEL_METRIC_EXPORT_TIMEOUT, - OTEL_EXPORTER_OTLP_PROTOCOL, - OTEL_EXPORTER_OTLP_ENDPOINT, - OTEL_EXPORTER_OTLP_HEADERS, - OTEL_EXPORTER_OTLP_TIMEOUT, - OTEL_BSP_SCHEDULE_DELAY, - OTEL_BSP_MAX_EXPORT_BATCH_SIZE, - OTEL_BSP_MAX_QUEUE_SIZE, - OTEL_METRIC_EXPORT_INTERVAL, - NX_TASK_TARGET_PROJECT, - } = source - - const tags = {} - - tagger.add(tags, parseSpaceSeparatedTags(handleOtel(OTEL_RESOURCE_ATTRIBUTES))) - tagger.add(tags, parseSpaceSeparatedTags(DD_TAGS)) - tagger.add(tags, DD_TRACE_TAGS) - - Object.assign(this.#parsedDdTags, tags) - - setString(target, 'apiKey', DD_API_KEY) - setBoolean(target, 'otelLogsEnabled', DD_LOGS_OTEL_ENABLED) - // Set OpenTelemetry logs configuration with specific _LOGS_ vars taking precedence over generic _EXPORTERS_ vars - if (OTEL_EXPORTER_OTLP_ENDPOINT) { - // Only set if there's a custom URL, otherwise let calc phase handle the default - setString(target, 'otelUrl', OTEL_EXPORTER_OTLP_ENDPOINT) + if (this.telemetry.heartbeatInterval) { + setAndTrack(this, 'telemetry.heartbeatInterval', Math.floor(this.telemetry.heartbeatInterval * 1000)) } - if (OTEL_EXPORTER_OTLP_ENDPOINT || OTEL_EXPORTER_OTLP_LOGS_ENDPOINT) { - setString(target, 'otelLogsUrl', OTEL_EXPORTER_OTLP_LOGS_ENDPOINT || target.otelUrl) + if (this.telemetry.extendedHeartbeatInterval) { + setAndTrack(this, 'telemetry.extendedHeartbeatInterval', + Math.floor(this.telemetry.extendedHeartbeatInterval * 1000)) } - setString(target, 'otelHeaders', OTEL_EXPORTER_OTLP_HEADERS) - setString(target, 'otelLogsHeaders', OTEL_EXPORTER_OTLP_LOGS_HEADERS || target.otelHeaders) - setString(target, 'otelProtocol', OTEL_EXPORTER_OTLP_PROTOCOL) - setString(target, 'otelLogsProtocol', OTEL_EXPORTER_OTLP_LOGS_PROTOCOL || target.otelProtocol) - const otelTimeout = nonNegInt(OTEL_EXPORTER_OTLP_TIMEOUT, 'OTEL_EXPORTER_OTLP_TIMEOUT') - if (otelTimeout !== undefined) { - target.otelTimeout = otelTimeout + + // Enable resourceRenamingEnabled when appsec is enabled and only + // if DD_TRACE_RESOURCE_RENAMING_ENABLED is not explicitly set + if (!trackedConfigOrigins.has('resourceRenamingEnabled')) { + setAndTrack(this, 'resourceRenamingEnabled', this.appsec.enabled ?? false) } - const otelLogsTimeout = nonNegInt(OTEL_EXPORTER_OTLP_LOGS_TIMEOUT, 'OTEL_EXPORTER_OTLP_LOGS_TIMEOUT') - target.otelLogsTimeout = otelLogsTimeout === undefined ? target.otelTimeout : otelLogsTimeout - const otelBatchTimeout = nonNegInt(OTEL_BSP_SCHEDULE_DELAY, 'OTEL_BSP_SCHEDULE_DELAY', false) - if (otelBatchTimeout !== undefined) { - target.otelBatchTimeout = otelBatchTimeout + + if (!trackedConfigOrigins.has('spanComputePeerService') && this.spanAttributeSchema !== 'v0') { + setAndTrack(this, 'spanComputePeerService', true) } - target.otelMaxExportBatchSize = nonNegInt(OTEL_BSP_MAX_EXPORT_BATCH_SIZE, 'OTEL_BSP_MAX_EXPORT_BATCH_SIZE', false) - target.otelMaxQueueSize = nonNegInt(OTEL_BSP_MAX_QUEUE_SIZE, 'OTEL_BSP_MAX_QUEUE_SIZE', false) - - const otelMetricsExporterEnabled = OTEL_METRICS_EXPORTER?.toLowerCase() !== 'none' - setBoolean( - target, - 'otelMetricsEnabled', - DD_METRICS_OTEL_ENABLED && isTrue(DD_METRICS_OTEL_ENABLED) && otelMetricsExporterEnabled - ) - // Set OpenTelemetry metrics configuration with specific _METRICS_ vars - // taking precedence over generic _EXPORTERS_ vars - if (OTEL_EXPORTER_OTLP_ENDPOINT || OTEL_EXPORTER_OTLP_METRICS_ENDPOINT) { - setString(target, 'otelMetricsUrl', OTEL_EXPORTER_OTLP_METRICS_ENDPOINT || target.otelUrl) + + if (!this.apmTracingEnabled) { + setAndTrack(this, 'stats.enabled', false) + } else if (!trackedConfigOrigins.has('stats.enabled')) { + setAndTrack(this, 'stats.enabled', getIsGCPFunction() || getIsAzureFunction()) } - setString(target, 'otelMetricsHeaders', OTEL_EXPORTER_OTLP_METRICS_HEADERS || target.otelHeaders) - setString(target, 'otelMetricsProtocol', OTEL_EXPORTER_OTLP_METRICS_PROTOCOL || target.otelProtocol) - const otelMetricsTimeout = nonNegInt(OTEL_EXPORTER_OTLP_METRICS_TIMEOUT, 'OTEL_EXPORTER_OTLP_METRICS_TIMEOUT') - target.otelMetricsTimeout = otelMetricsTimeout === undefined ? target.otelTimeout : otelMetricsTimeout - target.otelMetricsExportTimeout = nonNegInt(OTEL_METRIC_EXPORT_TIMEOUT, 'OTEL_METRIC_EXPORT_TIMEOUT') - target.otelMetricsExportInterval = nonNegInt(OTEL_METRIC_EXPORT_INTERVAL, 'OTEL_METRIC_EXPORT_INTERVAL', false) - - // Parse temporality preference (default to DELTA for Datadog) - if (OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE) { - const temporalityPref = OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE.toUpperCase() - if (['DELTA', 'CUMULATIVE', 'LOWMEMORY'].includes(temporalityPref)) { - setString(target, 'otelMetricsTemporalityPreference', temporalityPref) + + // TODO: Remove the experimental env vars as a major or deprecate the option? + if (this.experimental?.b3) { + if (!this.tracePropagationStyle.inject.includes('b3')) { + this.tracePropagationStyle.inject.push('b3') } - } - setBoolean( - target, - 'apmTracingEnabled', - DD_APM_TRACING_ENABLED ?? - (DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED && isFalse(DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED)) - ) - setBoolean(target, 'propagateProcessTags.enabled', DD_EXPERIMENTAL_PROPAGATE_PROCESS_TAGS_ENABLED) - setString(target, 'appKey', DD_APP_KEY) - setBoolean(target, 'appsec.apiSecurity.enabled', DD_API_SECURITY_ENABLED && isTrue(DD_API_SECURITY_ENABLED)) - target['appsec.apiSecurity.sampleDelay'] = maybeFloat(DD_API_SECURITY_SAMPLE_DELAY) - setBoolean(target, 'appsec.apiSecurity.endpointCollectionEnabled', - DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED) - target['appsec.apiSecurity.endpointCollectionMessageLimit'] = - maybeInt(DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT) - target['appsec.blockedTemplateGraphql'] = maybeFile(DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON) - target['appsec.blockedTemplateHtml'] = maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML) - unprocessedTarget['appsec.blockedTemplateHtml'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML - target['appsec.blockedTemplateJson'] = maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON) - unprocessedTarget['appsec.blockedTemplateJson'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON - setBoolean(target, 'appsec.enabled', DD_APPSEC_ENABLED) - setString(target, 'appsec.eventTracking.mode', DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE) - // TODO appsec.extendedHeadersCollection are deprecated, to delete in a major - setBoolean(target, 'appsec.extendedHeadersCollection.enabled', DD_APPSEC_COLLECT_ALL_HEADERS) - setBoolean( - target, - 'appsec.extendedHeadersCollection.redaction', - DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED - ) - target['appsec.extendedHeadersCollection.maxHeaders'] = maybeInt(DD_APPSEC_MAX_COLLECTED_HEADERS) - unprocessedTarget['appsec.extendedHeadersCollection.maxHeaders'] = DD_APPSEC_MAX_COLLECTED_HEADERS - setString(target, 'appsec.obfuscatorKeyRegex', DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP) - setString(target, 'appsec.obfuscatorValueRegex', DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP) - setBoolean(target, 'appsec.rasp.enabled', DD_APPSEC_RASP_ENABLED) - // TODO Deprecated, to delete in a major - setBoolean(target, 'appsec.rasp.bodyCollection', DD_APPSEC_RASP_COLLECT_REQUEST_BODY) - target['appsec.rateLimit'] = maybeInt(DD_APPSEC_TRACE_RATE_LIMIT) - unprocessedTarget['appsec.rateLimit'] = DD_APPSEC_TRACE_RATE_LIMIT - setString(target, 'appsec.rules', DD_APPSEC_RULES) - // DD_APPSEC_SCA_ENABLED is never used locally, but only sent to the backend - setBoolean(target, 'appsec.sca.enabled', DD_APPSEC_SCA_ENABLED) - setBoolean(target, 'appsec.stackTrace.enabled', DD_APPSEC_STACK_TRACE_ENABLED) - target['appsec.stackTrace.maxDepth'] = maybeInt(DD_APPSEC_MAX_STACK_TRACE_DEPTH) - unprocessedTarget['appsec.stackTrace.maxDepth'] = DD_APPSEC_MAX_STACK_TRACE_DEPTH - target['appsec.stackTrace.maxStackTraces'] = maybeInt(DD_APPSEC_MAX_STACK_TRACES) - unprocessedTarget['appsec.stackTrace.maxStackTraces'] = DD_APPSEC_MAX_STACK_TRACES - target['appsec.wafTimeout'] = maybeInt(DD_APPSEC_WAF_TIMEOUT) - unprocessedTarget['appsec.wafTimeout'] = DD_APPSEC_WAF_TIMEOUT - target['appsec.apiSecurity.downstreamBodyAnalysisSampleRate'] = - maybeFloat(DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE) - target['appsec.apiSecurity.maxDownstreamRequestBodyAnalysis'] = - maybeInt(DD_API_SECURITY_MAX_DOWNSTREAM_REQUEST_BODY_ANALYSIS) - target.baggageMaxBytes = DD_TRACE_BAGGAGE_MAX_BYTES - target.baggageMaxItems = DD_TRACE_BAGGAGE_MAX_ITEMS - setArray(target, 'baggageTagKeys', DD_TRACE_BAGGAGE_TAG_KEYS) - setBoolean(target, 'clientIpEnabled', DD_TRACE_CLIENT_IP_ENABLED) - setString(target, 'clientIpHeader', DD_TRACE_CLIENT_IP_HEADER?.toLowerCase()) - if (DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING || DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING) { - if (DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING) { - setBoolean(target, 'cloudPayloadTagging.requestsEnabled', true) + if (!this.tracePropagationStyle.extract.includes('b3')) { + this.tracePropagationStyle.extract.push('b3') } - if (DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING) { - setBoolean(target, 'cloudPayloadTagging.responsesEnabled', true) + if (!this.tracePropagationStyle.inject.includes('b3 single header')) { + this.tracePropagationStyle.inject.push('b3 single header') } - target['cloudPayloadTagging.rules'] = appendRules( - splitJSONPathRules(DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING), - splitJSONPathRules(DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING) - ) - } - if (DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH) { - target['cloudPayloadTagging.maxDepth'] = maybeInt(DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH) - } - setBoolean(target, 'crashtracking.enabled', DD_CRASHTRACKING_ENABLED) - setBoolean(target, 'codeOriginForSpans.enabled', DD_CODE_ORIGIN_FOR_SPANS_ENABLED) - setBoolean( - target, - 'codeOriginForSpans.experimental.exit_spans.enabled', - DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED - ) - setString(target, 'dbmPropagationMode', DD_DBM_PROPAGATION_MODE) - setBoolean(target, 'dbm.injectSqlBaseHash', DD_DBM_INJECT_SQL_BASEHASH) - setString(target, 'dogstatsd.hostname', DD_DOGSTATSD_HOST) - setString(target, 'dogstatsd.port', DD_DOGSTATSD_PORT) - setBoolean(target, 'dsmEnabled', DD_DATA_STREAMS_ENABLED) - target['dynamicInstrumentation.captureTimeoutMs'] = maybeInt(DD_DYNAMIC_INSTRUMENTATION_CAPTURE_TIMEOUT_MS) - unprocessedTarget['dynamicInstrumentation.captureTimeoutMs'] = DD_DYNAMIC_INSTRUMENTATION_CAPTURE_TIMEOUT_MS - setBoolean(target, 'dynamicInstrumentation.enabled', DD_DYNAMIC_INSTRUMENTATION_ENABLED) - setString(target, 'dynamicInstrumentation.probeFile', DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE) - setArray(target, 'dynamicInstrumentation.redactedIdentifiers', - DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS) - setArray( - target, - 'dynamicInstrumentation.redactionExcludedIdentifiers', - DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS - ) - target['dynamicInstrumentation.uploadIntervalSeconds'] = - maybeFloat(DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS) - unprocessedTarget['dynamicInstrumentation.uploadInterval'] = DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS - setString(target, 'env', DD_ENV || tags.env) - setBoolean( - target, - 'experimental.flaggingProvider.enabled', - DD_EXPERIMENTAL_FLAGGING_PROVIDER_ENABLED - ) - if (DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS != null) { - target['experimental.flaggingProvider.initializationTimeoutMs'] = - maybeInt(DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS) + if (!this.tracePropagationStyle.extract.includes('b3 single header')) { + this.tracePropagationStyle.extract.push('b3 single header') + } + setAndTrack(this, 'tracePropagationStyle.inject', this.tracePropagationStyle.inject) + setAndTrack(this, 'tracePropagationStyle.extract', this.tracePropagationStyle.extract) } - setBoolean(target, 'traceEnabled', DD_TRACE_ENABLED) - setBoolean(target, 'experimental.aiguard.block', DD_AI_GUARD_BLOCK) - setBoolean(target, 'experimental.aiguard.enabled', DD_AI_GUARD_ENABLED) - setString(target, 'experimental.aiguard.endpoint', DD_AI_GUARD_ENDPOINT) - target['experimental.aiguard.maxContentSize'] = maybeInt(DD_AI_GUARD_MAX_CONTENT_SIZE) - unprocessedTarget['experimental.aiguard.maxContentSize'] = DD_AI_GUARD_MAX_CONTENT_SIZE - target['experimental.aiguard.maxMessagesLength'] = maybeInt(DD_AI_GUARD_MAX_MESSAGES_LENGTH) - unprocessedTarget['experimental.aiguard.maxMessagesLength'] = DD_AI_GUARD_MAX_MESSAGES_LENGTH - target['experimental.aiguard.timeout'] = maybeInt(DD_AI_GUARD_TIMEOUT) - unprocessedTarget['experimental.aiguard.timeout'] = DD_AI_GUARD_TIMEOUT - setBoolean(target, 'experimental.enableGetRumData', DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED) - setString(target, 'experimental.exporter', DD_TRACE_EXPERIMENTAL_EXPORTER) - if (AWS_LAMBDA_FUNCTION_NAME && !fs.existsSync(DATADOG_MINI_AGENT_PATH)) { - target.flushInterval = 0 - } else if (DD_TRACE_FLUSH_INTERVAL) { - target.flushInterval = maybeInt(DD_TRACE_FLUSH_INTERVAL) + + if (getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME') && !fs.existsSync(DATADOG_MINI_AGENT_PATH)) { + setAndTrack(this, 'flushInterval', 0) } - target.flushMinSpans = maybeInt(DD_TRACE_PARTIAL_FLUSH_MIN_SPANS) - unprocessedTarget.flushMinSpans = DD_TRACE_PARTIAL_FLUSH_MIN_SPANS - setBoolean(target, 'gitMetadataEnabled', DD_TRACE_GIT_METADATA_ENABLED) - setIntegerRangeSet(target, 'grpc.client.error.statuses', DD_GRPC_CLIENT_ERROR_STATUSES) - setIntegerRangeSet(target, 'grpc.server.error.statuses', DD_GRPC_SERVER_ERROR_STATUSES) - setArray(target, 'headerTags', DD_TRACE_HEADER_TAGS) - target['heapSnapshot.count'] = maybeInt(DD_HEAP_SNAPSHOT_COUNT) - setString(target, 'heapSnapshot.destination', DD_HEAP_SNAPSHOT_DESTINATION) - target['heapSnapshot.interval'] = maybeInt(DD_HEAP_SNAPSHOT_INTERVAL) - setString(target, 'hostname', DD_AGENT_HOST) - target['iast.dbRowsToTaint'] = maybeInt(DD_IAST_DB_ROWS_TO_TAINT) - setBoolean(target, 'iast.deduplicationEnabled', DD_IAST_DEDUPLICATION_ENABLED) - setBoolean(target, 'iast.enabled', DD_IAST_ENABLED) - target['iast.maxConcurrentRequests'] = maybeInt(DD_IAST_MAX_CONCURRENT_REQUESTS) - unprocessedTarget['iast.maxConcurrentRequests'] = DD_IAST_MAX_CONCURRENT_REQUESTS - target['iast.maxContextOperations'] = maybeInt(DD_IAST_MAX_CONTEXT_OPERATIONS) - unprocessedTarget['iast.maxContextOperations'] = DD_IAST_MAX_CONTEXT_OPERATIONS - setBoolean(target, 'iast.redactionEnabled', DD_IAST_REDACTION_ENABLED && !isFalse(DD_IAST_REDACTION_ENABLED)) - setString(target, 'iast.redactionNamePattern', DD_IAST_REDACTION_NAME_PATTERN) - setString(target, 'iast.redactionValuePattern', DD_IAST_REDACTION_VALUE_PATTERN) - const iastRequestSampling = maybeInt(DD_IAST_REQUEST_SAMPLING) - if (iastRequestSampling !== undefined && iastRequestSampling > -1 && iastRequestSampling < 101) { - target['iast.requestSampling'] = iastRequestSampling + + if (!trackedConfigOrigins.has('apmTracingEnabled') && + trackedConfigOrigins.has('experimental.appsec.standalone.enabled')) { + setAndTrack(this, 'apmTracingEnabled', !this.experimental.appsec.standalone.enabled) } - unprocessedTarget['iast.requestSampling'] = DD_IAST_REQUEST_SAMPLING - setString(target, 'iast.securityControlsConfiguration', DD_IAST_SECURITY_CONTROLS_CONFIGURATION) - setString(target, 'iast.telemetryVerbosity', DD_IAST_TELEMETRY_VERBOSITY) - setBoolean(target, 'iast.stackTrace.enabled', DD_IAST_STACK_TRACE_ENABLED) - setString(target, 'installSignature.id', DD_INSTRUMENTATION_INSTALL_ID) - setString(target, 'installSignature.time', DD_INSTRUMENTATION_INSTALL_TIME) - setString(target, 'installSignature.type', DD_INSTRUMENTATION_INSTALL_TYPE) - // TODO: Why is DD_INJECTION_ENABLED a comma separated list? - setArray(target, 'injectionEnabled', DD_INJECTION_ENABLED) - if (DD_INJECTION_ENABLED !== undefined) { - setString(target, 'instrumentationSource', DD_INJECTION_ENABLED ? 'ssi' : 'manual') + + if (this.cloudPayloadTagging?.request || this.cloudPayloadTagging?.response) { + setAndTrack(this, 'cloudPayloadTagging.rules', appendRules( + this.cloudPayloadTagging.request, + this.cloudPayloadTagging.response + )) } - setBoolean(target, 'injectForce', DD_INJECT_FORCE) - setBoolean(target, 'isAzureFunction', getIsAzureFunction()) - setBoolean(target, 'isGCPFunction', getIsGCPFunction()) - setBoolean(target, 'gcpPubSubPushSubscriptionEnabled', enableGCPPubSubPushSubscription()) - target['langchain.spanCharLimit'] = maybeInt(DD_LANGCHAIN_SPAN_CHAR_LIMIT) - target['langchain.spanPromptCompletionSampleRate'] = maybeFloat(DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE) - setBoolean(target, 'legacyBaggageEnabled', DD_TRACE_LEGACY_BAGGAGE_ENABLED) - setBoolean(target, 'llmobs.agentlessEnabled', DD_LLMOBS_AGENTLESS_ENABLED) - setBoolean(target, 'llmobs.enabled', DD_LLMOBS_ENABLED) - setString(target, 'llmobs.mlApp', DD_LLMOBS_ML_APP) - setBoolean(target, 'logInjection', DD_LOGS_INJECTION) - // Requires an accompanying DD_APM_OBFUSCATION_MEMCACHED_KEEP_COMMAND=true in the agent - setBoolean(target, 'memcachedCommandEnabled', DD_TRACE_MEMCACHED_COMMAND_ENABLED) - setBoolean(target, 'middlewareTracingEnabled', DD_TRACE_MIDDLEWARE_TRACING_ENABLED) - setBoolean(target, 'openAiLogsEnabled', DD_OPENAI_LOGS_ENABLED) - target['openai.spanCharLimit'] = maybeInt(DD_OPENAI_SPAN_CHAR_LIMIT) - unprocessedTarget.openaiSpanCharLimit = DD_OPENAI_SPAN_CHAR_LIMIT - if (DD_TRACE_PEER_SERVICE_MAPPING) { - target.peerServiceMapping = Object.fromEntries( - DD_TRACE_PEER_SERVICE_MAPPING.split(',').map(x => x.trim().split(':')) - ) - unprocessedTarget.peerServiceMapping = DD_TRACE_PEER_SERVICE_MAPPING + + if (this.injectionEnabled) { + setAndTrack(this, 'instrumentationSource', 'ssi') } - setString(target, 'port', DD_TRACE_AGENT_PORT) - const profilingEnabled = normalizeProfilingEnabledValue(DD_PROFILING_ENABLED) - setString(target, 'profiling.enabled', profilingEnabled) - setString(target, 'profiling.exporters', DD_PROFILING_EXPORTERS) - setBoolean(target, 'profiling.sourceMap', DD_PROFILING_SOURCE_MAP && !isFalse(DD_PROFILING_SOURCE_MAP)) - if (DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD) { - // This is only used in testing to not have to wait 30s - target['profiling.longLivedThreshold'] = Number(DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD) + + if (!trackedConfigOrigins.has('runtimeMetrics.enabled') && this.OTEL_METRICS_EXPORTER === 'none') { + setAndTrack(this, 'runtimeMetrics.enabled', false) } - setString(target, 'protocolVersion', DD_TRACE_AGENT_PROTOCOL_VERSION) - setString(target, 'queryStringObfuscation', DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP) - setBoolean(target, 'remoteConfig.enabled', DD_REMOTE_CONFIGURATION_ENABLED) - target['remoteConfig.pollInterval'] = maybeFloat(DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS) - unprocessedTarget['remoteConfig.pollInterval'] = DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS - setBoolean(target, 'reportHostname', DD_TRACE_REPORT_HOSTNAME) - if (DD_TRACE_RESOURCE_RENAMING_ENABLED !== undefined) { - setBoolean(target, 'resourceRenamingEnabled', DD_TRACE_RESOURCE_RENAMING_ENABLED) + if (!trackedConfigOrigins.has('sampleRate') && trackedConfigOrigins.has('OTEL_TRACES_SAMPLER')) { + setAndTrack(this, 'sampleRate', getFromOtelSamplerMap(this.OTEL_TRACES_SAMPLER, this.OTEL_TRACES_SAMPLER_ARG)) } - // only used to explicitly set runtimeMetrics to false - const otelSetRuntimeMetrics = String(OTEL_METRICS_EXPORTER).toLowerCase() === 'none' - ? false - : undefined - setBoolean(target, 'runtimeMetrics.enabled', DD_RUNTIME_METRICS_ENABLED || - otelSetRuntimeMetrics) - setBoolean(target, 'runtimeMetrics.eventLoop', DD_RUNTIME_METRICS_EVENT_LOOP_ENABLED) - setBoolean(target, 'runtimeMetrics.gc', DD_RUNTIME_METRICS_GC_ENABLED) - setBoolean(target, 'runtimeMetricsRuntimeId', DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED) - setArray(target, 'sampler.spanSamplingRules', reformatSpanSamplingRules( - maybeJsonFile(DD_SPAN_SAMPLING_RULES_FILE) ?? - safeJsonParse(DD_SPAN_SAMPLING_RULES) - )) - setUnit( - target, - 'sampleRate', - DD_TRACE_SAMPLE_RATE || getFromOtelSamplerMap(OTEL_TRACES_SAMPLER, OTEL_TRACES_SAMPLER_ARG) - ) - target['sampler.rateLimit'] = DD_TRACE_RATE_LIMIT - setSamplingRule(target, 'sampler.rules', safeJsonParse(DD_TRACE_SAMPLING_RULES)) - unprocessedTarget['sampler.rules'] = DD_TRACE_SAMPLING_RULES - setString(target, 'scope', DD_TRACE_SCOPE) - // Priority: - // DD_SERVICE > tags.service > OTEL_SERVICE_NAME > NX_TASK_TARGET_PROJECT (if DD_ENABLE_NX_SERVICE_NAME) > default - let serviceName = DD_SERVICE || tags.service || OTEL_SERVICE_NAME - let isServiceNameInferred - if (!serviceName && NX_TASK_TARGET_PROJECT) { - if (isTrue(DD_ENABLE_NX_SERVICE_NAME)) { - isServiceNameInferred = true - serviceName = NX_TASK_TARGET_PROJECT - } else if (DD_MAJOR < 6) { - // Warn about v6 behavior change for Nx projects - log.warn( - // eslint-disable-next-line @stylistic/max-len - 'NX_TASK_TARGET_PROJECT is set but no service name was configured. In v6, NX_TASK_TARGET_PROJECT will be used as the default service name. Set DD_ENABLE_NX_SERVICE_NAME=true to opt-in to this behavior now, or set a service name explicitly.' - ) + + if (this.DD_SPAN_SAMPLING_RULES_FILE) { + try { + // TODO: Should we log a warning in case this is defined next to spanSamplingRules? + setAndTrack(this, 'spanSamplingRules', transformers.toCamelCase(JSON.parse(this.DD_SPAN_SAMPLING_RULES_FILE))) + } catch (error) { + log.warn('Error reading span sampling rules file %s; %o', this.DD_SPAN_SAMPLING_RULES_FILE, error) } } - setString(target, 'service', serviceName) - if (serviceName) setBoolean(target, 'isServiceNameInferred', isServiceNameInferred ?? false) - if (DD_SERVICE_MAPPING) { - target.serviceMapping = Object.fromEntries( - DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':')) - ) - } - setString(target, 'site', DD_SITE) - if (DD_TRACE_SPAN_ATTRIBUTE_SCHEMA) { - setString(target, 'spanAttributeSchema', validateNamingVersion(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA)) - unprocessedTarget.spanAttributeSchema = DD_TRACE_SPAN_ATTRIBUTE_SCHEMA - } - // 0: disabled, 1: logging, 2: garbage collection + logging - target.spanLeakDebug = maybeInt(DD_TRACE_SPAN_LEAK_DEBUG) - setBoolean(target, 'spanRemoveIntegrationFromService', DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED) - setBoolean(target, 'startupLogs', DD_TRACE_STARTUP_LOGS) - setTags(target, 'tags', tags) - target.tagsHeaderMaxLength = DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH - setBoolean(target, 'telemetry.enabled', DD_INSTRUMENTATION_TELEMETRY_ENABLED) - setString(target, 'instrumentation_config_id', DD_INSTRUMENTATION_CONFIG_ID) - setBoolean(target, 'telemetry.debug', DD_TELEMETRY_DEBUG) - setBoolean(target, 'telemetry.dependencyCollection', DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED) - target['telemetry.heartbeatInterval'] = maybeInt(Math.floor(DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000)) - unprocessedTarget['telemetry.heartbeatInterval'] = DD_TELEMETRY_HEARTBEAT_INTERVAL - setBoolean(target, 'telemetry.logCollection', DD_TELEMETRY_LOG_COLLECTION_ENABLED) - setBoolean(target, 'telemetry.metrics', DD_TELEMETRY_METRICS_ENABLED) - setBoolean(target, 'isKeepingCoverageConfiguration', DD_TEST_TIA_KEEP_COV_CONFIG) - setBoolean(target, 'traceId128BitGenerationEnabled', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED) - setBoolean(target, 'traceId128BitLoggingEnabled', DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED) - warnIfPropagationStyleConflict( - DD_TRACE_PROPAGATION_STYLE, - DD_TRACE_PROPAGATION_STYLE_INJECT, - DD_TRACE_PROPAGATION_STYLE_EXTRACT - ) - if (DD_TRACE_PROPAGATION_STYLE !== undefined) { - setArray(target, 'tracePropagationStyle.inject', normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE)) - setArray(target, 'tracePropagationStyle.extract', normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE)) - } - if (DD_TRACE_PROPAGATION_STYLE_INJECT !== undefined) { - setArray(target, 'tracePropagationStyle.inject', - normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE_INJECT)) + + // All sampler options are tracked as individual values. No need to track the sampler object as a whole. + this.sampler = { + rules: this.samplingRules, + rateLimit: this.rateLimit, + sampleRate: this.sampleRate, + spanSamplingRules: this.spanSamplingRules, } - if (DD_TRACE_PROPAGATION_STYLE_EXTRACT !== undefined) { - setArray(target, 'tracePropagationStyle.extract', - normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE_EXTRACT)) + + // For LLMObs, we want to auto enable it when other llmobs options are defined. + if (!this.llmobs.enabled && + !trackedConfigOrigins.has('llmobs.enabled') && + (trackedConfigOrigins.has('llmobs.agentlessEnabled') || + trackedConfigOrigins.has('llmobs.mlApp'))) { + setAndTrack(this, 'llmobs.enabled', true) } - setBoolean(target, 'tracePropagationExtractFirst', DD_TRACE_PROPAGATION_EXTRACT_FIRST) - if (DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT !== undefined) { - const stringPropagationBehaviorExtract = String(DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT) - target.tracePropagationBehaviorExtract = - VALID_PROPAGATION_BEHAVIOR_EXTRACT.has(stringPropagationBehaviorExtract) - ? stringPropagationBehaviorExtract - : 'continue' + + if (this.OTEL_RESOURCE_ATTRIBUTES) { + for (const [key, value] of Object.entries(this.OTEL_RESOURCE_ATTRIBUTES)) { + // Not replacing existing tags keeps the order of the tags as before. + if (!this.tags[key]) { + this.tags[key] = value + } + } } - if (DD_TRACE_PROPAGATION_STYLE !== undefined || - DD_TRACE_PROPAGATION_STYLE_INJECT !== undefined || - DD_TRACE_PROPAGATION_STYLE_EXTRACT !== undefined || - OTEL_PROPAGATORS !== undefined) { - // At least one var is defined, calculate value using truthy logic - const useDdStyle = DD_TRACE_PROPAGATION_STYLE || - DD_TRACE_PROPAGATION_STYLE_INJECT || - DD_TRACE_PROPAGATION_STYLE_EXTRACT - setBoolean(target, 'tracePropagationStyle.otelPropagators', - useDdStyle ? false : !!OTEL_PROPAGATORS) - - // Use OTEL_PROPAGATORS if no DD-specific vars are set - if (!useDdStyle && OTEL_PROPAGATORS) { - const otelStyles = normalizePropagationStyle(OTEL_PROPAGATORS) - // Validate OTEL propagators - for (const style of otelStyles || []) { - if (!VALID_PROPAGATION_STYLES.has(style)) { - log.warn('unexpected value %s for OTEL_PROPAGATORS environment variable', style) - getCounter('otel.env.invalid', 'DD_TRACE_PROPAGATION_STYLE', 'OTEL_PROPAGATORS').inc() + if (this.DD_TRACE_TAGS) { + // TODO: This is a hack to keep the order of the tags as before. + // That hack is not sufficient, since it does not handle other cases where the tags are set by the user. + if (trackedConfigOrigins.get('tags') === 'code') { + for (const [key, value] of Object.entries(this.DD_TRACE_TAGS)) { + // Not replacing existing tags keeps the order of the tags as before. + if (!this.tags[key]) { + this.tags[key] = value } } - // Set inject/extract from OTEL_PROPAGATORS - if (otelStyles) { - setArray(target, 'tracePropagationStyle.inject', otelStyles) - setArray(target, 'tracePropagationStyle.extract', otelStyles) - } + } else { + Object.assign(this.tags, this.DD_TRACE_TAGS) } } - setBoolean(target, 'traceWebsocketMessagesEnabled', DD_TRACE_WEBSOCKET_MESSAGES_ENABLED) - setBoolean(target, 'traceWebsocketMessagesInheritSampling', DD_TRACE_WEBSOCKET_MESSAGES_INHERIT_SAMPLING) - setBoolean(target, 'traceWebsocketMessagesSeparateTraces', DD_TRACE_WEBSOCKET_MESSAGES_SEPARATE_TRACES) - setBoolean(target, 'tracing', DD_TRACING_ENABLED) - setString(target, 'version', DD_VERSION || tags.version) - setBoolean(target, 'inferredProxyServicesEnabled', DD_TRACE_INFERRED_PROXY_SERVICES_ENABLED) - setBoolean(target, 'trace.aws.addSpanPointers', DD_TRACE_AWS_ADD_SPAN_POINTERS) - setString(target, 'trace.dynamoDb.tablePrimaryKeys', DD_TRACE_DYNAMODB_TABLE_PRIMARY_KEYS) - setArray(target, 'graphqlErrorExtensions', DD_TRACE_GRAPHQL_ERROR_EXTENSIONS) - setBoolean(target, 'trace.nativeSpanEvents', DD_TRACE_NATIVE_SPAN_EVENTS) - target['vertexai.spanPromptCompletionSampleRate'] = maybeFloat(DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE) - target['vertexai.spanCharLimit'] = maybeInt(DD_VERTEXAI_SPAN_CHAR_LIMIT) - } - #applyOptions (options) { - const opts = this.#options - const tags = {} - - options = this.#optionsArg = { ingestion: {}, ...options, ...opts } - - tagger.add(tags, options.tags) - - setBoolean(opts, 'apmTracingEnabled', options.apmTracingEnabled ?? - (options.experimental?.appsec?.standalone && !options.experimental.appsec.standalone.enabled) - ) - setBoolean(opts, 'appsec.apiSecurity.enabled', options.appsec?.apiSecurity?.enabled) - setBoolean(opts, 'appsec.apiSecurity.endpointCollectionEnabled', - options.appsec?.apiSecurity?.endpointCollectionEnabled) - opts['appsec.apiSecurity.endpointCollectionMessageLimit'] = - maybeInt(options.appsec?.apiSecurity?.endpointCollectionMessageLimit) - opts['appsec.blockedTemplateGraphql'] = maybeFile(options.appsec?.blockedTemplateGraphql) - opts['appsec.blockedTemplateHtml'] = maybeFile(options.appsec?.blockedTemplateHtml) - this.#optsUnprocessed['appsec.blockedTemplateHtml'] = options.appsec?.blockedTemplateHtml - opts['appsec.blockedTemplateJson'] = maybeFile(options.appsec?.blockedTemplateJson) - this.#optsUnprocessed['appsec.blockedTemplateJson'] = options.appsec?.blockedTemplateJson - setBoolean(opts, 'appsec.enabled', options.appsec?.enabled) - setString(opts, 'appsec.eventTracking.mode', options.appsec?.eventTracking?.mode) - setBoolean( - opts, - 'appsec.extendedHeadersCollection.enabled', - options.appsec?.extendedHeadersCollection?.enabled - ) - setBoolean( - opts, - 'appsec.extendedHeadersCollection.redaction', - options.appsec?.extendedHeadersCollection?.redaction - ) - opts['appsec.extendedHeadersCollection.maxHeaders'] = options.appsec?.extendedHeadersCollection?.maxHeaders - setString(opts, 'appsec.obfuscatorKeyRegex', options.appsec?.obfuscatorKeyRegex) - setString(opts, 'appsec.obfuscatorValueRegex', options.appsec?.obfuscatorValueRegex) - setBoolean(opts, 'appsec.rasp.enabled', options.appsec?.rasp?.enabled) - setBoolean(opts, 'appsec.rasp.bodyCollection', options.appsec?.rasp?.bodyCollection) - opts['appsec.rateLimit'] = maybeInt(options.appsec?.rateLimit) - this.#optsUnprocessed['appsec.rateLimit'] = options.appsec?.rateLimit - setString(opts, 'appsec.rules', options.appsec?.rules) - setBoolean(opts, 'appsec.stackTrace.enabled', options.appsec?.stackTrace?.enabled) - opts['appsec.stackTrace.maxDepth'] = maybeInt(options.appsec?.stackTrace?.maxDepth) - this.#optsUnprocessed['appsec.stackTrace.maxDepth'] = options.appsec?.stackTrace?.maxDepth - opts['appsec.stackTrace.maxStackTraces'] = maybeInt(options.appsec?.stackTrace?.maxStackTraces) - this.#optsUnprocessed['appsec.stackTrace.maxStackTraces'] = options.appsec?.stackTrace?.maxStackTraces - opts['appsec.wafTimeout'] = maybeInt(options.appsec?.wafTimeout) - this.#optsUnprocessed['appsec.wafTimeout'] = options.appsec?.wafTimeout - setBoolean(opts, 'clientIpEnabled', options.clientIpEnabled) - setString(opts, 'clientIpHeader', options.clientIpHeader?.toLowerCase()) - if (options.cloudPayloadTagging?.request || options.cloudPayloadTagging?.response) { - if (options.cloudPayloadTagging.request) { - setBoolean(opts, 'cloudPayloadTagging.requestsEnabled', true) - } - if (options.cloudPayloadTagging.response) { - setBoolean(opts, 'cloudPayloadTagging.responsesEnabled', true) - } - opts['cloudPayloadTagging.rules'] = appendRules( - splitJSONPathRules(options.cloudPayloadTagging.request), - splitJSONPathRules(options.cloudPayloadTagging.response) - ) - } - if (options.cloudPayloadTagging?.requestsEnabled !== undefined) { - setBoolean(opts, 'cloudPayloadTagging.requestsEnabled', options.cloudPayloadTagging.requestsEnabled) - } - if (options.cloudPayloadTagging?.responsesEnabled !== undefined) { - setBoolean(opts, 'cloudPayloadTagging.responsesEnabled', options.cloudPayloadTagging.responsesEnabled) - } - opts['cloudPayloadTagging.maxDepth'] = maybeInt(options.cloudPayloadTagging?.maxDepth) - opts.baggageMaxBytes = options.baggageMaxBytes - opts.baggageMaxItems = options.baggageMaxItems - setArray(opts, 'baggageTagKeys', options.baggageTagKeys) - setBoolean(opts, 'codeOriginForSpans.enabled', options.codeOriginForSpans?.enabled) - setBoolean( - opts, - 'codeOriginForSpans.experimental.exit_spans.enabled', - options.codeOriginForSpans?.experimental?.exit_spans?.enabled - ) - setString(opts, 'dbmPropagationMode', options.dbmPropagationMode) - setBoolean(opts, 'dbm.injectSqlBaseHash', options.dbm?.injectSqlBaseHash) - if (options.dogstatsd) { - setString(opts, 'dogstatsd.hostname', options.dogstatsd.hostname) - setString(opts, 'dogstatsd.port', options.dogstatsd.port) + if (!this.#parsedDdTags) { + this.#parsedDdTags = rfdc(this.tags) } - setBoolean(opts, 'dsmEnabled', options.dsmEnabled) - opts['dynamicInstrumentation.captureTimeoutMs'] = maybeInt(options.dynamicInstrumentation?.captureTimeoutMs) - this.#optsUnprocessed['dynamicInstrumentation.captureTimeoutMs'] = options.dynamicInstrumentation?.captureTimeoutMs - setBoolean(opts, 'dynamicInstrumentation.enabled', options.dynamicInstrumentation?.enabled) - setString(opts, 'dynamicInstrumentation.probeFile', options.dynamicInstrumentation?.probeFile) - setArray( - opts, - 'dynamicInstrumentation.redactedIdentifiers', - options.dynamicInstrumentation?.redactedIdentifiers - ) - setArray( - opts, - 'dynamicInstrumentation.redactionExcludedIdentifiers', - options.dynamicInstrumentation?.redactionExcludedIdentifiers - ) - opts['dynamicInstrumentation.uploadIntervalSeconds'] = - maybeFloat(options.dynamicInstrumentation?.uploadIntervalSeconds) - this.#optsUnprocessed['dynamicInstrumentation.uploadIntervalSeconds'] = - options.dynamicInstrumentation?.uploadIntervalSeconds - setString(opts, 'env', options.env || tags.env) - setBoolean(opts, 'experimental.aiguard.block', options.experimental?.aiguard?.block) - setBoolean(opts, 'experimental.aiguard.enabled', options.experimental?.aiguard?.enabled) - setString(opts, 'experimental.aiguard.endpoint', options.experimental?.aiguard?.endpoint) - opts['experimental.aiguard.maxMessagesLength'] = maybeInt(options.experimental?.aiguard?.maxMessagesLength) - this.#optsUnprocessed['experimental.aiguard.maxMessagesLength'] = options.experimental?.aiguard?.maxMessagesLength - opts['experimental.aiguard.maxContentSize'] = maybeInt(options.experimental?.aiguard?.maxContentSize) - this.#optsUnprocessed['experimental.aiguard.maxContentSize'] = options.experimental?.aiguard?.maxContentSize - opts['experimental.aiguard.timeout'] = maybeInt(options.experimental?.aiguard?.timeout) - this.#optsUnprocessed['experimental.aiguard.timeout'] = options.experimental?.aiguard?.timeout - setBoolean(opts, 'experimental.enableGetRumData', options.experimental?.enableGetRumData) - setString(opts, 'experimental.exporter', options.experimental?.exporter) - setBoolean(opts, 'experimental.flaggingProvider.enabled', options.experimental?.flaggingProvider?.enabled) - opts['experimental.flaggingProvider.initializationTimeoutMs'] = maybeInt( - options.experimental?.flaggingProvider?.initializationTimeoutMs - ) - this.#optsUnprocessed['experimental.flaggingProvider.initializationTimeoutMs'] = - options.experimental?.flaggingProvider?.initializationTimeoutMs - opts.flushInterval = maybeInt(options.flushInterval) - this.#optsUnprocessed.flushInterval = options.flushInterval - opts.flushMinSpans = maybeInt(options.flushMinSpans) - this.#optsUnprocessed.flushMinSpans = options.flushMinSpans - setArray(opts, 'headerTags', options.headerTags) - setString(opts, 'hostname', options.hostname) - opts['iast.dbRowsToTaint'] = maybeInt(options.iast?.dbRowsToTaint) - setBoolean(opts, 'iast.deduplicationEnabled', options.iast && options.iast.deduplicationEnabled) - setBoolean(opts, 'iast.enabled', - options.iast && (options.iast === true || options.iast.enabled === true)) - opts['iast.maxConcurrentRequests'] = maybeInt(options.iast?.maxConcurrentRequests) - this.#optsUnprocessed['iast.maxConcurrentRequests'] = options.iast?.maxConcurrentRequests - opts['iast.maxContextOperations'] = maybeInt(options.iast?.maxContextOperations) - this.#optsUnprocessed['iast.maxContextOperations'] = options.iast?.maxContextOperations - setBoolean(opts, 'iast.redactionEnabled', options.iast?.redactionEnabled) - setString(opts, 'iast.redactionNamePattern', options.iast?.redactionNamePattern) - setString(opts, 'iast.redactionValuePattern', options.iast?.redactionValuePattern) - const iastRequestSampling = maybeInt(options.iast?.requestSampling) - if (iastRequestSampling !== undefined && iastRequestSampling > -1 && iastRequestSampling < 101) { - opts['iast.requestSampling'] = iastRequestSampling - this.#optsUnprocessed['iast.requestSampling'] = options.iast?.requestSampling - } - if (DD_MAJOR < 6) { - opts['iast.securityControlsConfiguration'] = options.iast?.securityControlsConfiguration + + if (!this.env && this.tags.env !== undefined) { + setAndTrack(this, 'env', this.tags.env) } - setBoolean(opts, 'iast.stackTrace.enabled', options.iast?.stackTrace?.enabled) - setString(opts, 'iast.telemetryVerbosity', options.iast && options.iast.telemetryVerbosity) - setBoolean(opts, 'isCiVisibility', options.isCiVisibility) - setBoolean(opts, 'legacyBaggageEnabled', options.legacyBaggageEnabled) - setBoolean(opts, 'llmobs.agentlessEnabled', options.llmobs?.agentlessEnabled) - setString(opts, 'llmobs.mlApp', options.llmobs?.mlApp) - setBoolean(opts, 'logInjection', options.logInjection) - opts.lookup = options.lookup - setBoolean(opts, 'middlewareTracingEnabled', options.middlewareTracingEnabled) - setBoolean(opts, 'openAiLogsEnabled', options.openAiLogsEnabled) - opts.peerServiceMapping = options.peerServiceMapping - setBoolean(opts, 'plugins', options.plugins) - setString(opts, 'port', options.port) - const strProfiling = String(options.profiling) - if (['true', 'false', 'auto'].includes(strProfiling)) { - setString(opts, 'profiling.enabled', strProfiling) + + if (!this.version) { + setAndTrack(this, 'version', this.tags.version || pkg.version) + this.tags.version ??= pkg.version } - setString(opts, 'protocolVersion', options.protocolVersion) - if (options.remoteConfig) { - opts['remoteConfig.pollInterval'] = maybeFloat(options.remoteConfig.pollInterval) - this.#optsUnprocessed['remoteConfig.pollInterval'] = options.remoteConfig.pollInterval + + let isServiceNameInferred = false + if (!trackedConfigOrigins.has('service')) { + if (this.tags.service) { + setAndTrack(this, 'service', this.tags.service) + } else { + const NX_TASK_TARGET_PROJECT = getEnvironmentVariable('NX_TASK_TARGET_PROJECT') + if (NX_TASK_TARGET_PROJECT) { + if (this.DD_ENABLE_NX_SERVICE_NAME) { + setAndTrack(this, 'service', NX_TASK_TARGET_PROJECT) + isServiceNameInferred = true + } else if (DD_MAJOR < 6) { + log.warn( + // eslint-disable-next-line eslint-rules/eslint-log-printf-style + 'NX_TASK_TARGET_PROJECT is set but no service name was configured. In v6, NX_TASK_TARGET_PROJECT will ' + + 'be used as the default service name. Set DD_ENABLE_NX_SERVICE_NAME=true to opt-in to this behavior ' + + 'now, or set a service name explicitly.' + ) + } + } + } + + if (!this.service) { + const serverlessName = IS_SERVERLESS + ? ( + getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME') || + getEnvironmentVariable('FUNCTION_NAME') || // Google Cloud Function Name set by deprecated runtimes + getEnvironmentVariable('K_SERVICE') || // Google Cloud Function Name set by newer runtimes + getEnvironmentVariable('WEBSITE_SITE_NAME') // set by Azure Functions + ) + : undefined + + setAndTrack(this, 'service', serverlessName || pkg.name || 'node') + this.tags.service ??= /** @type {string} */ (this.service) + isServiceNameInferred = true + } } - setBoolean(opts, 'reportHostname', options.reportHostname) - setBoolean(opts, 'runtimeMetrics.enabled', options.runtimeMetrics?.enabled) - setBoolean(opts, 'runtimeMetrics.eventLoop', options.runtimeMetrics?.eventLoop) - setBoolean(opts, 'runtimeMetrics.gc', options.runtimeMetrics?.gc) - setBoolean(opts, 'runtimeMetricsRuntimeId', options.runtimeMetricsRuntimeId) - setArray(opts, 'sampler.spanSamplingRules', reformatSpanSamplingRules(options.spanSamplingRules)) - setUnit(opts, 'sampleRate', options.sampleRate ?? options.ingestion.sampleRate) - opts['sampler.rateLimit'] = maybeInt(options.rateLimit ?? options.ingestion.rateLimit) - setSamplingRule(opts, 'sampler.rules', options.samplingRules) - const optService = options.service || tags.service - setString(opts, 'service', optService) - if (optService) { - setBoolean(opts, 'isServiceNameInferred', false) + setAndTrack(this, 'isServiceNameInferred', isServiceNameInferred) + + // Add missing tags, in case they are defined otherwise. + if (this.service) { + this.tags.service = this.service } - opts.serviceMapping = options.serviceMapping - setString(opts, 'site', options.site) - if (options.spanAttributeSchema) { - setString(opts, 'spanAttributeSchema', validateNamingVersion(options.spanAttributeSchema)) - this.#optsUnprocessed.spanAttributeSchema = options.spanAttributeSchema + if (this.env) { + this.tags.env = this.env } - setBoolean(opts, 'spanRemoveIntegrationFromService', options.spanRemoveIntegrationFromService) - setBoolean(opts, 'startupLogs', options.startupLogs) - setTags(opts, 'tags', tags) - setBoolean(opts, 'traceId128BitGenerationEnabled', options.traceId128BitGenerationEnabled) - setBoolean(opts, 'traceId128BitLoggingEnabled', options.traceId128BitLoggingEnabled) - setBoolean(opts, 'traceWebsocketMessagesEnabled', options.traceWebsocketMessagesEnabled) - setBoolean(opts, 'traceWebsocketMessagesInheritSampling', options.traceWebsocketMessagesInheritSampling) - setBoolean(opts, 'traceWebsocketMessagesSeparateTraces', options.traceWebsocketMessagesSeparateTraces) - setString(opts, 'version', options.version || tags.version) - setBoolean(opts, 'inferredProxyServicesEnabled', options.inferredProxyServicesEnabled) - setBoolean(opts, 'graphqlErrorExtensions', options.graphqlErrorExtensions) - setBoolean(opts, 'trace.nativeSpanEvents', options.trace?.nativeSpanEvents) - if (options.tracePropagationStyle) { - setArray(opts, 'tracePropagationStyle.inject', - normalizePropagationStyle(options.tracePropagationStyle.inject ?? options.tracePropagationStyle)) - setArray(opts, 'tracePropagationStyle.extract', - normalizePropagationStyle(options.tracePropagationStyle.extract ?? options.tracePropagationStyle)) + if (this.version) { + this.tags.version = this.version } + this.tags['runtime-id'] = RUNTIME_ID - // For LLMObs, we want the environment variable to take precedence over the options. - // This is reliant on environment config being set before options. - // This is to make sure the origins of each value are tracked appropriately for telemetry. - // We'll only set `llmobs.enabled` on the opts when it's not set on the environment, and options.llmobs is provided. - if (this.#env['llmobs.enabled'] == null && options.llmobs) { - setBoolean(opts, 'llmobs.enabled', true) + if (IS_SERVERLESS) { + setAndTrack(this, 'telemetry.enabled', false) + setAndTrack(this, 'crashtracking.enabled', false) + setAndTrack(this, 'remoteConfig.enabled', false) } - } - - #isCiVisibility () { - return this.#optionsArg.isCiVisibility ?? this.#defaults.isCiVisibility - } - - #getHostname () { - const DD_CIVISIBILITY_AGENTLESS_URL = getEnv('DD_CIVISIBILITY_AGENTLESS_URL') - const url = DD_CIVISIBILITY_AGENTLESS_URL - ? new URL(DD_CIVISIBILITY_AGENTLESS_URL) - : getAgentUrl(this.#getTraceAgentUrl(), this.#optionsArg) - const DD_AGENT_HOST = this.#optionsArg.hostname ?? - getEnv('DD_AGENT_HOST') ?? - defaults.hostname - return DD_AGENT_HOST || url?.hostname - } - - #getSpanComputePeerService () { - const DD_TRACE_SPAN_ATTRIBUTE_SCHEMA = validateNamingVersion( - this.#optionsArg.spanAttributeSchema ?? - getEnv('DD_TRACE_SPAN_ATTRIBUTE_SCHEMA') - ) - - const peerServiceSet = ( - this.#optionsArg.hasOwnProperty('spanComputePeerService') || - getEnv('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED') !== undefined - ) - const peerServiceValue = this.#optionsArg.spanComputePeerService ?? - getEnv('DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED') - - const spanComputePeerService = ( - DD_TRACE_SPAN_ATTRIBUTE_SCHEMA === 'v0' - // In v0, peer service is computed only if it is explicitly set to true - ? peerServiceSet && isTrue(peerServiceValue) - // In >v0, peer service is false only if it is explicitly set to false - : (peerServiceSet ? !isFalse(peerServiceValue) : true) - ) - - return spanComputePeerService - } - #isTraceStatsComputationEnabled () { - const apmTracingEnabled = this.#options.apmTracingEnabled !== false && - this.#env.apmTracingEnabled !== false - - return apmTracingEnabled && ( - this.#optionsArg.stats ?? - getEnv('DD_TRACE_STATS_COMPUTATION_ENABLED') ?? - (getIsGCPFunction() || getIsAzureFunction()) - ) - } - - #getTraceAgentUrl () { - return this.#optionsArg.url ?? - getEnv('DD_TRACE_AGENT_URL') ?? - null - } - - // handles values calculated from a mixture of options and env vars - #applyCalculated () { - const calc = this.#calculated - - const DD_CIVISIBILITY_AGENTLESS_URL = getEnv('DD_CIVISIBILITY_AGENTLESS_URL') - - calc.url = DD_CIVISIBILITY_AGENTLESS_URL - ? new URL(DD_CIVISIBILITY_AGENTLESS_URL) - : getAgentUrl(this.#getTraceAgentUrl(), this.#optionsArg) + // TODO: Should this unconditionally be disabled? + if (getEnvironmentVariable('JEST_WORKER_ID') && !trackedConfigOrigins.has('telemetry.enabled')) { + setAndTrack(this, 'telemetry.enabled', false) + } // Experimental agentless APM span intake // When enabled, sends spans directly to Datadog intake without an agent - const agentlessEnabled = isTrue(getEnv('_DD_APM_TRACING_AGENTLESS_ENABLED')) + // TODO: Replace this with a proper configuration + const agentlessEnabled = isTrue(getEnvironmentVariable('_DD_APM_TRACING_AGENTLESS_ENABLED')) if (agentlessEnabled) { - setString(calc, 'experimental.exporter', 'agentless') - // Disable rate limiting - server-side sampling will be used - calc['sampler.rateLimit'] = -1 + setAndTrack(this, 'experimental.exporter', 'agentless') // Disable client-side stats computation - setBoolean(calc, 'stats.enabled', false) + setAndTrack(this, 'stats.enabled', false) // Enable hostname reporting - setBoolean(calc, 'reportHostname', true) + setAndTrack(this, 'reportHostname', true) + // Disable rate limiting - server-side sampling will be used + setAndTrack(this, 'sampler.rateLimit', -1) // Clear sampling rules - server-side sampling handles this - calc['sampler.rules'] = [] + setAndTrack(this, 'sampler.rules', []) // Agentless intake only accepts 64-bit trace IDs; disable 128-bit generation - setBoolean(calc, 'traceId128BitGenerationEnabled', false) - } - - if (this.#isCiVisibility()) { - setBoolean(calc, 'isEarlyFlakeDetectionEnabled', - getEnv('DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED') ?? true) - setBoolean(calc, 'isFlakyTestRetriesEnabled', getEnv('DD_CIVISIBILITY_FLAKY_RETRY_ENABLED') ?? true) - calc.flakyTestRetriesCount = maybeInt(getEnv('DD_CIVISIBILITY_FLAKY_RETRY_COUNT')) ?? 5 - setBoolean(calc, 'isIntelligentTestRunnerEnabled', isTrue(isCiVisibilityItrEnabled())) - setBoolean(calc, 'isManualApiEnabled', !isFalse(getEnv('DD_CIVISIBILITY_MANUAL_API_ENABLED'))) - setString(calc, 'ciVisibilityTestSessionName', getEnv('DD_TEST_SESSION_NAME')) - setBoolean(calc, 'ciVisAgentlessLogSubmissionEnabled', - isTrue(getEnv('DD_AGENTLESS_LOG_SUBMISSION_ENABLED'))) - setBoolean(calc, 'isTestDynamicInstrumentationEnabled', - !isFalse(getEnv('DD_TEST_FAILED_TEST_REPLAY_ENABLED'))) - setBoolean(calc, 'isServiceUserProvided', !!this.#env.service) - setBoolean(calc, 'isTestManagementEnabled', !isFalse(getEnv('DD_TEST_MANAGEMENT_ENABLED'))) - calc.testManagementAttemptToFixRetries = maybeInt(getEnv('DD_TEST_MANAGEMENT_ATTEMPT_TO_FIX_RETRIES')) ?? 20 - setBoolean(calc, 'isImpactedTestsEnabled', - !isFalse(getEnv('DD_CIVISIBILITY_IMPACTED_TESTS_DETECTION_ENABLED'))) - } - - // Disable log injection when OTEL logs are enabled - // OTEL logs and DD log injection are mutually exclusive - if (this.#env.otelLogsEnabled) { - setBoolean(calc, 'logInjection', false) + if (!trackedConfigOrigins.has('traceId128BitGenerationEnabled')) { + setAndTrack(this, 'traceId128BitGenerationEnabled', false) + } } - calc['dogstatsd.hostname'] = this.#getHostname() - - // Compute OTLP logs and metrics URLs to send payloads to the active Datadog Agent - const agentHostname = this.#getHostname() - calc.otelLogsUrl = `http://${agentHostname}:${DEFAULT_OTLP_PORT}` - calc.otelMetricsUrl = `http://${agentHostname}:${DEFAULT_OTLP_PORT}/v1/metrics` - calc.otelUrl = `http://${agentHostname}:${DEFAULT_OTLP_PORT}` - calc['telemetry.heartbeatInterval'] = maybeInt(Math.floor(this.#defaults['telemetry.heartbeatInterval'] * 1000)) - - setBoolean(calc, 'isGitUploadEnabled', - calc.isIntelligentTestRunnerEnabled && !isFalse(getEnv('DD_CIVISIBILITY_GIT_UPLOAD_ENABLED'))) - - // Enable resourceRenamingEnabled when appsec is enabled and only - // if DD_TRACE_RESOURCE_RENAMING_ENABLED is not explicitly set - if (this.#env.resourceRenamingEnabled === undefined) { - const appsecEnabled = this.#options['appsec.enabled'] ?? this.#env['appsec.enabled'] - if (appsecEnabled) { - setBoolean(calc, 'resourceRenamingEnabled', true) + // Apply all fallbacks to the calculated config. + for (const [configName, alias] of fallbackConfigurations) { + if (!trackedConfigOrigins.has(configName) && trackedConfigOrigins.has(alias)) { + setAndTrack(this, configName, this[alias]) } } - setBoolean(calc, 'spanComputePeerService', this.#getSpanComputePeerService()) - setBoolean(calc, 'stats.enabled', this.#isTraceStatsComputationEnabled()) - const defaultPropagationStyle = getDefaultPropagationStyle(this.#optionsArg) - if (defaultPropagationStyle.length > 2) { - // b3 was added, so update defaults to include it - // This will only be used if no other source (options, env, stable config) set the value - calc['tracePropagationStyle.inject'] = defaultPropagationStyle - calc['tracePropagationStyle.extract'] = defaultPropagationStyle + const DEFAULT_OTLP_PORT = '4318' + if (!this.otelLogsUrl) { + setAndTrack(this, 'otelLogsUrl', `http://${agentHostname}:${DEFAULT_OTLP_PORT}`) } - } - - /** - * Applies remote configuration options from APM_TRACING configs. - * - * @param {import('./remote_config').RemoteConfigOptions} options - Configurations received via Remote Config - */ - #applyRemoteConfig (options) { - const opts = this.#remote - - setBoolean(opts, 'dynamicInstrumentation.enabled', options.dynamic_instrumentation_enabled) - setBoolean(opts, 'codeOriginForSpans.enabled', options.code_origin_enabled) - setUnit(opts, 'sampleRate', options.tracing_sampling_rate) - setBoolean(opts, 'logInjection', options.log_injection_enabled) - setBoolean(opts, 'tracing', options.tracing_enabled) - this.#remoteUnprocessed['sampler.rules'] = options.tracing_sampling_rules - setSamplingRule(opts, 'sampler.rules', reformatTagsFromRC(options.tracing_sampling_rules)) - - opts.headerTags = options.tracing_header_tags?.map(tag => { - return tag.tag_name ? `${tag.header}:${tag.tag_name}` : tag.header - }) - - const tags = {} - tagger.add(tags, options.tracing_tags) - if (Object.keys(tags).length) { - tags['runtime-id'] = RUNTIME_ID + if (!this.otelMetricsUrl) { + setAndTrack(this, 'otelMetricsUrl', `http://${agentHostname}:${DEFAULT_OTLP_PORT}/v1/metrics`) } - setTags(opts, 'tags', tags) - } - #setAndTrackChange ({ name, value, origin, unprocessedValue, changes }) { - set(this, name, value) - - if (!changeTracker[name]) { - changeTracker[name] = {} + if (process.platform === 'win32') { + // OOM monitoring does not work properly on Windows, so it will be disabled. + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED') + // Profiler sampling contexts are not available on Windows, so features + // depending on those (code hotspots and endpoint collection) need to be disabled on Windows. + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_CODEHOTSPOTS_ENABLED') + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_ENDPOINT_COLLECTION_ENABLED') + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_CPU_ENABLED') + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_TIMELINE_ENABLED') + deactivateIfEnabledAndWarnOnWindows(this, 'DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED') } - const originExists = origin in changeTracker[name] - const oldValue = changeTracker[name][origin] + // Single tags update is tracked as a calculated value. + setAndTrack(this, 'tags', this.tags) - if (!originExists || oldValue !== value) { - changeTracker[name][origin] = value - changes.push({ - name, - value: unprocessedValue || value, - origin, - }) - } - } - - // TODO: Report origin changes and errors to telemetry. - // TODO: Deeply merge configurations. - // TODO: Move change tracking to telemetry. - // for telemetry reporting, `name`s in `containers` need to be keys from: - // https://github.com/DataDog/dd-go/blob/prod/trace/apps/tracer-telemetry-intake/telemetry-payload/static/config_norm_rules.json - #merge () { - const changes = [] - const sources = this.#getSourcesInOrder() - - for (const name of Object.keys(this.#defaults)) { - // Use reverse order for merge (lowest priority first) - for (let i = sources.length - 1; i >= 0; i--) { - const { container, origin, unprocessed } = sources[i] - const value = container[name] - if (value != null || container === this.#defaults) { - this.#setAndTrackChange({ - name, - value, - origin, - unprocessedValue: unprocessed?.[name], - changes, - }) - } - } - } - this.sampler.sampleRate = this.sampleRate - updateConfig(changes, this) + telemetry.updateConfig([...configWithOrigin.values()], this) } + // TODO: Move outside of config. This is unrelated to the config system. #loadGitMetadata () { - // try to read Git metadata from the environment variables - this.repositoryUrl = removeUserSensitiveInfo( - getEnv('DD_GIT_REPOSITORY_URL') ?? this.tags[GIT_REPOSITORY_URL] - ) - this.commitSHA = getEnv('DD_GIT_COMMIT_SHA') ?? this.tags[GIT_COMMIT_SHA] + // Try to read Git metadata from the environment variables + this.repositoryUrl = removeUserSensitiveInfo(this.DD_GIT_REPOSITORY_URL ?? this.tags[GIT_REPOSITORY_URL]) + this.commitSHA = this.DD_GIT_COMMIT_SHA ?? this.tags[GIT_COMMIT_SHA] - // otherwise, try to read Git metadata from the git.properties file + // Otherwise, try to read Git metadata from the git.properties file if (!this.repositoryUrl || !this.commitSHA) { - const DD_GIT_PROPERTIES_FILE = getEnv('DD_GIT_PROPERTIES_FILE') + const DD_GIT_PROPERTIES_FILE = this.DD_GIT_PROPERTIES_FILE const gitPropertiesFile = DD_GIT_PROPERTIES_FILE ?? `${process.cwd()}/git.properties` - let gitPropertiesString try { - gitPropertiesString = fs.readFileSync(gitPropertiesFile, 'utf8') - } catch (e) { + const gitPropertiesString = fs.readFileSync(gitPropertiesFile, 'utf8') + const { commitSHA, repositoryUrl } = getGitMetadataFromGitProperties(gitPropertiesString) + this.commitSHA ??= commitSHA + this.repositoryUrl ??= repositoryUrl + } catch (error) { // Only log error if the user has set a git.properties path if (DD_GIT_PROPERTIES_FILE) { - log.error('Error reading DD_GIT_PROPERTIES_FILE: %s', gitPropertiesFile, e) + log.error('Error reading DD_GIT_PROPERTIES_FILE: %s', gitPropertiesFile, error) } } - if (gitPropertiesString) { - const { commitSHA, repositoryUrl } = getGitMetadataFromGitProperties(gitPropertiesString) - this.commitSHA = this.commitSHA || commitSHA - this.repositoryUrl = this.repositoryUrl || repositoryUrl - } } - // otherwise, try to read Git metadata from the .git/ folder - if (!this.repositoryUrl || !this.commitSHA) { - const DD_GIT_FOLDER_PATH = getEnv('DD_GIT_FOLDER_PATH') - const gitFolderPath = DD_GIT_FOLDER_PATH ?? path.join(process.cwd(), '.git') - if (!this.repositoryUrl) { - // try to read git config (repository URL) - const gitConfigPath = path.join(gitFolderPath, 'config') - try { - const gitConfigContent = fs.readFileSync(gitConfigPath, 'utf8') - if (gitConfigContent) { - this.repositoryUrl = getRemoteOriginURL(gitConfigContent) - } - } catch (e) { - // Only log error if the user has set a .git/ path - if (DD_GIT_FOLDER_PATH) { - log.error('Error reading git config: %s', gitConfigPath, e) - } + + // Otherwise, try to read Git metadata from the .git/ folder + const DD_GIT_FOLDER_PATH = this.DD_GIT_FOLDER_PATH + const gitFolderPath = DD_GIT_FOLDER_PATH ?? path.join(process.cwd(), '.git') + + if (!this.repositoryUrl) { + // Try to read git config (repository URL) + const gitConfigPath = path.join(gitFolderPath, 'config') + try { + const gitConfigContent = fs.readFileSync(gitConfigPath, 'utf8') + if (gitConfigContent) { + this.repositoryUrl = getRemoteOriginURL(gitConfigContent) } - } - if (!this.commitSHA) { - // try to read git HEAD (commit SHA) - const gitHeadSha = resolveGitHeadSHA(gitFolderPath) - if (gitHeadSha) { - this.commitSHA = gitHeadSha + } catch (error) { + // Only log error if the user has set a .git/ path + if (DD_GIT_FOLDER_PATH) { + log.error('Error reading git config: %s', gitConfigPath, error) } } } + // Try to read git HEAD (commit SHA) + this.commitSHA ??= resolveGitHeadSHA(gitFolderPath) } } -function getCounter (event, ddVar, otelVar) { - const counters = TELEMETRY_COUNTERS.get(event) - const tags = [] - const ddVarPrefix = 'config_datadog:' - const otelVarPrefix = 'config_opentelemetry:' - if (ddVar) { - ddVar = ddVarPrefix + ddVar.toLowerCase() - tags.push(ddVar) - } - if (otelVar) { - otelVar = otelVarPrefix + otelVar.toLowerCase() - tags.push(otelVar) - } - - if (!(otelVar in counters)) counters[otelVar] = {} - - const counter = tracerMetrics.count(event, tags) - counters[otelVar][ddVar] = counter - return counter -} - -function getFromOtelSamplerMap (otelTracesSampler, otelTracesSamplerArg) { - const OTEL_TRACES_SAMPLER_MAPPING = { - always_on: '1.0', - always_off: '0.0', - traceidratio: otelTracesSamplerArg, - parentbased_always_on: '1.0', - parentbased_always_off: '0.0', - parentbased_traceidratio: otelTracesSamplerArg, - } - return OTEL_TRACES_SAMPLER_MAPPING[otelTracesSampler] -} - /** - * Validate the type of an environment variable - * @param {string} envVar - The name of the environment variable - * @param {string} [value] - The value of the environment variable - * @returns {boolean} - True if the value is valid, false otherwise + * @param {Config} config + * @param {ConfigKey} envVar */ -function isInvalidOtelEnvironmentVariable (envVar, value) { - // Skip validation if the value is undefined (it was not set as environment variable) - if (value === undefined) return false - - switch (envVar) { - case 'OTEL_LOG_LEVEL': - return !VALID_LOG_LEVELS.has(value) - case 'OTEL_PROPAGATORS': - case 'OTEL_RESOURCE_ATTRIBUTES': - case 'OTEL_SERVICE_NAME': - return typeof value !== 'string' - case 'OTEL_TRACES_SAMPLER': - return getFromOtelSamplerMap(value, getEnv('OTEL_TRACES_SAMPLER_ARG')) === undefined - case 'OTEL_TRACES_SAMPLER_ARG': - return Number.isNaN(Number.parseFloat(value)) - case 'OTEL_SDK_DISABLED': - return value.toLowerCase() !== 'true' && value.toLowerCase() !== 'false' - case 'OTEL_TRACES_EXPORTER': - case 'OTEL_METRICS_EXPORTER': - case 'OTEL_LOGS_EXPORTER': - return value.toLowerCase() !== 'none' - default: - return true - } -} - -function checkIfBothOtelAndDdEnvVarSet () { - for (const [otelEnvVar, ddEnvVar] of OTEL_DD_ENV_MAPPING) { - const otelValue = getEnv(otelEnvVar) - - if (ddEnvVar && getEnv(ddEnvVar) && otelValue) { - log.warn('both %s and %s environment variables are set', ddEnvVar, otelEnvVar) - getCounter('otel.env.hiding', ddEnvVar, otelEnvVar).inc() +function deactivateIfEnabledAndWarnOnWindows (config, envVar) { + if (config[envVar]) { + const source = trackedConfigOrigins.get(envVar) + setAndTrack(config, envVar, false) + // TODO: Should we log even for default values? + if (source) { + log.warn('%s is not supported on Windows. Deactivating. (source: %s)', envVar, source) } - - if (isInvalidOtelEnvironmentVariable(otelEnvVar, otelValue)) { - log.warn('unexpected value %s for %s environment variable', otelValue, otelEnvVar) - getCounter('otel.env.invalid', ddEnvVar, otelEnvVar).inc() - } - } -} - -function maybeFile (filepath) { - if (!filepath) return - try { - return fs.readFileSync(filepath, 'utf8') - } catch (e) { - log.error('Error reading file %s', filepath, e) - } -} - -function maybeJsonFile (filepath) { - const file = maybeFile(filepath) - if (!file) return - try { - return JSON.parse(file) - } catch (e) { - log.error('Error parsing JSON file %s', filepath, e) - } -} - -function safeJsonParse (input) { - try { - return JSON.parse(input) - } catch {} -} - -function validateNamingVersion (versionString) { - if (!versionString) { - return DEFAULT_NAMING_VERSION - } - if (!NAMING_VERSIONS.has(versionString)) { - log.warn('Unexpected input for config.spanAttributeSchema, picked default', DEFAULT_NAMING_VERSION) - return DEFAULT_NAMING_VERSION } - return versionString } -/** - * Given a string of comma-separated paths, return the array of paths. - * If a blank path is provided a null is returned to signal that the feature is disabled. - * An empty array means the feature is enabled but that no rules need to be applied. - * - * @param {string | string[]} input - */ -function splitJSONPathRules (input) { - if (!input || input === '$') return - if (Array.isArray(input)) return input - if (input === 'all') return [] - return input.split(',') -} - -// Shallow clone with property name remapping -function remapify (input, mappings) { - if (!input) return - const output = {} - for (const [key, value] of Object.entries(input)) { - output[key in mappings ? mappings[key] : key] = value - } - return output -} - -/** - * Normalizes propagation style values to a lowercase array. - * Handles both string (comma-separated) and array inputs. - */ -function normalizePropagationStyle (value) { - if (Array.isArray(value)) { - return value.map(v => v.toLowerCase()) - } - if (typeof value === 'string') { - return value.split(',') - .filter(v => v !== '') - .map(v => v.trim().toLowerCase()) - } - if (value !== undefined) { - log.warn('Unexpected input for config.tracePropagationStyle') +function increaseCounter (event, ddVar, otelVar) { + const tags = [] + if (ddVar) { + tags.push(`config_datadog:${ddVar.toLowerCase()}`) } + tags.push(`config_opentelemetry:${otelVar.toLowerCase()}`) + tracerMetrics.count(event, tags).inc() } -/** - * Warns if both DD_TRACE_PROPAGATION_STYLE and specific inject/extract vars are set. - */ -function warnIfPropagationStyleConflict (general, inject, extract) { - if (general && (inject || extract)) { - log.warn( - // eslint-disable-next-line @stylistic/max-len - 'Use either the DD_TRACE_PROPAGATION_STYLE environment variable or separate DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables' - ) +function getFromOtelSamplerMap (otelTracesSampler, otelTracesSamplerArg) { + const OTEL_TRACES_SAMPLER_MAPPING = { + always_on: 1, + always_off: 0, + parentbased_always_on: 1, + parentbased_always_off: 0, } -} - -function reformatSpanSamplingRules (rules) { - if (!rules) return rules - return rules.map(rule => { - return remapify(rule, { - sample_rate: 'sampleRate', - max_per_second: 'maxPerSecond', - }) - }) -} -function getDefaultPropagationStyle (options) { - // TODO: Remove the experimental env vars as a major? - const DD_TRACE_B3_ENABLED = options.experimental?.b3 ?? - getEnv('DD_TRACE_EXPERIMENTAL_B3_ENABLED') - const defaultPropagationStyle = ['datadog', 'tracecontext'] - if (isTrue(DD_TRACE_B3_ENABLED)) { - defaultPropagationStyle.push('b3', 'b3 single header') + const result = OTEL_TRACES_SAMPLER_MAPPING[otelTracesSampler] ?? otelTracesSamplerArg + if (result === undefined) { + increaseCounter('otel.env.invalid', 'DD_TRACE_SAMPLE_RATE', 'OTEL_TRACES_SAMPLER') } - return defaultPropagationStyle + return result } -function isCiVisibilityItrEnabled () { - return getEnv('DD_CIVISIBILITY_ITR_ENABLED') ?? true -} - -function reformatTagsFromRC (samplingRules) { - for (const rule of (samplingRules || [])) { - if (rule.tags) { - const reformattedTags = {} - for (const tag of rule.tags) { - reformattedTags[tag.key] = tag.value_glob +function warnWrongOtelSettings () { + // This mostly works for non-aliased environment variables only. + // TODO: Adjust this to work across all sources. + for (const [otelEnvVar, ddEnvVar, key] of [ + // eslint-disable-next-line eslint-rules/eslint-env-aliases + ['OTEL_LOG_LEVEL', 'DD_TRACE_LOG_LEVEL', 'logLevel'], + // eslint-disable-next-line eslint-rules/eslint-env-aliases + ['OTEL_PROPAGATORS', 'DD_TRACE_PROPAGATION_STYLE'], + // eslint-disable-next-line eslint-rules/eslint-env-aliases + ['OTEL_SERVICE_NAME', 'DD_SERVICE', 'service'], + ['OTEL_TRACES_SAMPLER', 'DD_TRACE_SAMPLE_RATE'], + ['OTEL_TRACES_SAMPLER_ARG', 'DD_TRACE_SAMPLE_RATE'], + ['OTEL_TRACES_EXPORTER', 'DD_TRACE_ENABLED'], + ['OTEL_METRICS_EXPORTER', 'DD_RUNTIME_METRICS_ENABLED'], + ['OTEL_RESOURCE_ATTRIBUTES', 'DD_TAGS'], + ['OTEL_SDK_DISABLED', 'DD_TRACE_OTEL_ENABLED'], + ['OTEL_LOGS_EXPORTER'], + ]) { + // eslint-disable-next-line eslint-rules/eslint-process-env + const envs = process.env + const otelSource = trackedConfigOrigins.get(/** @type {ConfigPath} */ (key ?? otelEnvVar)) + const otelEnvValue = envs[otelEnvVar] + if (otelEnvValue) { + if (envs[ddEnvVar]) { + log.warn('Conflicting %s and %s environment variables are set for %s', ddEnvVar, otelEnvVar, otelSource) + increaseCounter('otel.env.hiding', ddEnvVar, otelEnvVar) } - rule.tags = reformattedTags - } - } - return samplingRules -} - -function setBoolean (obj, name, value) { - if (value === undefined || value === null) { - obj[name] = value - } else if (isTrue(value)) { - obj[name] = true - } else if (isFalse(value)) { - obj[name] = false - } -} - -function setUnit (obj, name, value) { - if (value === null || value === undefined) { - obj[name] = value - return - } - - value = Number.parseFloat(value) - - if (!Number.isNaN(value)) { - // TODO: Ignore out of range values instead of normalizing them. - obj[name] = Math.min(Math.max(value, 0), 1) - } -} - -function setArray (obj, name, value) { - if (value == null) { - obj[name] = null - return - } - - if (typeof value === 'string') { - value = value.split(',').map(item => { - // Trim each item and remove whitespace around the colon - const [key, val] = item.split(':').map(part => part.trim()) - return val === undefined ? key : `${key}:${val}` - }) - } - if (Array.isArray(value)) { - obj[name] = value - } -} - -function setIntegerRangeSet (obj, name, value) { - if (value == null) { - obj[name] = null - return - } - value = value.split(',') - const result = [] - - for (const val of value) { - if (val.includes('-')) { - const [start, end] = val.split('-').map(Number) - for (let i = start; i <= end; i++) { - result.push(i) + // eslint-disable-next-line eslint-rules/eslint-env-aliases + const invalidOtelValue = otelEnvVar === 'OTEL_PROPAGATORS' + ? trackedConfigOrigins.get(/** @type {ConfigPath} */ ('tracePropagationStyle.inject')) !== otelSource && + !envs[ddEnvVar] + : !otelSource + if (invalidOtelValue) { + increaseCounter('otel.env.invalid', ddEnvVar, otelEnvVar) } - } else { - result.push(Number(val)) } } - obj[name] = result -} - -function setSamplingRule (obj, name, value) { - if (value == null) { - obj[name] = null - return - } - - if (typeof value === 'string') { - value = value.split(',') - } - - if (Array.isArray(value)) { - value = value.map(rule => { - return remapify(rule, { - sample_rate: 'sampleRate', - }) - }) - obj[name] = value - } -} - -function setString (obj, name, value) { - obj[name] = value ? String(value) : undefined // unset for empty strings -} - -function setTags (obj, name, value) { - if (!value || Object.keys(value).length === 0) { - obj[name] = null - return - } - - obj[name] = value -} - -function handleOtel (tagString) { - return tagString - ?.replace(/(^|,)deployment\.environment=/, '$1env:') - .replace(/(^|,)service\.name=/, '$1service:') - .replace(/(^|,)service\.version=/, '$1version:') - .replaceAll('=', ':') -} - -function parseSpaceSeparatedTags (tagString) { - if (tagString && !tagString.includes(',')) { - tagString = tagString.replaceAll(/\s+/g, ',') - } - return tagString -} - -function maybeInt (number) { - const parsed = Number.parseInt(number) - return Number.isNaN(parsed) ? undefined : parsed -} - -function maybeFloat (number) { - const parsed = Number.parseFloat(number) - return Number.isNaN(parsed) ? undefined : parsed -} - -function nonNegInt (value, envVarName, allowZero = true) { - if (value === undefined) return - const parsed = Number.parseInt(value) - if (Number.isNaN(parsed) || parsed < 0 || (parsed === 0 && !allowZero)) { - log.warn('Invalid value %d for %s. Using default value.', parsed, envVarName) - return - } - return parsed -} - -function getAgentUrl (url, options) { - if (url) return new URL(url) - - if (os.type() === 'Windows_NT') return - - if ( - !options.hostname && - !options.port && - !getEnv('DD_AGENT_HOST') && - !getEnv('DD_TRACE_AGENT_PORT') && - !isTrue(getEnv('DD_CIVISIBILITY_AGENTLESS_ENABLED')) && - fs.existsSync('/var/run/datadog/apm.socket') - ) { - return new URL('unix:///var/run/datadog/apm.socket') - } } +/** + * @param {TracerOptions} [options] + */ function getConfig (options) { if (!configInstance) { configInstance = new Config(options) diff --git a/packages/dd-trace/src/config/parsers.js b/packages/dd-trace/src/config/parsers.js new file mode 100644 index 00000000000..7ddd29b24a3 --- /dev/null +++ b/packages/dd-trace/src/config/parsers.js @@ -0,0 +1,256 @@ +'use strict' + +const fs = require('fs') + +const tagger = require('../tagger') + +let warnInvalidValue +function setWarnInvalidValue (fn) { + warnInvalidValue = fn +} + +const VALID_PROPAGATION_STYLES = new Set([ + 'datadog', 'tracecontext', 'b3', 'b3 single header', 'b3multi', 'baggage', 'none', +]) + +function toCase (value, methodName) { + if (Array.isArray(value)) { + return value.map(item => { + return transformers[methodName](item) + }) + } + return value[methodName]() +} + +const transformers = { + setGRPCRange (value) { + if (value == null) { + return + } + value = value.split(',') + const result = [] + + for (const val of value) { + const dashIndex = val.indexOf('-') + if (dashIndex === -1) { + result.push(Number(val)) + } else { + const start = Number(val.slice(0, dashIndex)) + const end = Number(val.slice(dashIndex + 1)) + for (let i = start; i <= end; i++) { + result.push(i) + } + } + } + return result + }, + toLowerCase (value) { + return toCase(value, 'toLowerCase') + }, + toUpperCase (value) { + return toCase(value, 'toUpperCase') + }, + toCamelCase (value) { + if (Array.isArray(value)) { + return value.map(item => { + return transformers.toCamelCase(item) + }) + } + if (typeof value === 'object' && value !== null) { + const result = {} + for (const [key, innerValue] of Object.entries(value)) { + const camelCaseKey = key.replaceAll(/_(\w)/g, (_, letter) => letter.toUpperCase()) + result[camelCaseKey] = transformers.toCamelCase(innerValue) + } + return result + } + return value + }, + parseOtelTags (value, optionName) { + return parsers.MAP(value + ?.replace(/(^|,)deployment\.environment=/, '$1env:') + .replace(/(^|,)service\.name=/, '$1service:') + .replace(/(^|,)service\.version=/, '$1version:') + .replaceAll('=', ':'), optionName) + }, + normalizeProfilingEnabled (configValue) { + if (configValue == null) { + return + } + if (configValue === 'true' || configValue === '1') { + return 'true' + } + if (configValue === 'false' || configValue === '0') { + return 'false' + } + const lowercased = String(configValue).toLowerCase() + if (lowercased !== configValue) { + return transformers.normalizeProfilingEnabled(lowercased) + } + return configValue + }, + sampleRate (value, optionName, source) { + const number = Number(value) + if (Number.isNaN(number) || value === null) { + warnInvalidValue(value, optionName, source, 'Sample rate invalid') + return + } + const clamped = Math.min(Math.max(number, 0), 1) + if (clamped !== number) { + warnInvalidValue(value, optionName, source, 'Sample rate out of range between 0 and 1') + return clamped + } + return number + }, + readFilePath (raw, optionName, source) { + const { stackTraceLimit } = Error + Error.stackTraceLimit = 0 + try { + return fs.readFileSync(raw, 'utf8') + } catch (error) { + warnInvalidValue(raw, optionName, source, 'Error reading path', error) + } finally { + Error.stackTraceLimit = stackTraceLimit + } + }, + /** + * Given a string of comma-separated paths, return the array of paths. + * If a blank path is provided a null is returned to signal that the feature is disabled. + * An empty array means the feature is enabled but that no rules need to be applied. + * + * @param {string | string[]} input + */ + splitJSONPathRules (input) { + if (!input || input === '$') return + if (Array.isArray(input)) return input + if (input === 'all') return [] + return input.split(',') + }, + stripColonWhitespace (value) { + if (Array.isArray(value)) { + return value.map(item => { + return transformers.stripColonWhitespace(item) + }) + } + return value.replaceAll(/\s*:\s*/g, ':') + }, + validatePropagationStyles (value, optionName) { + value = transformers.toLowerCase(value) + for (const propagator of value) { + if (!VALID_PROPAGATION_STYLES.has(propagator)) { + warnInvalidValue(propagator, optionName, optionName, 'Invalid propagator') + return + } + } + return value + }, +} + +const telemetryTransformers = { + JSON (object) { + return (typeof object !== 'object' || object === null) ? object : JSON.stringify(object) + }, + MAP (object) { + if (typeof object !== 'object' || object === null) { + return object + } + let result = '' + for (const [key, value] of Object.entries(object)) { + result += `${key}:${value},` + } + return result.slice(0, -1) + }, + ARRAY (array) { + return Array.isArray(array) ? array.join(',') : array + }, +} + +const parsers = { + BOOLEAN (raw) { + if (raw === 'true' || raw === '1') { + return true + } + if (raw === 'false' || raw === '0') { + return false + } + const lowercased = raw.toLowerCase() + if (lowercased !== raw) { + return parsers.BOOLEAN(lowercased) + } + }, + INT (raw) { + const parsed = Math.trunc(raw) + if (Number.isNaN(parsed)) { + return + } + return parsed + }, + DECIMAL (raw) { + const parsed = Number(raw) + if (Number.isNaN(parsed)) { + return + } + return parsed + }, + ARRAY (raw) { + // TODO: Make the parsing a helper that is reused everywhere. + const result = [] + if (!raw) { + return result + } + let valueStart = 0 + for (let i = 0; i < raw.length; i++) { + const char = raw[i] + if (char === ',') { + const value = raw.slice(valueStart, i).trim() + // Auto filter empty entries. + if (value.length > 0) { + result.push(value) + } + valueStart = i + 1 + } + } + if (valueStart < raw.length) { + const value = raw.slice(valueStart).trim() + // Auto filter empty entries. + if (value.length > 0) { + result.push(value) + } + } + return result + }, + MAP (raw, optionName) { + /** @type {Record} */ + const entries = {} + if (!raw) { + return entries + } + // DD_TAGS is a special case. It may be a map of key-value pairs separated by spaces. + if (optionName === 'DD_TAGS' && !raw.includes(',')) { + raw = raw.replaceAll(/\s+/g, ',') + } + tagger.add(entries, raw) + return entries + }, + JSON (raw) { + const { stackTraceLimit } = Error + Error.stackTraceLimit = 0 + try { + return JSON.parse(raw) + } catch { + // ignore + } finally { + Error.stackTraceLimit = stackTraceLimit + } + }, + STRING (raw) { + return raw + }, +} + +module.exports = { + parsers, + transformers, + telemetryTransformers, + setWarnInvalidValue, +} diff --git a/packages/dd-trace/src/config/remote_config.js b/packages/dd-trace/src/config/remote_config.js index 54f4b3067b4..f981dd37fef 100644 --- a/packages/dd-trace/src/config/remote_config.js +++ b/packages/dd-trace/src/config/remote_config.js @@ -2,6 +2,7 @@ const RemoteConfigCapabilities = require('../remote_config/capabilities') const log = require('../log') +const tagger = require('../tagger') module.exports = { enable, @@ -194,10 +195,66 @@ function enable (rc, config, onConfigUpdated) { transaction.ack(item.path) } - // Get merged config and apply it - const mergedLibConfig = rcClientLibConfigManager.getMergedLibConfig() + /** @type {import('../config').TracerOptions|null|RemoteConfigOptions} */ + let mergedLibConfig = rcClientLibConfigManager.getMergedLibConfig() + + if (mergedLibConfig) { + mergedLibConfig = transformRemoteConfigToLocalOption(mergedLibConfig) + } + config.setRemoteConfig(mergedLibConfig) onConfigUpdated() }) } + +/** + * @param {RemoteConfigOptions} libConfig + * @returns {import('../config').TracerOptions} + */ +function transformRemoteConfigToLocalOption (libConfig) { + const normalizedConfig = {} + for (const [name, value] of Object.entries(libConfig)) { + if (value !== null) { + normalizedConfig[optionLookupTable[name] ?? name] = transformers[name]?.(value) ?? value + } + } + return normalizedConfig +} + +// This is intermediate solution until remote config is reworked to handle all known entries with proper names +const optionLookupTable = { + dynamic_instrumentation_enabled: 'dynamicInstrumentation.enabled', + code_origin_enabled: 'codeOriginForSpans.enabled', + tracing_sampling_rate: 'sampleRate', + log_injection_enabled: 'logInjection', + tracing_enabled: 'tracing', + tracing_sampling_rules: 'samplingRules', + tracing_header_tags: 'headerTags', + tracing_tags: 'tags', +} + +const transformers = { + tracing_sampling_rules (samplingRules) { + for (const rule of (samplingRules || [])) { + if (rule.tags) { + const reformattedTags = {} + for (const tag of rule.tags) { + reformattedTags[tag.key] = tag.value_glob + } + rule.tags = reformattedTags + } + } + return samplingRules + }, + tracing_header_tags (headerTags) { + return headerTags?.map(tag => { + return tag.tag_name ? `${tag.header}:${tag.tag_name}` : tag.header + }) + }, + tracing_tags (tags) { + const normalizedTags = {} + tagger.add(normalizedTags, tags) + return normalizedTags + }, +} diff --git a/packages/dd-trace/src/config/supported-configurations.json b/packages/dd-trace/src/config/supported-configurations.json index d99dc7bd585..63131eb0a37 100644 --- a/packages/dd-trace/src/config/supported-configurations.json +++ b/packages/dd-trace/src/config/supported-configurations.json @@ -13,9 +13,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "ciVisAgentlessLogSubmissionEnabled" - ] + "internalPropertyName": "ciVisAgentlessLogSubmissionEnabled" } ], "DD_AGENTLESS_LOG_SUBMISSION_URL": [ @@ -114,9 +112,7 @@ "aliases": [ "DATADOG_API_KEY" ], - "configurationNames": [ - "apiKey" - ] + "internalPropertyName": "apiKey" } ], "DD_API_SECURITY_ENABLED": [ @@ -124,7 +120,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.apiSecurity.enabled" + "appsec.apiSecurity.enabled", + "experimental.appsec.apiSecurity.enabled" ], "default": "true", "aliases": [ @@ -137,7 +134,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.apiSecurity.endpointCollectionEnabled" + "appsec.apiSecurity.endpointCollectionEnabled", + "experimental.appsec.apiSecurity.endpointCollectionEnabled" ], "default": "true" } @@ -147,7 +145,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "appsec.apiSecurity.endpointCollectionMessageLimit" + "appsec.apiSecurity.endpointCollectionMessageLimit", + "experimental.appsec.apiSecurity.endpointCollectionMessageLimit" ], "default": "300" } @@ -156,9 +155,7 @@ { "implementation": "A", "type": "decimal", - "configurationNames": [ - "appsec.apiSecurity.downstreamBodyAnalysisSampleRate" - ], + "internalPropertyName": "appsec.apiSecurity.downstreamBodyAnalysisSampleRate", "default": "0.5" } ], @@ -166,9 +163,7 @@ { "implementation": "A", "type": "int", - "configurationNames": [ - "appsec.apiSecurity.maxDownstreamRequestBodyAnalysis" - ], + "internalPropertyName": "appsec.apiSecurity.maxDownstreamRequestBodyAnalysis", "default": "1" } ], @@ -177,9 +172,7 @@ "implementation": "A", "type": "decimal", "default": "30", - "configurationNames": [ - "appsec.apiSecurity.sampleDelay" - ] + "internalPropertyName": "appsec.apiSecurity.sampleDelay" } ], "DD_APM_FLUSH_DEADLINE_MILLISECONDS": [ @@ -204,7 +197,8 @@ "implementation": "E", "type": "string", "configurationNames": [ - "appsec.eventTracking.mode" + "appsec.eventTracking.mode", + "experimental.appsec.eventTracking.mode" ], "default": "identification", "aliases": [ @@ -217,7 +211,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.extendedHeadersCollection.enabled" + "appsec.extendedHeadersCollection.enabled", + "experimental.appsec.extendedHeadersCollection.enabled" ], "default": "false", "deprecated": true @@ -228,7 +223,10 @@ "implementation": "C", "type": "boolean", "configurationNames": [ - "appsec.enabled" + "appsec.enabled", + "appsec", + "experimental.appsec.enabled", + "experimental.appsec" ], "default": null } @@ -238,9 +236,11 @@ "implementation": "A", "type": "string", "configurationNames": [ - "appsec.blockedTemplateGraphql" + "appsec.blockedTemplateGraphql", + "experimental.appsec.blockedTemplateGraphql" ], - "default": null + "default": null, + "transform": "readFilePath" } ], "DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED": [ @@ -248,7 +248,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.extendedHeadersCollection.redaction" + "appsec.extendedHeadersCollection.redaction", + "experimental.appsec.extendedHeadersCollection.redaction" ], "default": "true" } @@ -258,9 +259,11 @@ "implementation": "B", "type": "string", "configurationNames": [ - "appsec.blockedTemplateHtml" + "appsec.blockedTemplateHtml", + "experimental.appsec.blockedTemplateHtml" ], - "default": null + "default": null, + "transform": "readFilePath" } ], "DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON": [ @@ -268,9 +271,11 @@ "implementation": "B", "type": "string", "configurationNames": [ - "appsec.blockedTemplateJson" + "appsec.blockedTemplateJson", + "experimental.appsec.blockedTemplateJson" ], - "default": null + "default": null, + "transform": "readFilePath" } ], "DD_APPSEC_MAX_COLLECTED_HEADERS": [ @@ -278,7 +283,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "appsec.extendedHeadersCollection.maxHeaders" + "appsec.extendedHeadersCollection.maxHeaders", + "experimental.appsec.extendedHeadersCollection.maxHeaders" ], "default": "50" } @@ -288,7 +294,11 @@ "implementation": "A", "type": "int", "configurationNames": [ - "appsec.stackTrace.maxStackTraces" + "appsec.stackTrace.maxStackTraces", + "experimental.appsec.stackTrace.maxStackTraces" + ], + "aliases": [ + "DD_APPSEC_MAX_STACKTRACES" ], "default": "2" } @@ -298,7 +308,11 @@ "implementation": "A", "type": "int", "configurationNames": [ - "appsec.stackTrace.maxDepth" + "appsec.stackTrace.maxDepth", + "experimental.appsec.stackTrace.maxDepth" + ], + "aliases": [ + "DD_APPSEC_MAX_STACKTRACE_DEPTH" ], "default": "32" } @@ -308,7 +322,8 @@ "implementation": "B", "type": "string", "configurationNames": [ - "appsec.obfuscatorKeyRegex" + "appsec.obfuscatorKeyRegex", + "experimental.appsec.obfuscatorKeyRegex" ], "default": "(?i)pass|pw(?:or)?d|secret|(?:api|private|public|access)[_-]?key|token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization|jsessionid|phpsessid|asp\\.net[_-]sessionid|sid|jwt" } @@ -318,7 +333,8 @@ "implementation": "G", "type": "string", "configurationNames": [ - "appsec.obfuscatorValueRegex" + "appsec.obfuscatorValueRegex", + "experimental.appsec.obfuscatorValueRegex" ], "default": "(?i)(?:p(?:ass)?w(?:or)?d|pass(?:[_-]?phrase)?|secret(?:[_-]?key)?|(?:(?:api|private|public|access)[_-]?)key(?:[_-]?id)?|(?:(?:auth|access|id|refresh)[_-]?)?token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|jsessionid|phpsessid|asp\\.net(?:[_-]|-)sessionid|sid|jwt)(?:\\s*=([^;&]+)|\"\\s*:\\s*(\"[^\"]+\"|\\d+))|bearer\\s+([a-z0-9\\._\\-]+)|token\\s*:\\s*([a-z0-9]{13})|gh[opsu]_([0-9a-zA-Z]{36})|ey[I-L][\\w=-]+\\.(ey[I-L][\\w=-]+(?:\\.[\\w.+\\/=-]+)?)|[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}([^\\-]+)[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*([a-z0-9\\/\\.+]{100,})" } @@ -328,7 +344,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.rasp.bodyCollection" + "appsec.rasp.bodyCollection", + "experimental.appsec.rasp.bodyCollection" ], "default": "false", "deprecated": true @@ -339,7 +356,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.rasp.enabled" + "appsec.rasp.enabled", + "experimental.appsec.rasp.enabled" ], "default": "true" } @@ -349,7 +367,8 @@ "implementation": "B", "type": "string", "configurationNames": [ - "appsec.rules" + "appsec.rules", + "experimental.appsec.rules" ], "default": null } @@ -359,9 +378,7 @@ "implementation": "B", "type": "boolean", "default": null, - "configurationNames": [ - "appsec.sca.enabled" - ] + "internalPropertyName": "appsec.sca.enabled" } ], "DD_APPSEC_STACK_TRACE_ENABLED": [ @@ -369,7 +386,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "appsec.stackTrace.enabled" + "appsec.stackTrace.enabled", + "experimental.appsec.stackTrace.enabled" ], "default": "true" } @@ -379,7 +397,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "appsec.rateLimit" + "appsec.rateLimit", + "experimental.appsec.rateLimit" ], "default": "100" } @@ -389,7 +408,8 @@ "implementation": "E", "type": "int", "configurationNames": [ - "appsec.wafTimeout" + "appsec.wafTimeout", + "experimental.appsec.wafTimeout" ], "default": "5000" } @@ -399,9 +419,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "appKey" - ] + "internalPropertyName": "appKey" } ], "DD_AZURE_RESOURCE_GROUP": [ @@ -451,9 +469,7 @@ "implementation": "B", "type": "boolean", "default": "true", - "configurationNames": [ - "isEarlyFlakeDetectionEnabled" - ] + "internalPropertyName": "isEarlyFlakeDetectionEnabled" } ], "DD_CIVISIBILITY_ENABLED": [ @@ -468,9 +484,7 @@ "implementation": "A", "type": "int", "default": "5", - "configurationNames": [ - "flakyTestRetriesCount" - ] + "internalPropertyName": "flakyTestRetriesCount" } ], "DD_CIVISIBILITY_FLAKY_RETRY_ENABLED": [ @@ -478,9 +492,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isFlakyTestRetriesEnabled" - ] + "internalPropertyName": "isFlakyTestRetriesEnabled" } ], "DD_CIVISIBILITY_GIT_UNSHALLOW_ENABLED": [ @@ -495,9 +507,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isGitUploadEnabled" - ] + "internalPropertyName": "isGitUploadEnabled" } ], "DD_CIVISIBILITY_IMPACTED_TESTS_DETECTION_ENABLED": [ @@ -505,9 +515,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isImpactedTestsEnabled" - ] + "internalPropertyName": "isImpactedTestsEnabled" } ], "DD_CIVISIBILITY_ITR_ENABLED": [ @@ -515,9 +523,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isIntelligentTestRunnerEnabled" - ] + "internalPropertyName": "isIntelligentTestRunnerEnabled" } ], "DD_ENABLE_LAGE_PACKAGE_NAME": [ @@ -532,9 +538,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isManualApiEnabled" - ] + "internalPropertyName": "isManualApiEnabled" } ], "DD_CIVISIBILITY_RUM_FLUSH_WAIT_MILLIS": [ @@ -590,9 +594,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "crashtracking.enabled" - ] + "internalPropertyName": "crashtracking.enabled" } ], "DD_CUSTOM_TRACE_ID": [ @@ -707,19 +709,19 @@ ], "DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS": [ { - "implementation": "A", - "type": "int", + "implementation": "C", + "type": "decimal", "configurationNames": [ "dynamicInstrumentation.uploadIntervalSeconds" ], - "default": "1" + "default": "1.0" } ], "DD_ENABLE_NX_SERVICE_NAME": [ { - "implementation": "A", - "type": "string", - "default": null + "implementation": "B", + "type": "boolean", + "default": "false" } ], "DD_ENV": [ @@ -734,9 +736,12 @@ ], "DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED": [ { - "implementation": "A", + "implementation": "B", "type": "boolean", - "default": "true" + "default": "false", + "configurationNames": [ + "experimental.appsec.standalone.enabled" + ] } ], "DD_EXPERIMENTAL_FLAGGING_PROVIDER_INITIALIZATION_TIMEOUT_MS": [ @@ -764,9 +769,7 @@ "implementation": "B", "type": "boolean", "default": "true", - "configurationNames": [ - "propagateProcessTags.enabled" - ] + "internalPropertyName": "propagateProcessTags.enabled" } ], "DD_EXPERIMENTAL_TEST_OPT_SETTINGS_CACHE": [ @@ -918,23 +921,20 @@ ], "DD_GRPC_CLIENT_ERROR_STATUSES": [ { - "implementation": "A", - "type": "array", - "default": "1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16", - "configurationNames": [ - "grpc.client.error.statuses" - ], - "handler": "GRPC_HANDLER" + "implementation": "C", + "type": "string", + "default": "1-16", + "internalPropertyName": "grpc.client.error.statuses", + "transform": "setGRPCRange" } ], "DD_GRPC_SERVER_ERROR_STATUSES": [ { - "implementation": "A", - "type": "array", - "default": "2,3,4,5,6,7,8,9,10,11,12,13,14,15,16", - "configurationNames": [ - "grpc.server.error.statuses" - ] + "implementation": "C", + "type": "string", + "default": "2-16", + "internalPropertyName": "grpc.server.error.statuses", + "transform": "setGRPCRange" } ], "DD_HEAP_SNAPSHOT_COUNT": [ @@ -942,9 +942,7 @@ "implementation": "A", "type": "int", "default": "0", - "configurationNames": [ - "heapSnapshot.count" - ] + "internalPropertyName": "heapSnapshot.count" } ], "DD_HEAP_SNAPSHOT_DESTINATION": [ @@ -952,9 +950,7 @@ "implementation": "A", "type": "string", "default": "", - "configurationNames": [ - "heapSnapshot.destination" - ] + "internalPropertyName": "heapSnapshot.destination" } ], "DD_HEAP_SNAPSHOT_INTERVAL": [ @@ -962,9 +958,7 @@ "implementation": "A", "type": "int", "default": "3600", - "configurationNames": [ - "heapSnapshot.interval" - ] + "internalPropertyName": "heapSnapshot.interval" } ], "DD_IAST_DB_ROWS_TO_TAINT": [ @@ -972,7 +966,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "iast.dbRowsToTaint" + "iast.dbRowsToTaint", + "experimental.iast.dbRowsToTaint" ], "default": "1" } @@ -982,7 +977,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "iast.deduplicationEnabled" + "iast.deduplicationEnabled", + "experimental.iast.deduplicationEnabled" ], "default": "true" } @@ -992,7 +988,10 @@ "implementation": "B", "type": "boolean", "configurationNames": [ - "iast.enabled" + "iast.enabled", + "iast", + "experimental.iast.enabled", + "experimental.iast" ], "default": "false" } @@ -1002,7 +1001,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "iast.maxConcurrentRequests" + "iast.maxConcurrentRequests", + "experimental.iast.maxConcurrentRequests" ], "default": "2" } @@ -1012,7 +1012,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "iast.maxContextOperations" + "iast.maxContextOperations", + "experimental.iast.maxContextOperations" ], "default": "2" } @@ -1022,7 +1023,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "iast.redactionEnabled" + "iast.redactionEnabled", + "experimental.iast.redactionEnabled" ], "default": "true" } @@ -1032,7 +1034,8 @@ "implementation": "A", "type": "string", "configurationNames": [ - "iast.redactionNamePattern" + "iast.redactionNamePattern", + "experimental.iast.redactionNamePattern" ], "default": "(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|(?:sur|last)name|user(?:name)?|address|e?mail)" } @@ -1042,7 +1045,8 @@ "implementation": "A", "type": "string", "configurationNames": [ - "iast.redactionValuePattern" + "iast.redactionValuePattern", + "experimental.iast.redactionValuePattern" ], "default": "(?:bearer\\s+[a-z0-9\\._\\-]+|glpat-[\\w\\-]{20}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L][\\w=\\-]+\\.ey[I-L][\\w=\\-]+(?:\\.[\\w.+/=\\-]+)?|(?:[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}[^\\-]+[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY[\\-]{5}|ssh-rsa\\s*[a-z0-9/\\.+]{100,})|[\\w\\.-]+@[a-zA-Z\\d\\.-]+\\.[a-zA-Z]{2,})" } @@ -1052,9 +1056,12 @@ "implementation": "A", "type": "int", "configurationNames": [ - "iast.requestSampling" + "iast.requestSampling", + "experimental.iast.requestSampling" ], - "default": "30" + "default": "30", + "allowed": "100|[1-9]?\\d", + "transform": "iastRequestSampling" } ], "DD_IAST_SECURITY_CONTROLS_CONFIGURATION": [ @@ -1062,7 +1069,8 @@ "implementation": "B", "type": "string", "configurationNames": [ - "iast.securityControlsConfiguration" + "iast.securityControlsConfiguration", + "experimental.iast.securityControlsConfiguration" ], "default": null } @@ -1072,7 +1080,8 @@ "implementation": "B", "type": "boolean", "configurationNames": [ - "iast.stackTrace.enabled" + "iast.stackTrace.enabled", + "experimental.iast.stackTrace.enabled" ], "default": "true" } @@ -1082,19 +1091,18 @@ "implementation": "B", "type": "string", "configurationNames": [ - "iast.telemetryVerbosity" + "iast.telemetryVerbosity", + "experimental.iast.telemetryVerbosity" ], "default": "INFORMATION" } ], "DD_INJECTION_ENABLED": [ { - "implementation": "A", - "type": "array", - "default": "", - "configurationNames": [ - "injectionEnabled" - ] + "implementation": "C", + "type": "string", + "default": null, + "internalPropertyName": "injectionEnabled" } ], "DD_INJECT_FORCE": [ @@ -1102,9 +1110,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "injectForce" - ] + "internalPropertyName": "injectForce" } ], "DD_INSTRUMENTATION_CONFIG_ID": [ @@ -1112,9 +1118,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "instrumentation_config_id" - ] + "internalPropertyName": "instrumentation_config_id" } ], "DD_INSTRUMENTATION_INSTALL_ID": [ @@ -1122,9 +1126,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "installSignature.id" - ] + "internalPropertyName": "installSignature.id" } ], "DD_INSTRUMENTATION_INSTALL_TIME": [ @@ -1132,9 +1134,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "installSignature.time" - ] + "internalPropertyName": "installSignature.time" } ], "DD_INSTRUMENTATION_INSTALL_TYPE": [ @@ -1142,9 +1142,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "installSignature.type" - ] + "internalPropertyName": "installSignature.type" } ], "DD_INSTRUMENTATION_TELEMETRY_ENABLED": [ @@ -1155,9 +1153,7 @@ "aliases": [ "DD_TRACE_TELEMETRY_ENABLED" ], - "configurationNames": [ - "telemetry.enabled" - ] + "internalPropertyName": "telemetry.enabled" } ], "DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD": [ @@ -1165,9 +1161,7 @@ "implementation": "A", "type": "int", "default": "30000", - "configurationNames": [ - "profiling.longLivedThreshold" - ] + "internalPropertyName": "profiling.longLivedThreshold" } ], "DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED": [ @@ -1189,9 +1183,7 @@ "implementation": "A", "type": "int", "default": "128", - "configurationNames": [ - "langchain.spanCharLimit" - ] + "internalPropertyName": "langchain.spanCharLimit" } ], "DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE": [ @@ -1199,9 +1191,7 @@ "implementation": "A", "type": "decimal", "default": "1", - "configurationNames": [ - "langchain.spanPromptCompletionSampleRate" - ] + "internalPropertyName": "langchain.spanPromptCompletionSampleRate" } ], "DD_LLMOBS_AGENTLESS_ENABLED": [ @@ -1219,9 +1209,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "llmobs.enabled" - ] + "internalPropertyName": "llmobs.enabled" } ], "DD_LLMOBS_ML_APP": [ @@ -1249,16 +1237,22 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "otelLogsEnabled" - ] + "internalPropertyName": "otelLogsEnabled" } ], - "DD_LOG_LEVEL": [ + "DD_TRACE_LOG_LEVEL": [ { - "implementation": "B", + "implementation": "C", "type": "string", - "default": null + "default": "debug", + "configurationNames": [ + "logLevel" + ], + "aliases": [ + "DD_LOG_LEVEL", + "OTEL_LOG_LEVEL" + ], + "allowed": "debug|info|warn|error" } ], "DD_METRICS_OTEL_ENABLED": [ @@ -1266,9 +1260,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "otelMetricsEnabled" - ] + "internalPropertyName": "otelMetricsEnabled" } ], "DD_MINI_AGENT_PATH": [ @@ -1293,9 +1285,7 @@ "implementation": "A", "type": "int", "default": "128", - "configurationNames": [ - "openai.spanCharLimit" - ] + "internalPropertyName": "openai.spanCharLimit" } ], "DD_PIPELINE_EXECUTION_ID": [ @@ -1370,38 +1360,30 @@ ], "DD_PROFILING_DEBUG_UPLOAD_COMPRESSION": [ { - "implementation": "A", + "implementation": "B", "type": "string", - "default": "zstd" + "default": "on", + "allowed": "on|off|(gzip|zstd)(-[1-9][0-9]?)?", + "transform": "toLowerCase" } ], "DD_PROFILING_ENABLED": [ { - "implementation": "A", - "type": "boolean", + "implementation": "B", + "type": "string", + "internalPropertyName": "profiling.enabled", "configurationNames": [ - "profiling.enabled" + "profiling" ], + "allowed": "false|true|auto|1|0", + "transform": "normalizeProfilingEnabled", "default": "false", + "__TODO__": "The alias is deprecated and should log. This needs an re-implementation.", "aliases": [ "DD_EXPERIMENTAL_PROFILING_ENABLED" ] } ], - "DD_EXPERIMENTAL_PROFILING_ENABLED": [ - { - "implementation": "A", - "type": "boolean", - "configurationNames": [ - "profiling.enabled" - ], - "default": "false", - "aliases": [ - "DD_PROFILING_ENABLED" - ], - "deprecated": true - } - ], "DD_PROFILING_ENDPOINT_COLLECTION_ENABLED": [ { "implementation": "A", @@ -1425,8 +1407,8 @@ ], "DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES": [ { - "implementation": "A", - "type": "string", + "implementation": "B", + "type": "array", "default": "process" } ], @@ -1453,19 +1435,16 @@ ], "DD_PROFILING_EXPORTERS": [ { - "implementation": "A", - "type": "string", - "default": "agent", - "configurationNames": [ - "profiling.exporters" - ] + "implementation": "B", + "type": "array", + "default": "agent" } ], "DD_PROFILING_HEAP_ENABLED": [ { - "implementation": "A", + "implementation": "B", "type": "boolean", - "default": "false" + "default": null } ], "DD_PROFILING_HEAP_SAMPLING_INTERVAL": [ @@ -1484,8 +1463,8 @@ ], "DD_PROFILING_PROFILERS": [ { - "implementation": "A", - "type": "string", + "implementation": "B", + "type": "array", "default": "space,wall" } ], @@ -1493,10 +1472,7 @@ { "implementation": "A", "type": "boolean", - "default": "true", - "configurationNames": [ - "profiling.sourceMap" - ] + "default": "true" } ], "DD_PROFILING_TIMELINE_ENABLED": [ @@ -1543,9 +1519,9 @@ ], "DD_PROFILING_WALLTIME_ENABLED": [ { - "implementation": "B", + "implementation": "A", "type": "boolean", - "default": "true" + "default": null } ], "DD_REMOTE_CONFIGURATION_ENABLED": [ @@ -1556,9 +1532,7 @@ "aliases": [ "DD_REMOTE_CONFIG_ENABLED" ], - "configurationNames": [ - "remoteConfig.enabled" - ] + "internalPropertyName": "remoteConfig.enabled" } ], "DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS": [ @@ -1576,7 +1550,8 @@ "implementation": "A", "type": "boolean", "configurationNames": [ - "runtimeMetrics.enabled" + "runtimeMetrics.enabled", + "runtimeMetrics" ], "default": "false" } @@ -1621,6 +1596,14 @@ ] } ], + "DD_ROOT_JS_SESSION_ID": [ + { + "implementation": "A", + "type": "string", + "default": null, + "internal": true + } + ], "DD_TRACE_EXPERIMENTAL_RUNTIME_ID_ENABLED": [ { "implementation": "B", @@ -1641,8 +1624,10 @@ ], "default": null, "aliases": [ - "DD_SERVICE_NAME" - ] + "DD_SERVICE_NAME", + "OTEL_SERVICE_NAME" + ], + "allowed": ".+" } ], "DD_SERVICE_MAPPING": [ @@ -1667,20 +1652,21 @@ ], "DD_SPAN_SAMPLING_RULES": [ { - "implementation": "C", - "type": "array", + "implementation": "D", + "type": "json", "configurationNames": [ - "spanSamplingRules", - "sampler.spanSamplingRules" + "spanSamplingRules" ], - "default": null + "default": null, + "transform": "toCamelCase" } ], "DD_SPAN_SAMPLING_RULES_FILE": [ { - "implementation": "B", + "implementation": "A", "type": "string", - "default": "" + "default": null, + "transform": "readFilePath" } ], "DD_TAGS": [ @@ -1698,9 +1684,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "telemetry.debug" - ] + "internalPropertyName": "telemetry.debug" } ], "DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED": [ @@ -1708,9 +1692,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "telemetry.dependencyCollection" - ] + "internalPropertyName": "telemetry.dependencyCollection" } ], "DD_TELEMETRY_FORWARDER_PATH": [ @@ -1720,14 +1702,20 @@ "default": null } ], + "DD_TELEMETRY_EXTENDED_HEARTBEAT_INTERVAL": [ + { + "implementation": "A", + "type": "int", + "default": "86400", + "internalPropertyName": "telemetry.extendedHeartbeatInterval" + } + ], "DD_TELEMETRY_HEARTBEAT_INTERVAL": [ { "implementation": "B", "type": "decimal", "default": "60.0", - "configurationNames": [ - "telemetry.heartbeatInterval" - ] + "internalPropertyName": "telemetry.heartbeatInterval" } ], "DD_TELEMETRY_LOG_COLLECTION_ENABLED": [ @@ -1735,9 +1723,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "telemetry.logCollection" - ] + "internalPropertyName": "telemetry.logCollection" } ], "DD_TELEMETRY_METRICS_ENABLED": [ @@ -1745,9 +1731,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "telemetry.metrics" - ] + "internalPropertyName": "telemetry.metrics" } ], "DD_TEST_FAILED_TEST_REPLAY_ENABLED": [ @@ -1755,9 +1739,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isTestDynamicInstrumentationEnabled" - ] + "internalPropertyName": "isTestDynamicInstrumentationEnabled" } ], "DD_TEST_FLEET_CONFIG_PATH": [ @@ -1779,9 +1761,7 @@ "implementation": "C", "type": "int", "default": "20", - "configurationNames": [ - "testManagementAttemptToFixRetries" - ] + "internalPropertyName": "testManagementAttemptToFixRetries" } ], "DD_TEST_MANAGEMENT_ENABLED": [ @@ -1789,9 +1769,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "isTestManagementEnabled" - ] + "internalPropertyName": "isTestManagementEnabled" } ], "DD_TEST_TIA_KEEP_COV_CONFIG": [ @@ -1799,9 +1777,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "isKeepingCoverageConfiguration" - ] + "internalPropertyName": "isKeepingCoverageConfiguration" } ], "DD_TEST_SESSION_NAME": [ @@ -1809,9 +1785,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "ciVisibilityTestSessionName" - ] + "internalPropertyName": "ciVisibilityTestSessionName" } ], "DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED": [ @@ -1963,9 +1937,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "trace.aws.addSpanPointers" - ] + "internalPropertyName": "trace.aws.addSpanPointers" } ], "DD_TRACE_AWS_SDK_AWS_BATCH_PROPAGATION_ENABLED": [ @@ -2351,7 +2323,8 @@ "configurationNames": [ "clientIpHeader" ], - "default": null + "default": null, + "transform": "toLowerCase" } ], "DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH": [ @@ -2361,27 +2334,30 @@ "configurationNames": [ "cloudPayloadTagging.maxDepth" ], - "default": "10" + "default": "10", + "allowed": "\\d+" } ], "DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING": [ { - "implementation": "A", - "type": "array", + "implementation": "B", + "type": "string", "configurationNames": [ "cloudPayloadTagging.request" ], - "default": null + "default": null, + "transform": "splitJSONPathRules" } ], "DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING": [ { - "implementation": "A", - "type": "array", + "implementation": "B", + "type": "string", "configurationNames": [ "cloudPayloadTagging.response" ], - "default": null + "default": null, + "transform": "splitJSONPathRules" } ], "DD_TRACE_COLLECTIONS_ENABLED": [ @@ -2494,9 +2470,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "trace.dynamoDb.tablePrimaryKeys" - ] + "internalPropertyName": "trace.dynamoDb.tablePrimaryKeys" } ], "DD_TRACE_ELASTICSEARCH_ENABLED": [ @@ -2525,8 +2499,9 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "traceEnabled" + "internalPropertyName": "tracing", + "aliases": [ + "DD_TRACING_ENABLED" ] } ], @@ -2644,10 +2619,7 @@ { "implementation": "A", "type": "boolean", - "default": "true", - "configurationNames": [ - "isGCPPubSubPushSubscriptionEnabled" - ] + "default": "true" } ], "DD_TRACE_GENERIC_POOL_ENABLED": [ @@ -2662,9 +2634,7 @@ "implementation": "A", "type": "boolean", "default": "true", - "configurationNames": [ - "gitMetadataEnabled" - ] + "internalPropertyName": "gitMetadataEnabled" } ], "DD_TRACE_GOOGLE_CLOUD_PUBSUB_ENABLED": [ @@ -2707,9 +2677,7 @@ "implementation": "A", "type": "array", "default": "", - "configurationNames": [ - "graphqlErrorExtensions" - ] + "internalPropertyName": "graphqlErrorExtensions" } ], "DD_TRACE_GRAPHQL_TAG_ENABLED": [ @@ -2796,7 +2764,8 @@ "default": "", "configurationNames": [ "headerTags" - ] + ], + "transform": "stripColonWhitespace" } ], "DD_TRACE_HONO_ENABLED": [ @@ -3064,13 +3033,6 @@ "default": "true" } ], - "DD_TRACE_LOG_LEVEL": [ - { - "implementation": "C", - "type": "string", - "default": "debug" - } - ], "DD_TRACE_LOOPBACK_ENABLED": [ { "implementation": "A", @@ -3090,9 +3052,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "memcachedCommandEnabled" - ] + "internalPropertyName": "memcachedCommandEnabled" } ], "DD_TRACE_MEMCACHED_ENABLED": [ @@ -3208,9 +3168,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "trace.nativeSpanEvents" - ] + "internalPropertyName": "trace.nativeSpanEvents" } ], "DD_TRACE_NET_ENABLED": [ @@ -3260,9 +3218,7 @@ "implementation": "F", "type": "string", "default": "(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:(?:\\s|%20)*(?:=|%3D)[^&]+|(?:\"|%22)(?:\\s|%20)*(?::|%3A)(?:\\s|%20)*(?:\"|%22)(?:%2[^2]|%[^2]|[^\"%])+(?:\"|%22))|bearer(?:\\s|%20)+[a-z0-9\\._\\-]+|token(?::|%3A)[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L](?:[\\w=-]|%3D)+\\.ey[I-L](?:[\\w=-]|%3D)+(?:\\.(?:[\\w.+\\/=-]|%3D|%2F|%2B)+)?|[\\-]{5}BEGIN(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY[\\-]{5}[^\\-]+[\\-]{5}END(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY|ssh-rsa(?:\\s|%20)*(?:[a-z0-9\\/\\.+]|%2F|%5C|%2B){100,}", - "configurationNames": [ - "queryStringObfuscation" - ] + "internalPropertyName": "queryStringObfuscation" } ], "DD_TRACE_OPENAI_ENABLED": [ @@ -3453,10 +3409,10 @@ { "implementation": "B", "type": "string", + "allowed": "continue|restart|ignore", + "transform": "toLowerCase", "default": "continue", - "configurationNames": [ - "tracePropagationBehaviorExtract" - ] + "internalPropertyName": "tracePropagationBehaviorExtract" } ], "DD_TRACE_PROPAGATION_EXTRACT_FIRST": [ @@ -3464,19 +3420,18 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "tracePropagationExtractFirst" - ] + "internalPropertyName": "tracePropagationExtractFirst" } ], "DD_TRACE_PROPAGATION_STYLE": [ { "implementation": "D", "type": "array", - "configurationNames": [ - "tracePropagationStyle" - ], - "default": "datadog,tracecontext,baggage" + "default": "datadog,tracecontext,baggage", + "transform": "validatePropagationStyles", + "aliases": [ + "OTEL_PROPAGATORS" + ] } ], "DD_TRACE_PROPAGATION_STYLE_EXTRACT": [ @@ -3486,7 +3441,8 @@ "configurationNames": [ "tracePropagationStyle.extract" ], - "default": "datadog, tracecontext, baggage" + "default": "datadog, tracecontext, baggage", + "transform": "toLowerCase" } ], "DD_TRACE_PROPAGATION_STYLE_INJECT": [ @@ -3496,7 +3452,8 @@ "configurationNames": [ "tracePropagationStyle.inject" ], - "default": "datadog, tracecontext, baggage" + "default": "datadog, tracecontext, baggage", + "transform": "toLowerCase" } ], "DD_TRACE_PROTOBUFJS_ENABLED": [ @@ -3525,8 +3482,8 @@ "implementation": "A", "type": "int", "configurationNames": [ - "ingestion.rateLimit", - "sampler.rateLimit" + "rateLimit", + "ingestion.rateLimit" ], "default": "100" } @@ -3591,9 +3548,7 @@ "implementation": "A", "type": "boolean", "default": "false", - "configurationNames": [ - "resourceRenamingEnabled" - ] + "internalPropertyName": "resourceRenamingEnabled" } ], "DD_TRACE_RESTIFY_ENABLED": [ @@ -3622,20 +3577,22 @@ "implementation": "B", "type": "decimal", "configurationNames": [ + "sampleRate", "ingestion.sampleRate" ], - "default": null + "default": null, + "transform": "sampleRate" } ], "DD_TRACE_SAMPLING_RULES": [ { - "implementation": "A", - "type": "array", + "implementation": "E", + "type": "json", "configurationNames": [ - "samplingRules", - "sampler.rules" + "samplingRules" ], - "default": "" + "default": "[]", + "transform": "toCamelCase" } ], "DD_TRACE_SCOPE": [ @@ -3643,9 +3600,7 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "scope" - ] + "internalPropertyName": "scope" } ], "DD_TRACE_SELENIUM_ENABLED": [ @@ -3687,6 +3642,8 @@ { "implementation": "B", "type": "string", + "allowed": "v0|v1", + "transform": "toLowerCase", "configurationNames": [ "spanAttributeSchema" ], @@ -3697,9 +3654,7 @@ { "implementation": "A", "type": "int", - "configurationNames": [ - "spanLeakDebug" - ], + "internalPropertyName": "spanLeakDebug", "default": "0" } ], @@ -3712,20 +3667,21 @@ ], "DD_TRACE_STARTUP_LOGS": [ { - "implementation": "D", + "implementation": "C", "type": "boolean", "configurationNames": [ "startupLogs" ], - "default": "false" + "default": "true" } ], "DD_TRACE_STATS_COMPUTATION_ENABLED": [ { "implementation": "A", "type": "boolean", + "internalPropertyName": "stats.enabled", "configurationNames": [ - "stats.enabled" + "stats" ], "default": "false" } @@ -3852,19 +3808,7 @@ "implementation": "A", "type": "int", "default": "512", - "configurationNames": [ - "tagsHeaderMaxLength" - ] - } - ], - "DD_TRACING_ENABLED": [ - { - "implementation": "A", - "type": "boolean", - "default": "true", - "configurationNames": [ - "tracing" - ] + "internalPropertyName": "tagsHeaderMaxLength" } ], "DD_VERSION": [ @@ -3882,9 +3826,7 @@ "implementation": "A", "type": "int", "default": "128", - "configurationNames": [ - "vertexai.spanCharLimit" - ] + "internalPropertyName": "vertexai.spanCharLimit" } ], "DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE": [ @@ -3892,9 +3834,7 @@ "implementation": "A", "type": "decimal", "default": "1", - "configurationNames": [ - "vertexai.spanPromptCompletionSampleRate" - ] + "internalPropertyName": "vertexai.spanPromptCompletionSampleRate" } ], "DD_VITEST_WORKER": [ @@ -3908,50 +3848,42 @@ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelMaxExportBatchSize" - ], - "default": "512" + "internalPropertyName": "otelMaxExportBatchSize", + "default": "512", + "allowed": "[1-9]\\d*" } ], "OTEL_BSP_MAX_QUEUE_SIZE": [ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelMaxQueueSize" - ], - "default": "2048" + "internalPropertyName": "otelMaxQueueSize", + "default": "2048", + "allowed": "[1-9]\\d*" } ], "OTEL_BSP_SCHEDULE_DELAY": [ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelBatchTimeout" - ], - "default": "5000" + "internalPropertyName": "otelBatchTimeout", + "default": "5000", + "allowed": "[1-9]\\d*" } ], "OTEL_EXPORTER_OTLP_ENDPOINT": [ { "implementation": "A", "type": "string", - "default": null, - "configurationNames": [ - "otelUrl" - ] + "default": null } ], "OTEL_EXPORTER_OTLP_HEADERS": [ { - "implementation": "B", - "type": "map", + "implementation": "C", + "type": "string", "default": null, - "configurationNames": [ - "otelHeaders" - ] + "internalPropertyName": "otelHeaders" } ], "OTEL_EXPORTER_OTLP_LOGS_ENDPOINT": [ @@ -3959,18 +3891,20 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "otelLogsUrl" + "internalPropertyName": "otelLogsUrl", + "aliases": [ + "OTEL_EXPORTER_OTLP_ENDPOINT" ] } ], "OTEL_EXPORTER_OTLP_LOGS_HEADERS": [ { - "implementation": "B", - "type": "map", + "implementation": "A", + "type": "string", "default": null, - "configurationNames": [ - "otelLogsHeaders" + "internalPropertyName": "otelLogsHeaders", + "aliases": [ + "OTEL_EXPORTER_OTLP_HEADERS" ] } ], @@ -3979,8 +3913,9 @@ "implementation": "D", "type": "string", "default": "http/protobuf", - "configurationNames": [ - "otelLogsProtocol" + "internalPropertyName": "otelLogsProtocol", + "aliases": [ + "OTEL_EXPORTER_OTLP_PROTOCOL" ] } ], @@ -3988,10 +3923,12 @@ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelLogsTimeout" - ], - "default": "10000" + "internalPropertyName": "otelLogsTimeout", + "default": "10000", + "allowed": "[1-9]\\d*", + "aliases": [ + "OTEL_EXPORTER_OTLP_TIMEOUT" + ] } ], "OTEL_EXPORTER_OTLP_METRICS_ENDPOINT": [ @@ -3999,18 +3936,20 @@ "implementation": "A", "type": "string", "default": null, - "configurationNames": [ - "otelMetricsUrl" + "internalPropertyName": "otelMetricsUrl", + "aliases": [ + "OTEL_EXPORTER_OTLP_ENDPOINT" ] } ], "OTEL_EXPORTER_OTLP_METRICS_HEADERS": [ { - "implementation": "A", - "type": "map", + "implementation": "B", + "type": "string", "default": null, - "configurationNames": [ - "otelMetricsHeaders" + "internalPropertyName": "otelMetricsHeaders", + "aliases": [ + "OTEL_EXPORTER_OTLP_HEADERS" ] } ], @@ -4019,8 +3958,9 @@ "implementation": "B", "type": "string", "default": "http/protobuf", - "configurationNames": [ - "otelMetricsProtocol" + "internalPropertyName": "otelMetricsProtocol", + "aliases": [ + "OTEL_EXPORTER_OTLP_PROTOCOL" ] } ], @@ -4028,9 +3968,9 @@ { "implementation": "A", "type": "string", - "configurationNames": [ - "otelMetricsTemporalityPreference" - ], + "allowed": "Delta|Cumulative|LowMemory", + "transform": "toUpperCase", + "internalPropertyName": "otelMetricsTemporalityPreference", "default": "delta" } ], @@ -4038,10 +3978,12 @@ { "implementation": "B", "type": "int", - "configurationNames": [ - "otelMetricsTimeout" - ], - "default": "10000" + "allowed": "[1-9]\\d*", + "internalPropertyName": "otelMetricsTimeout", + "default": "10000", + "aliases": [ + "OTEL_EXPORTER_OTLP_TIMEOUT" + ] } ], "OTEL_EXPORTER_OTLP_PROTOCOL": [ @@ -4049,18 +3991,15 @@ "implementation": "A", "type": "string", "default": "http/protobuf", - "configurationNames": [ - "otelProtocol" - ] + "internalPropertyName": "otelProtocol" } ], "OTEL_EXPORTER_OTLP_TIMEOUT": [ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelTimeout" - ], + "allowed": "[1-9]\\d*", + "internalPropertyName": "otelTimeout", "default": "10000" } ], @@ -4068,30 +4007,26 @@ { "implementation": "A", "type": "string", - "default": null - } - ], - "OTEL_LOG_LEVEL": [ - { - "implementation": "C", - "type": "string", - "default": null + "default": null, + "allowed": "none|otlp", + "transform": "toLowerCase" } ], "OTEL_METRICS_EXPORTER": [ { "implementation": "C", "type": "string", - "default": null + "default": null, + "allowed": "none|otlp", + "transform": "toLowerCase" } ], "OTEL_METRIC_EXPORT_INTERVAL": [ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelMetricsExportInterval" - ], + "allowed": "[1-9]\\d*", + "internalPropertyName": "otelMetricsExportInterval", "default": "10000" } ], @@ -4099,27 +4034,17 @@ { "implementation": "A", "type": "int", - "configurationNames": [ - "otelMetricsExportTimeout" - ], + "allowed": "[1-9]\\d*", + "internalPropertyName": "otelMetricsExportTimeout", "default": "7500" } ], - "OTEL_PROPAGATORS": [ - { - "implementation": "A", - "type": "array", - "default": "", - "configurationNames": [ - "tracePropagationStyle.otelPropagators" - ] - } - ], "OTEL_RESOURCE_ATTRIBUTES": [ { "implementation": "B", "type": "string", - "default": "" + "default": "", + "transform": "parseOtelTags" } ], "OTEL_SDK_DISABLED": [ @@ -4129,38 +4054,30 @@ "default": "true" } ], - "OTEL_SERVICE_NAME": [ - { - "implementation": "B", - "type": "string", - "configurationNames": [ - "service" - ], - "default": null - } - ], "OTEL_TRACES_EXPORTER": [ { "implementation": "F", "type": "string", - "default": "otlp" + "default": "otlp", + "allowed": "none|otlp", + "transform": "toLowerCase" } ], "OTEL_TRACES_SAMPLER": [ { "implementation": "E", "type": "string", - "default": "parentbased_always_on" + "default": "parentbased_always_on", + "allowed": "always_on|always_off|traceidratio|parentbased_always_on|parentbased_always_off|parentbased_traceidratio", + "transform": "toLowerCase" } ], "OTEL_TRACES_SAMPLER_ARG": [ { "implementation": "D", "type": "decimal", - "configurationNames": [ - "sampleRate" - ], - "default": null + "default": null, + "allowed": "\\d+(\\.\\d+)?" } ] } diff --git a/packages/dd-trace/src/crashtracking/crashtracker.js b/packages/dd-trace/src/crashtracking/crashtracker.js index 1fd2a822fb6..10b02988dc2 100644 --- a/packages/dd-trace/src/crashtracking/crashtracker.js +++ b/packages/dd-trace/src/crashtracking/crashtracker.js @@ -23,6 +23,9 @@ class Crashtracker { } } + /** + * @param {import('../config/config-base')} config - Tracer configuration + */ start (config) { if (this.#started) return this.configure(config) @@ -35,7 +38,7 @@ class Crashtracker { this.#getMetadata(config) ) } catch (e) { - log.error('Error initialising crashtracker', e) + log.error('Error initializing crashtracker', e) } } @@ -49,6 +52,9 @@ class Crashtracker { } // TODO: Send only configured values when defaults are fixed. + /** + * @param {import('../config/config-base')} config - Tracer configuration + */ #getConfig (config) { const url = getAgentUrl(config) diff --git a/packages/dd-trace/src/crashtracking/index.js b/packages/dd-trace/src/crashtracking/index.js index 5addf3864ef..2ba38e72658 100644 --- a/packages/dd-trace/src/crashtracking/index.js +++ b/packages/dd-trace/src/crashtracking/index.js @@ -1,15 +1,9 @@ 'use strict' -const { existsSync } = require('node:fs') const { isMainThread } = require('worker_threads') const log = require('../log') -// libdatadog v29 crashtracker segfaults during init on ARM64 musl (Alpine). -// The segfault bypasses JS try/catch so we must avoid loading it entirely. -// See: https://github.com/DataDog/libdatadog-nodejs/issues/114 -const isArm64Musl = process.arch === 'arm64' && existsSync('/etc/alpine-release') - -if (isMainThread && !isArm64Musl) { +if (isMainThread) { try { module.exports = require('./crashtracker') } catch (e) { diff --git a/packages/dd-trace/src/debugger/index.js b/packages/dd-trace/src/debugger/index.js index fe70f9dc5c2..9f3e750702e 100644 --- a/packages/dd-trace/src/debugger/index.js +++ b/packages/dd-trace/src/debugger/index.js @@ -147,7 +147,7 @@ function start (config, rcInstance) { * Sends the new configuration to the worker thread via the config channel. * Does nothing if the worker is not started. * - * @param {Config} config - The updated tracer configuration object + * @param {import('../config/config-base')} config - The updated tracer configuration object */ function configure (config) { if (configChannel === null) return diff --git a/packages/dd-trace/src/dogstatsd.js b/packages/dd-trace/src/dogstatsd.js index b9f1491febc..f3853203cb2 100644 --- a/packages/dd-trace/src/dogstatsd.js +++ b/packages/dd-trace/src/dogstatsd.js @@ -1,13 +1,11 @@ 'use strict' -const lookup = require('dns').lookup // cache to avoid instrumentation const dgram = require('dgram') const isIP = require('net').isIP const request = require('./exporters/common/request') const log = require('./log') const Histogram = require('./histogram') -const defaults = require('./config/defaults') const { getAgentUrl } = require('./agent/url') const { entityId } = require('./exporters/common/docker') @@ -23,7 +21,9 @@ const TYPE_HISTOGRAM = 'h' * @implements {DogStatsD} */ class DogStatsDClient { - constructor (options = {}) { + #lookup + constructor (options) { + this.#lookup = options.lookup if (options.metricsProxyUrl) { this._httpOptions = { method: 'POST', @@ -32,11 +32,10 @@ class DogStatsDClient { } } - this._host = options.host || defaults['dogstatsd.hostname'] + this._host = options.host this._family = isIP(this._host) - this._port = options.port || defaults['dogstatsd.port'] - this._prefix = options.prefix || '' - this._tags = options.tags || [] + this._port = options.port + this._tags = options.tags this._queue = [] this._buffer = '' this._offset = 0 @@ -99,7 +98,7 @@ class DogStatsDClient { _sendUdp (queue) { if (this._family === 0) { - lookup(this._host, (err, address, family) => { + this.#lookup(this._host, (err, address, family) => { if (err) return log.error('DogStatsDClient: Host not found', err) this._sendUdpFromQueue(queue, address, family) }) @@ -118,7 +117,7 @@ class DogStatsDClient { } _add (stat, value, type, tags) { - let message = `${this._prefix + stat}:${value}|${type}` + let message = `${stat}:${value}|${type}` // Don't manipulate this._tags as it is still used tags = tags ? [...this._tags, ...tags] : this._tags @@ -164,6 +163,9 @@ class DogStatsDClient { return socket } + /** + * @param {import('./config/config-base')} config - Tracer configuration + */ static generateClientConfig (config) { const tags = [] @@ -183,6 +185,7 @@ class DogStatsDClient { host: config.dogstatsd.hostname, port: config.dogstatsd.port, tags, + lookup: config.lookup, } if (config.url || config.port) { diff --git a/packages/dd-trace/src/encode/0.4.js b/packages/dd-trace/src/encode/0.4.js index 6a21762bf63..9a253325223 100644 --- a/packages/dd-trace/src/encode/0.4.js +++ b/packages/dd-trace/src/encode/0.4.js @@ -140,7 +140,7 @@ class AgentEncoder { this._traceBytes.length = 0 this._stringCount = 0 this._stringBytes.length = 0 - this._stringMap = {} + this._stringMap = Object.create(null) this._cacheString('') } diff --git a/packages/dd-trace/src/exporters/agent/writer.js b/packages/dd-trace/src/exporters/agent/writer.js index a9f5ada40b9..f880b2ba1be 100644 --- a/packages/dd-trace/src/exporters/agent/writer.js +++ b/packages/dd-trace/src/exporters/agent/writer.js @@ -1,6 +1,8 @@ 'use strict' const { inspect } = require('node:util') +const { channel } = require('dc-polyfill') + const request = require('../common/request') const { logIntegrations, logAgentError } = require('../../startup-log') const runtimeMetrics = require('../../runtime_metrics') @@ -10,10 +12,14 @@ const BaseWriter = require('../common/writer') const propagationHash = require('../../propagation-hash') const METRIC_PREFIX = 'datadog.tracer.node.exporter.agent' +const firstFlushChannel = channel('dd-trace:exporter:first-flush') class AgentWriter extends BaseWriter { constructor (...args) { - super(...args) + super({ + ...args[0], + beforeFirstFlush: () => firstFlushChannel.publish(), + }) const { prioritySampler, lookup, protocolVersion, headers, config = {} } = args[0] const AgentEncoder = getEncoder(protocolVersion) diff --git a/packages/dd-trace/src/exporters/common/request.js b/packages/dd-trace/src/exporters/common/request.js index f21daa0de5f..ca2e5ca752d 100644 --- a/packages/dd-trace/src/exporters/common/request.js +++ b/packages/dd-trace/src/exporters/common/request.js @@ -18,6 +18,10 @@ const maxActiveBufferSize = 1024 * 1024 * 64 let activeBufferSize = 0 +/** + * @param {string|URL|object} urlObjOrString + * @returns {object} + */ function parseUrl (urlObjOrString) { if (urlObjOrString !== null && typeof urlObjOrString === 'object') return urlToHttpOptions(urlObjOrString) @@ -33,6 +37,11 @@ function parseUrl (urlObjOrString) { return url } +/** + * @param {Buffer|string|Readable|Array} data + * @param {object} options + * @param {(error: Error|null, result: string, statusCode: number) => void} callback + */ function request (data, options, callback) { if (!options.headers) { options.headers = {} diff --git a/packages/dd-trace/src/exporters/common/writer.js b/packages/dd-trace/src/exporters/common/writer.js index 20e63ae66af..9b352a703d2 100644 --- a/packages/dd-trace/src/exporters/common/writer.js +++ b/packages/dd-trace/src/exporters/common/writer.js @@ -1,14 +1,21 @@ 'use strict' +const { channel } = require('dc-polyfill') + const log = require('../../log') const request = require('./request') const { safeJSONStringify } = require('./util') +const firstFlushChannel = channel('dd-trace:exporter:first-flush') + class Writer { - constructor ({ url }) { + constructor ({ url, beforeFirstFlush }) { this._url = url + this._beforeFirstFlush = beforeFirstFlush } + #isFirstFlush = true + flush (done = () => {}) { const count = this._encoder.count() @@ -16,8 +23,11 @@ class Writer { this._encoder.reset() done() } else if (count > 0) { + if (this.#isFirstFlush && firstFlushChannel.hasSubscribers && this._beforeFirstFlush) { + this.#isFirstFlush = false + this._beforeFirstFlush() + } const payload = this._encoder.makePayload() - this._sendPayload(payload, count, done) } else { done() diff --git a/packages/dd-trace/src/heap_snapshots.js b/packages/dd-trace/src/heap_snapshots.js index 35360d892c0..19ef1206a69 100644 --- a/packages/dd-trace/src/heap_snapshots.js +++ b/packages/dd-trace/src/heap_snapshots.js @@ -45,6 +45,9 @@ function getName (destination) { } module.exports = { + /** + * @param {import('./config/config-base')} config - Tracer configuration + */ async start (config) { const destination = config.heapSnapshot.destination diff --git a/packages/dd-trace/src/index.js b/packages/dd-trace/src/index.js index 582511ab6a9..0366f023e4c 100644 --- a/packages/dd-trace/src/index.js +++ b/packages/dd-trace/src/index.js @@ -1,7 +1,7 @@ 'use strict' const { getValueFromEnvSources } = require('./config/helper') -const { isFalse } = require('./util') +const { isFalse, isTrue } = require('./util') // Global `jest` is only present in Jest workers. const inJestWorker = typeof jest !== 'undefined' @@ -9,7 +9,10 @@ const inJestWorker = typeof jest !== 'undefined' const ddTraceDisabled = getValueFromEnvSources('DD_TRACE_ENABLED') ? isFalse(getValueFromEnvSources('DD_TRACE_ENABLED')) : String(getValueFromEnvSources('OTEL_TRACES_EXPORTER')).toLowerCase() === 'none' +const shouldUseProxyWhenTracingDisabled = + isTrue(getValueFromEnvSources('DD_DYNAMIC_INSTRUMENTATION_ENABLED')) || + isTrue(getValueFromEnvSources('DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED')) -module.exports = ddTraceDisabled || inJestWorker +module.exports = (ddTraceDisabled && !shouldUseProxyWhenTracingDisabled) || inJestWorker ? require('./noop/proxy') : require('./proxy') diff --git a/packages/dd-trace/src/lambda/runtime/ritm.js b/packages/dd-trace/src/lambda/runtime/ritm.js index 5418a1e7fc7..40ab513f0d9 100644 --- a/packages/dd-trace/src/lambda/runtime/ritm.js +++ b/packages/dd-trace/src/lambda/runtime/ritm.js @@ -89,12 +89,12 @@ const registerLambdaHook = () => { const lambdaFilePaths = _getLambdaFilePaths(lambdaStylePath) // TODO: Redo this like any other instrumentation. - Hook(lambdaFilePaths, (moduleExports, name) => { + Hook(lambdaFilePaths, (moduleExports, name, _, moduleVersion) => { require('./patch') for (const { hook } of instrumentations[name]) { try { - moduleExports = hook(moduleExports) + moduleExports = hook(moduleExports, moduleVersion) ?? moduleExports } catch (e) { log.error('Error executing lambda hook', e) } @@ -104,16 +104,16 @@ const registerLambdaHook = () => { }) } else { const moduleToPatch = 'datadog-lambda-js' - Hook([moduleToPatch], (moduleExports, moduleName, _) => { + Hook([moduleToPatch], (moduleExports, moduleName, _, moduleVersion) => { moduleName = moduleName.replace(pathSepExpr, '/') require('./patch') - for (const { name, file, hook } of instrumentations[moduleToPatch]) { - const fullFilename = filename(name, file) + for (const { file, hook } of instrumentations[moduleToPatch]) { + const fullFilename = filename(moduleToPatch, file) if (moduleName === fullFilename) { try { - moduleExports = hook(moduleExports) + moduleExports = hook(moduleExports, moduleVersion) ?? moduleExports } catch (e) { log.error('Error executing lambda hook for datadog-lambda-js', e) } diff --git a/packages/dd-trace/src/llmobs/index.js b/packages/dd-trace/src/llmobs/index.js index f3a4972a7a8..1e34a5a71ab 100644 --- a/packages/dd-trace/src/llmobs/index.js +++ b/packages/dd-trace/src/llmobs/index.js @@ -43,9 +43,12 @@ let spanWriter /** @type {LLMObsEvalMetricsWriter | null} */ let evalWriter -/** @type {import('../config')} */ +/** @type {import('../config/config-base')} */ let globalTracerConfig +/** + * @param {@type import('../config/config-base')} config + */ function enable (config) { globalTracerConfig = config diff --git a/packages/dd-trace/src/llmobs/plugins/ai/index.js b/packages/dd-trace/src/llmobs/plugins/ai/index.js index 55efcb0f929..185659e43e5 100644 --- a/packages/dd-trace/src/llmobs/plugins/ai/index.js +++ b/packages/dd-trace/src/llmobs/plugins/ai/index.js @@ -18,6 +18,7 @@ const { getToolNameFromTags, getToolCallResultContent, getLlmObsSpanName, + getTelemetryMetadata, } = require('./util') /** @@ -216,6 +217,9 @@ class VercelAILLMObsPlugin extends BaseLLMObsPlugin { this._tagger.tagEmbeddingIO(span, parsedInputs, output) + const metadata = getTelemetryMetadata(tags) + this._tagger.tagMetadata(span, metadata) + const usage = tags['ai.usage.tokens'] this._tagger.tagMetrics(span, { inputTokens: usage, @@ -234,7 +238,7 @@ class VercelAILLMObsPlugin extends BaseLLMObsPlugin { this._tagger.tagTextIO(span, prompt, output) - const metadata = getGenerationMetadata(tags) ?? {} + const metadata = getGenerationMetadata(tags) metadata.schema = getJsonStringValue(tags['ai.schema'], {}) this._tagger.tagMetadata(span, metadata) } diff --git a/packages/dd-trace/src/llmobs/plugins/ai/util.js b/packages/dd-trace/src/llmobs/plugins/ai/util.js index d8aee774bd2..9d28567094d 100644 --- a/packages/dd-trace/src/llmobs/plugins/ai/util.js +++ b/packages/dd-trace/src/llmobs/plugins/ai/util.js @@ -10,6 +10,10 @@ const MODEL_METADATA_KEYS = new Set([ 'stop_sequences', ]) +const VERCEL_AI_TELEMETRY_METADATA_PREFIX = 'ai.telemetry.metadata.' +const VERCEL_AI_MODEL_METADATA_PREFIX = 'gen_ai.request.' +const VERCEL_AI_GENERATION_METADATA_PREFIX = 'ai.settings.' + /** * @typedef {import('../../../opentracing/span')} Span * @@ -107,17 +111,29 @@ function getJsonStringValue (str, defaultValue) { /** * Get the model metadata from the span tags (top_p, top_k, temperature, etc.) + * Additionally, set telemetry metadata from manual telemetry tags. * @param {SpanTags} tags * @returns {Record | null} */ function getModelMetadata (tags) { /** @type {Record} */ const modelMetadata = {} - for (const metadata of MODEL_METADATA_KEYS) { - const metadataTagKey = `gen_ai.request.${metadata}` - const metadataValue = tags[metadataTagKey] - if (metadataValue) { - modelMetadata[metadata] = metadataValue + for (const tag of Object.keys(tags)) { + const isModelMetadata = tag.startsWith(VERCEL_AI_MODEL_METADATA_PREFIX) + if (isModelMetadata) { + const lastCommaPosition = tag.lastIndexOf('.') + const metadataKey = lastCommaPosition === -1 ? tag : tag.slice(lastCommaPosition + 1) + if (metadataKey && MODEL_METADATA_KEYS.has(metadataKey)) { + modelMetadata[metadataKey] = tags[tag] + } + } else { + const isTelemetryMetadata = tag.startsWith(VERCEL_AI_TELEMETRY_METADATA_PREFIX) + if (isTelemetryMetadata) { + const metadataKey = tag.slice(VERCEL_AI_TELEMETRY_METADATA_PREFIX.length) + if (metadataKey) { + modelMetadata[metadataKey] = tags[tag] + } + } } } @@ -126,6 +142,7 @@ function getModelMetadata (tags) { /** * Get the generation metadata from the span tags (maxSteps, maxRetries, etc.) + * Additionally, set telemetry metadata from manual telemetry tags. * @param {SpanTags} tags * @returns {Record | null} */ @@ -134,14 +151,24 @@ function getGenerationMetadata (tags) { const metadata = {} for (const tag of Object.keys(tags)) { - if (!tag.startsWith('ai.settings')) continue - - const settingKey = tag.split('.').pop() - const transformedKey = settingKey.replaceAll(/[A-Z]/g, letter => '_' + letter.toLowerCase()) - if (MODEL_METADATA_KEYS.has(transformedKey)) continue + const isGenerationMetadata = tag.startsWith(VERCEL_AI_GENERATION_METADATA_PREFIX) + if (isGenerationMetadata) { + const lastCommaPosition = tag.lastIndexOf('.') + const settingKey = lastCommaPosition === -1 ? tag : tag.slice(lastCommaPosition + 1) + const transformedKey = settingKey.replaceAll(/[A-Z]/g, letter => '_' + letter.toLowerCase()) + if (MODEL_METADATA_KEYS.has(transformedKey)) continue - const settingValue = tags[tag] - metadata[settingKey] = settingValue + const settingValue = tags[tag] + metadata[settingKey] = settingValue + } else { + const isTelemetryMetadata = tag.startsWith(VERCEL_AI_TELEMETRY_METADATA_PREFIX) + if (isTelemetryMetadata) { + const metadataKey = tag.slice(VERCEL_AI_TELEMETRY_METADATA_PREFIX.length) + if (metadataKey) { + metadata[metadataKey] = tags[tag] + } + } + } } return Object.keys(metadata).length ? metadata : null @@ -205,6 +232,26 @@ function getLlmObsSpanName (operation, functionId) { return functionId ? `${functionId}.${operation}` : operation } +/** + * Get custom telemetry metadata from ai.telemetry.metadata.* attributes + * @param {Record} tags + * @returns {Record | null} + */ +function getTelemetryMetadata (tags) { + const metadata = {} + + for (const tag of Object.keys(tags)) { + if (!tag.startsWith(VERCEL_AI_TELEMETRY_METADATA_PREFIX)) continue + + const metadataKey = tag.slice(VERCEL_AI_TELEMETRY_METADATA_PREFIX.length) + if (metadataKey) { + metadata[metadataKey] = tags[tag] + } + } + + return Object.keys(metadata).length ? metadata : null +} + module.exports = { getSpanTags, getOperation, @@ -215,4 +262,5 @@ module.exports = { getToolNameFromTags, getToolCallResultContent, getLlmObsSpanName, + getTelemetryMetadata, } diff --git a/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js b/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js index 3747c0dffec..db0249b8a6b 100644 --- a/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js +++ b/packages/dd-trace/src/llmobs/plugins/bedrockruntime.js @@ -65,10 +65,12 @@ class BedrockRuntimeLLMObsPlugin extends BaseLLMObsPlugin { telemetry.incrementLLMObsSpanStartCount({ autoinstrumented: true, integration: 'bedrock' }) const parent = llmobsStore.getStore()?.span + // Use full modelId and unified provider for LLMObs (required for backend cost estimation). + // Split modelProvider/modelName from parseModelId() are still used below for response parsing. this._tagger.registerLLMObsSpan(span, { parent, - modelName: modelName.toLowerCase(), - modelProvider: modelProvider.toLowerCase(), + modelName: request.params.modelId.toLowerCase(), + modelProvider: 'amazon_bedrock', kind: 'llm', name: 'bedrock-runtime.command', integration: 'bedrock', diff --git a/packages/dd-trace/src/llmobs/sdk.js b/packages/dd-trace/src/llmobs/sdk.js index 6e06027953c..8149b02ac77 100644 --- a/packages/dd-trace/src/llmobs/sdk.js +++ b/packages/dd-trace/src/llmobs/sdk.js @@ -29,16 +29,23 @@ class LLMObs extends NoopLLMObs { */ #hasUserSpanProcessor = false + /** + * @param {import('../tracer')} tracer - Tracer instance + * @param {import('./index')} llmobsModule - LLMObs module instance + * @param {import('../config/config-base')} config - Tracer configuration + */ constructor (tracer, llmobsModule, config) { super(tracer) + /** @type {import('../config/config-base')} */ this._config = config + this._llmobsModule = llmobsModule this._tagger = new LLMObsTagger(config) } get enabled () { - return this._config.llmobs.enabled + return this._config.llmobs.enabled ?? false } enable (options = {}) { @@ -56,13 +63,10 @@ class LLMObs extends NoopLLMObs { return } - const llmobs = { - mlApp: options.mlApp, - agentlessEnabled: options.agentlessEnabled, - } - // TODO: This will update config telemetry with the origin 'code', which is not ideal when `enable()` is called - // based on `APM_TRACING` RC product updates. - this._config.updateOptions({ llmobs }) + // TODO: These configs should be passed through directly at construction time instead. + this._config.llmobs.enabled = true + this._config.llmobs.mlApp = options.mlApp + this._config.llmobs.agentlessEnabled = options.agentlessEnabled // configure writers and channel subscribers this._llmobsModule.enable(this._config) diff --git a/packages/dd-trace/src/llmobs/span_processor.js b/packages/dd-trace/src/llmobs/span_processor.js index 0e873b6a5d9..9bf7cd3bd80 100644 --- a/packages/dd-trace/src/llmobs/span_processor.js +++ b/packages/dd-trace/src/llmobs/span_processor.js @@ -49,7 +49,7 @@ class LLMObservabilitySpan { } class LLMObsSpanProcessor { - /** @type {import('../config')} */ + /** @type {import('../config/config-base')} */ #config /** @type {((span: LLMObservabilitySpan) => LLMObservabilitySpan | null) | null} */ diff --git a/packages/dd-trace/src/llmobs/tagger.js b/packages/dd-trace/src/llmobs/tagger.js index db840693daf..baa303a1d9f 100644 --- a/packages/dd-trace/src/llmobs/tagger.js +++ b/packages/dd-trace/src/llmobs/tagger.js @@ -47,8 +47,11 @@ const { storage } = require('./storage') const registry = new WeakMap() class LLMObsTagger { + /** @type {import('../config/config-base')} */ + #config + constructor (config, softFail = false) { - this._config = config + this.#config = config this.softFail = softFail } @@ -72,15 +75,15 @@ class LLMObsTagger { integration, _decorator, } = {}) { - if (!this._config.llmobs.enabled) return + if (!this.#config.llmobs.enabled) return if (!kind) return // do not register it in the map if it doesn't have an llmobs span kind const spanMlApp = mlApp || registry.get(parent)?.[ML_APP] || span.context()._trace.tags[PROPAGATED_ML_APP_KEY] || - this._config.llmobs.mlApp || - this._config.service // this should always have a default + this.#config.llmobs.mlApp || + this.#config.service // this should always have a default if (!spanMlApp) { throw new Error( @@ -624,7 +627,7 @@ class LLMObsTagger { } _register (span) { - if (!this._config.llmobs.enabled) return + if (!this.#config.llmobs.enabled) return if (registry.has(span)) { this.#handleFailure(`LLMObs Span "${span._name}" already registered.`) return @@ -634,7 +637,7 @@ class LLMObsTagger { } _setTag (span, key, value) { - if (!this._config.llmobs.enabled) return + if (!this.#config.llmobs.enabled) return if (!registry.has(span)) { this.#handleFailure(`Span "${span._name}" must be an LLMObs generated span.`) return diff --git a/packages/dd-trace/src/llmobs/writers/base.js b/packages/dd-trace/src/llmobs/writers/base.js index c32903dd886..eb60158ff44 100644 --- a/packages/dd-trace/src/llmobs/writers/base.js +++ b/packages/dd-trace/src/llmobs/writers/base.js @@ -45,7 +45,9 @@ class BaseLLMObsWriter { /** @type {LLMObsBuffer} */ this._buffer = new LLMObsBuffer({ events: [], size: 0, isDefault: true }) + /** @type {import('../../config/config-base')} */ this._config = config + this._endpoint = endpoint this._baseEndpoint = endpoint // should not be unset this._intake = intake diff --git a/packages/dd-trace/src/llmobs/writers/util.js b/packages/dd-trace/src/llmobs/writers/util.js index b6e6f487069..eb1150e7702 100644 --- a/packages/dd-trace/src/llmobs/writers/util.js +++ b/packages/dd-trace/src/llmobs/writers/util.js @@ -6,6 +6,9 @@ const telemetry = require('../telemetry') const { fetchAgentInfo } = require('../../agent/info') const { getAgentUrl } = require('../../agent/url') +/** + * @param {import('../../config/config-base')} config + */ function setAgentStrategy (config, setWritersAgentlessValue) { const agentlessEnabled = config.llmobs.agentlessEnabled diff --git a/packages/dd-trace/src/log/index.js b/packages/dd-trace/src/log/index.js index 7ff2a82fe99..a237325d899 100644 --- a/packages/dd-trace/src/log/index.js +++ b/packages/dd-trace/src/log/index.js @@ -1,5 +1,8 @@ 'use strict' + const { inspect } = require('util') + +const { defaults } = require('../config/defaults') const { isTrue } = require('../util') const { getValueFromEnvSources } = require('../config/helper') const { traceChannel, debugChannel, infoChannel, warnChannel, errorChannel } = require('./channels') @@ -8,12 +11,17 @@ const { Log, LogConfig, NoTransmitError } = require('./log') const { memoize } = require('./utils') const config = { - enabled: false, + enabled: defaults.DD_TRACE_DEBUG, logger: undefined, - logLevel: 'debug', + logLevel: defaults.logLevel, } -// in most places where we know we want to mute a log we use log.error() directly +const deprecate = memoize((code, message) => { + publishFormatted(errorChannel, null, message) + return true +}) + +// In most places where we know we want to mute a log we use log.error() directly const NO_TRANSMIT = new LogConfig(false) const log = { @@ -21,36 +29,6 @@ const log = { NO_TRANSMIT, NoTransmitError, - /** - * @returns Read-only version of logging config. To modify config, call `log.use` and `log.toggle` - */ - getConfig () { - return { ...config } - }, - - use (logger) { - config.logger = logger - logWriter.use(logger) - return log - }, - - toggle (enabled, logLevel) { - config.enabled = enabled - config.logLevel = logLevel - logWriter.toggle(enabled, logLevel) - return log - }, - - reset () { - logWriter.reset() - log._deprecate = memoize((code, message) => { - publishFormatted(errorChannel, null, message) - return true - }) - - return log - }, - trace (...args) { if (traceChannel.hasSubscribers) { const logRecord = {} @@ -66,6 +44,8 @@ const log = { publishFormatted(traceChannel, null, stack.join('\n')) } + // TODO: Why do we allow chaining here? This is likely not used anywhere. + // If it is used, that seems like a mistake. return log }, @@ -103,30 +83,23 @@ const log = { }, deprecate (code, message) { - return log._deprecate(code, message) + return deprecate(code, message) }, - isEnabled (fleetStableConfigValue, localStableConfigValue) { - return isTrue( - fleetStableConfigValue ?? + configure (options) { + config.logger = options.logger + config.logLevel = options.logLevel ?? + getValueFromEnvSources('DD_TRACE_LOG_LEVEL') ?? + config.logLevel + config.enabled = isTrue( getValueFromEnvSources('DD_TRACE_DEBUG') ?? - (getValueFromEnvSources('OTEL_LOG_LEVEL') === 'debug' || undefined) ?? - localStableConfigValue ?? - config.enabled + // TODO: Handle this by adding a log buffer so that configure may be called with the actual configurations. + // eslint-disable-next-line eslint-rules/eslint-process-env + (process.env.OTEL_LOG_LEVEL === 'debug' || config.enabled) ) - }, + logWriter.configure(config.enabled, config.logLevel, options.logger) - getLogLevel ( - optionsValue, - fleetStableConfigValue, - localStableConfigValue - ) { - return optionsValue ?? - fleetStableConfigValue ?? - getValueFromEnvSources('DD_TRACE_LOG_LEVEL') ?? - getValueFromEnvSources('OTEL_LOG_LEVEL') ?? - localStableConfigValue ?? - config.logLevel + return config.enabled }, } @@ -150,8 +123,6 @@ function getErrorLog (err) { return err } -log.reset() - -log.toggle(log.isEnabled(), log.getLogLevel()) +log.configure({}) module.exports = log diff --git a/packages/dd-trace/src/log/writer.js b/packages/dd-trace/src/log/writer.js index 13ce84d92d7..358a3b680fe 100644 --- a/packages/dd-trace/src/log/writer.js +++ b/packages/dd-trace/src/log/writer.js @@ -2,6 +2,7 @@ const { storage } = require('../../../datadog-core') const { LogChannel } = require('./channels') + const defaultLogger = { debug: msg => console.debug(msg), /* eslint-disable-line no-console */ info: msg => console.info(msg), /* eslint-disable-line no-console */ @@ -17,12 +18,8 @@ function withNoop (fn) { storage('legacy').run({ noop: true }, fn) } -function unsubscribeAll () { - logChannel.unsubscribe({ trace, debug, info, warn, error }) -} - function toggleSubscription (enable, level) { - unsubscribeAll() + logChannel.unsubscribe({ trace, debug, info, warn, error }) if (enable) { logChannel = new LogChannel(level) @@ -30,23 +27,14 @@ function toggleSubscription (enable, level) { } } -function toggle (enable, level) { +function configure (enable, level, newLogger) { enabled = enable + logger = typeof newLogger?.debug === 'function' && typeof newLogger.error === 'function' + ? newLogger + : defaultLogger toggleSubscription(enabled, level) } -function use (newLogger) { - if (typeof newLogger?.debug === 'function' && typeof newLogger.error === 'function') { - logger = newLogger - } -} - -function reset () { - logger = defaultLogger - enabled = false - toggleSubscription(false) -} - function error (err) { withNoop(() => logger.error(err)) } @@ -69,4 +57,4 @@ function trace (log) { withNoop(() => logger.debug(log)) } -module.exports = { use, toggle, reset, error, warn, info, debug, trace } +module.exports = { configure, error, warn, info, debug, trace } diff --git a/packages/dd-trace/src/noop/proxy.js b/packages/dd-trace/src/noop/proxy.js index b223aab3a65..53dd1e709b1 100644 --- a/packages/dd-trace/src/noop/proxy.js +++ b/packages/dd-trace/src/noop/proxy.js @@ -13,6 +13,10 @@ const noopDogStatsDClient = new NoopDogStatsDClient() const noopLLMObs = new NoopLLMObsSDK(noop) const noopOpenFeatureProvider = new NoopFlaggingProvider() const noopAIGuard = new NoopAIGuardSDK() +const noopProfiling = { + setCustomLabelKeys () {}, + runWithLabels (labels, fn) { return fn() }, +} /** @type {import('../../src/index')} Proxy */ class NoopProxy { @@ -98,6 +102,10 @@ class NoopProxy { return this } + get profiling () { + return noopProfiling + } + get TracerProvider () { return require('../opentelemetry/tracer_provider') } diff --git a/packages/dd-trace/src/opentelemetry/logs/index.js b/packages/dd-trace/src/opentelemetry/logs/index.js index 2d9ec8c71d7..a36446d7dbe 100644 --- a/packages/dd-trace/src/opentelemetry/logs/index.js +++ b/packages/dd-trace/src/opentelemetry/logs/index.js @@ -33,7 +33,7 @@ const OtlpHttpLogExporter = require('./otlp_http_log_exporter') /** * Initializes OpenTelemetry Logs support - * @param {Config} config - Tracer configuration instance + * @param {import('../../config/config-base')} config - Tracer configuration instance */ function initializeOpenTelemetryLogs (config) { // Build resource attributes diff --git a/packages/dd-trace/src/opentelemetry/metrics/index.js b/packages/dd-trace/src/opentelemetry/metrics/index.js index c0d116e2075..914baeee330 100644 --- a/packages/dd-trace/src/opentelemetry/metrics/index.js +++ b/packages/dd-trace/src/opentelemetry/metrics/index.js @@ -35,7 +35,7 @@ const OtlpHttpMetricExporter = require('./otlp_http_metric_exporter') /** * Initializes OpenTelemetry Metrics support - * @param {Config} config - Tracer configuration instance + * @param {import('../../config/config-base')} config - Tracer configuration instance */ function initializeOpenTelemetryMetrics (config) { const resourceAttributes = { diff --git a/packages/dd-trace/src/opentracing/propagation/text_map.js b/packages/dd-trace/src/opentracing/propagation/text_map.js index 3c7b65eefb0..50efb42c9e0 100644 --- a/packages/dd-trace/src/opentracing/propagation/text_map.js +++ b/packages/dd-trace/src/opentracing/propagation/text_map.js @@ -6,6 +6,7 @@ const id = require('../../id') const DatadogSpanContext = require('../span_context') const log = require('../../log') const tags = require('../../../../../ext/tags') +const { getConfiguredEnvName } = require('../../config/helper') const { setBaggageItem, getAllBaggageItems, removeAllBaggageItems } = require('../../baggage') const telemetryMetrics = require('../../telemetry/metrics') @@ -65,8 +66,15 @@ const zeroTraceId = '0000000000000000' const hex16 = /^[0-9A-Fa-f]{16}$/ class TextMapPropagator { + #extractB3Context + constructor (config) { this._config = config + + // TODO: should match "b3 single header" in next major + const envName = getConfiguredEnvName('DD_TRACE_PROPAGATION_STYLE') + // eslint-disable-next-line eslint-rules/eslint-env-aliases + this.#extractB3Context = envName === 'OTEL_PROPAGATORS' ? this._extractB3SingleContext : this._extractB3MultiContext } inject (spanContext, carrier) { @@ -363,10 +371,7 @@ class TextMapPropagator { extractedContext = this._extractB3SingleContext(carrier) break case 'b3': - extractedContext = this._config.tracePropagationStyle.otelPropagators - // TODO: should match "b3 single header" in next major - ? this._extractB3SingleContext(carrier) - : this._extractB3MultiContext(carrier) + extractedContext = this.#extractB3Context(carrier) break case 'b3multi': extractedContext = this._extractB3MultiContext(carrier) diff --git a/packages/dd-trace/src/payload-tagging/config/index.js b/packages/dd-trace/src/payload-tagging/config/index.js index 1f91dd9d6e7..c103349ca8b 100644 --- a/packages/dd-trace/src/payload-tagging/config/index.js +++ b/packages/dd-trace/src/payload-tagging/config/index.js @@ -3,16 +3,17 @@ const aws = require('./aws.json') const sdks = { aws } +/** @typedef {Record} SDKRules */ /** * Builds rules per service for a given SDK, appending user-provided rules. * - * @param {Record} sdk + * @param {SDKRules} sdk * @param {string[]} requestInput * @param {string[]} responseInput - * @returns {Record} + * @returns {SDKRules} */ function getSDKRules (sdk, requestInput, responseInput) { - const sdkServiceRules = {} + const sdkServiceRules = /** @type {SDKRules} */ ({}) for (const [service, serviceRules] of Object.entries(sdk)) { sdkServiceRules[service] = { // Make a copy. Otherwise calling the function multiple times would append @@ -31,10 +32,10 @@ function getSDKRules (sdk, requestInput, responseInput) { * * @param {string[]} [requestInput=[]] * @param {string[]} [responseInput=[]] - * @returns {Record>} + * @returns {Record} */ function appendRules (requestInput = [], responseInput = []) { - const sdkRules = {} + const sdkRules = /** @type {Record} */ ({}) for (const [name, sdk] of Object.entries(sdks)) { sdkRules[name] = getSDKRules(sdk, requestInput, responseInput) } diff --git a/packages/dd-trace/src/plugin_manager.js b/packages/dd-trace/src/plugin_manager.js index 2bf92b390c6..3a56b0d3a42 100644 --- a/packages/dd-trace/src/plugin_manager.js +++ b/packages/dd-trace/src/plugin_manager.js @@ -67,7 +67,6 @@ function getEnabled (Plugin) { module.exports = class PluginManager { constructor (tracer) { this._tracer = tracer - this._tracerConfig = null this._pluginsByName = {} this._configsByName = {} @@ -104,7 +103,7 @@ module.exports = class PluginManager { // extracts predetermined configuration from tracer and combines it with plugin-specific config this._pluginsByName[name].configure({ - ...this._getSharedConfig(name), + ...this.#getSharedConfig(name), ...pluginConfig, }) } @@ -121,8 +120,11 @@ module.exports = class PluginManager { this.loadPlugin(name) } - // like instrumenter.enable() - configure (config = {}) { + /** + * Like instrumenter.enable() + * @param {import('./config/config-base')} config - Tracer configuration + */ + configure (config) { this._tracerConfig = config this._tracer._nomenclature.configure(config) @@ -148,7 +150,7 @@ module.exports = class PluginManager { } // TODO: figure out a better way to handle this - _getSharedConfig (name) { + #getSharedConfig (name) { const { logInjection, serviceMapping, @@ -172,7 +174,7 @@ module.exports = class PluginManager { traceWebsocketMessagesSeparateTraces, experimental, resourceRenamingEnabled, - } = this._tracerConfig + } = /** @type {import('./config/config-base')} */ (this._tracerConfig) const sharedConfig = { codeOriginForSpans, diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index 9008186c107..d318174bf61 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -469,6 +469,10 @@ module.exports = class CiPlugin extends Plugin { return getSessionRequestErrorTags(this.testSessionSpan) } + /** + * @param {import('../config/config-base')} config - Tracer configuration + * @param {boolean} shouldGetEnvironmentData - Whether to get environment data + */ configure (config, shouldGetEnvironmentData = true) { super.configure(config) diff --git a/packages/dd-trace/src/plugins/plugin.js b/packages/dd-trace/src/plugins/plugin.js index 0f12da1d81c..784b71720dd 100644 --- a/packages/dd-trace/src/plugins/plugin.js +++ b/packages/dd-trace/src/plugins/plugin.js @@ -72,7 +72,7 @@ module.exports = class Plugin { * Create a new plugin instance. * * @param {object} tracer Tracer instance or wrapper containing it under `_tracer`. - * @param {object} tracerConfig Global tracer configuration object. + * @param {import('../config/config-base')} tracerConfig Global tracer configuration object. */ constructor (tracer, tracerConfig) { this._subscriptions = [] @@ -80,6 +80,8 @@ module.exports = class Plugin { this._enabled = false this._tracer = tracer this.config = {} // plugin-specific configuration, unset until .configure() is called + + /** @type {import('../config/config-base')} */ this._tracerConfig = tracerConfig // global tracer configuration } @@ -163,9 +165,10 @@ module.exports = class Plugin { /** * Enable or disable the plugin and (re)apply its configuration. * - * @param {boolean|object} config Either a boolean to enable/disable or a configuration object - * containing at least `{ enabled: boolean }`. - * @returns {void} + * TODO: Remove the overloading with `enabled` and use the config object directly. + * + * @param {boolean|import('../config/config-base')} config Either a boolean to enable/disable + * or a configuration object containing at least `{ enabled: boolean }`. */ configure (config) { if (typeof config === 'boolean') { diff --git a/packages/dd-trace/src/process-tags/index.js b/packages/dd-trace/src/process-tags/index.js index 6fe87b848cb..98f7cf3a2aa 100644 --- a/packages/dd-trace/src/process-tags/index.js +++ b/packages/dd-trace/src/process-tags/index.js @@ -72,6 +72,9 @@ function buildProcessTags (config) { // Singleton with constant defaults so pre-init reads don't blow up const processTags = module.exports = { + /** + * @param {import('../config/config-base')} config + */ initialize (config) { // check if one of the properties added during build exist and if so return if (processTags.tags) return diff --git a/packages/dd-trace/src/profiler.js b/packages/dd-trace/src/profiler.js index 4990212fe92..d1bacecc31e 100644 --- a/packages/dd-trace/src/profiler.js +++ b/packages/dd-trace/src/profiler.js @@ -5,13 +5,38 @@ const { profiler } = require('./profiling') globalThis[Symbol.for('dd-trace')].beforeExitHandlers.add(() => { profiler.stop() }) module.exports = { - start: config => { + /** + * @param {import('./config/config-base')} config - Tracer configuration + */ + start (config) { // Forward the full tracer config to the profiling layer. // Profiling code is responsible for deriving the specific options it needs. return profiler.start(config) }, - stop: () => { + stop () { profiler.stop() }, + + /** + * Declares the set of custom label keys that will be used with + * {@link runWithLabels}. + * + * @param {Iterable} keys - Custom label key names + */ + setCustomLabelKeys: (keys) => { + profiler.setCustomLabelKeys(keys) + }, + + /** + * Runs a function with custom profiling labels attached to wall profiler samples. + * + * @param {Record} labels - Custom labels to attach + * @param {function(): T} fn - Function to execute with the labels + * @returns {T} The return value of fn + * @template T + */ + runWithLabels: (labels, fn) => { + return profiler.runWithLabels(labels, fn) + }, } diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index 040618c5b58..f34135284af 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -6,9 +6,8 @@ const { pathToFileURL } = require('url') const satisfies = require('../../../../vendor/dist/semifies') const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA } = require('../plugins/util/tags') const { getIsAzureFunction } = require('../serverless') -const { isFalse, isTrue } = require('../util') const { getAzureTagsFromMetadata, getAzureAppMetadata, getAzureFunctionMetadata } = require('../azure_metadata') -const { getEnvironmentVariable, getValueFromEnvSources } = require('../config/helper') +const { getEnvironmentVariable } = require('../config/helper') const { getAgentUrl } = require('../agent/url') const { isACFActive } = require('../../../datadog-core/src/storage') @@ -22,59 +21,22 @@ const { oomExportStrategies, snapshotKinds } = require('./constants') const { tagger } = require('./tagger') class Config { - constructor (options = {}) { - // TODO: Remove entries that were already resolved in config. - // For the others, move them over to config. + constructor (options) { const AWS_LAMBDA_FUNCTION_NAME = getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME') - // TODO: Move initialization of these values to packages/dd-trace/src/config/index.js, and just read from config - const { - DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED, - DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED, - DD_PROFILING_CODEHOTSPOTS_ENABLED, - DD_PROFILING_CPU_ENABLED, - DD_PROFILING_DEBUG_SOURCE_MAPS, - DD_PROFILING_DEBUG_UPLOAD_COMPRESSION, - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED, - DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES, - DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE, - DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT, - DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED, - DD_PROFILING_HEAP_ENABLED, - DD_PROFILING_HEAP_SAMPLING_INTERVAL, - DD_PROFILING_PPROF_PREFIX, - DD_PROFILING_PROFILERS, - DD_PROFILING_TIMELINE_ENABLED, - DD_PROFILING_UPLOAD_PERIOD, - DD_PROFILING_UPLOAD_TIMEOUT, - DD_PROFILING_V8_PROFILER_BUG_WORKAROUND, - DD_PROFILING_WALLTIME_ENABLED, - DD_TAGS, - } = getProfilingEnvValues() - - // Must be longer than one minute so pad with five seconds - const flushInterval = options.interval ?? (Number(DD_PROFILING_UPLOAD_PERIOD) * 1000 || 65 * 1000) - const uploadTimeout = options.uploadTimeout ?? (Number(DD_PROFILING_UPLOAD_TIMEOUT) || 60 * 1000) - const pprofPrefix = options.pprofPrefix ?? DD_PROFILING_PPROF_PREFIX ?? '' - - // TODO: Remove the fallback. Just use the value from the config. - this.service = options.service || 'node' + this.version = options.version + this.service = options.service this.env = options.env this.functionname = AWS_LAMBDA_FUNCTION_NAME - this.version = options.version - this.tags = Object.assign( - tagger.parse(DD_TAGS), - tagger.parse(options.tags), - tagger.parse({ - env: options.env, + this.tags = { + ...options.tags, + ...tagger.parse({ host: options.reportHostname ? require('os').hostname() : undefined, - service: this.service, - version: this.version, functionname: AWS_LAMBDA_FUNCTION_NAME, }), - getAzureTagsFromMetadata(getIsAzureFunction() ? getAzureFunctionMetadata() : getAzureAppMetadata()) - ) + ...getAzureTagsFromMetadata(getIsAzureFunction() ? getAzureFunctionMetadata() : getAzureAppMetadata()), + } // Add source code integration tags if available if (options.repositoryUrl && options.commitSHA) { @@ -82,58 +44,35 @@ class Config { this.tags[GIT_COMMIT_SHA] = options.commitSHA } - this.logger = ensureLogger(options.logger) - // Profiler sampling contexts are not available on Windows, so features - // depending on those (code hotspots and endpoint collection) need to default - // to false on Windows. - const samplingContextsAvailable = process.platform !== 'win32' - function checkOptionAllowed (option, description, condition) { - if (option && !condition) { - // injection hardening: all of these can only happen if user explicitly - // sets an environment variable to its non-default value on the platform. - // In practical terms, it'd require someone explicitly turning on OOM - // monitoring, code hotspots, endpoint profiling, or CPU profiling on - // Windows, where it is not supported. - throw new Error(`${description} not supported on ${process.platform}.`) - } - } - function checkOptionWithSamplingContextAllowed (option, description) { - checkOptionAllowed(option, description, samplingContextsAvailable) - } + // Normalize from seconds to milliseconds. Default must be longer than a minute. + this.flushInterval = options.DD_PROFILING_UPLOAD_PERIOD * 1000 + this.uploadTimeout = options.DD_PROFILING_UPLOAD_TIMEOUT + this.sourceMap = options.DD_PROFILING_SOURCE_MAP + this.debugSourceMaps = options.DD_PROFILING_DEBUG_SOURCE_MAPS + this.endpointCollectionEnabled = options.DD_PROFILING_ENDPOINT_COLLECTION_ENABLED + this.pprofPrefix = options.DD_PROFILING_PPROF_PREFIX + this.v8ProfilerBugWorkaroundEnabled = options.DD_PROFILING_V8_PROFILER_BUG_WORKAROUND - this.flushInterval = flushInterval - this.uploadTimeout = uploadTimeout - this.sourceMap = options.sourceMap - this.debugSourceMaps = isTrue(options.debugSourceMaps ?? DD_PROFILING_DEBUG_SOURCE_MAPS) - this.endpointCollectionEnabled = isTrue(options.endpointCollection ?? - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED ?? samplingContextsAvailable) - checkOptionWithSamplingContextAllowed(this.endpointCollectionEnabled, 'Endpoint collection') - - this.pprofPrefix = pprofPrefix - this.v8ProfilerBugWorkaroundEnabled = isTrue(options.v8ProfilerBugWorkaround ?? - DD_PROFILING_V8_PROFILER_BUG_WORKAROUND ?? true) + this.logger = ensureLogger(options.logger) this.url = getAgentUrl(options) - this.libraryInjected = options.libraryInjected - this.activation = options.activation - this.exporters = ensureExporters(options.exporters || [ - new AgentExporter(this), - ], this) + this.libraryInjected = !!options.DD_INJECTION_ENABLED - // OOM monitoring does not work well on Windows, so it is disabled by default. - const oomMonitoringSupported = process.platform !== 'win32' + let activation + if (options.profiling.enabled === 'auto') { + activation = 'auto' + } else if (options.profiling.enabled === 'true') { + activation = 'manual' + } // else activation = undefined - const oomMonitoringEnabled = isTrue(options.oomMonitoring ?? - DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED ?? oomMonitoringSupported) - checkOptionAllowed(oomMonitoringEnabled, 'OOM monitoring', oomMonitoringSupported) + this.activation = activation + this.exporters = ensureExporters(options.DD_PROFILING_EXPORTERS, this) - const heapLimitExtensionSize = options.oomHeapLimitExtensionSize ?? - (Number(DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE) || 0) - const maxHeapExtensionCount = options.oomMaxHeapExtensionCount ?? - (Number(DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT) || 0) + const oomMonitoringEnabled = options.DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED + const heapLimitExtensionSize = options.DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE + const maxHeapExtensionCount = options.DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT const exportStrategies = oomMonitoringEnabled - ? ensureOOMExportStrategies(options.oomExportStrategies ?? DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES ?? - [oomExportStrategies.PROCESS], this) + ? ensureOOMExportStrategies(options.DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES, this) : [] const exportCommand = oomMonitoringEnabled ? buildExportCommand(this) : undefined this.oomMonitoring = { @@ -144,61 +83,26 @@ class Config { exportCommand, } - const profilers = options.profilers || getProfilers({ - DD_PROFILING_HEAP_ENABLED, - DD_PROFILING_WALLTIME_ENABLED, - DD_PROFILING_PROFILERS, - }) + const profilers = getProfilers(options) - this.timelineEnabled = isTrue( - options.timelineEnabled ?? DD_PROFILING_TIMELINE_ENABLED ?? samplingContextsAvailable - ) - checkOptionWithSamplingContextAllowed(this.timelineEnabled, 'Timeline view') - this.timelineSamplingEnabled = isTrue( - options.timelineSamplingEnabled ?? DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED ?? true - ) + this.timelineEnabled = options.DD_PROFILING_TIMELINE_ENABLED + this.timelineSamplingEnabled = options.DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED + this.codeHotspotsEnabled = options.DD_PROFILING_CODEHOTSPOTS_ENABLED + this.cpuProfilingEnabled = options.DD_PROFILING_CPU_ENABLED + this.heapSamplingInterval = options.DD_PROFILING_HEAP_SAMPLING_INTERVAL - this.codeHotspotsEnabled = isTrue( - options.codeHotspotsEnabled ?? DD_PROFILING_CODEHOTSPOTS_ENABLED ?? samplingContextsAvailable - ) - checkOptionWithSamplingContextAllowed(this.codeHotspotsEnabled, 'Code hotspots') - - this.cpuProfilingEnabled = isTrue( - options.cpuProfilingEnabled ?? DD_PROFILING_CPU_ENABLED ?? samplingContextsAvailable - ) - checkOptionWithSamplingContextAllowed(this.cpuProfilingEnabled, 'CPU profiling') - - this.samplingInterval = options.samplingInterval || 1e3 / 99 // 99hz in millis - - this.heapSamplingInterval = options.heapSamplingInterval ?? - (Number(DD_PROFILING_HEAP_SAMPLING_INTERVAL) || 512 * 1024) + this.samplingInterval = 1e3 / 99 // 99hz in milliseconds const isAtLeast24 = satisfies(process.versions.node, '>=24.0.0') - const uploadCompression0 = options.uploadCompression ?? DD_PROFILING_DEBUG_UPLOAD_COMPRESSION ?? 'on' + const uploadCompression0 = options.DD_PROFILING_DEBUG_UPLOAD_COMPRESSION let [uploadCompression, level0] = uploadCompression0.split('-') - if (!['on', 'off', 'gzip', 'zstd'].includes(uploadCompression)) { - this.logger.warn(`Invalid profile upload compression method "${uploadCompression0}". Will use "on".`) - uploadCompression = 'on' - } let level = level0 ? Number.parseInt(level0, 10) : undefined if (level !== undefined) { - if (['on', 'off'].includes(uploadCompression)) { - this.logger.warn(`Compression levels are not supported for "${uploadCompression}".`) - level = undefined - } else if (Number.isNaN(level)) { - this.logger.warn( - `Invalid compression level "${level0}". Will use default level.`) - level = undefined - } else if (level < 1) { - this.logger.warn(`Invalid compression level ${level}. Will use 1.`) - level = 1 - } else { - const maxLevel = { gzip: 9, zstd: 22 }[uploadCompression] - if (level > maxLevel) { - this.logger.warn(`Invalid compression level ${level}. Will use ${maxLevel}.`) - level = maxLevel - } + const maxLevel = { gzip: 9, zstd: 22 }[uploadCompression] + if (level > maxLevel) { + this.logger.warn(`Invalid compression level ${level}. Will use ${maxLevel}.`) + level = maxLevel } } @@ -219,13 +123,9 @@ class Config { that.asyncContextFrameEnabled = false } - const canUseAsyncContextFrame = samplingContextsAvailable && isACFActive - - this.asyncContextFrameEnabled = isTrue(DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED ?? canUseAsyncContextFrame) - if (this.asyncContextFrameEnabled && !canUseAsyncContextFrame) { - if (!samplingContextsAvailable) { - turnOffAsyncContextFrame(`on ${process.platform}`) - } else if (isAtLeast24) { + this.asyncContextFrameEnabled = options.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED ?? isACFActive + if (this.asyncContextFrameEnabled && !isACFActive) { + if (isAtLeast24) { turnOffAsyncContextFrame('with --no-async-context-frame') } else if (satisfies(process.versions.node, '>=22.9.0')) { turnOffAsyncContextFrame('without --experimental-async-context-frame') @@ -234,7 +134,7 @@ class Config { } } - this.heartbeatInterval = options.heartbeatInterval || 60 * 1000 // 1 minute + this.heartbeatInterval = options.telemetry.heartbeatInterval this.profilers = ensureProfilers(profilers, this) } @@ -248,7 +148,7 @@ class Config { endpointCollectionEnabled: this.endpointCollectionEnabled, heapSamplingInterval: this.heapSamplingInterval, oomMonitoring: { ...this.oomMonitoring }, - profilerTypes: this.profilers.map(p => p.type), + profilerTypes: this.profilers.map(profiler => profiler.type), sourceMap: this.sourceMap, timelineEnabled: this.timelineEnabled, timelineSamplingEnabled: this.timelineSamplingEnabled, @@ -263,7 +163,9 @@ class Config { module.exports = { Config } function getProfilers ({ - DD_PROFILING_HEAP_ENABLED, DD_PROFILING_WALLTIME_ENABLED, DD_PROFILING_PROFILERS, + DD_PROFILING_HEAP_ENABLED, + DD_PROFILING_WALLTIME_ENABLED, + DD_PROFILING_PROFILERS, }) { // First consider "legacy" DD_PROFILING_PROFILERS env variable, defaulting to space + wall // Use a Set to avoid duplicates @@ -272,26 +174,26 @@ function getProfilers ({ // snapshots the space profile won't include memory taken by profiles created // before it in the sequence. That memory is ultimately transient and will be // released when all profiles are subsequently encoded. - const profilers = new Set((DD_PROFILING_PROFILERS ?? 'space,wall').split(',')) + const profilers = new Set(DD_PROFILING_PROFILERS) let spaceExplicitlyEnabled = false // Add/remove space depending on the value of DD_PROFILING_HEAP_ENABLED - if (DD_PROFILING_HEAP_ENABLED != null) { - if (isTrue(DD_PROFILING_HEAP_ENABLED)) { + if (DD_PROFILING_HEAP_ENABLED !== undefined) { + if (DD_PROFILING_HEAP_ENABLED) { if (!profilers.has('space')) { profilers.add('space') spaceExplicitlyEnabled = true } - } else if (isFalse(DD_PROFILING_HEAP_ENABLED)) { + } else { profilers.delete('space') } } // Add/remove wall depending on the value of DD_PROFILING_WALLTIME_ENABLED - if (DD_PROFILING_WALLTIME_ENABLED != null) { - if (isTrue(DD_PROFILING_WALLTIME_ENABLED)) { + if (DD_PROFILING_WALLTIME_ENABLED !== undefined) { + if (DD_PROFILING_WALLTIME_ENABLED) { profilers.add('wall') - } else if (isFalse(DD_PROFILING_WALLTIME_ENABLED)) { + } else { profilers.delete('wall') profilers.delete('cpu') // remove alias too } @@ -321,22 +223,12 @@ function getExportStrategy (name, options) { } function ensureOOMExportStrategies (strategies, options) { - if (!strategies) { - return [] + const set = new Set() + for (const strategy of strategies) { + set.add(getExportStrategy(strategy, options)) } - if (typeof strategies === 'string') { - strategies = strategies.split(',') - } - - for (let i = 0; i < strategies.length; i++) { - const strategy = strategies[i] - if (typeof strategy === 'string') { - strategies[i] = getExportStrategy(strategy, options) - } - } - - return [...new Set(strategies)] + return [...set] } function getExporter (name, options) { @@ -345,22 +237,13 @@ function getExporter (name, options) { return new AgentExporter(options) case 'file': return new FileExporter(options) + default: + options.logger.error(`Unknown exporter "${name}"`) } } function ensureExporters (exporters, options) { - if (typeof exporters === 'string') { - exporters = exporters.split(',') - } - - for (let i = 0; i < exporters.length; i++) { - const exporter = exporters[i] - if (typeof exporter === 'string') { - exporters[i] = getExporter(exporter, options) - } - } - - return exporters + return exporters.map((exporter) => getExporter(exporter, options)) } function getProfiler (name, options) { @@ -376,30 +259,26 @@ function getProfiler (name, options) { } function ensureProfilers (profilers, options) { - if (typeof profilers === 'string') { - profilers = profilers.split(',') - } + const filteredProfilers = [] for (let i = 0; i < profilers.length; i++) { - const profiler = profilers[i] - if (typeof profiler === 'string') { - profilers[i] = getProfiler(profiler, options) + const profiler = getProfiler(profilers[i], options) + if (profiler !== undefined) { + filteredProfilers.push(profiler) } } // Events profiler is a profiler that produces timeline events. It is only // added if timeline is enabled and there's a wall profiler. - if (options.timelineEnabled && profilers.some(p => p instanceof WallProfiler)) { - profilers.push(new EventsProfiler(options)) + if (options.timelineEnabled && filteredProfilers.some(profiler => profiler instanceof WallProfiler)) { + filteredProfilers.push(new EventsProfiler(options)) } - // Filter out any invalid profilers - return profilers.filter(Boolean) + return filteredProfilers } function ensureLogger (logger) { - if (typeof logger !== 'object' || - typeof logger.debug !== 'function' || + if (typeof logger?.debug !== 'function' || typeof logger.info !== 'function' || typeof logger.warn !== 'function' || typeof logger.error !== 'function') { @@ -424,50 +303,3 @@ function buildExportCommand (options) { path.join(__dirname, 'exporter_cli.js'), urls.join(','), tags, 'space'] } - -function getProfilingEnvValues () { - return { - DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED: - getValueFromEnvSources('DD_INTERNAL_PROFILING_TIMELINE_SAMPLING_ENABLED'), - DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED: - getValueFromEnvSources('DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED'), - DD_PROFILING_CODEHOTSPOTS_ENABLED: - getValueFromEnvSources('DD_PROFILING_CODEHOTSPOTS_ENABLED'), - DD_PROFILING_CPU_ENABLED: - getValueFromEnvSources('DD_PROFILING_CPU_ENABLED'), - DD_PROFILING_DEBUG_SOURCE_MAPS: - getValueFromEnvSources('DD_PROFILING_DEBUG_SOURCE_MAPS'), - DD_PROFILING_DEBUG_UPLOAD_COMPRESSION: - getValueFromEnvSources('DD_PROFILING_DEBUG_UPLOAD_COMPRESSION'), - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: - getValueFromEnvSources('DD_PROFILING_ENDPOINT_COLLECTION_ENABLED'), - DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: - getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES'), - DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: - getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE'), - DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: - getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT'), - DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED: - getValueFromEnvSources('DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED'), - DD_PROFILING_HEAP_ENABLED: - getValueFromEnvSources('DD_PROFILING_HEAP_ENABLED'), - DD_PROFILING_HEAP_SAMPLING_INTERVAL: - getValueFromEnvSources('DD_PROFILING_HEAP_SAMPLING_INTERVAL'), - DD_PROFILING_PPROF_PREFIX: - getValueFromEnvSources('DD_PROFILING_PPROF_PREFIX'), - DD_PROFILING_PROFILERS: - getValueFromEnvSources('DD_PROFILING_PROFILERS'), - DD_PROFILING_TIMELINE_ENABLED: - getValueFromEnvSources('DD_PROFILING_TIMELINE_ENABLED'), - DD_PROFILING_UPLOAD_PERIOD: - getValueFromEnvSources('DD_PROFILING_UPLOAD_PERIOD'), - DD_PROFILING_UPLOAD_TIMEOUT: - getValueFromEnvSources('DD_PROFILING_UPLOAD_TIMEOUT'), - DD_PROFILING_V8_PROFILER_BUG_WORKAROUND: - getValueFromEnvSources('DD_PROFILING_V8_PROFILER_BUG_WORKAROUND'), - DD_PROFILING_WALLTIME_ENABLED: - getValueFromEnvSources('DD_PROFILING_WALLTIME_ENABLED'), - DD_TAGS: - getValueFromEnvSources('DD_TAGS'), - } -} diff --git a/packages/dd-trace/src/profiling/exporter_cli.js b/packages/dd-trace/src/profiling/exporter_cli.js index cba3d6349b1..a122a334664 100644 --- a/packages/dd-trace/src/profiling/exporter_cli.js +++ b/packages/dd-trace/src/profiling/exporter_cli.js @@ -17,9 +17,6 @@ function exporterFromURL (url) { if (url.protocol === 'file:') { return new FileExporter({ pprofPrefix: fileURLToPath(url) }) } - // TODO: Why is DD_INJECTION_ENABLED a comma separated list? - const injectionEnabled = (getValueFromEnvSources('DD_INJECTION_ENABLED') ?? '').split(',') - const libraryInjected = injectionEnabled.length > 0 const profilingEnabled = (getValueFromEnvSources('DD_PROFILING_ENABLED') ?? '').toLowerCase() const activation = ['true', '1'].includes(profilingEnabled) ? 'manual' @@ -30,7 +27,7 @@ function exporterFromURL (url) { url, logger, uploadTimeout: timeoutMs, - libraryInjected, + libraryInjected: !!getValueFromEnvSources('DD_INJECTION_ENABLED'), activation, }) } diff --git a/packages/dd-trace/src/profiling/exporters/event_serializer.js b/packages/dd-trace/src/profiling/exporters/event_serializer.js index a7bd652f9e9..5929766709b 100644 --- a/packages/dd-trace/src/profiling/exporters/event_serializer.js +++ b/packages/dd-trace/src/profiling/exporters/event_serializer.js @@ -14,7 +14,7 @@ class EventSerializer { this._host = host this._service = service this._appVersion = version - this._libraryInjected = !!libraryInjected + this._libraryInjected = libraryInjected this._activation = activation || 'unknown' } @@ -22,7 +22,7 @@ class EventSerializer { return `${type}.pprof` } - getEventJSON ({ profiles, infos, start, end, tags = {}, endpointCounts }) { + getEventJSON ({ profiles, infos, start, end, tags = {}, endpointCounts, customAttributes }) { const event = { attachments: Object.keys(profiles).map(t => this.typeToFile(t)), start: start.toISOString(), @@ -80,6 +80,10 @@ class EventSerializer { }, } + if (customAttributes) { + event.custom_attributes = customAttributes + } + if (processTags.serialized) { event[processTags.PROFILING_FIELD_NAME] = processTags.serialized } diff --git a/packages/dd-trace/src/profiling/profiler.js b/packages/dd-trace/src/profiling/profiler.js index c107fc82750..72966640664 100644 --- a/packages/dd-trace/src/profiling/profiler.js +++ b/packages/dd-trace/src/profiling/profiler.js @@ -51,6 +51,7 @@ class Profiler extends EventEmitter { #compressionFnInitialized = false #compressionOptions #config + #customLabelKeys = new Set() #enabled = false #endpointCounts = new Map() #lastStart @@ -70,56 +71,22 @@ class Profiler extends EventEmitter { return this.#config?.flushInterval } + /** + * @param {import('../config/config-base')} config - Tracer configuration + */ start (config) { - const { - service, - version, - env, - url, - hostname, - port, - tags, - repositoryUrl, - commitSHA, - injectionEnabled, - reportHostname, - } = config - const { enabled, sourceMap, exporters } = config.profiling - const { heartbeatInterval } = config.telemetry - // TODO: Unify with main logger and rewrite template strings to use printf formatting. const logger = { - debug (message) { log.debug(message) }, - info (message) { log.info(message) }, - warn (message) { log.warn(message) }, - error (...args) { log.error(...args) }, + debug: log.debug.bind(log), + info: log.info.bind(log), + warn: log.warn.bind(log), + error: log.error.bind(log), } - const libraryInjected = injectionEnabled.length > 0 - let activation - if (enabled === 'auto') { - activation = 'auto' - } else if (enabled === 'true') { - activation = 'manual' - } // else activation = undefined - + // TODO: Rewrite this to not need to copy the config. const options = { - service, - version, - env, + ...config, logger, - sourceMap, - exporters, - url, - hostname, - port, - tags, - repositoryUrl, - commitSHA, - libraryInjected, - activation, - heartbeatInterval, - reportHostname, } try { @@ -135,6 +102,45 @@ class Profiler extends EventEmitter { return this.#enabled } + /** + * Declares the set of custom label keys that will be used with + * {@link runWithLabels}. This is used for profile upload metadata and + * for pprof serialization optimization (low-cardinality deduplication). + * + * @param {Iterable} keys - Custom label key names + */ + setCustomLabelKeys (keys) { + this.#customLabelKeys.clear() + for (const key of keys) { + this.#customLabelKeys.add(key) + } + if (this.#config) { + for (const profiler of this.#config.profilers) { + profiler.setCustomLabelKeys?.(this.#customLabelKeys) + } + } + } + + /** + * Runs a function with custom profiling labels attached to wall profiler samples. + * + * @param {Record} labels - Custom labels to attach + * @param {function(): T} fn - Function to execute with the labels + * @returns {T} The return value of fn + * @template T + */ + runWithLabels (labels, fn) { + if (!this.#enabled || !this.#config) { + return fn() + } + for (const profiler of this.#config.profilers) { + if (profiler.runWithLabels) { + return profiler.runWithLabels(labels, fn) + } + } + return fn() + } + #logError (err) { logError(this.#logger, err) } @@ -182,6 +188,9 @@ class Profiler extends EventEmitter { return this.#compressionFn } + /** + * @param {import('../config/config-base')} options - Tracer configuration + */ _start (options) { if (this.enabled) return true @@ -410,7 +419,10 @@ class Profiler extends EventEmitter { tags.snapshot = snapshotKind tags.profile_seq = this.#profileSeq++ - const exportSpec = { profiles, infos, start, end, tags, endpointCounts } + const customAttributes = this.#customLabelKeys.size > 0 + ? [...this.#customLabelKeys] + : undefined + const exportSpec = { profiles, infos, start, end, tags, endpointCounts, customAttributes } const tasks = this.#config.exporters.map(exporter => exporter.export(exportSpec).catch(err => { if (this.#logger) { diff --git a/packages/dd-trace/src/profiling/profilers/events.js b/packages/dd-trace/src/profiling/profilers/events.js index c2e5aa02fd9..eddeef7fab1 100644 --- a/packages/dd-trace/src/profiling/profilers/events.js +++ b/packages/dd-trace/src/profiling/profilers/events.js @@ -51,8 +51,7 @@ function labelFromStrStr (stringTable, keyStr, valStr) { } function getMaxSamples (options) { - const flushInterval = options.flushInterval || 65 * 1e3 // 65 seconds - const maxCpuSamples = flushInterval / options.samplingInterval + const maxCpuSamples = options.flushInterval / options.samplingInterval // The lesser of max parallelism and libuv thread pool size, plus one so we can detect // oversubscription on libuv thread pool, plus another one for GC. @@ -403,7 +402,7 @@ class EventsProfiler { get type () { return 'events' } - constructor (options = {}) { + constructor (options) { this.#maxSamples = getMaxSamples(options) this.#timelineSamplingEnabled = !!options.timelineSamplingEnabled this.#eventSerializer = new EventSerializer(this.#maxSamples) diff --git a/packages/dd-trace/src/profiling/profilers/wall.js b/packages/dd-trace/src/profiling/profilers/wall.js index 72afd547f72..54559d888cf 100644 --- a/packages/dd-trace/src/profiling/profilers/wall.js +++ b/packages/dd-trace/src/profiling/profilers/wall.js @@ -108,6 +108,8 @@ class NativeWallProfiler { #captureSpanData = false #codeHotspotsEnabled = false #cpuProfilingEnabled = false + #customLabelsActive = false + #customLabelKeys #endpointCollectionEnabled = false #flushIntervalMillis = 0 #logger @@ -245,7 +247,24 @@ class NativeWallProfiler { // context -- we simply can't tell which one it might've been across all // possible async context frames. if (this.#asyncContextFrameEnabled) { - this.#pprof.time.setContext(sampleContext) + if (this.#customLabelsActive) { + // Custom labels may be active in this async context. The current CPED + // context could be a 2-element array [profilingContext, customLabels]. + // Replace the profiling context while preserving the custom labels. + // This flag is monotonic (once set, stays true) because async + // continuations from runWithLabels can fire at any time after the + // synchronous runWithLabels call has returned. + const current = this.#pprof.time.getContext() + if (Array.isArray(current)) { + if (current[0] !== sampleContext) { + this.#pprof.time.setContext([sampleContext, current[1]]) + } + } else if (current !== sampleContext) { + this.#pprof.time.setContext(sampleContext) + } + } else { + this.#pprof.time.setContext(sampleContext) + } } else { const sampleCount = this._profilerState[kSampleCount] if (sampleCount !== this._lastSampleCount) { @@ -344,6 +363,13 @@ class NativeWallProfiler { const lowCardinalityLabels = Object.keys(getThreadLabels()) lowCardinalityLabels.push(TRACE_ENDPOINT_LABEL) + // Custom labels are expected to be low-cardinality (e.g. customer tier, region) + if (this.#customLabelKeys) { + for (const key of this.#customLabelKeys) { + lowCardinalityLabels.push(key) + } + } + const profile = this.#pprof.time.stop(restart, this.#boundGenerateLabels, lowCardinalityLabels) if (restart) { @@ -383,7 +409,29 @@ class NativeWallProfiler { return getThreadLabels() } - const labels = { ...getThreadLabels() } + // Native profiler doesn't set context.context for some samples, such as idle samples or when + // the context was otherwise unavailable when the sample was taken. Note that with ACF, we don't + // use the "ref" indirection. + let ref + let customLabels + const cctx = context.context + if (this.#asyncContextFrameEnabled) { + // When custom labels are active with ACF, context.context is a 2-element array: + // [profilingContext, customLabels]. Otherwise it's a plain object. + if (Array.isArray(cctx)) { + [ref, customLabels] = cctx + } else { + ref = cctx + } + } else { + ref = cctx?.ref + } + + // Custom labels are spread first so that internal labels always take + // precedence and overwrite them. + const labels = customLabels === undefined + ? { ...getThreadLabels() } + : { ...customLabels, ...getThreadLabels() } if (this.#timelineEnabled) { // Incoming timestamps are in microseconds, we emit nanos. @@ -395,10 +443,6 @@ class NativeWallProfiler { labels['async id'] = asyncId } - // Native profiler doesn't set context.context for some samples, such as idle samples or when - // the context was otherwise unavailable when the sample was taken. Note that with async context - // frame, we don't use the "ref" indirection. - const ref = this.#asyncContextFrameEnabled ? context.context : context.context?.ref if (typeof ref !== 'object') { return labels } @@ -421,6 +465,45 @@ class NativeWallProfiler { return labels } + /** + * Sets the custom label keys used for pprof low-cardinality deduplication. + * Called once by the top-level Profiler when keys are declared. + * + * @param {Iterable} keys + */ + setCustomLabelKeys (keys) { + this.#customLabelKeys = keys + } + + /** + * Runs a function with custom profiling labels attached to all wall profiler + * samples taken during its execution. Labels are key-value pairs that appear + * in the pprof output and can be used to filter flame graphs in the Datadog UI. + * + * Requires AsyncContextFrame (ACF) to be enabled. Supports nesting: inner + * calls merge labels with outer calls, with inner values taking precedence. + * + * @param {Record} labels - Custom labels to attach + * @param {function(): T} fn - Function to execute with the labels + * @returns {T} The return value of fn + * @template T + */ + runWithLabels (labels, fn) { + if (!this.#asyncContextFrameEnabled || !this.#withContexts) { + return fn() + } + + // Read current context; merge custom labels if already in a runWithLabels scope + const current = this.#pprof.time.getContext() + const isCurrentArray = Array.isArray(current) + const customLabels = isCurrentArray ? { ...current[1], ...labels } : labels + + const profilingContext = (isCurrentArray ? current[0] : current) ?? {} + + this.#customLabelsActive = true + return this.#pprof.time.runWithContext([profilingContext, customLabels], fn) + } + profile (restart) { return this.#stop(restart) } diff --git a/packages/dd-trace/src/profiling/ssi-heuristics.js b/packages/dd-trace/src/profiling/ssi-heuristics.js index 994cf7d6a46..e83ba71b18f 100644 --- a/packages/dd-trace/src/profiling/ssi-heuristics.js +++ b/packages/dd-trace/src/profiling/ssi-heuristics.js @@ -1,6 +1,6 @@ 'use strict' -const dc = require('dc-polyfill') +const dc = /** @type {typeof import('diagnostics_channel')} */ (require('dc-polyfill')) const log = require('../log') // If the process lives for at least 30 seconds, it's considered long-lived @@ -10,6 +10,9 @@ const DEFAULT_LONG_LIVED_THRESHOLD = 30_000 * This class embodies the SSI profiler-triggering heuristics under SSI. */ class SSIHeuristics { + /** + * @param {import('../config/config-base')} config - Tracer configuration + */ constructor (config) { const longLivedThreshold = config.profiling.longLivedThreshold || DEFAULT_LONG_LIVED_THRESHOLD if (typeof longLivedThreshold !== 'number' || longLivedThreshold <= 0) { diff --git a/packages/dd-trace/src/propagation-hash/index.js b/packages/dd-trace/src/propagation-hash/index.js index 74d61f3938b..29cb6069809 100644 --- a/packages/dd-trace/src/propagation-hash/index.js +++ b/packages/dd-trace/src/propagation-hash/index.js @@ -17,11 +17,12 @@ class PropagationHashManager { _cachedHash = null _cachedHashString = null _cachedHashBase64 = null + /** @type {import('../config/config-base') | null} */ _config = null /** * Configure the propagation hash manager with tracer config - * @param {object} config - Tracer configuration + * @param {import('../config/config-base')} config - Tracer configuration */ configure (config) { this._config = config diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index d7a4cedf7b9..0ad8a0416e8 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -27,9 +27,12 @@ class LazyModule { this.provider = provider } - enable (...args) { + /** + * @param {import('./config/config-base')} config - Tracer configuration + */ + enable (config, ...args) { this.module = this.provider() - this.module.enable(...args) + this.module.enable(config, ...args) } disable () { @@ -238,12 +241,16 @@ class Tracer extends NoopProxy { getDynamicInstrumentationClient(config) } } catch (e) { - log.error('Error initialising tracer', e) + log.error('Error initializing tracer', e) + // TODO: Should we stop everything started so far? } return this } + /** + * @param {import('./config/config-base')} config - Tracer configuration + */ _startProfiler (config) { // do not stop tracer initialization if the profiler fails to be imported try { @@ -257,6 +264,9 @@ class Tracer extends NoopProxy { } } + /** + * @param {import('./config/config-base')} config - Tracer configuration + */ #updateTracing (config) { if (config.tracing !== false) { if (config.appsec.enabled) { @@ -330,6 +340,25 @@ class Tracer extends NoopProxy { } } + /** + * @override + */ + get profiling () { + // Lazily require the profiler module and cache the result. If profiling + // is not enabled, runWithLabels still works as a passthrough (just calls fn()). + const profilerModule = require('./profiler') + const profiling = { + setCustomLabelKeys (keys) { + profilerModule.setCustomLabelKeys(keys) + }, + runWithLabels (labels, fn) { + return profilerModule.runWithLabels(labels, fn) + }, + } + Reflect.defineProperty(this, 'profiling', { value: profiling, configurable: true, enumerable: true }) + return profiling + } + /** * @override */ diff --git a/packages/dd-trace/src/remote_config/index.js b/packages/dd-trace/src/remote_config/index.js index d4451234938..83a3b016e15 100644 --- a/packages/dd-trace/src/remote_config/index.js +++ b/packages/dd-trace/src/remote_config/index.js @@ -25,6 +25,9 @@ class RemoteConfig { #products = new Set() #batchHandlers = new Map() + /** + * @param {import('../config/config-base')} config - Tracer configuration + */ constructor (config) { const pollInterval = Math.floor(config.remoteConfig.pollInterval * 1000) diff --git a/packages/dd-trace/src/require-package-json.js b/packages/dd-trace/src/require-package-json.js index 3f685993fdf..5800155c2c5 100644 --- a/packages/dd-trace/src/require-package-json.js +++ b/packages/dd-trace/src/require-package-json.js @@ -21,10 +21,14 @@ function requirePackageJson (name, module) { } for (const modulePath of module.paths) { const candidate = path.join(modulePath, name, 'package.json') - try { - return JSON.parse(fs.readFileSync(candidate, 'utf8')) - } catch { - continue + // fs.existsSync is faster than fs.readFileSync due to not throwing an error if the file does not exist. + // The race condition should also not matter here as the time window is very small. + if (fs.existsSync(candidate)) { + try { + return JSON.parse(fs.readFileSync(candidate, 'utf8')) + } catch { + continue + } } } throw new Error(`could not find ${name}/package.json`) diff --git a/packages/dd-trace/src/ritm.js b/packages/dd-trace/src/ritm.js index 29b4d09260e..6038c99c83c 100644 --- a/packages/dd-trace/src/ritm.js +++ b/packages/dd-trace/src/ritm.js @@ -22,19 +22,44 @@ let patchedRequire = null const moduleLoadStartChannel = dc.channel('dd-trace:moduleLoadStart') const moduleLoadEndChannel = dc.channel('dd-trace:moduleLoadEnd') +function stripNodePrefix (name) { + if (typeof name !== 'string') return name + return name.startsWith('node:') ? name.slice(5) : name +} + +const builtinModules = new Set(Module.builtinModules.map(stripNodePrefix)) + +function isBuiltinModuleName (name) { + if (typeof name !== 'string') return false + return builtinModules.has(stripNodePrefix(name)) +} + +function normalizeModuleName (name) { + if (typeof name !== 'string') return name + const stripped = stripNodePrefix(name) + return builtinModules.has(stripped) ? stripped : name +} + +/** + * @overload + * @param {string[]} modules list of modules to hook into + * @param {object} options hook options + * @param {Function} onrequire callback to be executed upon encountering module + */ +/** + * @overload + * @param {string[]} modules list of modules to hook into + * @param {Function} onrequire callback to be executed upon encountering module + */ function Hook (modules, options, onrequire) { if (!(this instanceof Hook)) return new Hook(modules, options, onrequire) - if (typeof modules === 'function') { - onrequire = modules - modules = null - options = {} - } else if (typeof options === 'function') { + if (typeof options === 'function') { onrequire = options options = {} } - modules = modules || [] - options = options || {} + modules ??= [] + options ??= {} this.modules = modules this.options = options @@ -63,32 +88,34 @@ function Hook (modules, options, onrequire) { */ let filename try { - // @ts-expect-error Module._resolveFilename is not typed + // @ts-expect-error - Module._resolveFilename is not typed filename = Module._resolveFilename(request, this) } catch { return _origRequire.apply(this, arguments) } - const core = !filename.includes(path.sep) + + const builtin = isBuiltinModuleName(filename) + const moduleId = builtin ? normalizeModuleName(filename) : filename let name, basedir, hooks // return known patched modules immediately - if (cache[filename]) { - const externalCacheEntry = require.cache[filename] + if (cache[moduleId]) { // require.cache was potentially altered externally - if (externalCacheEntry && externalCacheEntry.exports !== cache[filename].original) { - return externalCacheEntry.exports + const cacheEntry = require.cache[filename] + if (cacheEntry && cacheEntry.exports !== cache[filename].original) { + return cacheEntry.exports } - return cache[filename].exports + return cache[moduleId].exports } // Check if this module has a patcher in-progress already. // Otherwise, mark this module as patching in-progress. - const patched = patching[filename] + const patched = patching[moduleId] if (patched) { // If it's already patched, just return it as-is. return origRequire.apply(this, arguments) } - patching[filename] = true + patching[moduleId] = true const payload = { filename, @@ -107,12 +134,12 @@ function Hook (modules, options, onrequire) { // The module has already been loaded, // so the patching mark can be cleaned up. - delete patching[filename] + delete patching[moduleId] - if (core) { - hooks = moduleHooks[filename] + if (builtin) { + hooks = moduleHooks[moduleId] if (!hooks) return exports // abort if module name isn't on whitelist - name = filename + name = moduleId } else { const inAWSLambda = getEnvironmentVariable('AWS_LAMBDA_FUNCTION_NAME') !== undefined const hasLambdaHandler = getValueFromEnvSources('DD_LAMBDA_HANDLER') !== undefined @@ -129,7 +156,8 @@ function Hook (modules, options, onrequire) { hooks = moduleHooks[name] if (!hooks) return exports // abort if module name isn't on whitelist - // @ts-expect-error Module._resolveLookupPaths is not typed + // figure out if this is the main module file, or a file inside the module + // @ts-expect-error - Module._resolveLookupPaths is meant to be internal and is not typed const paths = Module._resolveLookupPaths(name, this, true) if (!paths) { // abort if _resolveLookupPaths return null @@ -138,7 +166,7 @@ function Hook (modules, options, onrequire) { let res try { - // @ts-expect-error Module._findPath is not typed + // @ts-expect-error - Module._findPath is meant to be internal and is not typed res = Module._findPath(name, [basedir, ...paths]) } catch { // case where the file specified in package.json "main" doesn't exist @@ -163,17 +191,21 @@ function Hook (modules, options, onrequire) { // ensure that the cache entry is assigned a value before calling // onrequire, in case calling onrequire requires the same module. - cache[filename] = { exports } - cache[filename].original = exports + cache[moduleId] = { exports } + cache[moduleId].original = exports for (const hook of hooks) { - cache[filename].exports = hook(cache[filename].exports, name, basedir) + cache[moduleId].exports = hook(cache[moduleId].exports, name, basedir) } - return cache[filename].exports + return cache[moduleId].exports } } +/** + * Reset the Ritm hook. This is used to reset the hook after a test. + * TODO: Remove this and instead use proxyquire to reset the hook. + */ Hook.reset = function () { Module.prototype.require = origRequire patchedRequire = null diff --git a/packages/dd-trace/src/runtime_metrics/index.js b/packages/dd-trace/src/runtime_metrics/index.js index 9b2602844e7..72f51dae1fb 100644 --- a/packages/dd-trace/src/runtime_metrics/index.js +++ b/packages/dd-trace/src/runtime_metrics/index.js @@ -14,6 +14,9 @@ const noop = runtimeMetrics = { } module.exports = { + /** + * @param {import('../config/config-base')} config - Tracer configuration + */ start (config) { if (!config?.runtimeMetrics.enabled) return diff --git a/packages/dd-trace/src/runtime_metrics/runtime_metrics.js b/packages/dd-trace/src/runtime_metrics/runtime_metrics.js index 5e042b8484b..7fd0ccdd7c1 100644 --- a/packages/dd-trace/src/runtime_metrics/runtime_metrics.js +++ b/packages/dd-trace/src/runtime_metrics/runtime_metrics.js @@ -35,6 +35,9 @@ let eventLoopDelayObserver = null // https://github.com/DataDog/dogweb/blob/prod/integration/node/node_metadata.csv module.exports = { + /** + * @param {import('../config/config-base')} config - Tracer configuration + */ start (config) { this.stop() const clientConfig = DogStatsDClient.generateClientConfig(config) diff --git a/packages/dd-trace/src/sampler.js b/packages/dd-trace/src/sampler.js index b023c55b6de..df9eadb1dec 100644 --- a/packages/dd-trace/src/sampler.js +++ b/packages/dd-trace/src/sampler.js @@ -42,7 +42,7 @@ class Sampler { /** * Determines whether a trace/span should be sampled based on the configured sampling rate. * - * @param {Span|SpanContext} span - The span or span context to evaluate. + * @param {import("../../..").Span|import("../../..").SpanContext} span - The span or span context to evaluate. * @returns {boolean} `true` if the trace/span should be sampled, otherwise `false`. */ isSampled (span) { diff --git a/packages/dd-trace/src/standalone/index.js b/packages/dd-trace/src/standalone/index.js index eb43ee87d4d..699e48c220c 100644 --- a/packages/dd-trace/src/standalone/index.js +++ b/packages/dd-trace/src/standalone/index.js @@ -11,6 +11,9 @@ const startCh = channel('dd-trace:span:start') const injectCh = channel('dd-trace:span:inject') const extractCh = channel('dd-trace:span:extract') +/** + * @param {import('../config/config-base')} config - Tracer configuration + */ function configure (config) { if (startCh.hasSubscribers) startCh.unsubscribe(onSpanStart) if (injectCh.hasSubscribers) injectCh.unsubscribe(onSpanInject) diff --git a/packages/dd-trace/src/telemetry/index.js b/packages/dd-trace/src/telemetry/index.js index 05a7ec8b96b..43e884b026f 100644 --- a/packages/dd-trace/src/telemetry/index.js +++ b/packages/dd-trace/src/telemetry/index.js @@ -5,15 +5,14 @@ let telemetry // Lazy load the telemetry module to avoid the performance impact of loading it unconditionally module.exports = { start (config, ...args) { + if (!config.telemetry.enabled) return telemetry ??= require('./telemetry') telemetry.start(config, ...args) }, - stop () { - telemetry?.stop() - }, // This might be called before `start` so we have to trigger loading the // underlying module here as well. updateConfig (changes, config, ...args) { + if (!config.telemetry.enabled) return telemetry ??= require('./telemetry') telemetry.updateConfig(changes, config, ...args) }, diff --git a/packages/dd-trace/src/telemetry/send-data.js b/packages/dd-trace/src/telemetry/send-data.js index fb7af48e64d..ef0d86634df 100644 --- a/packages/dd-trace/src/telemetry/send-data.js +++ b/packages/dd-trace/src/telemetry/send-data.js @@ -62,19 +62,6 @@ const { getValueFromEnvSources } = require('../config/helper') * kernel_name?: string * } & Record} TelemetryHost */ -/** - * @typedef {{ - * hostname?: string, - * port?: string | number, - * url?: string | URL, - * site?: string, - * apiKey?: string, - * isCiVisibility?: boolean, - * spanAttributeSchema?: string, - * tags: Record, - * telemetry?: { debug?: boolean } - * }} TelemetryConfig - */ /** * @callback SendDataCallback * @param {Error | null | undefined} error @@ -85,23 +72,22 @@ const { getValueFromEnvSources } = require('../config/helper') let agentTelemetry = true /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config * @param {TelemetryApplication} application * @param {TelemetryRequestType} reqType * @returns {Record} */ function getHeaders (config, application, reqType) { - const sessionId = config.tags['runtime-id'] const headers = { 'content-type': 'application/json', 'dd-telemetry-api-version': 'v2', 'dd-telemetry-request-type': reqType, 'dd-client-library-language': application.language_name, 'dd-client-library-version': application.tracer_version, - 'dd-session-id': sessionId, + 'dd-session-id': config.tags['runtime-id'], } - if (config.rootSessionId && config.rootSessionId !== sessionId) { - headers['dd-root-session-id'] = config.rootSessionId + if (config.DD_ROOT_JS_SESSION_ID) { + headers['dd-root-session-id'] = config.DD_ROOT_JS_SESSION_ID } const debug = config.telemetry && config.telemetry.debug if (debug) { @@ -141,7 +127,7 @@ function getPayload (payload) { // TODO(BridgeAR): Simplify this code. A lot does not need to be recalculated on every call. /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config * @param {TelemetryApplication} application * @param {TelemetryHost} host * @param {TelemetryRequestType} reqType diff --git a/packages/dd-trace/src/telemetry/session-propagation.js b/packages/dd-trace/src/telemetry/session-propagation.js index 0af4968db52..7f191f02d7a 100644 --- a/packages/dd-trace/src/telemetry/session-propagation.js +++ b/packages/dd-trace/src/telemetry/session-propagation.js @@ -1,53 +1,37 @@ 'use strict' -const dc = require('dc-polyfill') - +const dc = /** @type {typeof import('diagnostics_channel')} */ (require('dc-polyfill')) const childProcessChannel = dc.tracingChannel('datadog:child_process:execution') let subscribed = false -let rootSessionId let runtimeId -function injectSessionEnv (existingEnv) { - // eslint-disable-next-line eslint-rules/eslint-process-env -- not in supported-configurations.json - const base = existingEnv == null ? process.env : existingEnv - return { - ...base, - DD_ROOT_JS_SESSION_ID: rootSessionId, - DD_PARENT_JS_SESSION_ID: runtimeId, - } +function isOptionsObject (value) { + return value != null && typeof value === 'object' && !Array.isArray(value) && value } -function findOptionsIndex (args, shell) { - if (Array.isArray(args[1])) { - return { index: 2, exists: args[2] != null && typeof args[2] === 'object' } - } - if (args[1] != null && typeof args[1] === 'object') { - return { index: 1, exists: true } - } - if (!shell && args[2] != null && typeof args[2] === 'object') { - return { index: 2, exists: true } - } - return { index: shell ? 1 : 2, exists: false } +function getEnvWithRuntimeId (env) { + // eslint-disable-next-line eslint-rules/eslint-process-env + return { ...(env ?? process.env), DD_ROOT_JS_SESSION_ID: runtimeId } } function onChildProcessStart (context) { - if (!context.callArgs) return - const args = context.callArgs - const { index, exists } = findOptionsIndex(args, context.shell) + if (!args) return - if (exists) { - args[index] = { ...args[index], env: injectSessionEnv(args[index].env) } + const index = Array.isArray(args[1]) || (!context.shell && !isOptionsObject(args[1])) ? 2 : 1 + const options = isOptionsObject(args[index]) ? args[index] : undefined + + if (options) { + args[index] = { ...options, env: getEnvWithRuntimeId(options.env) } return } - const opts = { env: injectSessionEnv(null) } - - if (!context.shell && !Array.isArray(args[1])) { + if (index === 2 && !Array.isArray(args[1])) { args.splice(1, 0, []) } + const opts = { env: getEnvWithRuntimeId() } if (typeof args[index] === 'function') { args.splice(index, 0, opts) } else { @@ -55,24 +39,15 @@ function onChildProcessStart (context) { } } -const handler = { start: onChildProcessStart } - function start (config) { if (!config.telemetry?.enabled || subscribed) return subscribed = true - rootSessionId = config.rootSessionId - runtimeId = config.tags['runtime-id'] - - childProcessChannel.subscribe(handler) -} + runtimeId = config.DD_ROOT_JS_SESSION_ID || config.tags['runtime-id'] -function stop () { - if (!subscribed) return - childProcessChannel.unsubscribe(handler) - subscribed = false - rootSessionId = undefined - runtimeId = undefined + childProcessChannel.subscribe( + /** @type {import('diagnostics_channel').TracingChannelSubscribers} */ ({ start: onChildProcessStart }) + ) } -module.exports = { start, stop, _onChildProcessStart: onChildProcessStart } +module.exports = { start } diff --git a/packages/dd-trace/src/telemetry/telemetry.js b/packages/dd-trace/src/telemetry/telemetry.js index e113bb9e077..e6be63cd13b 100644 --- a/packages/dd-trace/src/telemetry/telemetry.js +++ b/packages/dd-trace/src/telemetry/telemetry.js @@ -18,15 +18,17 @@ const sessionPropagation = require('./session-propagation') * @typedef {Record} TelemetryPayloadObject */ /** - * @typedef {string | number | boolean | null | undefined | URL | Record | unknown[]} ConfigValue + * @typedef {string | number | boolean | null | URL | Record | unknown[] | Function} ConfigValue + */ +/** + * @typedef {{ [K in keyof processTags]: typeof processTags.tagsObject[K] }} ProcessTags */ /** * @typedef {{ * name: string, * enabled: boolean, * auto_enabled: boolean, - * process_tags: typeof processTags.tagsObject - * }} Integration + * } & Partial} Integration */ /** * @typedef {{ _enabled: boolean }} Plugin @@ -56,41 +58,11 @@ const sessionPropagation = require('./session-propagation') * kernel_name?: string * }} TelemetryHost */ -/** - * @typedef {{ - * telemetry: { - * enabled: boolean, - * heartbeatInterval: number, - * debug?: boolean, - * dependencyCollection?: boolean, - * logCollection?: boolean - * }, - * service: string | undefined, - * env: string | undefined, - * version: string | undefined, - * tags: Record, - * url?: string | URL, - * hostname?: string, - * port?: string | number, - * site?: string, - * apiKey?: string, - * isCiVisibility?: boolean, - * spanAttributeSchema?: string, - * installSignature?: { id?: string, time?: string, type?: string }, - * sca?: { enabled?: boolean }, - * appsec: { enabled: boolean, apiSecurity?: { - * endpointCollectionEnabled?: boolean, - * endpointCollectionMessageLimit?: number - * } }, - * profiling: { enabled: boolean | 'true' | 'false' | 'auto' } - * }} TelemetryConfig - */ const telemetryStartChannel = dc.channel('datadog:telemetry:start') -const telemetryStopChannel = dc.channel('datadog:telemetry:stop') const telemetryAppClosingChannel = dc.channel('datadog:telemetry:app-closing') -/** @type {TelemetryConfig | undefined} */ +/** @type {import('../config/config-base') | undefined} */ let config /** @type {PluginManager} */ @@ -102,18 +74,9 @@ let application /** @type {TelemetryHost} */ const host = createHostObject() -/** @type {ReturnType | undefined} */ -let heartbeatInterval - -/** @type {ReturnType | undefined} */ -let extendedInterval - /** @type {Integration[]} */ let integrations -/** @type {Map} */ -const configWithOrigin = new Map() - /** * Retry information that `telemetry.js` keeps in-memory to be merged into the next payload. * @@ -130,8 +93,6 @@ let heartbeatFailedDependencies = [] const sentIntegrations = new Set() -let seqId = 0 - function getRetryData () { return retryData } @@ -184,7 +145,7 @@ function getIntegrations () { } /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config */ function getProducts (config) { return { @@ -199,7 +160,7 @@ function getProducts (config) { } /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config */ function getInstallSignature (config) { const { installSignature: sig } = config @@ -212,13 +173,11 @@ function getInstallSignature (config) { } } -/** - * @param {TelemetryConfig} config - */ +/** @param {import('../config/config-base')} config */ function appStarted (config) { const app = { products: getProducts(config), - configuration: [...configWithOrigin.values()], + configuration: latestConfiguration, } const installSignature = getInstallSignature(config) if (installSignature) { @@ -245,7 +204,7 @@ function appClosing () { } /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config * @returns {TelemetryApplication} */ function createAppObject (config) { @@ -320,11 +279,11 @@ function createPayload (currReqType, currPayload = {}) { } /** - * @param {TelemetryConfig} config + * @param {import('../config/config-base')} config * @param {TelemetryApplication} application */ function heartbeat (config, application) { - heartbeatInterval = setInterval(() => { + setInterval(() => { metricsManager.send(config, application, host) telemetryLogger.send(config, application, host) @@ -333,11 +292,9 @@ function heartbeat (config, application) { }, config.telemetry.heartbeatInterval).unref() } -/** - * @param {TelemetryConfig} config - */ +/** @param {import('../config/config-base')} config */ function extendedHeartbeat (config) { - extendedInterval = setInterval(() => { + setInterval(() => { const appPayload = appStarted(config) if (heartbeatFailedIntegrations.length > 0) { appPayload.integrations = heartbeatFailedIntegrations @@ -348,16 +305,16 @@ function extendedHeartbeat (config) { heartbeatFailedDependencies = [] } sendData(config, application, host, 'app-extended-heartbeat', appPayload) - }, 1000 * 60 * 60 * 24).unref() + }, config.telemetry.extendedHeartbeatInterval).unref() } /** - * @param {TelemetryConfig} aConfig + * @param {import('../config/config-base')} aConfig * @param {PluginManager} thePluginManager */ function start (aConfig, thePluginManager) { if (!aConfig.telemetry.enabled) { - if (aConfig.sca?.enabled) { + if (aConfig.appsec.sca.enabled) { logger.warn('DD_APPSEC_SCA_ENABLED requires enabling telemetry to work.') } @@ -376,8 +333,7 @@ function start (aConfig, thePluginManager) { sendData(config, application, host, 'app-started', appStarted(config)) if (integrations.length > 0) { - sendData(config, application, host, 'app-integrations-change', - { integrations }, updateRetryData) + sendData(config, application, host, 'app-integrations-change', { integrations }, updateRetryData) } heartbeat(config, application) @@ -388,21 +344,6 @@ function start (aConfig, thePluginManager) { telemetryStartChannel.publish(getTelemetryData()) } -function stop () { - if (!config) { - return - } - clearInterval(extendedInterval) - clearInterval(heartbeatInterval) - globalThis[Symbol.for('dd-trace')].beforeExitHandlers.delete(appClosing) - - telemetryStopChannel.publish(getTelemetryData()) - - endpoints.stop() - sessionPropagation.stop() - config = undefined -} - function updateIntegrations () { if (!config?.telemetry.enabled) { return @@ -417,121 +358,37 @@ function updateIntegrations () { sendData(config, application, host, reqType, payload, updateRetryData) } -/** - * @param {Record | null | undefined} map - */ -function formatMapForTelemetry (map) { - // format from an object to a string map in order for - // telemetry intake to accept the configuration - return map - ? Object.entries(map).map(([key, value]) => `${key}:${value}`).join(',') - : '' -} - -const nameMapping = { - sampleRate: 'DD_TRACE_SAMPLE_RATE', - logInjection: 'DD_LOG_INJECTION', - headerTags: 'DD_TRACE_HEADER_TAGS', - tags: 'DD_TAGS', - 'sampler.rules': 'DD_TRACE_SAMPLING_RULES', - traceEnabled: 'DD_TRACE_ENABLED', - url: 'DD_TRACE_AGENT_URL', - 'sampler.rateLimit': 'DD_TRACE_RATE_LIMIT', - queryStringObfuscation: 'DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP', - version: 'DD_VERSION', - env: 'DD_ENV', - service: 'DD_SERVICE', - clientIpHeader: 'DD_TRACE_CLIENT_IP_HEADER', - 'grpc.client.error.statuses': 'DD_GRPC_CLIENT_ERROR_STATUSES', - 'grpc.server.error.statuses': 'DD_GRPC_SERVER_ERROR_STATUSES', - traceId128BitLoggingEnabled: 'DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED', - instrumentationSource: 'instrumentation_source', - injectionEnabled: 'ssi_injection_enabled', - injectForce: 'ssi_forced_injection_enabled', - 'runtimeMetrics.enabled': 'runtimeMetrics', - otelLogsEnabled: 'DD_LOGS_OTEL_ENABLED', - otelUrl: 'OTEL_EXPORTER_OTLP_ENDPOINT', - otelEndpoint: 'OTEL_EXPORTER_OTLP_ENDPOINT', - otelHeaders: 'OTEL_EXPORTER_OTLP_HEADERS', - otelProtocol: 'OTEL_EXPORTER_OTLP_PROTOCOL', - otelTimeout: 'OTEL_EXPORTER_OTLP_TIMEOUT', - otelLogsHeaders: 'OTEL_EXPORTER_OTLP_LOGS_HEADERS', - otelLogsProtocol: 'OTEL_EXPORTER_OTLP_LOGS_PROTOCOL', - otelLogsTimeout: 'OTEL_EXPORTER_OTLP_LOGS_TIMEOUT', - otelLogsUrl: 'OTEL_EXPORTER_OTLP_LOGS_ENDPOINT', - otelBatchTimeout: 'OTEL_BSP_SCHEDULE_DELAY', - otelMaxExportBatchSize: 'OTEL_BSP_MAX_EXPORT_BATCH_SIZE', - otelMaxQueueSize: 'OTEL_BSP_MAX_QUEUE_SIZE', - otelMetricsEnabled: 'DD_METRICS_OTEL_ENABLED', - otelMetricsHeaders: 'OTEL_EXPORTER_OTLP_METRICS_HEADERS', - otelMetricsProtocol: 'OTEL_EXPORTER_OTLP_METRICS_PROTOCOL', - otelMetricsTimeout: 'OTEL_EXPORTER_OTLP_METRICS_TIMEOUT', - otelMetricsExportTimeout: 'OTEL_METRIC_EXPORT_TIMEOUT', - otelMetricsUrl: 'OTEL_EXPORTER_OTLP_METRICS_ENDPOINT', - otelMetricsExportInterval: 'OTEL_METRIC_EXPORT_INTERVAL', - otelMetricsTemporalityPreference: 'OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE', -} - -const namesNeedFormatting = new Set(['DD_TAGS', 'peerServiceMapping', 'serviceMapping']) +let latestConfiguration = [] /** - * @param {{ name: string, value: ConfigValue, origin: string }[]} changes - * @param {TelemetryConfig} config + * @param {{ name: string, value: ConfigValue, origin: string, seq_id: number }[]} configuration + * @param {import('../config/config-base')} config */ -function updateConfig (changes, config) { +function updateConfig (configuration, config) { if (!config.telemetry.enabled) return - if (changes.length === 0) return - logger.trace(changes) + logger.trace(configuration) const application = createAppObject(config) - const changed = configWithOrigin.size > 0 - - for (const change of changes) { - const name = nameMapping[change.name] || change.name - const { origin, value } = change - const entry = { name, value, origin, seq_id: seqId++ } - - if (namesNeedFormatting.has(name)) { - // @ts-expect-error entry.value is known to be a map for these config names - entry.value = formatMapForTelemetry(value) - } else if (name === 'url') { - if (value) { - entry.value = value.toString() - } - } else if (name === 'DD_TRACE_SAMPLING_RULES') { - entry.value = JSON.stringify(value) - } else if (Array.isArray(value)) { - entry.value = value.join(',') - } - - // Use composite key to support multiple origins for same config name - configWithOrigin.set(`${name}|${origin}`, entry) - } - - if (changed) { - // update configWithOrigin to contain up-to-date full list of config values for app-extended-heartbeat + if (latestConfiguration.length) { const { reqType, payload } = createPayload('app-client-configuration-change', { - configuration: [...configWithOrigin.values()], + configuration, }) sendData(config, application, host, reqType, payload, updateRetryData) } + latestConfiguration = configuration } /** - * @param {TelemetryConfig['profiling']['enabled']} profilingEnabled + * @param {import('../config/config-base')['profiling']['enabled']} profilingEnabled */ function profilingEnabledToBoolean (profilingEnabled) { - if (typeof profilingEnabled === 'boolean') { - return profilingEnabled - } return profilingEnabled === 'true' || profilingEnabled === 'auto' } module.exports = { start, - stop, updateIntegrations, updateConfig, appClosing, diff --git a/packages/dd-trace/src/util.js b/packages/dd-trace/src/util.js index a902f3b99de..f4cce7196fd 100644 --- a/packages/dd-trace/src/util.js +++ b/packages/dd-trace/src/util.js @@ -67,14 +67,6 @@ function calculateDDBasePath (dirname) { return dirSteps.slice(0, packagesIndex).join(path.sep) + path.sep } -function normalizeProfilingEnabledValue (configValue) { - return isTrue(configValue) - ? 'true' - : isFalse(configValue) - ? 'false' - : configValue === 'auto' ? 'auto' : undefined -} - function normalizePluginEnvName (envPluginName, makeLowercase = false) { if (envPluginName.startsWith('@')) { envPluginName = envPluginName.slice(1) @@ -89,6 +81,5 @@ module.exports = { isError, globMatch, ddBasePath: globalThis.__DD_ESBUILD_BASEPATH || calculateDDBasePath(__dirname), - normalizeProfilingEnabledValue, normalizePluginEnvName, } diff --git a/packages/dd-trace/test/agent/info.spec.js b/packages/dd-trace/test/agent/info.spec.js index de1576f46f7..421b546933c 100644 --- a/packages/dd-trace/test/agent/info.spec.js +++ b/packages/dd-trace/test/agent/info.spec.js @@ -8,10 +8,10 @@ const sinon = require('sinon') require('../setup/core') const { fetchAgentInfo, clearCache } = require('../../src/agent/info') +const { defaults: { hostname, port } } = require('../../src/config/defaults') describe('agent/info', () => { - const port = 8126 - const url = `http://127.0.0.1:${port}` + const url = `http://${hostname}:${port}` describe('fetchAgentInfo', () => { afterEach(() => { @@ -130,7 +130,7 @@ describe('agent/info', () => { }) it('should clear cache when URL changes', (done) => { - const url2 = `http://127.0.0.1:${port + 1}` + const url2 = `http://${hostname}:${port + 1}` const agentInfo1 = { endpoints: ['/evp_proxy/v2'] } const agentInfo2 = { endpoints: ['/evp_proxy/v3'] } diff --git a/packages/dd-trace/test/agent/url.spec.js b/packages/dd-trace/test/agent/url.spec.js index 1fd8bcd92b1..08c518a58c6 100644 --- a/packages/dd-trace/test/agent/url.spec.js +++ b/packages/dd-trace/test/agent/url.spec.js @@ -7,7 +7,7 @@ const { describe, it } = require('mocha') require('../setup/core') const { getAgentUrl } = require('../../src/agent/url') -const defaults = require('../../src/config/defaults') +const { defaults: { hostname, port } } = require('../../src/config/defaults') describe('agent/url', () => { describe('getAgentUrl', () => { @@ -41,7 +41,7 @@ describe('agent/url', () => { const result = getAgentUrl(config) - assert.strictEqual(result.hostname, defaults.hostname) + assert.strictEqual(result.hostname, hostname) assert.strictEqual(result.port, '9999') }) @@ -53,7 +53,7 @@ describe('agent/url', () => { const result = getAgentUrl(config) assert.strictEqual(result.hostname, 'custom-host') - assert.strictEqual(result.port, defaults.port) + assert.strictEqual(result.port, String(port)) assert.strictEqual(result.protocol, 'http:') }) @@ -62,8 +62,8 @@ describe('agent/url', () => { const result = getAgentUrl(config) - assert.strictEqual(result.hostname, defaults.hostname) - assert.strictEqual(result.port, defaults.port) + assert.strictEqual(result.hostname, hostname) + assert.strictEqual(result.port, String(port)) assert.strictEqual(result.protocol, 'http:') }) diff --git a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js index a350ac6e801..6996716396f 100644 --- a/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js +++ b/packages/dd-trace/test/appsec/iast/vulnerability-formatter/evidence-redaction/sensitive-handler.spec.js @@ -7,7 +7,7 @@ const sinon = require('sinon') const sensitiveHandler = require('../../../../../src/appsec/iast/vulnerabilities-formatter/evidence-redaction/sensitive-handler') -const defaults = require('../../../../../src/config/defaults') +const { defaults } = require('../../../../../src/config/defaults') const { suite } = require('../resources/evidence-redaction-suite.json') const DEFAULT_IAST_REDACTION_NAME_PATTERN = /** @type {string} */ (defaults['iast.redactionNamePattern']) diff --git a/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js b/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js index 6961a1b7453..1ac470eb007 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/agent-proxy/agent-proxy.spec.js @@ -15,6 +15,7 @@ const DynamicInstrumentationLogsWriter = require('../../../../src/ci-visibility/ const CoverageWriter = require('../../../../src/ci-visibility/exporters/agentless/coverage-writer') const AgentWriter = require('../../../../src/exporters/agent/writer') const { clearCache } = require('../../../../src/agent/info') +const { defaults: { hostname, port } } = require('../../../../src/config/defaults') describe('AgentProxyCiVisibilityExporter', () => { beforeEach(() => { @@ -24,8 +25,7 @@ describe('AgentProxyCiVisibilityExporter', () => { }) const flushInterval = 50 - const port = 8126 - const url = `http://127.0.0.1:${port}` + const url = `http://${hostname}:${port}` const queryDelay = 50 const tags = {} diff --git a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js index b65d3dc5534..c4864efc1a3 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js @@ -13,10 +13,10 @@ const nock = require('nock') const { assertObjectContains } = require('../../../../../integration-tests/helpers') require('../../../../dd-trace/test/setup/core') const CiVisibilityExporter = require('../../../src/ci-visibility/exporters/ci-visibility-exporter') +const { defaults: { hostname, port } } = require('../../../src/config/defaults') describe('CI Visibility Exporter', () => { - const port = 8126 - const url = `http://127.0.0.1:${port}` + const url = `http://${hostname}:${port}` beforeEach(() => { // to make sure `isShallowRepository` in `git.js` returns false diff --git a/packages/dd-trace/test/config/disabled_instrumentations.spec.js b/packages/dd-trace/test/config/disabled_instrumentations.spec.js index eeefecf3a61..0706003da25 100644 --- a/packages/dd-trace/test/config/disabled_instrumentations.spec.js +++ b/packages/dd-trace/test/config/disabled_instrumentations.spec.js @@ -1,35 +1,96 @@ 'use strict' -const assert = require('node:assert/strict') +const assert = require('node:assert') +const proxyquire = require('proxyquire') const { describe, it } = require('mocha') require('../setup/core') -describe('config/disabled_instrumentations', () => { - it('should disable loading instrumentations completely', () => { - process.env.DD_TRACE_DISABLED_INSTRUMENTATIONS = 'express' - const handleBefore = require('express').application.handle - const tracer = require('../../../..') - const handleAfterImport = require('express').application.handle - tracer.init() - const handleAfterInit = require('express').application.handle - - assert.strictEqual(handleBefore, handleAfterImport) - assert.strictEqual(handleBefore, handleAfterInit) - delete process.env.DD_TRACE_DISABLED_INSTRUMENTATIONS - }) +describe('config/instrumentations', () => { + const httpRequest = require('http').request + const expressHandle = require('express').application.handle + + function getTracer () { + const register = proxyquire.noPreserveCache()('../../../datadog-instrumentations/src/helpers/register', {}) + const instrumentations = proxyquire('../../../datadog-instrumentations/src/helpers/instrumentations', { + './src/helpers/register': register, + }) + const pluginManager = proxyquire('../../src/plugin_manager', { + '../../datadog-instrumentations': instrumentations, + }) + const proxy = proxyquire('../../src/proxy', { + './plugin_manager': pluginManager, + }) + const TracerProxy = proxyquire('../../src', { + './proxy': proxy, + }) + return proxyquire('../../', { + './src': TracerProxy, + }) + } + + ['disable', 'enable'].forEach((mode) => { + /** @type {(a: unknown, b: unknown) => void} */ + const assertionMethod = mode === 'disable' ? assert.strictEqual : assert.notStrictEqual + + describe(`config/${mode}_instrumentations`, () => { + it(`should ${mode} node prefixed and unprefixed http instrumentations completely`, () => { + if (mode === 'disable') { + process.env.DD_TRACE_DISABLED_INSTRUMENTATIONS = 'http,express' + } + const tracer = getTracer() + const prefixedHandleAfterImport = require('node:http').request + const handleAfterImport = require('http').request + tracer.init() + const prefixedHandleAfterInit = require('http').request + const handleAfterInit = require('http').request + + assertionMethod(httpRequest, handleAfterImport) + assertionMethod(httpRequest, handleAfterInit) + assertionMethod(httpRequest, prefixedHandleAfterImport) + assertionMethod(httpRequest, prefixedHandleAfterInit) + assert.strictEqual(handleAfterImport, handleAfterInit) + assert.strictEqual(prefixedHandleAfterImport, prefixedHandleAfterInit) + delete process.env.DD_TRACE_DISABLED_INSTRUMENTATIONS + }) + + it(`should ${mode} loading instrumentations completely`, () => { + if (mode === 'disable') { + process.env.DD_TRACE_DISABLED_INSTRUMENTATIONS = 'express' + } + const tracer = getTracer() + // Ensure Express is reloaded through the instrumentation hook by clearing Node's require cache. + delete require.cache[require.resolve('express')] + // @ts-expect-error Express handle is not typed as it is an internal property + const handleAfterImport = require('express').application.handle + tracer.init() + // Reload again post-init to validate behavior after tracer initialization. + // @ts-expect-error Express handle is not typed as it is an internal property + const handleAfterInit = require('express').application.handle + + assertionMethod(expressHandle, handleAfterImport) + assertionMethod(expressHandle, handleAfterInit) + delete process.env.DD_TRACE_DISABLED_INSTRUMENTATIONS + }) + + if (mode === 'disable') { + it('should not disable loading instrumentations using DD_TRACE__ENABLED', () => { + process.env.DD_TRACE_EXPRESS_ENABLED = 'false' + const tracer = getTracer() + delete require.cache[require.resolve('express')] + // @ts-expect-error Express handle is not typed as it is an internal property + const handleAfterImport = require('express').application.handle + tracer.init() + // @ts-expect-error Express handle is not typed as it is an internal property + const handleAfterInit = require('express').application.handle - it('should disable loading instrumentations using DD_TRACE__ENABLED', () => { - process.env.DD_TRACE_EXPRESS_ENABLED = 'false' - const handleBefore = require('express').application.handle - const tracer = require('../../../..') - const handleAfterImport = require('express').application.handle - tracer.init() - const handleAfterInit = require('express').application.handle - - assert.strictEqual(handleBefore, handleAfterImport) - assert.strictEqual(handleBefore, handleAfterInit) - delete process.env.DD_TRACE_EXPRESS_ENABLED + assert.notStrictEqual(expressHandle, handleAfterImport) + assert.notStrictEqual(expressHandle, handleAfterInit) + assert.strictEqual(handleAfterImport, handleAfterInit) + delete process.env.DD_TRACE_EXPRESS_ENABLED + }) + } + }) }) }) diff --git a/packages/dd-trace/test/config/generated-config-types.spec.js b/packages/dd-trace/test/config/generated-config-types.spec.js index 452595157f5..20ce9b08b36 100644 --- a/packages/dd-trace/test/config/generated-config-types.spec.js +++ b/packages/dd-trace/test/config/generated-config-types.spec.js @@ -10,8 +10,7 @@ const { OUTPUT_PATH, } = require('../../../../scripts/generate-config-types') -// TODO: Re-enable when landing the actual change. -describe.skip('generated config types', () => { +describe('generated config types', () => { it('should stay in sync with supported-configurations.json', () => { assert.strictEqual( readFileSync(OUTPUT_PATH, 'utf8').replaceAll('\r\n', '\n'), diff --git a/packages/dd-trace/test/config/helper.spec.js b/packages/dd-trace/test/config/helper.spec.js index e7af52ff1fc..e1fd0fe7b15 100644 --- a/packages/dd-trace/test/config/helper.spec.js +++ b/packages/dd-trace/test/config/helper.spec.js @@ -81,6 +81,7 @@ describe('config-helper stable config sources', () => { describe('config-helper env resolution', () => { let getValueFromEnvSources + let getConfiguredEnvName let getEnvironmentVariable let resetModule let originalEnv @@ -89,6 +90,7 @@ describe('config-helper env resolution', () => { // Ensure we always get a fresh copy of the module when needed const mod = proxyquire('../../src/config/helper', overrides) getValueFromEnvSources = mod.getValueFromEnvSources + getConfiguredEnvName = mod.getConfiguredEnvName getEnvironmentVariable = mod.getEnvironmentVariable resetModule = () => {} } @@ -144,6 +146,23 @@ describe('config-helper env resolution', () => { assert.strictEqual(value, 'canonical-hostname') }) + it('returns the env name used for canonical values', () => { + process.env.DD_TRACE_AGENT_HOSTNAME = 'alias-hostname' + process.env.DD_AGENT_HOST = 'canonical-hostname' + + const envName = getConfiguredEnvName('DD_AGENT_HOST') + + assert.strictEqual(envName, 'DD_AGENT_HOST') + }) + + it('returns the env alias name when alias is used', () => { + process.env.DD_TRACE_AGENT_HOSTNAME = 'alias-hostname' + + const envName = getConfiguredEnvName('DD_AGENT_HOST') + + assert.strictEqual(envName, 'DD_TRACE_AGENT_HOSTNAME') + }) + it('throws for unsupported DD_ configuration', () => { assert.throws( () => getEnvironmentVariable('DD_UNSUPPORTED_CONFIG'), diff --git a/packages/dd-trace/test/config/index.spec.js b/packages/dd-trace/test/config/index.spec.js index a8913892917..11fca885cc8 100644 --- a/packages/dd-trace/test/config/index.spec.js +++ b/packages/dd-trace/test/config/index.spec.js @@ -2,6 +2,7 @@ const { readFileSync, mkdtempSync, rmSync, writeFileSync } = require('node:fs') const assert = require('node:assert/strict') +const dns = require('node:dns') const { once } = require('node:events') const path = require('node:path') const os = require('node:os') @@ -12,7 +13,7 @@ const context = describe const proxyquire = require('proxyquire') require('../setup/core') -const defaults = require('../../src/config/defaults') +const { defaults } = require('../../src/config/defaults') const { getEnvironmentVariable, getEnvironmentVariables } = require('../../src/config/helper') const { assertObjectContains } = require('../../../../integration-tests/helpers') const { DD_MAJOR } = require('../../../../version') @@ -22,7 +23,6 @@ const GRPC_CLIENT_ERROR_STATUSES = defaults['grpc.client.error.statuses'] const GRPC_SERVER_ERROR_STATUSES = defaults['grpc.server.error.statuses'] describe('Config', () => { - let getConfig let log let pkg let env @@ -30,6 +30,7 @@ describe('Config', () => { let existsSyncParam let existsSyncReturn let updateConfig + const isWindows = process.platform === 'win32' const RECOMMENDED_JSON_PATH = require.resolve('../../src/appsec/recommended.json') const RULES_JSON_PATH = require.resolve('../fixtures/config/appsec-rules.json') @@ -42,35 +43,42 @@ describe('Config', () => { const comparator = (a, b) => a.name.localeCompare(b.name) || a.origin.localeCompare(b.origin) - function reloadLoggerAndConfig () { - log = proxyquire('../../src/log', {}) - log.use = sinon.spy() - log.toggle = sinon.spy() - log.warn = sinon.spy() - log.error = sinon.spy() + function assertConfigUpdateContains (actual, expected) { + for (const entry of expected) { + const match = actual.find(actualEntry => actualEntry.name === entry.name && actualEntry.origin === entry.origin) - const configDefaults = proxyquire('../../src/config/defaults', { - '../pkg': pkg, - }) - - // Reload the config module with each call to getConfig to ensure we get a new instance of the config. - getConfig = (options) => { - const supportedConfigurations = proxyquire.noPreserveCache()('../../src/config/supported-configurations.json', {}) - const configHelper = proxyquire.noPreserveCache()('../../src/config/helper', { - './supported-configurations.json': supportedConfigurations, - }) - const serverless = proxyquire.noPreserveCache()('../../src/serverless', {}) - return proxyquire.noPreserveCache()('../../src/config', { - './defaults': configDefaults, - '../log': log, - '../telemetry': { updateConfig }, - '../serverless': serverless, - 'node:fs': fs, - './helper': configHelper, - })(options) + assert.ok(match, `Expected update for ${entry.name} (${entry.origin})`) + assertObjectContains(match, entry) } } + // Reload the config module with each call to getConfig to ensure we get a new instance of the config. + const getConfig = (options) => { + log = proxyquire('../../src/log', {}) + sinon.spy(log, 'warn') + sinon.spy(log, 'error') + const parsers = proxyquire.noPreserveCache()('../../src/config/parsers', {}) + const supportedConfigurations = proxyquire.noPreserveCache()('../../src/config/supported-configurations.json', {}) + const configDefaults = proxyquire.noPreserveCache()('../../src/config/defaults', { + './supported-configurations.json': supportedConfigurations, + '../log': log, + './parsers': parsers, + }) + const configHelper = proxyquire.noPreserveCache()('../../src/config/helper', { + './supported-configurations.json': supportedConfigurations, + }) + const serverless = proxyquire.noPreserveCache()('../../src/serverless', {}) + return proxyquire.noPreserveCache()('../../src/config', { + './defaults': configDefaults, + '../log': log, + '../telemetry': { updateConfig }, + '../serverless': serverless, + 'node:fs': fs, + './helper': configHelper, + '../pkg': pkg, + })(options) + } + beforeEach(() => { pkg = { name: '', @@ -90,8 +98,6 @@ describe('Config', () => { mkdtempSync, writeFileSync, } - - reloadLoggerAndConfig() }) afterEach(() => { @@ -110,9 +116,7 @@ describe('Config', () => { it('should return aliased value', () => { process.env.DATADOG_API_KEY = '12345' - assert.throws(() => getEnvironmentVariable('DATADOG_API_KEY'), { - message: /Missing DATADOG_API_KEY env\/configuration in "supported-configurations.json" file./, - }) + assert.strictEqual(getEnvironmentVariable('DATADOG_API_KEY'), '12345') assert.strictEqual(getEnvironmentVariable('DD_API_KEY'), '12345') const { DD_API_KEY, DATADOG_API_KEY } = getEnvironmentVariables() assert.strictEqual(DATADOG_API_KEY, undefined) @@ -161,8 +165,6 @@ describe('Config', () => { process.env.DD_TRACE_DEBUG = 'true' process.env.DD_TRACE_LOG_LEVEL = 'error' - reloadLoggerAndConfig() - const config = getConfig() assertObjectContains(config, { @@ -191,9 +193,6 @@ describe('Config', () => { process.env.DD_TRACE_PROPAGATION_STYLE_EXTRACT = 'b3,tracecontext' process.env.OTEL_PROPAGATORS = 'datadog,tracecontext' - // required if we want to check updates to config.debug and config.logLevel which is fetched from logger - reloadLoggerAndConfig() - const config = getConfig() assertObjectContains(config, { @@ -211,7 +210,6 @@ describe('Config', () => { tracePropagationStyle: { inject: ['b3', 'tracecontext'], extract: ['b3', 'tracecontext'], - otelPropagators: false, }, }) @@ -230,9 +228,6 @@ describe('Config', () => { process.env.OTEL_RESOURCE_ATTRIBUTES = 'foo=bar1,baz=qux1' process.env.OTEL_PROPAGATORS = 'b3,datadog' - // required if we want to check updates to config.debug and config.logLevel which is fetched from logger - reloadLoggerAndConfig() - const config = getConfig() assertObjectContains(config, { @@ -250,7 +245,6 @@ describe('Config', () => { tracePropagationStyle: { inject: ['b3', 'datadog'], extract: ['b3', 'datadog'], - otelPropagators: true, }, }) @@ -260,6 +254,85 @@ describe('Config', () => { assert.strictEqual(indexFile, noop) }) + it('should use proxy when dynamic instrumentation is enabled with DD_TRACE_ENABLED=false', () => { + process.env.DD_TRACE_ENABLED = 'false' + process.env.DD_DYNAMIC_INSTRUMENTATION_ENABLED = 'true' + + const config = getConfig() + + assert.strictEqual(config.tracing, false) + assert.strictEqual(config.dynamicInstrumentation.enabled, true) + + delete require.cache[require.resolve('../../src/index')] + const indexFile = require('../../src/index') + const proxy = require('../../src/proxy') + assert.strictEqual(indexFile, proxy) + }) + + it('should use proxy when dynamic instrumentation is enabled with DD_TRACING_ENABLED=false', () => { + process.env.DD_TRACING_ENABLED = 'false' + process.env.DD_DYNAMIC_INSTRUMENTATION_ENABLED = 'true' + + const config = getConfig() + + assert.strictEqual(config.tracing, false) + assert.strictEqual(config.dynamicInstrumentation.enabled, true) + + delete require.cache[require.resolve('../../src/index')] + const indexFile = require('../../src/index') + const proxy = require('../../src/proxy') + assert.strictEqual(indexFile, proxy) + }) + + it('should use proxy when appsec standalone is enabled with DD_TRACE_ENABLED=false', () => { + process.env.DD_TRACE_ENABLED = 'false' + process.env.DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED = 'true' + + const config = getConfig() + + assert.strictEqual(config.tracing, false) + assert.strictEqual(config.apmTracingEnabled, false) + + delete require.cache[require.resolve('../../src/index')] + const indexFile = require('../../src/index') + const proxy = require('../../src/proxy') + assert.strictEqual(indexFile, proxy) + }) + + it('should prefer DD propagation style over OTEL propagators', () => { + process.env.DD_TRACE_PROPAGATION_STYLE = 'tracecontext' + process.env.OTEL_PROPAGATORS = 'b3,datadog' + + const config = getConfig() + + assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['tracecontext']) + assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['tracecontext']) + }) + + it('should use generic OTLP exporter config for logs and metrics when specific config is not set', () => { + process.env.OTEL_EXPORTER_OTLP_ENDPOINT = 'http://collector:4318' + process.env.OTEL_EXPORTER_OTLP_HEADERS = 'x-test=value' + process.env.OTEL_EXPORTER_OTLP_PROTOCOL = 'grpc' + process.env.OTEL_EXPORTER_OTLP_TIMEOUT = '1234' + + const config = getConfig() + + assertObjectContains(config, { + OTEL_EXPORTER_OTLP_ENDPOINT: 'http://collector:4318', + otelLogsUrl: 'http://collector:4318', + otelMetricsUrl: 'http://collector:4318', + otelHeaders: 'x-test=value', + otelLogsHeaders: 'x-test=value', + otelMetricsHeaders: 'x-test=value', + otelProtocol: 'grpc', + otelLogsProtocol: 'grpc', + otelMetricsProtocol: 'grpc', + otelTimeout: 1234, + otelLogsTimeout: 1234, + otelMetricsTimeout: 1234, + }) + }) + it('should correctly map OTEL_RESOURCE_ATTRIBUTES', () => { process.env.OTEL_RESOURCE_ATTRIBUTES = 'deployment.environment=test1,service.name=test2,service.version=5,foo=bar1,baz=qux1' @@ -362,7 +435,7 @@ describe('Config', () => { debug: false, dogstatsd: { hostname: '127.0.0.1', - port: '8125', + port: 8125, }, dynamicInstrumentation: { enabled: false, @@ -437,7 +510,6 @@ describe('Config', () => { spanAttributeSchema: 'v0', spanComputePeerService: false, spanRemoveIntegrationFromService: false, - traceEnabled: true, traceId128BitGenerationEnabled: true, traceId128BitLoggingEnabled: true, tracePropagationBehaviorExtract: 'continue', @@ -446,7 +518,7 @@ describe('Config', () => { assert.deepStrictEqual(config.dynamicInstrumentation?.redactionExcludedIdentifiers, []) assert.deepStrictEqual(config.grpc.client.error.statuses, GRPC_CLIENT_ERROR_STATUSES) assert.deepStrictEqual(config.grpc.server.error.statuses, GRPC_SERVER_ERROR_STATUSES) - assert.deepStrictEqual(config.injectionEnabled, []) + assert.deepStrictEqual(config.injectionEnabled, undefined) assert.deepStrictEqual(config.serviceMapping, {}) assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['datadog', 'tracecontext', 'baggage']) assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['datadog', 'tracecontext', 'baggage']) @@ -456,164 +528,169 @@ describe('Config', () => { sinon.assert.calledOnce(updateConfig) - assertObjectContains(updateConfig.getCall(0).args[0].sort(comparator), [ - { name: 'apmTracingEnabled', value: true, origin: 'default' }, - { name: 'appsec.apiSecurity.enabled', value: true, origin: 'default' }, - { name: 'appsec.apiSecurity.sampleDelay', value: 30, origin: 'default' }, - { name: 'appsec.apiSecurity.endpointCollectionEnabled', value: true, origin: 'default' }, - { name: 'appsec.apiSecurity.endpointCollectionMessageLimit', value: 300, origin: 'default' }, - { name: 'appsec.apiSecurity.downstreamBodyAnalysisSampleRate', value: 0.5, origin: 'default' }, - { name: 'appsec.apiSecurity.maxDownstreamRequestBodyAnalysis', value: 1, origin: 'default' }, - { name: 'appsec.blockedTemplateHtml', value: undefined, origin: 'default' }, - { name: 'appsec.blockedTemplateJson', value: undefined, origin: 'default' }, - { name: 'appsec.enabled', value: undefined, origin: 'default' }, - { name: 'appsec.eventTracking.mode', value: 'identification', origin: 'default' }, - { name: 'appsec.extendedHeadersCollection.enabled', value: false, origin: 'default' }, - { name: 'appsec.extendedHeadersCollection.maxHeaders', value: 50, origin: 'default' }, - { name: 'appsec.extendedHeadersCollection.redaction', value: true, origin: 'default' }, + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_APM_TRACING_ENABLED', value: true, origin: 'default' }, + { name: 'DD_API_SECURITY_ENABLED', value: true, origin: 'default' }, + { name: 'DD_API_SECURITY_SAMPLE_DELAY', value: 30, origin: 'default' }, + { name: 'DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT', value: 300, origin: 'default' }, + { name: 'DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE', value: 0.5, origin: 'default' }, + { name: 'DD_API_SECURITY_MAX_DOWNSTREAM_REQUEST_BODY_ANALYSIS', value: 1, origin: 'default' }, + { name: 'DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML', value: null, origin: 'default' }, + { name: 'DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON', value: null, origin: 'default' }, + { name: 'DD_APPSEC_ENABLED', value: null, origin: 'default' }, + { name: 'DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE', value: 'identification', origin: 'default' }, + { name: 'DD_APPSEC_COLLECT_ALL_HEADERS', value: false, origin: 'default' }, + { name: 'DD_APPSEC_MAX_COLLECTED_HEADERS', value: 50, origin: 'default' }, + { name: 'DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED', value: true, origin: 'default' }, { - name: 'appsec.obfuscatorKeyRegex', + name: 'DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP', // eslint-disable-next-line @stylistic/max-len value: '(?i)pass|pw(?:or)?d|secret|(?:api|private|public|access)[_-]?key|token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization|jsessionid|phpsessid|asp\\.net[_-]sessionid|sid|jwt', origin: 'default', }, { - name: 'appsec.obfuscatorValueRegex', + name: 'DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP', // eslint-disable-next-line @stylistic/max-len value: '(?i)(?:p(?:ass)?w(?:or)?d|pass(?:[_-]?phrase)?|secret(?:[_-]?key)?|(?:(?:api|private|public|access)[_-]?)key(?:[_-]?id)?|(?:(?:auth|access|id|refresh)[_-]?)?token|consumer[_-]?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?|jsessionid|phpsessid|asp\\.net(?:[_-]|-)sessionid|sid|jwt)(?:\\s*=([^;&]+)|"\\s*:\\s*("[^"]+"|\\d+))|bearer\\s+([a-z0-9\\._\\-]+)|token\\s*:\\s*([a-z0-9]{13})|gh[opsu]_([0-9a-zA-Z]{36})|ey[I-L][\\w=-]+\\.(ey[I-L][\\w=-]+(?:\\.[\\w.+\\/=-]+)?)|[\\-]{5}BEGIN[a-z\\s]+PRIVATE\\sKEY[\\-]{5}([^\\-]+)[\\-]{5}END[a-z\\s]+PRIVATE\\sKEY|ssh-rsa\\s*([a-z0-9\\/\\.+]{100,})', origin: 'default', }, - { name: 'appsec.rasp.bodyCollection', value: false, origin: 'default' }, - { name: 'appsec.rasp.enabled', value: true, origin: 'default' }, - { name: 'appsec.rateLimit', value: 100, origin: 'default' }, - { name: 'appsec.rules', value: undefined, origin: 'default' }, - { name: 'appsec.sca.enabled', value: undefined, origin: 'default' }, - { name: 'appsec.stackTrace.enabled', value: true, origin: 'default' }, - { name: 'appsec.stackTrace.maxDepth', value: 32, origin: 'default' }, - { name: 'appsec.stackTrace.maxStackTraces', value: 2, origin: 'default' }, - { name: 'appsec.wafTimeout', value: 5e3, origin: 'default' }, - { name: 'ciVisAgentlessLogSubmissionEnabled', value: false, origin: 'default' }, - { name: 'ciVisibilityTestSessionName', value: undefined, origin: 'default' }, - { name: 'clientIpEnabled', value: false, origin: 'default' }, - { name: 'clientIpHeader', value: undefined, origin: 'default' }, - { name: 'codeOriginForSpans.enabled', value: true, origin: 'default' }, - { name: 'codeOriginForSpans.experimental.exit_spans.enabled', value: false, origin: 'default' }, - { name: 'dbmPropagationMode', value: 'disabled', origin: 'default' }, - { name: 'dogstatsd.hostname', value: '127.0.0.1', origin: 'calculated' }, - { name: 'dogstatsd.port', value: '8125', origin: 'default' }, - { name: 'dsmEnabled', value: false, origin: 'default' }, - { name: 'dynamicInstrumentation.enabled', value: false, origin: 'default' }, - { name: 'dynamicInstrumentation.probeFile', value: undefined, origin: 'default' }, - { name: 'dynamicInstrumentation.redactedIdentifiers', value: [], origin: 'default' }, - { name: 'dynamicInstrumentation.redactionExcludedIdentifiers', value: [], origin: 'default' }, - { name: 'dynamicInstrumentation.uploadIntervalSeconds', value: 1, origin: 'default' }, - { name: 'env', value: undefined, origin: 'default' }, - { name: 'experimental.aiguard.block', value: false, origin: 'default' }, - { name: 'experimental.aiguard.enabled', value: false, origin: 'default' }, - { name: 'experimental.aiguard.endpoint', value: undefined, origin: 'default' }, - { name: 'experimental.aiguard.maxContentSize', value: 512 * 1024, origin: 'default' }, - { name: 'experimental.aiguard.maxMessagesLength', value: 16, origin: 'default' }, - { name: 'experimental.aiguard.timeout', value: 10_000, origin: 'default' }, - { name: 'experimental.enableGetRumData', value: false, origin: 'default' }, - { name: 'experimental.exporter', value: '', origin: 'default' }, - { name: 'flakyTestRetriesCount', value: 5, origin: 'default' }, - { name: 'flushInterval', value: 2000, origin: 'default' }, - { name: 'flushMinSpans', value: 1000, origin: 'default' }, - { name: 'gitMetadataEnabled', value: true, origin: 'default' }, - { name: 'headerTags', value: [], origin: 'default' }, - { name: 'hostname', value: '127.0.0.1', origin: 'default' }, - { name: 'iast.dbRowsToTaint', value: 1, origin: 'default' }, - { name: 'iast.deduplicationEnabled', value: true, origin: 'default' }, - { name: 'iast.enabled', value: false, origin: 'default' }, - { name: 'iast.maxConcurrentRequests', value: 2, origin: 'default' }, - { name: 'iast.maxContextOperations', value: 2, origin: 'default' }, - { name: 'iast.redactionEnabled', value: true, origin: 'default' }, - { name: 'iast.redactionNamePattern', value: defaults['iast.redactionNamePattern'], origin: 'default' }, - { name: 'iast.redactionValuePattern', value: defaults['iast.redactionValuePattern'], origin: 'default' }, - { name: 'iast.requestSampling', value: 30, origin: 'default' }, - { name: 'iast.securityControlsConfiguration', value: undefined, origin: 'default' }, - { name: 'iast.stackTrace.enabled', value: true, origin: 'default' }, - { name: 'iast.telemetryVerbosity', value: 'INFORMATION', origin: 'default' }, - { name: 'injectForce', value: false, origin: 'default' }, - { name: 'injectionEnabled', value: [], origin: 'default' }, + { name: 'DD_APPSEC_RASP_COLLECT_REQUEST_BODY', value: false, origin: 'default' }, + { name: 'DD_APPSEC_RASP_ENABLED', value: true, origin: 'default' }, + { name: 'DD_APPSEC_TRACE_RATE_LIMIT', value: 100, origin: 'default' }, + { name: 'DD_APPSEC_RULES', value: null, origin: 'default' }, + { name: 'DD_APPSEC_SCA_ENABLED', value: null, origin: 'default' }, + { name: 'DD_APPSEC_STACK_TRACE_ENABLED', value: true, origin: 'default' }, + { name: 'DD_APPSEC_MAX_STACK_TRACE_DEPTH', value: 32, origin: 'default' }, + { name: 'DD_APPSEC_MAX_STACK_TRACES', value: 2, origin: 'default' }, + { name: 'DD_APPSEC_WAF_TIMEOUT', value: 5e3, origin: 'default' }, + { name: 'DD_AGENTLESS_LOG_SUBMISSION_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TEST_SESSION_NAME', value: null, origin: 'default' }, + { name: 'DD_TRACE_CLIENT_IP_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_CLIENT_IP_HEADER', value: null, origin: 'default' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_ENABLED', value: true, origin: 'default' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED', value: false, origin: 'default' }, + { name: 'DD_DBM_PROPAGATION_MODE', value: 'disabled', origin: 'default' }, + { name: 'DD_DOGSTATSD_HOST', value: 'localhost', origin: 'default' }, + { name: 'DD_DOGSTATSD_PORT', value: 8125, origin: 'default' }, + { name: 'DD_DATA_STREAMS_ENABLED', value: false, origin: 'default' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_ENABLED', value: false, origin: 'default' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE', value: null, origin: 'default' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS', value: '', origin: 'default' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS', value: '', origin: 'default' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS', value: 1, origin: 'default' }, + { name: 'DD_ENV', value: null, origin: 'default' }, + { name: 'DD_AI_GUARD_ENABLED', value: false, origin: 'default' }, + { name: 'DD_AI_GUARD_BLOCK', value: false, origin: 'default' }, + { name: 'DD_AI_GUARD_ENDPOINT', value: null, origin: 'default' }, + { name: 'DD_AI_GUARD_MAX_CONTENT_SIZE', value: 512 * 1024, origin: 'default' }, + { name: 'DD_AI_GUARD_MAX_MESSAGES_LENGTH', value: 16, origin: 'default' }, + { name: 'DD_AI_GUARD_TIMEOUT', value: 10_000, origin: 'default' }, + { name: 'DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_EXPERIMENTAL_EXPORTER', value: '', origin: 'default' }, + { name: 'DD_CIVISIBILITY_FLAKY_RETRY_COUNT', value: 5, origin: 'default' }, + { name: 'DD_TRACE_FLUSH_INTERVAL', value: 2000, origin: 'default' }, + { name: 'DD_TRACE_PARTIAL_FLUSH_MIN_SPANS', value: 1000, origin: 'default' }, + { name: 'DD_TRACE_GIT_METADATA_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_HEADER_TAGS', value: '', origin: 'default' }, + { name: 'DD_AGENT_HOST', value: '127.0.0.1', origin: 'default' }, + { name: 'DD_IAST_DB_ROWS_TO_TAINT', value: 1, origin: 'default' }, + { name: 'DD_IAST_DEDUPLICATION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_IAST_ENABLED', value: false, origin: 'default' }, + { name: 'DD_IAST_MAX_CONCURRENT_REQUESTS', value: 2, origin: 'default' }, + { name: 'DD_IAST_MAX_CONTEXT_OPERATIONS', value: 2, origin: 'default' }, + { name: 'DD_IAST_REDACTION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_IAST_REDACTION_NAME_PATTERN', value: defaults['iast.redactionNamePattern'], origin: 'default' }, + { name: 'DD_IAST_REDACTION_VALUE_PATTERN', value: defaults['iast.redactionValuePattern'], origin: 'default' }, + { name: 'DD_IAST_REQUEST_SAMPLING', value: 30, origin: 'default' }, + { name: 'DD_IAST_SECURITY_CONTROLS_CONFIGURATION', value: null, origin: 'default' }, + { name: 'DD_IAST_STACK_TRACE_ENABLED', value: true, origin: 'default' }, + { name: 'DD_IAST_TELEMETRY_VERBOSITY', value: 'INFORMATION', origin: 'default' }, + { name: 'DD_INJECT_FORCE', value: false, origin: 'default' }, + { name: 'DD_INJECTION_ENABLED', value: null, origin: 'default' }, { name: 'instrumentationSource', value: 'manual', origin: 'default' }, { name: 'isCiVisibility', value: false, origin: 'default' }, - { name: 'isEarlyFlakeDetectionEnabled', value: true, origin: 'default' }, - { name: 'isFlakyTestRetriesEnabled', value: true, origin: 'default' }, - { name: 'isGCPFunction', value: false, origin: 'env_var' }, - { name: 'isGitUploadEnabled', value: false, origin: 'default' }, - { name: 'isIntelligentTestRunnerEnabled', value: false, origin: 'default' }, - { name: 'isManualApiEnabled', value: false, origin: 'default' }, - { name: 'langchain.spanCharLimit', value: 128, origin: 'default' }, - { name: 'langchain.spanPromptCompletionSampleRate', value: 1.0, origin: 'default' }, - { name: 'llmobs.agentlessEnabled', value: undefined, origin: 'default' }, - { name: 'llmobs.mlApp', value: undefined, origin: 'default' }, - { name: 'isTestDynamicInstrumentationEnabled', value: true, origin: 'default' }, - { name: 'logInjection', value: true, origin: 'default' }, - { name: 'lookup', value: undefined, origin: 'default' }, - { name: 'middlewareTracingEnabled', value: true, origin: 'default' }, - { name: 'openai.spanCharLimit', value: 128, origin: 'default' }, - { name: 'openAiLogsEnabled', value: false, origin: 'default' }, - { name: 'peerServiceMapping', value: {}, origin: 'default' }, + { name: 'DD_CIVISIBILITY_EARLY_FLAKE_DETECTION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_CIVISIBILITY_FLAKY_RETRY_ENABLED', value: true, origin: 'default' }, + { name: 'DD_CIVISIBILITY_GIT_UPLOAD_ENABLED', value: true, origin: 'default' }, + { name: 'DD_CIVISIBILITY_ITR_ENABLED', value: true, origin: 'default' }, + { name: 'DD_CIVISIBILITY_MANUAL_API_ENABLED', value: true, origin: 'default' }, + { name: 'DD_LANGCHAIN_SPAN_CHAR_LIMIT', value: 128, origin: 'default' }, + { name: 'DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE', value: 1.0, origin: 'default' }, + { name: 'DD_LLMOBS_AGENTLESS_ENABLED', value: null, origin: 'default' }, + { name: 'DD_LLMOBS_ML_APP', value: null, origin: 'default' }, + { name: 'DD_TEST_FAILED_TEST_REPLAY_ENABLED', value: true, origin: 'default' }, + { name: 'DD_LOGS_INJECTION', value: true, origin: 'default' }, + { name: 'lookup', value: dns.lookup, origin: 'default' }, + { name: 'DD_TRACE_MIDDLEWARE_TRACING_ENABLED', value: true, origin: 'default' }, + { name: 'DD_OPENAI_SPAN_CHAR_LIMIT', value: 128, origin: 'default' }, + { name: 'DD_OPENAI_LOGS_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_PEER_SERVICE_MAPPING', value: '', origin: 'default' }, { name: 'plugins', value: true, origin: 'default' }, - { name: 'port', value: '8126', origin: 'default' }, - { name: 'profiling.enabled', value: false, origin: 'default' }, - { name: 'profiling.exporters', value: 'agent', origin: 'default' }, - { name: 'profiling.sourceMap', value: true, origin: 'default' }, - { name: 'protocolVersion', value: '0.4', origin: 'default' }, + { name: 'DD_TRACE_AGENT_PORT', value: 8126, origin: 'default' }, + { name: 'DD_PROFILING_ENABLED', value: 'false', origin: 'default' }, + { name: 'DD_PROFILING_EXPORTERS', value: 'agent', origin: 'default' }, + { name: 'DD_PROFILING_SOURCE_MAP', value: true, origin: 'default' }, + { name: 'DD_TRACE_AGENT_PROTOCOL_VERSION', value: '0.4', origin: 'default' }, { - name: 'queryStringObfuscation', + name: 'DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP', value: config.queryStringObfuscation, origin: 'default', }, - { name: 'remoteConfig.enabled', value: true, origin: 'default' }, - { name: 'remoteConfig.pollInterval', value: 5, origin: 'default' }, - { name: 'reportHostname', value: false, origin: 'default' }, - { name: 'runtimeMetrics.enabled', value: false, origin: 'default' }, - { name: 'runtimeMetricsRuntimeId', value: false, origin: 'default' }, - { name: 'sampleRate', value: undefined, origin: 'default' }, - { name: 'sampler.rateLimit', value: 100, origin: 'default' }, - { name: 'sampler.rules', value: [], origin: 'default' }, - { name: 'scope', value: undefined, origin: 'default' }, - { name: 'service', value: 'node', origin: 'default' }, - { name: 'site', value: 'datadoghq.com', origin: 'default' }, - { name: 'spanAttributeSchema', value: 'v0', origin: 'default' }, - { name: 'spanComputePeerService', value: false, origin: 'calculated' }, - { name: 'spanRemoveIntegrationFromService', value: false, origin: 'default' }, - { name: 'startupLogs', value: DD_MAJOR >= 6, origin: 'default' }, - { name: 'stats.enabled', value: false, origin: 'calculated' }, - { name: 'tagsHeaderMaxLength', value: 512, origin: 'default' }, - { name: 'telemetry.debug', value: false, origin: 'default' }, - { name: 'telemetry.dependencyCollection', value: true, origin: 'default' }, - { name: 'telemetry.enabled', value: true, origin: 'default' }, - { name: 'telemetry.heartbeatInterval', value: 60, origin: 'default' }, - { name: 'telemetry.logCollection', value: true, origin: 'default' }, - { name: 'telemetry.metrics', value: true, origin: 'default' }, - { name: 'traceEnabled', value: true, origin: 'default' }, - { name: 'traceId128BitGenerationEnabled', value: true, origin: 'default' }, - { name: 'traceId128BitLoggingEnabled', value: true, origin: 'default' }, - { name: 'tracing', value: true, origin: 'default' }, - { name: 'url', value: '', origin: 'default' }, - { name: 'version', value: '', origin: 'default' }, - { name: 'vertexai.spanCharLimit', value: 128, origin: 'default' }, - { name: 'vertexai.spanPromptCompletionSampleRate', value: 1.0, origin: 'default' }, + { name: 'DD_REMOTE_CONFIGURATION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS', value: 5, origin: 'default' }, + { name: 'DD_TRACE_REPORT_HOSTNAME', value: false, origin: 'default' }, + { name: 'DD_RUNTIME_METRICS_ENABLED', value: false, origin: 'default' }, + { name: 'DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: null, origin: 'default' }, + { name: 'DD_TRACE_RATE_LIMIT', value: 100, origin: 'default' }, + { name: 'DD_TRACE_SAMPLING_RULES', value: '[]', origin: 'default' }, + { name: 'DD_TRACE_SCOPE', value: null, origin: 'default' }, + { name: 'DD_SERVICE', value: null, origin: 'default' }, + { name: 'DD_SITE', value: 'datadoghq.com', origin: 'default' }, + { name: 'DD_TRACE_SPAN_ATTRIBUTE_SCHEMA', value: 'v0', origin: 'default' }, + { name: 'DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_STARTUP_LOGS', value: DD_MAJOR >= 6, origin: 'default' }, + { name: 'DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH', value: 512, origin: 'default' }, + { name: 'DD_TELEMETRY_DEBUG', value: false, origin: 'default' }, + { name: 'DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_INSTRUMENTATION_TELEMETRY_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TELEMETRY_HEARTBEAT_INTERVAL', value: 60, origin: 'default' }, + { name: 'DD_TELEMETRY_LOG_COLLECTION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TELEMETRY_METRICS_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_AGENT_URL', value: '', origin: 'default' }, + { name: 'DD_VERSION', value: null, origin: 'default' }, + { name: 'DD_VERTEXAI_SPAN_CHAR_LIMIT', value: 128, origin: 'default' }, + { name: 'DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE', value: 1.0, origin: 'default' }, + { name: 'DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: false, origin: 'default' }, ].sort(comparator)) }) it('should support logging', () => { - const config = getConfig({ - logger: {}, - debug: true, - }) + process.env.DD_TRACE_DEBUG = 'true' + const logger = { + debug: sinon.spy(), + error: sinon.spy(), + } + getConfig({ logger }) - sinon.assert.calledWith(log.use, config.logger) - sinon.assert.calledWith(log.toggle, config.debug) + log.debug('debug') + log.error('error') + + sinon.assert.calledOnceWithExactly(logger.debug, 'debug') + sinon.assert.calledOnce(logger.error) + assert.ok(logger.error.firstCall.args[0] instanceof Error) + assert.strictEqual(logger.error.firstCall.args[0].message, 'error') }) it('should not warn on undefined DD_TRACE_SPAN_ATTRIBUTE_SCHEMA', () => { const config = getConfig({ logger: {}, - debug: true, }) sinon.assert.notCalled(log.warn) assert.strictEqual(config.spanAttributeSchema, 'v0') @@ -621,7 +698,6 @@ describe('Config', () => { it('should initialize from the default service', () => { pkg.name = 'test' - reloadLoggerAndConfig() const config = getConfig() @@ -631,7 +707,6 @@ describe('Config', () => { it('should initialize from the default version', () => { pkg.version = '1.2.3' - reloadLoggerAndConfig() const config = getConfig() @@ -762,9 +837,6 @@ describe('Config', () => { process.env.DD_VERTEXAI_SPAN_CHAR_LIMIT = '50' process.env.DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE = '0.5' - // required if we want to check updates to config.debug and config.logLevel which is fetched from logger - reloadLoggerAndConfig() - const config = getConfig() assertObjectContains(config, { @@ -825,7 +897,7 @@ describe('Config', () => { debug: true, dogstatsd: { hostname: 'dsd-agent', - port: '5218', + port: 5218, }, dynamicInstrumentation: { enabled: true, @@ -900,11 +972,10 @@ describe('Config', () => { version: '1.0.0', env: 'test', }, - traceEnabled: true, traceId128BitGenerationEnabled: true, traceId128BitLoggingEnabled: true, tracePropagationBehaviorExtract: 'restart', - tracing: false, + tracing: true, version: '1.0.0', }) assert.deepStrictEqual(config.grpc.client.error.statuses, [3, 13, 400, 401, 402, 403]) @@ -916,7 +987,7 @@ describe('Config', () => { assert.deepStrictEqual(config.peerServiceMapping, { c: 'cc', d: 'dd' }) assert.deepStrictEqual(config.sampler, { sampleRate: 0.5, - rateLimit: '-1', + rateLimit: -1, rules: [ { service: 'usersvc', name: 'healthcheck', sampleRate: 0.0 }, { service: 'usersvc', sampleRate: 0.5 }, @@ -936,102 +1007,107 @@ describe('Config', () => { sinon.assert.calledOnce(updateConfig) - assertObjectContains(updateConfig.getCall(0).args[0].sort(comparator), [ - { name: 'apmTracingEnabled', value: false, origin: 'env_var' }, - { name: 'appsec.apiSecurity.enabled', value: true, origin: 'env_var' }, - { name: 'appsec.apiSecurity.sampleDelay', value: 25, origin: 'env_var' }, - { name: 'appsec.apiSecurity.endpointCollectionEnabled', value: false, origin: 'env_var' }, - { name: 'appsec.apiSecurity.endpointCollectionMessageLimit', value: 500, origin: 'env_var' }, - { name: 'appsec.apiSecurity.downstreamBodyAnalysisSampleRate', value: 0.75, origin: 'env_var' }, - { name: 'appsec.apiSecurity.maxDownstreamRequestBodyAnalysis', value: 2, origin: 'env_var' }, - { name: 'appsec.blockedTemplateHtml', value: BLOCKED_TEMPLATE_HTML_PATH, origin: 'env_var' }, - { name: 'appsec.blockedTemplateJson', value: BLOCKED_TEMPLATE_JSON_PATH, origin: 'env_var' }, - { name: 'appsec.enabled', value: true, origin: 'env_var' }, - { name: 'appsec.eventTracking.mode', value: 'extended', origin: 'env_var' }, - { name: 'appsec.extendedHeadersCollection.enabled', value: true, origin: 'env_var' }, - { name: 'appsec.extendedHeadersCollection.maxHeaders', value: '42', origin: 'env_var' }, - { name: 'appsec.extendedHeadersCollection.redaction', value: false, origin: 'env_var' }, - { name: 'appsec.obfuscatorKeyRegex', value: '.*', origin: 'env_var' }, - { name: 'appsec.obfuscatorValueRegex', value: '.*', origin: 'env_var' }, - { name: 'appsec.rasp.bodyCollection', value: true, origin: 'env_var' }, - { name: 'appsec.rasp.enabled', value: false, origin: 'env_var' }, - { name: 'appsec.rateLimit', value: '42', origin: 'env_var' }, - { name: 'appsec.rules', value: RULES_JSON_PATH, origin: 'env_var' }, - { name: 'appsec.sca.enabled', value: true, origin: 'env_var' }, - { name: 'appsec.stackTrace.enabled', value: false, origin: 'env_var' }, - { name: 'appsec.stackTrace.maxDepth', value: '42', origin: 'env_var' }, - { name: 'appsec.stackTrace.maxStackTraces', value: '5', origin: 'env_var' }, - { name: 'appsec.wafTimeout', value: '42', origin: 'env_var' }, - { name: 'clientIpEnabled', value: true, origin: 'env_var' }, - { name: 'clientIpHeader', value: 'x-true-client-ip', origin: 'env_var' }, - { name: 'codeOriginForSpans.enabled', value: false, origin: 'env_var' }, - { name: 'codeOriginForSpans.experimental.exit_spans.enabled', value: true, origin: 'env_var' }, - { name: 'crashtracking.enabled', value: false, origin: 'env_var' }, - { name: 'dogstatsd.hostname', value: 'dsd-agent', origin: 'env_var' }, - { name: 'dogstatsd.port', value: '5218', origin: 'env_var' }, - { name: 'dynamicInstrumentation.enabled', value: true, origin: 'env_var' }, - { name: 'dynamicInstrumentation.probeFile', value: 'probes.json', origin: 'env_var' }, - { name: 'dynamicInstrumentation.redactedIdentifiers', value: ['foo', 'bar'], origin: 'env_var' }, - { name: 'dynamicInstrumentation.redactionExcludedIdentifiers', value: ['a', 'b', 'c'], origin: 'env_var' }, - { name: 'dynamicInstrumentation.uploadIntervalSeconds', value: 0.1, origin: 'env_var' }, - { name: 'env', value: 'test', origin: 'env_var' }, - { name: 'experimental.aiguard.block', value: true, origin: 'env_var' }, - { name: 'experimental.aiguard.enabled', value: true, origin: 'env_var' }, - { name: 'experimental.aiguard.endpoint', value: 'https://dd.datad0g.com/api/unstable/ai-guard', origin: 'env_var' }, - { name: 'experimental.aiguard.maxContentSize', value: String(1024 * 1024), origin: 'env_var' }, - { name: 'experimental.aiguard.maxMessagesLength', value: '32', origin: 'env_var' }, - { name: 'experimental.aiguard.timeout', value: '2000', origin: 'env_var' }, - { name: 'experimental.enableGetRumData', value: true, origin: 'env_var' }, - { name: 'experimental.exporter', value: 'log', origin: 'env_var' }, - { name: 'hostname', value: 'agent', origin: 'env_var' }, - { name: 'iast.dbRowsToTaint', value: 2, origin: 'env_var' }, - { name: 'iast.deduplicationEnabled', value: false, origin: 'env_var' }, - { name: 'iast.enabled', value: true, origin: 'env_var' }, - { name: 'iast.maxConcurrentRequests', value: '3', origin: 'env_var' }, - { name: 'iast.maxContextOperations', value: '4', origin: 'env_var' }, - { name: 'iast.redactionEnabled', value: false, origin: 'env_var' }, - { name: 'iast.redactionNamePattern', value: 'REDACTION_NAME_PATTERN', origin: 'env_var' }, - { name: 'iast.redactionValuePattern', value: 'REDACTION_VALUE_PATTERN', origin: 'env_var' }, - { name: 'iast.requestSampling', value: '40', origin: 'env_var' }, + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_APM_TRACING_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_API_SECURITY_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_API_SECURITY_SAMPLE_DELAY', value: 25, origin: 'env_var' }, + { name: 'DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT', value: 500, origin: 'env_var' }, + { name: 'DD_API_SECURITY_DOWNSTREAM_BODY_ANALYSIS_SAMPLE_RATE', value: 0.75, origin: 'env_var' }, + { name: 'DD_API_SECURITY_MAX_DOWNSTREAM_REQUEST_BODY_ANALYSIS', value: 2, origin: 'env_var' }, + { name: 'DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML', value: BLOCKED_TEMPLATE_HTML_PATH, origin: 'env_var' }, + { name: 'DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON', value: BLOCKED_TEMPLATE_JSON_PATH, origin: 'env_var' }, + { name: 'DD_APPSEC_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE', value: 'extended', origin: 'env_var' }, + { name: 'DD_APPSEC_COLLECT_ALL_HEADERS', value: true, origin: 'env_var' }, + { name: 'DD_APPSEC_MAX_COLLECTED_HEADERS', value: 42, origin: 'env_var' }, + { name: 'DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP', value: '.*', origin: 'env_var' }, + { name: 'DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP', value: '.*', origin: 'env_var' }, + { name: 'DD_APPSEC_RASP_COLLECT_REQUEST_BODY', value: true, origin: 'env_var' }, + { name: 'DD_APPSEC_RASP_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_APPSEC_TRACE_RATE_LIMIT', value: 42, origin: 'env_var' }, + { name: 'DD_APPSEC_RULES', value: RULES_JSON_PATH, origin: 'env_var' }, + { name: 'DD_APPSEC_SCA_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_APPSEC_STACK_TRACE_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_APPSEC_MAX_STACK_TRACE_DEPTH', value: 42, origin: 'env_var' }, + { name: 'DD_APPSEC_MAX_STACK_TRACES', value: 5, origin: 'env_var' }, + { name: 'DD_APPSEC_WAF_TIMEOUT', value: 42, origin: 'env_var' }, + { name: 'DD_TRACE_CLIENT_IP_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_CLIENT_IP_HEADER', value: 'x-true-client-ip', origin: 'env_var' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_CRASHTRACKING_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_DOGSTATSD_HOST', value: 'dsd-agent', origin: 'env_var' }, + { name: 'DD_DOGSTATSD_PORT', value: 5218, origin: 'env_var' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE', value: 'probes.json', origin: 'env_var' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS', value: 'foo,bar', origin: 'env_var' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS', value: 'a,b,c', origin: 'env_var' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS', value: 0.1, origin: 'env_var' }, + { name: 'DD_ENV', value: 'test', origin: 'env_var' }, + { name: 'DD_AI_GUARD_ENABLED', value: false, origin: 'default' }, + { name: 'DD_AI_GUARD_BLOCK', value: false, origin: 'default' }, + { name: 'DD_AI_GUARD_ENDPOINT', value: null, origin: 'default' }, + { name: 'DD_AI_GUARD_MAX_CONTENT_SIZE', value: 512 * 1024, origin: 'default' }, + { name: 'DD_AI_GUARD_MAX_MESSAGES_LENGTH', value: 16, origin: 'default' }, + { name: 'DD_AI_GUARD_TIMEOUT', value: 10_000, origin: 'default' }, + { name: 'DD_AI_GUARD_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_AI_GUARD_BLOCK', value: true, origin: 'env_var' }, + { name: 'DD_AI_GUARD_ENDPOINT', value: 'https://dd.datad0g.com/api/unstable/ai-guard', origin: 'env_var' }, + { name: 'DD_AI_GUARD_TIMEOUT', value: 2000, origin: 'env_var' }, + { name: 'DD_AI_GUARD_MAX_CONTENT_SIZE', value: 1024 * 1024, origin: 'env_var' }, + { name: 'DD_AI_GUARD_MAX_MESSAGES_LENGTH', value: 32, origin: 'env_var' }, + { name: 'DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_EXPERIMENTAL_EXPORTER', value: 'log', origin: 'env_var' }, + { name: 'DD_AGENT_HOST', value: 'agent', origin: 'env_var' }, + { name: 'DD_IAST_DB_ROWS_TO_TAINT', value: 2, origin: 'env_var' }, + { name: 'DD_IAST_DEDUPLICATION_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_IAST_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_IAST_MAX_CONCURRENT_REQUESTS', value: 3, origin: 'env_var' }, + { name: 'DD_IAST_MAX_CONTEXT_OPERATIONS', value: 4, origin: 'env_var' }, + { name: 'DD_IAST_REDACTION_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_IAST_REDACTION_NAME_PATTERN', value: 'REDACTION_NAME_PATTERN', origin: 'env_var' }, + { name: 'DD_IAST_REDACTION_VALUE_PATTERN', value: 'REDACTION_VALUE_PATTERN', origin: 'env_var' }, + { name: 'DD_IAST_REQUEST_SAMPLING', value: 40, origin: 'env_var' }, { - name: 'iast.securityControlsConfiguration', + name: 'DD_IAST_SECURITY_CONTROLS_CONFIGURATION', value: 'SANITIZER:CODE_INJECTION:sanitizer.js:method', origin: 'env_var', }, - { name: 'iast.stackTrace.enabled', value: false, origin: 'env_var' }, - { name: 'iast.telemetryVerbosity', value: 'DEBUG', origin: 'env_var' }, - { name: 'injectForce', value: false, origin: 'env_var' }, - { name: 'injectionEnabled', value: ['tracer'], origin: 'env_var' }, - { name: 'instrumentation_config_id', value: 'abcdef123', origin: 'env_var' }, - { name: 'instrumentationSource', value: 'ssi', origin: 'env_var' }, - { name: 'isGCPFunction', value: false, origin: 'env_var' }, - { name: 'langchain.spanCharLimit', value: 50, origin: 'env_var' }, - { name: 'langchain.spanPromptCompletionSampleRate', value: 0.5, origin: 'env_var' }, - { name: 'llmobs.agentlessEnabled', value: true, origin: 'env_var' }, - { name: 'llmobs.mlApp', value: 'myMlApp', origin: 'env_var' }, - { name: 'middlewareTracingEnabled', value: false, origin: 'env_var' }, - { name: 'peerServiceMapping', value: process.env.DD_TRACE_PEER_SERVICE_MAPPING, origin: 'env_var' }, - { name: 'port', value: '6218', origin: 'env_var' }, - { name: 'profiling.enabled', value: 'true', origin: 'env_var' }, - { name: 'protocolVersion', value: '0.5', origin: 'env_var' }, - { name: 'queryStringObfuscation', value: '.*', origin: 'env_var' }, - { name: 'remoteConfig.enabled', value: false, origin: 'env_var' }, - { name: 'remoteConfig.pollInterval', value: '42', origin: 'env_var' }, - { name: 'reportHostname', value: true, origin: 'env_var' }, - { name: 'runtimeMetrics.enabled', value: true, origin: 'env_var' }, - { name: 'runtimeMetricsRuntimeId', value: true, origin: 'env_var' }, - { name: 'sampler.rateLimit', value: '-1', origin: 'env_var' }, - { name: 'sampler.rules', value: process.env.DD_TRACE_SAMPLING_RULES, origin: 'env_var' }, - { name: 'sampleRate', value: 0.5, origin: 'env_var' }, - { name: 'service', value: 'service', origin: 'env_var' }, - { name: 'spanAttributeSchema', value: 'v1', origin: 'env_var' }, - { name: 'spanRemoveIntegrationFromService', value: true, origin: 'env_var' }, - { name: 'traceId128BitGenerationEnabled', value: true, origin: 'env_var' }, - { name: 'traceId128BitLoggingEnabled', value: true, origin: 'env_var' }, - { name: 'tracing', value: false, origin: 'env_var' }, - { name: 'version', value: '1.0.0', origin: 'env_var' }, - { name: 'vertexai.spanCharLimit', value: 50, origin: 'env_var' }, - { name: 'vertexai.spanPromptCompletionSampleRate', value: 0.5, origin: 'env_var' }, + { name: 'DD_IAST_STACK_TRACE_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_IAST_TELEMETRY_VERBOSITY', value: 'DEBUG', origin: 'env_var' }, + { name: 'DD_INJECT_FORCE', value: false, origin: 'env_var' }, + { name: 'DD_INJECTION_ENABLED', value: 'tracer', origin: 'env_var' }, + { name: 'DD_INSTRUMENTATION_CONFIG_ID', value: 'abcdef123', origin: 'env_var' }, + { name: 'DD_LANGCHAIN_SPAN_CHAR_LIMIT', value: 50, origin: 'env_var' }, + { name: 'DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE', value: 0.5, origin: 'env_var' }, + { name: 'DD_LLMOBS_AGENTLESS_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_LLMOBS_ML_APP', value: 'myMlApp', origin: 'env_var' }, + { name: 'DD_TRACE_MIDDLEWARE_TRACING_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_TRACE_PEER_SERVICE_MAPPING', value: 'c:cc, d:dd', origin: 'env_var' }, + { name: 'DD_TRACE_AGENT_PORT', value: 6218, origin: 'env_var' }, + { name: 'DD_PROFILING_ENABLED', value: 'true', origin: 'env_var' }, + { name: 'DD_TRACE_AGENT_PROTOCOL_VERSION', value: '0.5', origin: 'env_var' }, + { name: 'DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP', value: '.*', origin: 'env_var' }, + { name: 'DD_REMOTE_CONFIGURATION_ENABLED', value: false, origin: 'env_var' }, + { name: 'DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS', value: 42, origin: 'env_var' }, + { name: 'DD_TRACE_REPORT_HOSTNAME', value: true, origin: 'env_var' }, + { name: 'DD_RUNTIME_METRICS_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_RATE_LIMIT', value: -1, origin: 'env_var' }, + { name: 'DD_TRACE_SAMPLING_RULES', value: process.env.DD_TRACE_SAMPLING_RULES, origin: 'env_var' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: 0.5, origin: 'env_var' }, + { name: 'DD_SERVICE', value: 'service', origin: 'env_var' }, + { name: 'DD_TRACE_SPAN_ATTRIBUTE_SCHEMA', value: 'v1', origin: 'env_var' }, + { name: 'DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_VERSION', value: '1.0.0', origin: 'env_var' }, + { name: 'DD_VERTEXAI_SPAN_CHAR_LIMIT', value: 50, origin: 'env_var' }, + { name: 'DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE', value: 0.5, origin: 'env_var' }, + { name: 'instrumentationSource', value: 'ssi', origin: 'calculated' }, ].sort(comparator)) }) @@ -1042,7 +1118,7 @@ describe('Config', () => { assertObjectContains(config, { service: 'node', - env: undefined, + env: '', version: '', }) @@ -1052,7 +1128,7 @@ describe('Config', () => { assertObjectContains(config, { service: 'node', - env: undefined, + env: '', version: '', }) }) @@ -1121,7 +1197,7 @@ describe('Config', () => { process.env.DD_SITE = 'datadoghq.eu' process.env.DD_TRACE_AGENT_HOSTNAME = 'agent' process.env.DD_TRACE_AGENT_PORT = '6218' - process.env.DD_TRACING_ENABLED = 'false' + process.env.DD_TRACE_ENABLED = 'false' process.env.DD_SERVICE = 'service' process.env.DD_ENV = 'test' @@ -1178,7 +1254,9 @@ describe('Config', () => { }) it('should initialize from the options', () => { - const logger = {} + const logger = { + warn: sinon.spy(), + } const tags = { foo: 'bar', } @@ -1189,6 +1267,11 @@ describe('Config', () => { { service: 'authsvc', sampleRate: 1.0 }, { sampleRate: 0.1 }, ] + const samplingRulesString = '[{"service":"usersvc","name":"healthcheck","sampleRate":0},' + + '{"service":"usersvc","sampleRate":0.5},' + + '{"service":"authsvc","sampleRate":1},' + + '{"sampleRate":0.1}]' + const config = getConfig({ appsec: false, clientIpEnabled: true, @@ -1201,10 +1284,9 @@ describe('Config', () => { }, }, }, - debug: true, dogstatsd: { hostname: 'agent-dsd', - port: 5218, + port: '5218', }, dynamicInstrumentation: { enabled: true, @@ -1213,6 +1295,8 @@ describe('Config', () => { redactionExcludedIdentifiers: ['a', 'b', 'c'], uploadIntervalSeconds: 0.1, }, + // 'enabled' does not exist as property. This is added to test for the + // warning that is logged when a non-existent property is set. enabled: false, env: 'test', experimental: { @@ -1243,7 +1327,6 @@ describe('Config', () => { }, telemetryVerbosity: 'DEBUG', }, - traceparent: true, }, flushInterval: 5000, flushMinSpans: 500, @@ -1251,7 +1334,6 @@ describe('Config', () => { llmobs: { mlApp: 'myMlApp', agentlessEnabled: true, - apiKey: 'myApiKey', }, logger, logLevel, @@ -1321,12 +1403,6 @@ describe('Config', () => { dynamicInstrumentation: { enabled: true, probeFile: 'probes.json', - }, - }) - assert.deepStrictEqual(config.dynamicInstrumentation?.redactedIdentifiers, ['foo', 'bar']) - assert.deepStrictEqual(config.dynamicInstrumentation?.redactionExcludedIdentifiers, ['a', 'b', 'c']) - assertObjectContains(config, { - dynamicInstrumentation: { uploadIntervalSeconds: 0.1, }, env: 'test', @@ -1335,13 +1411,7 @@ describe('Config', () => { block: true, enabled: true, endpoint: 'https://dd.datad0g.com/api/unstable/ai-guard', - }, - }, - }) - assert.strictEqual(config.experimental?.aiguard?.maxContentSize, 1024 * 1024) - assertObjectContains(config, { - experimental: { - aiguard: { + maxContentSize: 1024 * 1024, maxMessagesLength: 32, timeout: 2000, }, @@ -1361,15 +1431,6 @@ describe('Config', () => { redactionNamePattern: 'REDACTION_NAME_PATTERN', redactionValuePattern: 'REDACTION_VALUE_PATTERN', requestSampling: 50, - }, - }) - if (DD_MAJOR < 6) { - assert.strictEqual(config.iast?.securityControlsConfiguration, 'SANITIZER:CODE_INJECTION:sanitizer.js:method') - } else { - assert.ok(!('iast.securityControlsConfiguration' in config)) - } - assertObjectContains(config, { - iast: { stackTrace: { enabled: false, }, @@ -1379,14 +1440,12 @@ describe('Config', () => { agentlessEnabled: true, mlApp: 'myMlApp', }, - }) - assertObjectContains(config, { logLevel, logger, middlewareTracingEnabled: false, peerServiceMapping: { d: 'dd' }, plugins: false, - port: '6218', + port: 6218, protocolVersion: '0.5', remoteConfig: { pollInterval: 42, @@ -1399,7 +1458,27 @@ describe('Config', () => { }, runtimeMetricsRuntimeId: true, sampleRate: 0.5, + service: 'service', + site: 'datadoghq.eu', + spanComputePeerService: true, + spanRemoveIntegrationFromService: true, + tags: { + env: 'test', + foo: 'bar', + service: 'service', + version: '0.1.0', + }, + traceId128BitGenerationEnabled: true, + traceId128BitLoggingEnabled: true, + version: '0.1.0', }) + assert.deepStrictEqual(config.dynamicInstrumentation?.redactedIdentifiers, ['foo', 'bar']) + assert.deepStrictEqual(config.dynamicInstrumentation?.redactionExcludedIdentifiers, ['a', 'b', 'c']) + if (DD_MAJOR < 6) { + assert.strictEqual(config.iast?.securityControlsConfiguration, 'SANITIZER:CODE_INJECTION:sanitizer.js:method') + } else { + assert.ok(!('iast.securityControlsConfiguration' in config)) + } assert.deepStrictEqual(config.sampler, { rateLimit: 1000, rules: [ @@ -1416,103 +1495,106 @@ describe('Config', () => { { sampleRate: 0.1 }, ], }) - assert.strictEqual(config.service, 'service') assert.deepStrictEqual(config.serviceMapping, { a: 'aa', b: 'bb' }) - assertObjectContains(config, { - site: 'datadoghq.eu', - spanComputePeerService: true, - spanRemoveIntegrationFromService: true, - }) - assert.ok(Object.hasOwn(config, 'tags')) - assertObjectContains(config.tags, { - env: 'test', - foo: 'bar', - }) assert.ok(Object.hasOwn(config.tags, 'runtime-id')) assert.match(config.tags['runtime-id'], /^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$/) - assertObjectContains(config.tags, { - service: 'service', - version: '0.1.0', - }) - assertObjectContains(config, { - traceId128BitGenerationEnabled: true, - traceId128BitLoggingEnabled: true, - }) - assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['datadog']) - assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['datadog']) - assert.strictEqual(config.version, '0.1.0') + assert.deepStrictEqual(config.tracePropagationStyle?.extract, ['datadog', 'b3', 'b3 single header']) + assert.deepStrictEqual(config.tracePropagationStyle?.inject, ['datadog', 'b3', 'b3 single header']) + + if (DD_MAJOR < 6) { + sinon.assert.calledOnce(log.warn) + } else { + sinon.assert.calledTwice(log.warn) + sinon.assert.calledWithExactly( + log.warn, + 'Unknown option %s with value %o', + 'experimental.iast.securityControlsConfiguration', + 'SANITIZER:CODE_INJECTION:sanitizer.js:method', + ) + } + sinon.assert.calledWithExactly(log.warn, 'Unknown option %s with value %o', 'enabled', false) sinon.assert.calledOnce(updateConfig) - assertObjectContains(updateConfig.getCall(0).args[0].sort(comparator), [ - { name: 'appsec.enabled', value: false, origin: 'code' }, - { name: 'clientIpEnabled', value: true, origin: 'code' }, - { name: 'clientIpHeader', value: 'x-true-client-ip', origin: 'code' }, - { name: 'codeOriginForSpans.enabled', value: false, origin: 'code' }, - { name: 'codeOriginForSpans.experimental.exit_spans.enabled', value: true, origin: 'code' }, - { name: 'dogstatsd.hostname', value: 'agent-dsd', origin: 'code' }, - { name: 'dogstatsd.port', value: '5218', origin: 'code' }, - { name: 'dynamicInstrumentation.enabled', value: true, origin: 'code' }, - { name: 'dynamicInstrumentation.probeFile', value: 'probes.json', origin: 'code' }, - { name: 'dynamicInstrumentation.redactedIdentifiers', value: ['foo', 'bar'], origin: 'code' }, - { name: 'dynamicInstrumentation.redactionExcludedIdentifiers', value: ['a', 'b', 'c'], origin: 'code' }, - { name: 'dynamicInstrumentation.uploadIntervalSeconds', value: 0.1, origin: 'code' }, - { name: 'env', value: 'test', origin: 'code' }, - { name: 'experimental.aiguard.block', value: true, origin: 'code' }, - { name: 'experimental.aiguard.enabled', value: true, origin: 'code' }, - { name: 'experimental.aiguard.endpoint', value: 'https://dd.datad0g.com/api/unstable/ai-guard', origin: 'code' }, - { name: 'experimental.aiguard.maxContentSize', value: 1024 * 1024, origin: 'code' }, - { name: 'experimental.aiguard.maxMessagesLength', value: 32, origin: 'code' }, - { name: 'experimental.aiguard.timeout', value: 2_000, origin: 'code' }, - { name: 'experimental.enableGetRumData', value: true, origin: 'code' }, - { name: 'experimental.exporter', value: 'log', origin: 'code' }, - { name: 'flushInterval', value: 5000, origin: 'code' }, - { name: 'flushMinSpans', value: 500, origin: 'code' }, - { name: 'hostname', value: 'agent', origin: 'code' }, - { name: 'iast.dbRowsToTaint', value: 2, origin: 'code' }, - { name: 'iast.deduplicationEnabled', value: false, origin: 'code' }, - { name: 'iast.enabled', value: true, origin: 'code' }, - { name: 'iast.maxConcurrentRequests', value: 4, origin: 'code' }, - { name: 'iast.maxContextOperations', value: 5, origin: 'code' }, - { name: 'iast.redactionEnabled', value: false, origin: 'code' }, - { name: 'iast.redactionNamePattern', value: 'REDACTION_NAME_PATTERN', origin: 'code' }, - { name: 'iast.redactionValuePattern', value: 'REDACTION_VALUE_PATTERN', origin: 'code' }, - { name: 'iast.requestSampling', value: 50, origin: 'code' }, + assert.ok( + updateConfig.getCall(0).args[0].every( + entry => entry.name !== 'DD_TRACE_STATS_COMPUTATION_ENABLED' || entry.origin !== 'calculated' + ), + ) + + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_APPSEC_ENABLED', value: false, origin: 'code' }, + { name: 'DD_TRACE_CLIENT_IP_ENABLED', value: true, origin: 'code' }, + { name: 'DD_TRACE_CLIENT_IP_HEADER', value: 'x-true-client-ip', origin: 'code' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_ENABLED', value: false, origin: 'code' }, + { name: 'DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED', value: true, origin: 'code' }, + { name: 'DD_DOGSTATSD_HOST', value: 'agent-dsd', origin: 'code' }, + { name: 'DD_DOGSTATSD_PORT', value: '5218', origin: 'code' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_ENABLED', value: true, origin: 'code' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE', value: 'probes.json', origin: 'code' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS', value: 'foo,bar', origin: 'code' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS', value: 'a,b,c', origin: 'code' }, + { name: 'DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS', value: 0.1, origin: 'code' }, + { name: 'DD_ENV', value: 'test', origin: 'code' }, + { name: 'DD_AI_GUARD_ENABLED', value: true, origin: 'code' }, + { name: 'DD_AI_GUARD_BLOCK', value: true, origin: 'code' }, + { name: 'DD_AI_GUARD_ENDPOINT', value: 'https://dd.datad0g.com/api/unstable/ai-guard', origin: 'code' }, + { name: 'DD_AI_GUARD_MAX_CONTENT_SIZE', value: 1024 * 1024, origin: 'code' }, + { name: 'DD_AI_GUARD_MAX_MESSAGES_LENGTH', value: 32, origin: 'code' }, + { name: 'DD_AI_GUARD_TIMEOUT', value: 2_000, origin: 'code' }, + { name: 'DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED', value: true, origin: 'code' }, + { name: 'DD_TRACE_EXPERIMENTAL_EXPORTER', value: 'log', origin: 'code' }, + { name: 'DD_TRACE_FLUSH_INTERVAL', value: 5000, origin: 'code' }, + { name: 'DD_TRACE_PARTIAL_FLUSH_MIN_SPANS', value: 500, origin: 'code' }, + { name: 'DD_AGENT_HOST', value: 'agent', origin: 'code' }, + { name: 'DD_IAST_DB_ROWS_TO_TAINT', value: 2, origin: 'code' }, + { name: 'DD_IAST_DEDUPLICATION_ENABLED', value: false, origin: 'code' }, + { name: 'DD_IAST_ENABLED', value: true, origin: 'code' }, + { name: 'DD_IAST_MAX_CONCURRENT_REQUESTS', value: 4, origin: 'code' }, + { name: 'DD_IAST_MAX_CONTEXT_OPERATIONS', value: 5, origin: 'code' }, + { name: 'DD_IAST_REDACTION_ENABLED', value: false, origin: 'code' }, + { name: 'DD_IAST_REDACTION_NAME_PATTERN', value: 'REDACTION_NAME_PATTERN', origin: 'code' }, + { name: 'DD_IAST_REDACTION_VALUE_PATTERN', value: 'REDACTION_VALUE_PATTERN', origin: 'code' }, + { name: 'DD_IAST_REQUEST_SAMPLING', value: 50, origin: 'code' }, DD_MAJOR < 6 && { - name: 'iast.securityControlsConfiguration', + name: 'DD_IAST_SECURITY_CONTROLS_CONFIGURATION', value: 'SANITIZER:CODE_INJECTION:sanitizer.js:method', origin: 'code', }, - { name: 'iast.stackTrace.enabled', value: false, origin: 'code' }, - { name: 'iast.telemetryVerbosity', value: 'DEBUG', origin: 'code' }, - { name: 'llmobs.agentlessEnabled', value: true, origin: 'code' }, - { name: 'llmobs.mlApp', value: 'myMlApp', origin: 'code' }, - { name: 'middlewareTracingEnabled', value: false, origin: 'code' }, - { name: 'peerServiceMapping', value: { d: 'dd' }, origin: 'code' }, + { name: 'DD_IAST_STACK_TRACE_ENABLED', value: false, origin: 'code' }, + { name: 'DD_IAST_TELEMETRY_VERBOSITY', value: 'DEBUG', origin: 'code' }, + { name: 'DD_LLMOBS_AGENTLESS_ENABLED', value: true, origin: 'code' }, + { name: 'DD_LLMOBS_ML_APP', value: 'myMlApp', origin: 'code' }, + { name: 'DD_TRACE_MIDDLEWARE_TRACING_ENABLED', value: false, origin: 'code' }, + { name: 'DD_TRACE_PEER_SERVICE_MAPPING', value: 'd:dd', origin: 'code' }, { name: 'plugins', value: false, origin: 'code' }, - { name: 'port', value: '6218', origin: 'code' }, - { name: 'protocolVersion', value: '0.5', origin: 'code' }, - { name: 'remoteConfig.pollInterval', value: 42, origin: 'code' }, - { name: 'reportHostname', value: true, origin: 'code' }, - { name: 'runtimeMetrics.enabled', value: true, origin: 'code' }, - { name: 'runtimeMetricsRuntimeId', value: true, origin: 'code' }, - { name: 'sampler.rateLimit', value: 1000, origin: 'code' }, - { name: 'sampler.rules', value: samplingRules, origin: 'code' }, - { name: 'sampleRate', value: 0.5, origin: 'code' }, - { name: 'service', value: 'service', origin: 'code' }, - { name: 'site', value: 'datadoghq.eu', origin: 'code' }, - { name: 'spanAttributeSchema', value: 'v1', origin: 'code' }, - { name: 'spanComputePeerService', value: true, origin: 'calculated' }, - { name: 'spanRemoveIntegrationFromService', value: true, origin: 'code' }, - { name: 'stats.enabled', value: false, origin: 'calculated' }, - { name: 'traceId128BitGenerationEnabled', value: true, origin: 'code' }, - { name: 'traceId128BitLoggingEnabled', value: true, origin: 'code' }, - { name: 'version', value: '0.1.0', origin: 'code' }, + { name: 'DD_TRACE_AGENT_PORT', value: 6218, origin: 'code' }, + { name: 'DD_TRACE_AGENT_PROTOCOL_VERSION', value: '0.5', origin: 'code' }, + { name: 'DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS', value: 42, origin: 'code' }, + { name: 'DD_TRACE_REPORT_HOSTNAME', value: true, origin: 'code' }, + { name: 'DD_RUNTIME_METRICS_ENABLED', value: true, origin: 'code' }, + { name: 'DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED', value: true, origin: 'code' }, + { name: 'DD_TRACE_RATE_LIMIT', value: 1000, origin: 'code' }, + { name: 'DD_TRACE_SAMPLING_RULES', value: samplingRulesString, origin: 'code' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: 0.5, origin: 'code' }, + { name: 'DD_SERVICE', value: 'service', origin: 'code' }, + { name: 'DD_SITE', value: 'datadoghq.eu', origin: 'code' }, + { name: 'DD_TRACE_SPAN_ATTRIBUTE_SCHEMA', value: 'v1', origin: 'code' }, + { name: 'DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED', value: true, origin: 'code' }, + { name: 'DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED', value: true, origin: 'code' }, + { name: 'DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED', value: true, origin: 'code' }, + { name: 'DD_VERSION', value: '0.1.0', origin: 'code' }, + { name: 'DD_TRACE_PEER_SERVICE_DEFAULTS_ENABLED', value: true, origin: 'code' }, ].filter(v => v).sort(comparator)) }) it('should initialize from the options with url taking precedence', () => { - const logger = {} + const logger = { + warn: sinon.spy(), + error: sinon.spy(), + info: sinon.spy(), + debug: sinon.spy(), + } const tags = { foo: 'bar' } const config = getConfig({ hostname: 'agent', @@ -1552,9 +1634,8 @@ describe('Config', () => { getConfig() - sinon.assert.calledWith(log.warn, 'Use either the DD_TRACE_PROPAGATION_STYLE ' + - 'environment variable or separate DD_TRACE_PROPAGATION_STYLE_INJECT and ' + - 'DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables') + sinon.assert.calledWith(log.warn, 'Use either DD_TRACE_PROPAGATION_STYLE or separate ' + + 'DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables') }) it('should warn if mixing shared and inject propagation style env vars', () => { @@ -1563,9 +1644,8 @@ describe('Config', () => { getConfig() - sinon.assert.calledWith(log.warn, 'Use either the DD_TRACE_PROPAGATION_STYLE ' + - 'environment variable or separate DD_TRACE_PROPAGATION_STYLE_INJECT and ' + - 'DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables') + sinon.assert.calledWith(log.warn, 'Use either DD_TRACE_PROPAGATION_STYLE or separate ' + + 'DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables') }) it('should warn if defaulting to v0 span attribute schema', () => { @@ -1573,7 +1653,10 @@ describe('Config', () => { const config = getConfig() - sinon.assert.calledWith(log.warn, 'Unexpected input for config.spanAttributeSchema, picked default', 'v0') + sinon.assert.calledWithExactly( + log.warn, + "Invalid value: 'foo' for DD_TRACE_SPAN_ATTRIBUTE_SCHEMA (source: env_var), picked default", + ) assert.strictEqual(config.spanAttributeSchema, 'v0') }) @@ -1800,7 +1883,6 @@ describe('Config', () => { timeout: 2000, }, b3: false, - traceparent: false, exporter: 'agent', enableGetRumData: false, }, @@ -1812,7 +1894,7 @@ describe('Config', () => { enabled: true, redactionNamePattern: 'REDACTION_NAME_PATTERN', redactionValuePattern: 'REDACTION_VALUE_PATTERN', - securityControlsConfiguration: 'SANITIZER:CODE_INJECTION:sanitizer.js:method2', + ...DD_MAJOR < 6 && { securityControlsConfiguration: 'SANITIZER:CODE_INJECTION:sanitizer.js:method2' }, stackTrace: { enabled: false, }, @@ -1826,7 +1908,6 @@ describe('Config', () => { d: 'dd', }, port: 7777, - protocol: 'https', protocolVersion: '0.5', remoteConfig: { pollInterval: 42, @@ -1847,10 +1928,7 @@ describe('Config', () => { }, traceId128BitGenerationEnabled: false, traceId128BitLoggingEnabled: false, - tracePropagationStyle: { - inject: [], - extract: [], - }, + tracePropagationStyle: ['abc'], version: '1.0.0', }) @@ -1901,7 +1979,7 @@ describe('Config', () => { }, dogstatsd: { hostname: 'server', - port: '8888', + port: 8888, }, dynamicInstrumentation: { enabled: false, @@ -1971,8 +2049,8 @@ describe('Config', () => { env: 'development', }, tracePropagationStyle: { - extract: [], - inject: [], + extract: ['abc'], + inject: ['abc'], }, }) assert.strictEqual(config.url.toString(), 'https://agent2:6218/') @@ -2079,9 +2157,9 @@ describe('Config', () => { downstreamBodyAnalysisSampleRate: 0.5, maxDownstreamRequestBodyAnalysis: 1, }, - blockedTemplateGraphql: undefined, - blockedTemplateHtml: undefined, - blockedTemplateJson: undefined, + blockedTemplateGraphql: BLOCKED_TEMPLATE_GRAPHQL, + blockedTemplateHtml: BLOCKED_TEMPLATE_HTML, + blockedTemplateJson: BLOCKED_TEMPLATE_JSON, enabled: true, eventTracking: { mode: 'disabled', @@ -2098,7 +2176,7 @@ describe('Config', () => { bodyCollection: true, }, rateLimit: 42, - rules: undefined, + rules: RULES_JSON_PATH, sca: { enabled: undefined, }, @@ -2137,7 +2215,6 @@ describe('Config', () => { const config = getConfig({ url: 'https://agent3:7778', - protocol: 'http', hostname: 'server', port: 7777, service: 'test', @@ -2156,7 +2233,8 @@ describe('Config', () => { process.env.DD_SERVICE = 'test' process.env.DD_ENV = 'dev' process.env.DD_VERSION = '1.0.0' - process.env.DD_TAGS = 'service=foo,env=bar,version=0.0.0' + // TODO: Is that correct? Did we support equal signs in DD_TAGS before? + process.env.DD_TAGS = 'service:foo,env:bar,version:0.0.0' const config = getConfig() @@ -2384,21 +2462,34 @@ describe('Config', () => { it('should send empty array when remote config is called on empty options', () => { const config = getConfig() + sinon.assert.calledOnce(updateConfig) + + const length = updateConfig.getCall(0).args[0].length + + updateConfig.resetHistory() + config.setRemoteConfig({}) - sinon.assert.calledTwice(updateConfig) - assert.deepStrictEqual(updateConfig.getCall(1).args[0], []) + for (const entry of updateConfig.getCall(0).args[0].slice(length)) { + assert.notStrictEqual(entry.origin, 'remote_config') + } + + sinon.assert.calledOnce(updateConfig) }) it('should send remote config changes to telemetry', () => { const config = getConfig() + // Reset the changes array. This would normally be done by updateConfig. + updateConfig.getCall(0).args[0].length = 0 + updateConfig.resetHistory() + config.setRemoteConfig({ - tracing_sampling_rate: 0, + sampleRate: 0, }) - assert.deepStrictEqual(updateConfig.getCall(1).args[0], [ - { name: 'sampleRate', value: 0, origin: 'remote_config' }, + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_TRACE_SAMPLE_RATE', value: 0, origin: 'remote_config' }, ]) }) @@ -2406,13 +2497,10 @@ describe('Config', () => { const config = getConfig() config.setRemoteConfig({ - tracing_sampling_rules: [ + samplingRules: [ { resource: '*', - tags: [ - { key: 'tag-a', value_glob: 'tag-a-val*' }, - { key: 'tag-b', value_glob: 'tag-b-val*' }, - ], + tags: { 'tag-a': 'tag-a-val*', 'tag-b': 'tag-b-val*' }, provenance: 'customer', }, ], @@ -2435,7 +2523,7 @@ describe('Config', () => { const config = getConfig() const runtimeId = config.tags['runtime-id'] config.setRemoteConfig({ - tracing_tags: { foo: 'bar' }, + tags: { foo: 'bar' }, }) assert.strictEqual(config.tags?.foo, 'bar') @@ -2467,10 +2555,33 @@ describe('Config', () => { ]) }) - it('should skip appsec config files if they do not exist', () => { - const error = new Error('file not found') - fs.readFileSync = () => { throw error } + it('should warn when span sampling rules file contains invalid JSON', function () { + if (isWindows) { + this.skip() + return + } + const tempDir = mkdtempSync(path.join(process.cwd(), 'dd-trace-span-sampling-rules-')) + const rulesPath = path.join(tempDir, 'span-sampling-rules.json') + writeFileSync(rulesPath, '{"sample_rate":') + + process.env.DD_SPAN_SAMPLING_RULES_FILE = rulesPath + + try { + const config = getConfig() + + assert.strictEqual(config.sampler?.spanSamplingRules, undefined) + sinon.assert.calledWithMatch( + log.warn, + 'Error reading span sampling rules file %s; %o', + '{"sample_rate":', + sinon.match.instanceOf(SyntaxError) + ) + } finally { + rmSync(tempDir, { recursive: true, force: true }) + } + }) + it('should skip appsec config files if they do not exist', () => { const config = getConfig({ appsec: { enabled: true, @@ -2482,9 +2593,41 @@ describe('Config', () => { }) sinon.assert.callCount(log.error, 3) - sinon.assert.calledWithExactly(log.error.firstCall, 'Error reading file %s', 'DOES_NOT_EXIST.json', error) - sinon.assert.calledWithExactly(log.error.secondCall, 'Error reading file %s', 'DOES_NOT_EXIST.html', error) - sinon.assert.calledWithExactly(log.error.thirdCall, 'Error reading file %s', 'DOES_NOT_EXIST.json', error) + const assertMissingAppsecTemplateError = (message, optionName, fileName) => { + const escapedFileName = fileName.replaceAll('.', '\\.') + const escapedOptionName = optionName.replaceAll('.', '\\.') + const escapedPathSuffix = `[\\\\/]${escapedFileName}` + + assert.match( + message, + new RegExp( + '^Error reading path: \'' + escapedFileName + '\' for ' + escapedOptionName + + ' \\(source: code\\), picked default\\n\\n' + + '\\[Error: ENOENT: no such file or directory, open \'(?:.*' + + escapedPathSuffix + '|' + escapedFileName + ')\'\\]' + ) + ) + assert.match(message, /errno: -(2|4058)/) + assert.match(message, /code: 'ENOENT'/) + assert.match(message, /syscall: 'open'/) + assert.match(message, new RegExp(`path: '(?:.*${escapedPathSuffix}|${escapedFileName})'`)) + } + + assertMissingAppsecTemplateError( + log.error.firstCall.args[0], + 'appsec.blockedTemplateHtml', + 'DOES_NOT_EXIST.html' + ) + assertMissingAppsecTemplateError( + log.error.secondCall.args[0], + 'appsec.blockedTemplateJson', + 'DOES_NOT_EXIST.json' + ) + assertMissingAppsecTemplateError( + log.error.thirdCall.args[0], + 'appsec.blockedTemplateGraphql', + 'DOES_NOT_EXIST.json' + ) assertObjectContains(config, { appsec: { @@ -2752,8 +2895,9 @@ describe('Config', () => { process.env.DD_CIVISIBILITY_GIT_UPLOAD_ENABLED = 'true' const config = getConfig(options) assertObjectContains(config, { - isIntelligentTestRunnerEnabled: false, - isGitUploadEnabled: false, + isCiVisibility: false, + isIntelligentTestRunnerEnabled: true, + isGitUploadEnabled: true, }) }) }) @@ -2882,8 +3026,8 @@ describe('Config', () => { assert.strictEqual(config.llmobs.enabled, false) // check origin computation - assertObjectContains(updateConfig.getCall(0).args[0], [{ - name: 'llmobs.enabled', value: false, origin: 'default', + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [{ + name: 'DD_LLMOBS_ENABLED', value: false, origin: 'default', }]) }) @@ -2893,8 +3037,8 @@ describe('Config', () => { assert.strictEqual(config.llmobs.enabled, true) // check origin computation - assertObjectContains(updateConfig.getCall(0).args[0], [{ - name: 'llmobs.enabled', value: true, origin: 'env_var', + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [{ + name: 'DD_LLMOBS_ENABLED', value: true, origin: 'env_var', }]) }) @@ -2904,29 +3048,29 @@ describe('Config', () => { assert.strictEqual(config.llmobs.enabled, false) // check origin computation - assertObjectContains(updateConfig.getCall(0).args[0], [{ - name: 'llmobs.enabled', value: false, origin: 'env_var', + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [{ + name: 'DD_LLMOBS_ENABLED', value: false, origin: 'env_var', }]) }) it('should enable llmobs with options and DD_LLMOBS_ENABLED is not set', () => { - const config = getConfig({ llmobs: {} }) + const config = getConfig({ llmobs: { agentlessEnabled: true } }) assert.strictEqual(config.llmobs.enabled, true) // check origin computation - assertObjectContains(updateConfig.getCall(0).args[0], [{ - name: 'llmobs.enabled', value: true, origin: 'code', + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [{ + name: 'DD_LLMOBS_ENABLED', value: true, origin: 'calculated', }]) }) it('should have DD_LLMOBS_ENABLED take priority over options', () => { process.env.DD_LLMOBS_ENABLED = 'false' - const config = getConfig({ llmobs: {} }) + const config = getConfig({ llmobs: { agentlessEnabled: true } }) assert.strictEqual(config.llmobs.enabled, false) // check origin computation - assertObjectContains(updateConfig.getCall(0).args[0], [{ - name: 'llmobs.enabled', value: false, origin: 'env_var', + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [{ + name: 'DD_LLMOBS_ENABLED', value: false, origin: 'env_var', }]) }) }) @@ -2947,8 +3091,8 @@ describe('Config', () => { it('defaults', () => { const taggingConfig = getConfig().cloudPayloadTagging assertObjectContains(taggingConfig, { - requestsEnabled: false, - responsesEnabled: false, + request: undefined, + response: undefined, maxDepth: 10, }) }) @@ -2957,9 +3101,10 @@ describe('Config', () => { process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = 'all' const taggingConfig = getConfig().cloudPayloadTagging assertObjectContains(taggingConfig, { - requestsEnabled: true, - responsesEnabled: false, + request: [], + response: undefined, maxDepth: 10, + rules: { aws: { dynamodb: { request: [], response: [], expand: [] } } }, }) const awsRules = taggingConfig.rules.aws for (const [serviceName, service] of Object.entries(awsRules)) { @@ -2971,8 +3116,8 @@ describe('Config', () => { process.env.DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING = '$.foo.bar' const taggingConfig = getConfig().cloudPayloadTagging assertObjectContains(taggingConfig, { - requestsEnabled: true, - responsesEnabled: false, + request: ['$.foo.bar'], + response: undefined, maxDepth: 10, }) const awsRules = taggingConfig.rules.aws @@ -2987,9 +3132,10 @@ describe('Config', () => { process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = 'all' const taggingConfig = getConfig().cloudPayloadTagging assertObjectContains(taggingConfig, { - requestsEnabled: false, - responsesEnabled: true, + request: undefined, + response: [], maxDepth: 10, + rules: { aws: { dynamodb: { request: [], response: [], expand: [] } } }, }) const awsRules = taggingConfig.rules.aws for (const [serviceName, service] of Object.entries(awsRules)) { @@ -3001,9 +3147,10 @@ describe('Config', () => { process.env.DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING = '$.foo.bar' const taggingConfig = getConfig().cloudPayloadTagging assertObjectContains(taggingConfig, { - requestsEnabled: false, - responsesEnabled: true, + request: undefined, + response: ['$.foo.bar'], maxDepth: 10, + rules: { aws: { dynamodb: { request: [], response: [], expand: [] } } }, }) const awsRules = taggingConfig.rules.aws for (const [, service] of Object.entries(awsRules)) { @@ -3021,8 +3168,9 @@ describe('Config', () => { let { cloudPayloadTagging } = getConfig() assertObjectContains(cloudPayloadTagging, { maxDepth: 7, - requestsEnabled: true, - responsesEnabled: true, + request: [], + response: [], + rules: { aws: { dynamodb: { request: [], response: [], expand: [] } } }, }) delete process.env.DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH @@ -3030,8 +3178,9 @@ describe('Config', () => { ; ({ cloudPayloadTagging } = getConfig({ cloudPayloadTagging: { maxDepth: 7 } })) assertObjectContains(cloudPayloadTagging, { maxDepth: 7, - requestsEnabled: true, - responsesEnabled: true, + request: [], + response: [], + rules: { aws: { dynamodb: { request: [], response: [], expand: [] } } }, }) }) @@ -3048,7 +3197,10 @@ describe('Config', () => { ; ({ cloudPayloadTagging } = getConfig({ cloudPayloadTagging: { maxDepth: NaN } })) assertObjectContains(cloudPayloadTagging, { maxDepth: 10, + request: undefined, + response: undefined, }) + assert.ok(!(Object.hasOwn(cloudPayloadTagging, 'rules'))) }) }) @@ -3096,8 +3248,9 @@ describe('Config', () => { }, }) - assertObjectContains(updateConfig.getCall(0).args[0], [ - { name: 'stats.enabled', value: true, origin: 'calculated' }, + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: true, origin: 'env_var' }, ]) }) @@ -3113,8 +3266,10 @@ describe('Config', () => { }, }) - assertObjectContains(updateConfig.getCall(0).args[0], [ - { name: 'stats.enabled', value: false, origin: 'calculated' }, + assertConfigUpdateContains(updateConfig.getCall(0).args[0], [ + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: false, origin: 'default' }, + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: true, origin: 'env_var' }, + { name: 'DD_TRACE_STATS_COMPUTATION_ENABLED', value: false, origin: 'calculated' }, ]) }) @@ -3146,7 +3301,7 @@ describe('Config', () => { fleetConfigPath = path.join(tempDir, 'fleet.yaml') process.env.DD_TEST_LOCAL_CONFIG_PATH = localConfigPath process.env.DD_TEST_FLEET_CONFIG_PATH = fleetConfigPath - reloadLoggerAndConfig() + getConfig() }) afterEach(() => { @@ -3279,7 +3434,11 @@ apm_configuration_default: process.env.AWS_LAMBDA_FUNCTION_NAME = 'my-great-lambda-function' const stableConfig = getConfig() - assert.ok(!(Object.hasOwn(stableConfig, 'stableConfig'))) + assert.deepStrictEqual(stableConfig.stableConfig, { + fleetEntries: {}, + localEntries: {}, + warnings: undefined, + }) }) it('should support all extended configs across product areas', () => { @@ -3333,9 +3492,8 @@ apm_configuration_default: llmobs: { mlApp: 'my-llm-app', }, - profiling: { - exporters: 'agent', - }, + DD_PROFILING_EXPORTERS: ['agent'], + profiling: {}, dynamicInstrumentation: { probeFile: '/tmp/probes', }, @@ -3368,7 +3526,7 @@ apm_configuration_default: type: 'local_install', }, cloudPayloadTagging: { - requestsEnabled: true, + request: [], maxDepth: 5, }, }) @@ -3420,8 +3578,8 @@ rules: type: 'fleet_install', }, cloudPayloadTagging: { - requestsEnabled: false, - responsesEnabled: true, + request: undefined, + response: [], maxDepth: 15, }, }) @@ -3546,7 +3704,6 @@ rules: process.env.NX_TASK_TARGET_PROJECT = 'my-nx-project' pkg.name = 'default-service' - reloadLoggerAndConfig() const config = getConfig() @@ -3568,7 +3725,6 @@ rules: } pkg.name = 'default-service' - reloadLoggerAndConfig() const config = getConfig() @@ -3581,7 +3737,6 @@ rules: delete process.env.DD_ENABLE_NX_SERVICE_NAME delete process.env.DD_SERVICE pkg.name = 'default-service' - reloadLoggerAndConfig() getConfig() @@ -3602,7 +3757,6 @@ rules: process.env.DD_ENABLE_NX_SERVICE_NAME = 'true' delete process.env.DD_SERVICE pkg.name = 'default-service' - reloadLoggerAndConfig() getConfig() @@ -3613,7 +3767,6 @@ rules: process.env.NX_TASK_TARGET_PROJECT = 'my-nx-project' process.env.DD_SERVICE = 'explicit-service' delete process.env.DD_ENABLE_NX_SERVICE_NAME - reloadLoggerAndConfig() getConfig() @@ -3742,49 +3895,51 @@ rules: it('should map dynamic_instrumentation_enabled to dynamicInstrumentation.enabled', () => { const config = getConfig() assert.strictEqual(config.dynamicInstrumentation.enabled, false) - config.setRemoteConfig({ dynamic_instrumentation_enabled: true }) + config.setRemoteConfig({ 'dynamicInstrumentation.enabled': true }) assert.strictEqual(config.dynamicInstrumentation.enabled, true) }) it('should map code_origin_enabled to codeOriginForSpans.enabled', () => { const config = getConfig() assert.strictEqual(config.codeOriginForSpans.enabled, true) - config.setRemoteConfig({ code_origin_enabled: false }) + config.setRemoteConfig({ 'codeOriginForSpans.enabled': false }) assert.strictEqual(config.codeOriginForSpans.enabled, false) }) it('should map tracing_sampling_rate to sampleRate', () => { const config = getConfig() assert.strictEqual(config.sampleRate, undefined) - config.setRemoteConfig({ tracing_sampling_rate: 0.5 }) + config.setRemoteConfig({ sampleRate: 0.5 }) assert.strictEqual(config.sampleRate, 0.5) }) it('should map log_injection_enabled to logInjection', () => { const config = getConfig() assert.strictEqual(config.logInjection, true) - config.setRemoteConfig({ log_injection_enabled: false }) + config.setRemoteConfig({ logInjection: false }) assert.strictEqual(config.logInjection, false) }) it('should map tracing_enabled to tracing', () => { - const config = getConfig() + // Tracing is not exposed as programmatic option and will be ignored. + const config = getConfig({ tracing: false }) assert.strictEqual(config.tracing, true) - config.setRemoteConfig({ tracing_enabled: false }) + config.setRemoteConfig({ tracing: false }) assert.strictEqual(config.tracing, false) }) - it('should map tracing_sampling_rules to sampler.rules', () => { + it('should map tracing_sampling_rules to samplingRules', () => { const config = getConfig() assert.deepStrictEqual(config.sampler.rules, []) - config.setRemoteConfig({ tracing_sampling_rules: [{ sample_rate: 0.5 }] }) + config.setRemoteConfig({ samplingRules: [{ sample_rate: 0.5 }] }) + assert.deepStrictEqual(config.samplingRules, [{ sampleRate: 0.5 }]) assert.deepStrictEqual(config.sampler.rules, [{ sampleRate: 0.5 }]) }) it('should map tracing_header_tags to headerTags', () => { - const config = getConfig({ headerTags: ['foo:bar'] }) + const config = getConfig({ headerTags: ['foo :bar'] }) assert.deepStrictEqual(config.headerTags, ['foo:bar']) - config.setRemoteConfig({ tracing_header_tags: [{ header: 'x-custom-header', tag_name: 'custom.tag' }] }) + config.setRemoteConfig({ headerTags: ['x-custom-header:custom.tag'] }) assert.deepStrictEqual(config.headerTags, [ // TODO: There's an unrelated bug in the tracer resulting in headerTags not being merged. // 'foo:bar', @@ -3796,7 +3951,7 @@ rules: const config = getConfig({ tags: { foo: 'bar' } }) assertObjectContains(config.tags, { foo: 'bar' }) assert.strictEqual(config.tags.team, undefined) - config.setRemoteConfig({ tracing_tags: ['team:backend'] }) + config.setRemoteConfig({ tags: { team: 'backend' } }) assertObjectContains(config.tags, { // TODO: There's an unrelated bug in the tracer resulting in tags not being merged. // foo: 'bar', @@ -3809,7 +3964,13 @@ rules: it('should clear RC fields when setRemoteConfig is called with null', () => { const config = getConfig({ logInjection: true, sampleRate: 0.5 }) - config.setRemoteConfig({ tracing_enabled: false }) + assertObjectContains(config, { + tracing: true, + logInjection: true, + sampleRate: 0.5, + }) + + config.setRemoteConfig({ tracing: false }) assertObjectContains(config, { tracing: false, @@ -3828,25 +3989,133 @@ rules: it('should ignore null values', () => { const config = getConfig({ sampleRate: 0.5 }) - config.setRemoteConfig({ tracing_sampling_rate: null }) + config.setRemoteConfig({ sampleRate: null }) assert.strictEqual(config.sampleRate, 0.5) }) it('should treat null values as unset', () => { - const config = getConfig({ sampleRate: 0.5 }) - config.setRemoteConfig({ tracing_sampling_rate: 0.8 }) + const config = getConfig({ sampleRate: 0.5, tracing: true }) + assert.strictEqual(config.sampleRate, 0.5) + assert.strictEqual(config.tracing, true) + config.setRemoteConfig({ sampleRate: 0.8, tracing: false }) assert.strictEqual(config.sampleRate, 0.8) - config.setRemoteConfig({ tracing_sampling_rate: null }) + assert.strictEqual(config.tracing, false) + assert.strictEqual(config.logInjection, true) + config.setRemoteConfig({ logInjection: false }) + assert.strictEqual(config.sampleRate, 0.5) + assert.strictEqual(config.tracing, true) + assert.strictEqual(config.logInjection, false) + }) + + it('should restore tracked origins when an individual RC option falls back to code', () => { + const config = getConfig({ sampleRate: 0.5, logInjection: true }) + + updateConfig.resetHistory() + + config.setRemoteConfig({ + sampleRate: 0.8, + logInjection: false, + }) + + assert.strictEqual(config.getOrigin('sampleRate'), 'remote_config') + assert.strictEqual(config.getOrigin('logInjection'), 'remote_config') + + config.setRemoteConfig({ + logInjection: false, + }) + assert.strictEqual(config.sampleRate, 0.5) + assert.strictEqual(config.getOrigin('sampleRate'), 'code') + assert.strictEqual(config.getOrigin('logInjection'), 'remote_config') + }) + + it('should update telemetry when an individual RC option falls back to a previous source', () => { + const config = getConfig({ sampleRate: 0.5, logInjection: true }) + + updateConfig.resetHistory() + + config.setRemoteConfig({ + sampleRate: 0.8, + logInjection: false, + }) + config.setRemoteConfig({ + logInjection: false, + }) + + sinon.assert.calledTwice(updateConfig) + + const telemetry = updateConfig.getCall(1).args[0] + + assertObjectContains(telemetry.sort((a, b) => a.seq_id - b.seq_id), [ + { name: 'DD_TRACE_SAMPLE_RATE', value: 0.8, origin: 'remote_config' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: 0.5, origin: 'code' }, + { name: 'DD_LOGS_INJECTION', value: true, origin: 'code' }, + { name: 'DD_LOGS_INJECTION', value: false, origin: 'remote_config' }, + ]) + }) + + it('should restore default origins when an individual RC option falls back to defaults', () => { + const config = getConfig() + + updateConfig.resetHistory() + + config.setRemoteConfig({ + tracing: false, + sampleRate: 0.8, + }) + + assert.strictEqual(config.getOrigin('tracing'), 'remote_config') + assert.strictEqual(config.getOrigin('sampleRate'), 'remote_config') + + config.setRemoteConfig({ + sampleRate: 0.8, + }) + + assert.strictEqual(config.tracing, true) + assert.strictEqual(config.sampleRate, 0.8) + assert.strictEqual(config.getOrigin('tracing'), 'default') + assert.strictEqual(config.getOrigin('sampleRate'), 'remote_config') + }) + + it('should update telemetry when an individual RC option falls back to defaults', () => { + const config = getConfig() + + updateConfig.resetHistory() + + config.setRemoteConfig({ + tracing: false, + sampleRate: 0.1, + }) + config.setRemoteConfig({ + sampleRate: 0.8, + }) + + sinon.assert.calledTwice(updateConfig) + + const telemetry = updateConfig.getCall(1).args[0] + + assertObjectContains(telemetry.sort((a, b) => a.seq_id - b.seq_id), [ + { name: 'DD_TRACE_ENABLED', value: false, origin: 'remote_config' }, + { name: 'DD_TRACE_ENABLED', value: true, origin: 'default' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: null, origin: 'default' }, + { name: 'DD_TRACE_SAMPLE_RATE', value: 0.8, origin: 'remote_config' }, + ]) }) it('should replace all RC fields with each update', () => { const config = getConfig() + assertObjectContains(config, { + tracing: true, + logInjection: true, + }) + + assert.strictEqual(config.sampleRate, undefined) + config.setRemoteConfig({ - tracing_enabled: true, - log_injection_enabled: false, - tracing_sampling_rate: 0.8, + tracing: true, + logInjection: false, + sampleRate: 0.8, }) assertObjectContains(config, { @@ -3856,7 +4125,7 @@ rules: }) config.setRemoteConfig({ - tracing_enabled: false, + tracing: false, }) assertObjectContains(config, { diff --git a/packages/dd-trace/test/config/remote_config.spec.js b/packages/dd-trace/test/config/remote_config.spec.js index 3ef775c80b7..1e047f1f95d 100644 --- a/packages/dd-trace/test/config/remote_config.spec.js +++ b/packages/dd-trace/test/config/remote_config.spec.js @@ -150,7 +150,7 @@ describe('Tracing Remote Config', () => { // Service config should win const lastCall = config.setRemoteConfig.lastCall - sinon.assert.match(lastCall.args[0], { tracing_sampling_rate: 0.8 }) + sinon.assert.match(lastCall.args[0], { sampleRate: 0.8 }) }) it('should handle config removal', () => { @@ -181,7 +181,7 @@ describe('Tracing Remote Config', () => { // Lower priority should now apply const lastCall = config.setRemoteConfig.lastCall - sinon.assert.match(lastCall.args[0], { tracing_sampling_rate: 0.5 }) + sinon.assert.match(lastCall.args[0], { sampleRate: 0.5 }) }) it('should filter configs by service/env', () => { @@ -232,8 +232,8 @@ describe('Tracing Remote Config', () => { // Service config sampling rate should win, but log_injection should come from org const lastCall = config.setRemoteConfig.lastCall sinon.assert.match(lastCall.args[0], { - tracing_sampling_rate: 0.8, - log_injection_enabled: true, + sampleRate: 0.8, + logInjection: true, }) }) diff --git a/packages/dd-trace/test/dogstatsd.spec.js b/packages/dd-trace/test/dogstatsd.spec.js index 7b90f2586c8..2337d96141e 100644 --- a/packages/dd-trace/test/dogstatsd.spec.js +++ b/packages/dd-trace/test/dogstatsd.spec.js @@ -72,7 +72,6 @@ describe('dogstatsd', () => { const dogstatsd = proxyquire.noPreserveCache().noCallThru()('../src/dogstatsd', { dgram, - dns, './exporters/common/docker': docker, }) DogStatsDClient = dogstatsd.DogStatsDClient @@ -122,8 +121,29 @@ describe('dogstatsd', () => { sockets.forEach(socket => socket.destroy()) }) + function createDogStatsDClient (options) { + return new DogStatsDClient({ + host: '127.0.0.1', + lookup: dns.lookup, + port: 8125, + tags: [], + ...options, + }) + } + + function createCustomMetrics (CustomMetricsCtor = CustomMetrics) { + return new CustomMetricsCtor({ + dogstatsd: { + hostname: '127.0.0.1', + port: 8125, + }, + lookup: dns.lookup, + runtimeMetricsRuntimeId: false, + }) + } + it('should send gauges', () => { - client = new DogStatsDClient() + client = createDogStatsDClient() client.gauge('test.avg', 10) client.flush() @@ -132,12 +152,12 @@ describe('dogstatsd', () => { assert.strictEqual(udp4.send.firstCall.args[0].toString(), 'test.avg:10|g\n') assert.strictEqual(udp4.send.firstCall.args[1], 0) assert.strictEqual(udp4.send.firstCall.args[2], 14) - assert.strictEqual(udp4.send.firstCall.args[3], '8125') + assert.strictEqual(udp4.send.firstCall.args[3], 8125) assert.strictEqual(udp4.send.firstCall.args[4], '127.0.0.1') }) it('should send histograms', () => { - client = new DogStatsDClient() + client = createDogStatsDClient() client.histogram('test.histogram', 10) client.flush() @@ -146,12 +166,12 @@ describe('dogstatsd', () => { assert.strictEqual(udp4.send.firstCall.args[0].toString(), 'test.histogram:10|h\n') assert.strictEqual(udp4.send.firstCall.args[1], 0) assert.strictEqual(udp4.send.firstCall.args[2], 20) - assert.strictEqual(udp4.send.firstCall.args[3], '8125') + assert.strictEqual(udp4.send.firstCall.args[3], 8125) assert.strictEqual(udp4.send.firstCall.args[4], '127.0.0.1') }) it('should send counters', () => { - client = new DogStatsDClient() + client = createDogStatsDClient() client.increment('test.count', 10) client.flush() @@ -162,7 +182,7 @@ describe('dogstatsd', () => { }) it('should send multiple metrics', () => { - client = new DogStatsDClient() + client = createDogStatsDClient() client.gauge('test.avg', 10) client.increment('test.count', 10) @@ -175,7 +195,7 @@ describe('dogstatsd', () => { }) it('should support tags', () => { - client = new DogStatsDClient() + client = createDogStatsDClient() client.gauge('test.avg', 10, ['foo:bar', 'baz:qux']) client.flush() @@ -189,7 +209,7 @@ describe('dogstatsd', () => { const value = new Array(1000).map(() => 'a').join() const tags = [`foo:${value}`] - client = new DogStatsDClient() + client = createDogStatsDClient() client.gauge('test.avg', 1, tags) client.gauge('test.avg', 1, tags) @@ -199,7 +219,7 @@ describe('dogstatsd', () => { }) it('should not flush if the queue is empty', () => { - client = new DogStatsDClient() + client = createDogStatsDClient() client.flush() @@ -209,7 +229,7 @@ describe('dogstatsd', () => { }) it('should not flush if the dns lookup fails', () => { - client = new DogStatsDClient({ + client = createDogStatsDClient({ host: 'invalid', }) @@ -222,7 +242,7 @@ describe('dogstatsd', () => { }) it('should not call DNS if the host is an IPv4 address', () => { - client = new DogStatsDClient({ + client = createDogStatsDClient({ host: '127.0.0.1', }) @@ -234,7 +254,7 @@ describe('dogstatsd', () => { }) it('should not call DNS if the host is an IPv6 address', () => { - client = new DogStatsDClient({ + client = createDogStatsDClient({ host: '2001:db8:3333:4444:5555:6666:7777:8888', }) @@ -246,10 +266,9 @@ describe('dogstatsd', () => { }) it('should support configuration', () => { - client = new DogStatsDClient({ + client = createDogStatsDClient({ host: '::1', port: 7777, - prefix: 'prefix.', tags: ['foo:bar'], }) @@ -257,9 +276,9 @@ describe('dogstatsd', () => { client.flush() sinon.assert.called(udp6.send) - assert.strictEqual(udp6.send.firstCall.args[0].toString(), 'prefix.test.avg:1|g|#foo:bar,baz:qux\n') + assert.strictEqual(udp6.send.firstCall.args[0].toString(), 'test.avg:1|g|#foo:bar,baz:qux\n') assert.strictEqual(udp6.send.firstCall.args[1], 0) - assert.strictEqual(udp6.send.firstCall.args[2], 37) + assert.strictEqual(udp6.send.firstCall.args[2], 30) assert.strictEqual(udp6.send.firstCall.args[3], 7777) assert.strictEqual(udp6.send.firstCall.args[4], '::1') }) @@ -275,7 +294,7 @@ describe('dogstatsd', () => { } } - client = new DogStatsDClient({ + client = createDogStatsDClient({ metricsProxyUrl: `unix://${udsPath}`, }) @@ -294,7 +313,7 @@ describe('dogstatsd', () => { } } - client = new DogStatsDClient({ + client = createDogStatsDClient({ metricsProxyUrl: `http://localhost:${httpPort}`, }) @@ -313,7 +332,7 @@ describe('dogstatsd', () => { } } - client = new DogStatsDClient({ + client = createDogStatsDClient({ metricsProxyUrl: new URL(`http://localhost:${httpPort}`), }) @@ -338,7 +357,7 @@ describe('dogstatsd', () => { statusCode = 404 - client = new DogStatsDClient({ + client = createDogStatsDClient({ metricsProxyUrl: `http://localhost:${httpPort}`, }) @@ -362,7 +381,7 @@ describe('dogstatsd', () => { statusCode = null // host exists but port does not, ECONNREFUSED - client = new DogStatsDClient({ + client = createDogStatsDClient({ metricsProxyUrl: 'http://localhost:32700', host: 'localhost', port: 8125, @@ -375,7 +394,7 @@ describe('dogstatsd', () => { describe('CustomMetrics', () => { it('.gauge()', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.gauge('test.avg', 10, { foo: 'bar' }) client.gauge('test.avg', 10, { foo: 'bar' }) @@ -386,7 +405,7 @@ describe('dogstatsd', () => { }) it('.gauge() with tags', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.gauge('test.avg', 10, { foo: 'bar' }) client.gauge('test.avg', 10, { foo: 'bar', baz: 'qux' }) @@ -401,7 +420,7 @@ describe('dogstatsd', () => { }) it('.increment()', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.increment('test.count', 10) client.increment('test.count', 10) @@ -412,7 +431,7 @@ describe('dogstatsd', () => { }) it('.increment() with default', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.increment('test.count') client.increment('test.count') @@ -423,7 +442,7 @@ describe('dogstatsd', () => { }) it('.increment() with tags', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.increment('test.count', 10, { foo: 'bar' }) client.increment('test.count', 10, { foo: 'bar', baz: 'qux' }) @@ -438,7 +457,7 @@ describe('dogstatsd', () => { }) it('.decrement()', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.decrement('test.count', 10) client.decrement('test.count', 10) @@ -449,7 +468,7 @@ describe('dogstatsd', () => { }) it('.decrement() with default', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.decrement('test.count') client.decrement('test.count') @@ -460,7 +479,7 @@ describe('dogstatsd', () => { }) it('.distribution()', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.distribution('test.dist', 10) client.distribution('test.dist', 10) @@ -471,7 +490,7 @@ describe('dogstatsd', () => { }) it('.histogram()', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.histogram('test.histogram', 10) client.histogram('test.histogram', 10) @@ -491,7 +510,7 @@ describe('dogstatsd', () => { }) it('.histogram() with tags', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.histogram('test.histogram', 10, { foo: 'bar' }) client.histogram('test.histogram', 10, { foo: 'bar', baz: 'qux' }) @@ -520,7 +539,7 @@ describe('dogstatsd', () => { }) it('should support array-based tags for gauge', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.gauge('test.avg', 10, ['foo:bar', 'baz:qux']) client.flush() @@ -530,7 +549,7 @@ describe('dogstatsd', () => { }) it('should support array-based tags for increment', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.increment('test.count', 10, ['foo:bar', 'baz:qux']) client.flush() @@ -540,7 +559,7 @@ describe('dogstatsd', () => { }) it('should support array-based tags for decrement', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.decrement('test.count', 10, ['foo:bar', 'baz:qux']) client.flush() @@ -550,7 +569,7 @@ describe('dogstatsd', () => { }) it('should support array-based tags for distribution', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.distribution('test.dist', 10, ['foo:bar', 'baz:qux']) client.flush() @@ -560,7 +579,7 @@ describe('dogstatsd', () => { }) it('should support array-based tags for histogram', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.histogram('test.histogram', 10, ['foo:bar', 'baz:qux']) client.flush() @@ -579,7 +598,7 @@ describe('dogstatsd', () => { }) it('should handle empty array of tags', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.gauge('test.avg', 10, []) client.flush() @@ -589,7 +608,7 @@ describe('dogstatsd', () => { }) it('should handle mixed tag formats', () => { - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics() client.gauge('test.avg', 10, { foo: 'bar' }) client.gauge('test.avg', 20, ['baz:qux']) @@ -607,16 +626,20 @@ describe('dogstatsd', () => { toFake: ['Date', 'setTimeout', 'clearTimeout', 'setInterval', 'clearInterval'], }) - client = new CustomMetrics({ dogstatsd: {} }) + try { + client = createCustomMetrics() - client.gauge('test.avg', 10, { foo: 'bar' }) + client.gauge('test.avg', 10, { foo: 'bar' }) - sinon.assert.notCalled(udp4.send) + sinon.assert.notCalled(udp4.send) - clock.tick(10 * 1000) + clock.tick(10 * 1000) - sinon.assert.called(udp4.send) - assert.strictEqual(udp4.send.firstCall.args[0].toString(), 'test.avg:10|g|#foo:bar\n') + sinon.assert.called(udp4.send) + assert.strictEqual(udp4.send.firstCall.args[0].toString(), 'test.avg:10|g|#foo:bar\n') + } finally { + clock.restore() + } }) it('should send the Docker entity ID when available', () => { @@ -624,11 +647,10 @@ describe('dogstatsd', () => { const { CustomMetrics } = proxyquire.noPreserveCache()('../src/dogstatsd', { dgram, - dns, './exporters/common/docker': docker, }) - client = new CustomMetrics({ dogstatsd: {} }) + client = createCustomMetrics(CustomMetrics) client.gauge('test.avg', 10, { foo: 'bar' }) client.flush() diff --git a/packages/dd-trace/test/exporters/agent/writer.spec.js b/packages/dd-trace/test/exporters/agent/writer.spec.js index 6b07fb511c0..c5f805e85eb 100644 --- a/packages/dd-trace/test/exporters/agent/writer.spec.js +++ b/packages/dd-trace/test/exporters/agent/writer.spec.js @@ -7,6 +7,7 @@ const { describe, it, beforeEach } = require('mocha') const context = describe const sinon = require('sinon') const proxyquire = require('proxyquire') +const { channel } = require('dc-polyfill') const { assertObjectContains } = require('../../../../../integration-tests/helpers') require('../../setup/core') @@ -183,6 +184,22 @@ function describeWriter (protocolVersion) { }) }) + it('should publish event on first flush with data', () => { + const ch = channel('dd-trace:exporter:first-flush') + let published = false + const onFirstFlush = () => { published = !published } + ch.subscribe(onFirstFlush) + + encoder.count.returns(1) + writer.flush() + + assert.strictEqual(published, true) + writer.flush() + // should only publish on first flush, hence published should mantain as true + assert.strictEqual(published, true) + ch.unsubscribe(onFirstFlush) + }) + context('with the url as a unix socket', () => { beforeEach(() => { url = new URL('unix:/path/to/somesocket.sock') diff --git a/packages/dd-trace/test/helpers/config.js b/packages/dd-trace/test/helpers/config.js index 0e83e73f4be..aa325ea58c9 100644 --- a/packages/dd-trace/test/helpers/config.js +++ b/packages/dd-trace/test/helpers/config.js @@ -2,11 +2,13 @@ const proxyquire = require('proxyquire') -// Resolve the config module from within the test package -const CONFIG_PATH = require.resolve('../../src/config') - function getConfigFresh (options) { - return proxyquire.noPreserveCache()(CONFIG_PATH, {})(options) + const helper = proxyquire.noPreserveCache()('../../src/config/helper.js', {}) + const defaults = proxyquire.noPreserveCache()('../../src/config/defaults.js', {}) + return proxyquire.noPreserveCache()('../../src/config', { + './defaults': defaults, + './helper': helper, + })(options) } module.exports = { diff --git a/packages/dd-trace/test/llmobs/plugins/ai/index.spec.js b/packages/dd-trace/test/llmobs/plugins/ai/index.spec.js index 3c9ebefcc33..27f4a8aa07e 100644 --- a/packages/dd-trace/test/llmobs/plugins/ai/index.spec.js +++ b/packages/dd-trace/test/llmobs/plugins/ai/index.spec.js @@ -30,6 +30,12 @@ function getAiSdkOpenAiPackage (vercelAiVersion) { } } +const MOCK_TELEMETRY_METADATA = { + userId: '12345', + organizationId: 'orgAbc123', + conversationId: 'convAbc123', +} + describe('Plugin', () => { useEnv({ OPENAI_API_KEY: '', @@ -73,6 +79,9 @@ describe('Plugin', () => { system: 'You are a helpful assistant', prompt: 'Hello, OpenAI!', temperature: 0.5, + experimental_telemetry: { + metadata: MOCK_TELEMETRY_METADATA, + }, } if (semifies(realVersion, '>=5.0.0')) { @@ -85,7 +94,9 @@ describe('Plugin', () => { const { apmSpans, llmobsSpans } = await getEvents() - const expectedWorkflowMetadata = {} + const expectedWorkflowMetadata = { + ...MOCK_TELEMETRY_METADATA, + } if (semifies(realVersion, '>=5.0.0')) { expectedWorkflowMetadata.maxRetries = MOCK_NUMBER expectedWorkflowMetadata.maxOutputTokens = 100 @@ -117,6 +128,7 @@ describe('Plugin', () => { metadata: { max_tokens: 100, temperature: 0.5, + ...MOCK_TELEMETRY_METADATA, }, metrics: { input_tokens: MOCK_NUMBER, output_tokens: MOCK_NUMBER, total_tokens: MOCK_NUMBER }, tags: { ml_app: 'test', integration: 'ai' }, @@ -139,6 +151,9 @@ describe('Plugin', () => { model: openai('gpt-4o-mini'), schema, prompt: 'Invent a character for a video game', + experimental_telemetry: { + metadata: MOCK_TELEMETRY_METADATA, + }, }) const { apmSpans, llmobsSpans } = await getEvents() @@ -146,6 +161,7 @@ describe('Plugin', () => { const expectedWorkflowMetadata = { schema: MOCK_OBJECT, output: 'object', + ...MOCK_TELEMETRY_METADATA, } if (semifies(realVersion, '>=5.0.0')) { expectedWorkflowMetadata.maxRetries = MOCK_NUMBER @@ -171,6 +187,7 @@ describe('Plugin', () => { inputMessages: [{ content: 'Invent a character for a video game', role: 'user' }], outputMessages: [{ content: MOCK_STRING, role: 'assistant' }], metrics: { input_tokens: MOCK_NUMBER, output_tokens: MOCK_NUMBER, total_tokens: MOCK_NUMBER }, + metadata: MOCK_TELEMETRY_METADATA, tags: { ml_app: 'test', integration: 'ai' }, }) }) @@ -179,6 +196,9 @@ describe('Plugin', () => { await ai.embed({ model: openai.embedding('text-embedding-ada-002'), value: 'hello world', + experimental_telemetry: { + metadata: MOCK_TELEMETRY_METADATA, + }, }) const { apmSpans, llmobsSpans } = await getEvents() @@ -189,13 +209,14 @@ describe('Plugin', () => { spanKind: 'workflow', inputValue: 'hello world', outputValue: '[1 embedding(s) returned with size 1536]', + metadata: { + ...MOCK_TELEMETRY_METADATA, + }, tags: { ml_app: 'test', integration: 'ai' }, } if (semifies(realVersion, '>=5.0.0')) { - expectedWorkflowSpanEvent.metadata = { - maxRetries: MOCK_NUMBER, - } + expectedWorkflowSpanEvent.metadata.maxRetries = MOCK_NUMBER } assertLlmObsSpanEvent(llmobsSpans[0], expectedWorkflowSpanEvent) @@ -210,6 +231,7 @@ describe('Plugin', () => { inputDocuments: [{ text: 'hello world' }], outputValue: '[1 embedding(s) returned with size 1536]', metrics: { input_tokens: MOCK_NUMBER, total_tokens: MOCK_NUMBER }, + metadata: MOCK_TELEMETRY_METADATA, tags: { ml_app: 'test', integration: 'ai' }, }) }) @@ -218,6 +240,13 @@ describe('Plugin', () => { await ai.embedMany({ model: openai.embedding('text-embedding-ada-002'), values: ['hello world', 'goodbye world'], + experimental_telemetry: { + metadata: { + userId: '12345', + organizationId: 'orgAbc123', + conversationId: 'convAbc123', + }, + }, }) const { apmSpans, llmobsSpans } = await getEvents() @@ -229,11 +258,14 @@ describe('Plugin', () => { inputValue: JSON.stringify(['hello world', 'goodbye world']), outputValue: '[2 embedding(s) returned with size 1536]', tags: { ml_app: 'test', integration: 'ai' }, + metadata: { + userId: '12345', + organizationId: 'orgAbc123', + conversationId: 'convAbc123', + }, } if (semifies(realVersion, '>=5.0.0')) { - expectedWorkflowSpanEvent.metadata = { - maxRetries: MOCK_NUMBER, - } + expectedWorkflowSpanEvent.metadata.maxRetries = MOCK_NUMBER } assertLlmObsSpanEvent(llmobsSpans[0], expectedWorkflowSpanEvent) @@ -248,6 +280,11 @@ describe('Plugin', () => { inputDocuments: [{ text: 'hello world' }, { text: 'goodbye world' }], outputValue: '[2 embedding(s) returned with size 1536]', metrics: { input_tokens: MOCK_NUMBER, total_tokens: MOCK_NUMBER }, + metadata: { + userId: '12345', + organizationId: 'orgAbc123', + conversationId: 'convAbc123', + }, tags: { ml_app: 'test', integration: 'ai' }, }) }) @@ -259,6 +296,9 @@ describe('Plugin', () => { prompt: 'Hello, OpenAI!', maxTokens: 100, temperature: 0.5, + experimental_telemetry: { + metadata: MOCK_TELEMETRY_METADATA, + }, } if (semifies(realVersion, '>=5.0.0')) { options.maxOutputTokens = 100 @@ -278,6 +318,8 @@ describe('Plugin', () => { ? { maxRetries: MOCK_NUMBER, maxOutputTokens: 100 } : { maxSteps: MOCK_NUMBER } + Object.assign(expectedMetadata, MOCK_TELEMETRY_METADATA) + assertLlmObsSpanEvent(llmobsSpans[0], { span: apmSpans[0], name: 'streamText', @@ -303,6 +345,7 @@ describe('Plugin', () => { metadata: { max_tokens: 100, temperature: 0.5, + ...MOCK_TELEMETRY_METADATA, }, metrics: { input_tokens: MOCK_NUMBER, output_tokens: MOCK_NUMBER, total_tokens: MOCK_NUMBER }, tags: { ml_app: 'test', integration: 'ai' }, @@ -325,6 +368,9 @@ describe('Plugin', () => { model: openai('gpt-4o-mini'), schema, prompt: 'Invent a character for a video game', + experimental_telemetry: { + metadata: MOCK_TELEMETRY_METADATA, + }, }) const partialObjectStream = result.partialObjectStream @@ -338,6 +384,7 @@ describe('Plugin', () => { const expectedWorkflowMetadata = { schema: MOCK_OBJECT, output: 'object', + ...MOCK_TELEMETRY_METADATA, } if (semifies(realVersion, '>=5.0.0')) { expectedWorkflowMetadata.maxRetries = MOCK_NUMBER @@ -366,6 +413,7 @@ describe('Plugin', () => { role: 'assistant', }], metrics: { input_tokens: MOCK_NUMBER, output_tokens: MOCK_NUMBER, total_tokens: MOCK_NUMBER }, + metadata: MOCK_TELEMETRY_METADATA, tags: { ml_app: 'test', integration: 'ai' }, }) }) diff --git a/packages/dd-trace/test/llmobs/plugins/aws-sdk/bedrockruntime.spec.js b/packages/dd-trace/test/llmobs/plugins/aws-sdk/bedrockruntime.spec.js index 91409841038..73f521d264f 100644 --- a/packages/dd-trace/test/llmobs/plugins/aws-sdk/bedrockruntime.spec.js +++ b/packages/dd-trace/test/llmobs/plugins/aws-sdk/bedrockruntime.spec.js @@ -85,8 +85,8 @@ describe('Plugin', () => { cache_read_input_tokens: model.response.cacheReadTokens, cache_write_input_tokens: model.response.cacheWriteTokens, }, - modelName: model.modelId.split('.')[1].toLowerCase(), - modelProvider: model.provider.toLowerCase(), + modelName: model.modelId.toLowerCase(), + modelProvider: 'amazon_bedrock', metadata: { temperature: modelConfig.temperature, max_tokens: modelConfig.maxTokens, @@ -134,8 +134,8 @@ describe('Plugin', () => { cache_read_input_tokens: model.response.cacheReadTokens, cache_write_input_tokens: model.response.cacheWriteTokens, }, - modelName: model.modelId.split('.')[1].toLowerCase(), - modelProvider: model.provider.toLowerCase(), + modelName: model.modelId.toLowerCase(), + modelProvider: 'amazon_bedrock', metadata: { temperature: modelConfig.temperature, max_tokens: modelConfig.maxTokens, @@ -181,8 +181,8 @@ describe('Plugin', () => { cache_read_input_tokens: cacheWriteRequest.response.cacheReadTokens, cache_write_input_tokens: cacheWriteRequest.response.cacheWriteTokens, }, - modelName: cacheWriteRequest.modelId.split('.')[2].toLowerCase(), - modelProvider: cacheWriteRequest.provider.toLowerCase(), + modelName: cacheWriteRequest.modelId.toLowerCase(), + modelProvider: 'amazon_bedrock', metadata: { temperature: cacheWriteRequest.requestBody.temperature, max_tokens: cacheWriteRequest.requestBody.max_tokens, @@ -226,8 +226,8 @@ describe('Plugin', () => { cache_read_input_tokens: cacheWriteRequest.response.cacheReadTokens, cache_write_input_tokens: cacheWriteRequest.response.cacheWriteTokens, }, - modelName: cacheWriteRequest.modelId.split('.')[2].toLowerCase(), - modelProvider: cacheWriteRequest.provider.toLowerCase(), + modelName: cacheWriteRequest.modelId.toLowerCase(), + modelProvider: 'amazon_bedrock', metadata: { temperature: cacheWriteRequest.requestBody.temperature, max_tokens: cacheWriteRequest.requestBody.max_tokens, @@ -274,8 +274,8 @@ describe('Plugin', () => { cache_read_input_tokens: cacheReadRequest.response.cacheReadTokens, cache_write_input_tokens: cacheReadRequest.response.cacheWriteTokens, }, - modelName: cacheReadRequest.modelId.split('.')[2].toLowerCase(), - modelProvider: cacheReadRequest.provider.toLowerCase(), + modelName: cacheReadRequest.modelId.toLowerCase(), + modelProvider: 'amazon_bedrock', metadata: { temperature: cacheReadRequest.requestBody.temperature, max_tokens: cacheReadRequest.requestBody.max_tokens, @@ -319,8 +319,8 @@ describe('Plugin', () => { cache_read_input_tokens: cacheReadRequest.response.cacheReadTokens, cache_write_input_tokens: cacheReadRequest.response.cacheWriteTokens, }, - modelName: cacheReadRequest.modelId.split('.')[2].toLowerCase(), - modelProvider: cacheReadRequest.provider.toLowerCase(), + modelName: cacheReadRequest.modelId.toLowerCase(), + modelProvider: 'amazon_bedrock', metadata: { temperature: cacheReadRequest.requestBody.temperature, max_tokens: cacheReadRequest.requestBody.max_tokens, diff --git a/packages/dd-trace/test/llmobs/sdk/index.spec.js b/packages/dd-trace/test/llmobs/sdk/index.spec.js index d2c8ce7a586..ceeec2dd691 100644 --- a/packages/dd-trace/test/llmobs/sdk/index.spec.js +++ b/packages/dd-trace/test/llmobs/sdk/index.spec.js @@ -150,7 +150,9 @@ describe('sdk', () => { } const config = getConfigFresh({ - llmobs: {}, + llmobs: { + agentlessEnabled: false, + }, }) const enabledLLMObs = new LLMObsSDK(tracer._tracer, llmobsModule, config) diff --git a/packages/dd-trace/test/llmobs/sdk/typescript/index.spec.js b/packages/dd-trace/test/llmobs/sdk/typescript/index.spec.js index e54d5839f5b..b0445b00b77 100644 --- a/packages/dd-trace/test/llmobs/sdk/typescript/index.spec.js +++ b/packages/dd-trace/test/llmobs/sdk/typescript/index.spec.js @@ -27,6 +27,7 @@ const testVersions = [ '^3', '^4', '^5', + '^6', ] const testCases = [ @@ -100,7 +101,7 @@ describe('typescript', () => { // compile typescript execSync( - `tsc --target ES6 --experimentalDecorators --module commonjs --sourceMap ${file}.ts`, + `tsc --target ES6 --experimentalDecorators --module commonjs --sourceMap --types node ${file}.ts`, { cwd, stdio: 'inherit' } ) diff --git a/packages/dd-trace/test/log.spec.js b/packages/dd-trace/test/log.spec.js index e7ce165e6dd..93bba4b1875 100644 --- a/packages/dd-trace/test/log.spec.js +++ b/packages/dd-trace/test/log.spec.js @@ -15,6 +15,41 @@ describe('log', () => { describe('config', () => { let env + /** + * @param {{ + * fleetEntries?: Record, + * localEntries?: Record, + * isServerless?: boolean + * }} [options] + */ + const reloadLog = (options = {}) => { + const { fleetEntries, localEntries, isServerless = true } = options + const logWriter = { + configure: sinon.spy(), + } + const configHelper = isServerless + ? proxyquire.noPreserveCache()('../src/config/helper', { + '../serverless': { IS_SERVERLESS: true }, + }) + : proxyquire.noPreserveCache()('../src/config/helper', { + '../serverless': { IS_SERVERLESS: false }, + './stable': function StableConfigStub () { + this.localEntries = localEntries + this.fleetEntries = fleetEntries + this.warnings = [] + }, + }) + + const log = proxyquire.noPreserveCache()('../src/log', { + '../config/helper': configHelper, + './writer': logWriter, + }) + + logWriter.configure.resetHistory() + + return { log, logWriter } + } + beforeEach(() => { env = process.env process.env = {} @@ -24,110 +59,176 @@ describe('log', () => { process.env = env }) - it('should have getConfig function', () => { - const log = require('../src/log') - assert.strictEqual(typeof log.getConfig, 'function') + it('should have configure function', () => { + const { log } = reloadLog() + assert.strictEqual(typeof log.configure, 'function') }) - it('should be configured with default config if no environment variables are set', () => { - const log = require('../src/log') - assert.deepStrictEqual(log.getConfig(), { - enabled: false, - logger: undefined, - logLevel: 'debug', - }) + it('should configure with default config if no environment variables are set', () => { + const { log, logWriter } = reloadLog() + + assert.strictEqual(log.configure({}), false) + sinon.assert.calledOnceWithExactly(logWriter.configure, false, 'debug', undefined) }) - it('should not be possbile to mutate config object returned by getConfig', () => { - const log = require('../src/log') - const config = log.getConfig() - config.enabled = 1 - config.logger = 1 - config.logLevel = 1 - assert.deepStrictEqual(log.getConfig(), { - enabled: false, - logger: undefined, - logLevel: 'debug', - }) + it('should pass the logger option to the writer', () => { + const { log, logWriter } = reloadLog() + const logger = { + debug: () => {}, + error: () => {}, + } + + log.configure({ logger }) + + sinon.assert.calledOnceWithExactly(logWriter.configure, false, 'debug', logger) }) it('should initialize from environment variables with DD env vars taking precedence OTEL env vars', () => { process.env.DD_TRACE_LOG_LEVEL = 'error' process.env.DD_TRACE_DEBUG = 'false' process.env.OTEL_LOG_LEVEL = 'debug' - const config = proxyquire('../src/log', {}).getConfig() - assert.strictEqual(config.enabled, false) - assert.strictEqual(config.logLevel, 'error') + const { log, logWriter } = reloadLog() + + assert.strictEqual(log.configure({}), false) + sinon.assert.calledOnceWithExactly(logWriter.configure, false, 'error', undefined) }) it('should initialize with OTEL environment variables when DD env vars are not set', () => { process.env.OTEL_LOG_LEVEL = 'debug' - const config = proxyquire('../src/log', {}).getConfig() - assert.strictEqual(config.enabled, true) - assert.strictEqual(config.logLevel, 'debug') + const { log, logWriter } = reloadLog() + + assert.strictEqual(log.configure({}), true) + sinon.assert.calledOnceWithExactly(logWriter.configure, true, 'debug', undefined) }) it('should initialize from environment variables', () => { process.env.DD_TRACE_DEBUG = 'true' - const config = proxyquire('../src/log', {}).getConfig() - assert.strictEqual(config.enabled, true) + const { log, logWriter } = reloadLog() + + assert.strictEqual(log.configure({}), true) + sinon.assert.calledOnceWithExactly(logWriter.configure, true, 'debug', undefined) }) it('should read case-insensitive booleans from environment variables', () => { process.env.DD_TRACE_DEBUG = 'TRUE' - const config = proxyquire('../src/log', {}).getConfig() - assert.strictEqual(config.enabled, true) + const { log, logWriter } = reloadLog() + + assert.strictEqual(log.configure({}), true) + sinon.assert.calledOnceWithExactly(logWriter.configure, true, 'debug', undefined) }) - describe('isEnabled', () => { + describe('configure', () => { it('prefers fleetStableConfigValue over env and local', () => { - const log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled('true', 'false'), true) - assert.strictEqual(log.isEnabled('false', 'true'), false) + process.env.DD_TRACE_DEBUG = 'false' + + let loaded = reloadLog({ + fleetEntries: { DD_TRACE_DEBUG: 'true' }, + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'false' }, + }) + assert.strictEqual(loaded.log.configure({}), true) + + process.env.DD_TRACE_DEBUG = 'true' + + loaded = reloadLog({ + fleetEntries: { DD_TRACE_DEBUG: 'false' }, + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'true' }, + }) + assert.strictEqual(loaded.log.configure({}), false) }) it('uses DD_TRACE_DEBUG when fleetStableConfigValue is not set', () => { process.env.DD_TRACE_DEBUG = 'true' - let log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled(undefined, 'false'), true) + let loaded = reloadLog({ + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'false' }, + }) + assert.strictEqual(loaded.log.configure({}), true) process.env.DD_TRACE_DEBUG = 'false' - log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled(undefined, 'true'), false) + loaded = reloadLog({ + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'true' }, + }) + assert.strictEqual(loaded.log.configure({}), false) }) it('uses OTEL_LOG_LEVEL=debug when DD vars are not set', () => { process.env.OTEL_LOG_LEVEL = 'debug' - let log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled(undefined, undefined), true) + let loaded = reloadLog({ + isServerless: false, + localEntries: { OTEL_LOG_LEVEL: 'info' }, + }) + assert.strictEqual(loaded.log.configure({}), true) process.env.OTEL_LOG_LEVEL = 'info' - log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled(undefined, undefined), false) + loaded = reloadLog({ + isServerless: false, + localEntries: { OTEL_LOG_LEVEL: 'debug' }, + }) + assert.strictEqual(loaded.log.configure({}), false) }) it('falls back to localStableConfigValue', () => { - const log = proxyquire('../src/log', {}) - assert.strictEqual(log.isEnabled(undefined, 'false'), false) - assert.strictEqual(log.isEnabled(undefined, 'true'), true) + let loaded = reloadLog({ + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'false' }, + }) + assert.strictEqual(loaded.log.configure({}), false) + + loaded = reloadLog({ + isServerless: false, + localEntries: { DD_TRACE_DEBUG: 'true' }, + }) + assert.strictEqual(loaded.log.configure({}), true) }) it('falls back to internal config.enabled when nothing else provided', () => { - const log = proxyquire('../src/log', {}) - log.toggle(true) - assert.strictEqual(log.isEnabled(), true) - log.toggle(false) - assert.strictEqual(log.isEnabled(), false) + const { log, logWriter } = reloadLog({ + fleetEntries: {}, + isServerless: false, + localEntries: {}, + }) + + process.env.OTEL_LOG_LEVEL = 'debug' + assert.strictEqual(log.configure({}), true) + + process.env = {} + assert.strictEqual(log.configure({}), true) + sinon.assert.calledWithExactly(logWriter.configure.secondCall, true, 'debug', undefined) + }) + + it('falls back to the previous log level when no override is provided', () => { + const { log, logWriter } = reloadLog() + + log.configure({ logLevel: 'error' }) + log.configure({}) + + sinon.assert.calledWithExactly(logWriter.configure.secondCall, false, 'error', undefined) }) }) }) describe('general usage', () => { + let env let log let logger let error + function loadConfiguredLog (options = {}, envEntries = {}) { + process.env = { + DD_TRACE_DEBUG: 'true', + ...envEntries, + } + log = proxyquire.noPreserveCache()('../src/log', {}) + log.configure(options) + return log + } + beforeEach(() => { + env = process.env + process.env = {} sinon.stub(console, 'info') sinon.stub(console, 'error') sinon.stub(console, 'warn') @@ -140,12 +241,11 @@ describe('log', () => { error: sinon.spy(), } - log = proxyquire('../src/log', {}) - log.toggle(true) + loadConfiguredLog() }) afterEach(() => { - log.reset() + process.env = env console.info.restore() console.error.restore() console.warn.restore() @@ -153,12 +253,11 @@ describe('log', () => { }) it('should support chaining', () => { + loadConfiguredLog({ logger }) + log - .use(logger) - .toggle(true) .error('error') .debug('debug') - .reset() }) it('should call the logger in a noop context', () => { @@ -167,7 +266,8 @@ describe('log', () => { assert.strictEqual(storage('legacy').getStore().noop, true) } - log.use(logger).debug('debug') + loadConfiguredLog({ logger }) + log.debug('debug') }) describe('debug', () => { @@ -198,7 +298,7 @@ describe('log', () => { } } - log.toggle(true, 'trace') + loadConfiguredLog({ logLevel: 'trace' }) log.trace('argument', { hello: 'world' }, new Foo()) sinon.assert.calledOnce(console.debug) @@ -310,9 +410,9 @@ describe('log', () => { }) }) - describe('toggle', () => { - it('should disable the logger', () => { - log.toggle(false) + describe('configure', () => { + it('should disable the logger when DD_TRACE_DEBUG is false', () => { + loadConfiguredLog({}, { DD_TRACE_DEBUG: 'false' }) log.debug('debug') log.error(error) @@ -320,9 +420,8 @@ describe('log', () => { sinon.assert.notCalled(console.error) }) - it('should enable the logger', () => { - log.toggle(false) - log.toggle(true) + it('should enable the logger when OTEL_LOG_LEVEL is debug', () => { + loadConfiguredLog({}, { OTEL_LOG_LEVEL: 'debug' }) log.debug('debug') log.error(error) @@ -330,8 +429,8 @@ describe('log', () => { sinon.assert.calledWith(console.error, error) }) - it('should set minimum log level when enabled with logLevel argument set to a valid string', () => { - log.toggle(true, 'error') + it('should set minimum log level when configured with a valid string', () => { + loadConfiguredLog({ logLevel: 'error' }) log.debug('debug') log.error(error) @@ -339,8 +438,8 @@ describe('log', () => { sinon.assert.calledWith(console.error, error) }) - it('should set default log level when enabled with logLevel argument set to an invalid string', () => { - log.toggle(true, 'not a real log level') + it('should set default log level when configured with an invalid string', () => { + loadConfiguredLog({ logLevel: 'not a real log level' }) log.debug('debug') log.error(error) @@ -348,8 +447,8 @@ describe('log', () => { sinon.assert.calledWith(console.error, error) }) - it('should set min log level when enabled w/logLevel arg set to valid string w/wrong case or whitespace', () => { - log.toggle(true, ' ErRoR ') + it('should set min log level when configured with valid string with wrong case or whitespace', () => { + loadConfiguredLog({ logLevel: ' ErRoR ' }) log.debug('debug') log.error(error) @@ -358,7 +457,7 @@ describe('log', () => { }) it('should log all log levels greater than or equal to minimum log level', () => { - log.toggle(true, 'debug') + loadConfiguredLog({ logLevel: 'debug' }) log.debug('debug') log.error(error) @@ -366,8 +465,8 @@ describe('log', () => { sinon.assert.calledWith(console.error, error) }) - it('should enable default log level when enabled with logLevel argument set to invalid input', () => { - log.toggle(true, ['trace', 'info', 'eror']) + it('should enable default log level when configured with invalid input', () => { + loadConfiguredLog({ logLevel: ['trace', 'info', 'eror'] }) log.debug('debug') log.error(error) @@ -375,8 +474,8 @@ describe('log', () => { sinon.assert.calledWith(console.error, error) }) - it('should enable default log level when enabled without logLevel argument', () => { - log.toggle(true) + it('should enable default log level when configured without logLevel argument', () => { + loadConfiguredLog() log.debug('debug') log.error(error) @@ -385,9 +484,9 @@ describe('log', () => { }) }) - describe('use', () => { + describe('logger option', () => { it('should set the underlying logger when valid', () => { - log.use(logger) + loadConfiguredLog({ logger }) log.debug('debug') log.error(error) @@ -396,7 +495,7 @@ describe('log', () => { }) it('be a no op with an empty logger', () => { - log.use(null) + loadConfiguredLog({ logger: null }) log.debug('debug') log.error(error) @@ -405,42 +504,7 @@ describe('log', () => { }) it('be a no op with an invalid logger', () => { - log.use('invalid') - log.debug('debug') - log.error(error) - - sinon.assert.calledWith(console.debug, 'debug') - sinon.assert.calledWith(console.error, error) - }) - }) - - describe('reset', () => { - it('should reset the logger', () => { - log.use(logger) - log.reset() - log.toggle(true) - log.debug('debug') - log.error(error) - - sinon.assert.calledWith(console.debug, 'debug') - sinon.assert.calledWith(console.error, error) - }) - - it('should reset the toggle', () => { - log.use(logger) - log.reset() - log.debug('debug') - log.error(error) - - sinon.assert.notCalled(console.debug) - sinon.assert.notCalled(console.error) - }) - - it('should reset the minimum log level to defaults', () => { - log.use(logger) - log.toggle(true, 'error') - log.reset() - log.toggle(true) + loadConfiguredLog({ logger: 'invalid' }) log.debug('debug') log.error(error) @@ -471,11 +535,7 @@ describe('log', () => { let logWriter beforeEach(() => { - logWriter = require('../src/log/writer') - }) - - afterEach(() => { - logWriter.reset() + logWriter = proxyquire.noPreserveCache()('../src/log/writer', {}) }) describe('error', () => { @@ -486,7 +546,7 @@ describe('log', () => { }) it('should call console.error no matter enable flag value', () => { - logWriter.toggle(false) + logWriter.configure(false) logWriter.error(error) sinon.assert.calledOnceWithExactly(console.error, error) @@ -501,14 +561,14 @@ describe('log', () => { }) it('should call logger debug if warn is not provided', () => { - logWriter.use(logger) + logWriter.configure(false, undefined, logger) logWriter.warn('warn') sinon.assert.calledOnceWithExactly(logger.debug, 'warn') }) it('should call console.warn no matter enable flag value', () => { - logWriter.toggle(false) + logWriter.configure(false) logWriter.warn('warn') sinon.assert.calledOnceWithExactly(console.warn, 'warn') @@ -523,14 +583,14 @@ describe('log', () => { }) it('should call logger debug if info is not provided', () => { - logWriter.use(logger) + logWriter.configure(false, undefined, logger) logWriter.info('info') sinon.assert.calledOnceWithExactly(logger.debug, 'info') }) it('should call console.info no matter enable flag value', () => { - logWriter.toggle(false) + logWriter.configure(false) logWriter.info('info') sinon.assert.calledOnceWithExactly(console.info, 'info') @@ -545,7 +605,7 @@ describe('log', () => { }) it('should call console.debug no matter enable flag value', () => { - logWriter.toggle(false) + logWriter.configure(false) logWriter.debug('debug') sinon.assert.calledOnceWithExactly(console.debug, 'debug') diff --git a/packages/dd-trace/test/opentelemetry/metrics.spec.js b/packages/dd-trace/test/opentelemetry/metrics.spec.js index 5e3b82ad449..9422dd13fc7 100644 --- a/packages/dd-trace/test/opentelemetry/metrics.spec.js +++ b/packages/dd-trace/test/opentelemetry/metrics.spec.js @@ -29,7 +29,15 @@ describe('OpenTelemetry Meter Provider', () => { process.env.OTEL_METRIC_EXPORT_INTERVAL = '100' process.env.OTEL_EXPORTER_OTLP_METRICS_TIMEOUT = '5000' } - Object.assign(process.env, envOverrides) + if (envOverrides) { + for (const [key, value] of Object.entries(envOverrides)) { + if (value === undefined) { + delete process.env[key] + } else { + process.env[key] = value + } + } + } const dogstatsd = proxyquire.noPreserveCache()('../../src/dogstatsd', {}) @@ -673,9 +681,11 @@ describe('OpenTelemetry Meter Provider', () => { meter.removeBatchObservableCallback(() => {}, []) assert.strictEqual(warnSpy.callCount, 2) - assert.strictEqual(warnSpy.firstCall.args[0], 'addBatchObservableCallback is not implemented') - assert.strictEqual(warnSpy.secondCall.args[0], 'removeBatchObservableCallback is not implemented') + assert.deepStrictEqual( + warnSpy.getCalls().map(call => format(...call.args)), + ['addBatchObservableCallback is not implemented', 'removeBatchObservableCallback is not implemented'] + ) warnSpy.restore() }) }) @@ -786,7 +796,7 @@ describe('OpenTelemetry Meter Provider', () => { }) }) - describe('NonNegInt Configuration Validation', () => { + describe('Allowed Integer Configuration Validation', () => { let log, warnSpy beforeEach(() => { @@ -798,6 +808,10 @@ describe('OpenTelemetry Meter Provider', () => { warnSpy.restore() }) + function hasWarning (message) { + return warnSpy.getCalls().some(call => format(...call.args).includes(message)) + } + it('rejects zero for metrics configs with allowZero=false', () => { setupTracer({ OTEL_BSP_SCHEDULE_DELAY: '0', @@ -808,16 +822,16 @@ describe('OpenTelemetry Meter Provider', () => { OTEL_METRIC_EXPORT_TIMEOUT: '0', OTEL_BSP_MAX_EXPORT_BATCH_SIZE: '0', }, false) - assert(warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_BSP_SCHEDULE_DELAY/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_METRIC_EXPORT_INTERVAL/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_BSP_MAX_EXPORT_BATCH_SIZE/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_BSP_MAX_QUEUE_SIZE/.test(format(...call.args)))) - assert(!warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_EXPORTER_OTLP_TIMEOUT/.test(format(...call.args)))) - assert(!warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_METRIC_EXPORT_TIMEOUT/.test(format(...call.args)))) - assert(!warnSpy.getCalls().some(call => /Invalid value 0 for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT/.test(format(...call.args)))) + assert(hasWarning('Invalid value: 0 for OTEL_BSP_SCHEDULE_DELAY')) + assert(hasWarning('Invalid value: 0 for OTEL_METRIC_EXPORT_INTERVAL')) + assert(hasWarning('Invalid value: 0 for OTEL_BSP_MAX_QUEUE_SIZE')) + assert(hasWarning('Invalid value: 0 for OTEL_EXPORTER_OTLP_TIMEOUT')) + assert(hasWarning('Invalid value: 0 for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT')) + assert(hasWarning('Invalid value: 0 for OTEL_METRIC_EXPORT_TIMEOUT')) + assert(hasWarning('Invalid value: 0 for OTEL_BSP_MAX_EXPORT_BATCH_SIZE')) }) - it('rejects negative values for all configs', () => { + it('rejects negative values for non-negative integer configs', () => { setupTracer({ OTEL_EXPORTER_OTLP_TIMEOUT: '-1', OTEL_EXPORTER_OTLP_LOGS_TIMEOUT: '-1', @@ -828,17 +842,17 @@ describe('OpenTelemetry Meter Provider', () => { OTEL_BSP_MAX_EXPORT_BATCH_SIZE: '-1', OTEL_BSP_MAX_QUEUE_SIZE: '-1', }, false) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_EXPORTER_OTLP_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_EXPORTER_OTLP_LOGS_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_METRIC_EXPORT_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_METRIC_EXPORT_INTERVAL/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_BSP_SCHEDULE_DELAY/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_BSP_MAX_EXPORT_BATCH_SIZE/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value -1 for OTEL_BSP_MAX_QUEUE_SIZE/.test(format(...call.args)))) + assert(hasWarning('Invalid value: -1 for OTEL_EXPORTER_OTLP_TIMEOUT')) + assert(hasWarning('Invalid value: -1 for OTEL_EXPORTER_OTLP_LOGS_TIMEOUT')) + assert(hasWarning('Invalid value: -1 for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT')) + assert(hasWarning('Invalid value: -1 for OTEL_METRIC_EXPORT_TIMEOUT')) + assert(hasWarning('Invalid value: -1 for OTEL_METRIC_EXPORT_INTERVAL')) + assert(hasWarning('Invalid value: -1 for OTEL_BSP_SCHEDULE_DELAY')) + assert(hasWarning('Invalid value: -1 for OTEL_BSP_MAX_EXPORT_BATCH_SIZE')) + assert(hasWarning('Invalid value: -1 for OTEL_BSP_MAX_QUEUE_SIZE')) }) - it('rejects values that are not numbers for all configs', () => { + it('rejects values that are not numbers for integer-based configs', () => { setupTracer({ OTEL_EXPORTER_OTLP_TIMEOUT: 'not a number', OTEL_EXPORTER_OTLP_LOGS_TIMEOUT: 'invalid', @@ -849,14 +863,14 @@ describe('OpenTelemetry Meter Provider', () => { OTEL_BSP_MAX_EXPORT_BATCH_SIZE: 'abc', OTEL_BSP_MAX_QUEUE_SIZE: 'xyz', }, false) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_EXPORTER_OTLP_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_EXPORTER_OTLP_LOGS_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_METRIC_EXPORT_TIMEOUT/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_METRIC_EXPORT_INTERVAL/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_BSP_SCHEDULE_DELAY/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_BSP_MAX_EXPORT_BATCH_SIZE/.test(format(...call.args)))) - assert(warnSpy.getCalls().some(call => /Invalid value NaN for OTEL_BSP_MAX_QUEUE_SIZE/.test(format(...call.args)))) + assert(hasWarning("Invalid INT input: 'not a number' for OTEL_EXPORTER_OTLP_TIMEOUT")) + assert(hasWarning("Invalid INT input: 'invalid' for OTEL_EXPORTER_OTLP_LOGS_TIMEOUT")) + assert(hasWarning("Invalid INT input: 'hi sir' for OTEL_EXPORTER_OTLP_METRICS_TIMEOUT")) + assert(hasWarning("Invalid INT input: '@weeeeee' for OTEL_METRIC_EXPORT_TIMEOUT")) + assert(hasWarning("Invalid INT input: 'python!' for OTEL_METRIC_EXPORT_INTERVAL")) + assert(hasWarning("Invalid INT input: 'NaN' for OTEL_BSP_SCHEDULE_DELAY")) + assert(hasWarning("Invalid INT input: 'abc' for OTEL_BSP_MAX_EXPORT_BATCH_SIZE")) + assert(hasWarning("Invalid INT input: 'xyz' for OTEL_BSP_MAX_QUEUE_SIZE")) }) }) diff --git a/packages/dd-trace/test/opentracing/propagation/text_map.spec.js b/packages/dd-trace/test/opentracing/propagation/text_map.spec.js index 5cf016ffb55..45683a7c6a4 100644 --- a/packages/dd-trace/test/opentracing/propagation/text_map.spec.js +++ b/packages/dd-trace/test/opentracing/propagation/text_map.spec.js @@ -660,15 +660,15 @@ describe('TextMapPropagator', () => { // should not add baggage when key list is empty config = getConfigFresh({ - baggageTagKeys: '', + baggageTagKeys: [], }) propagator = new TextMapPropagator(config) const spanContextC = propagator.extract(carrier) assert.deepStrictEqual(spanContextC._trace.tags, {}) - // should not add baggage when key list is empty + // should not add baggage when key list does not contain the key config = getConfigFresh({ - baggageTagKeys: 'customKey', + baggageTagKeys: ['customKey'], }) propagator = new TextMapPropagator(config) carrier = { @@ -683,7 +683,7 @@ describe('TextMapPropagator', () => { // should add all baggage to span tags config = getConfigFresh({ - baggageTagKeys: '*', + baggageTagKeys: ['*'], }) propagator = new TextMapPropagator(config) carrier = { @@ -1215,6 +1215,76 @@ describe('TextMapPropagator', () => { }) }) + describe('with B3 propagation from DD_TRACE_PROPAGATION_STYLE', () => { + beforeEach(() => { + config.tracePropagationStyle.extract = ['b3'] + config.getOrigin = sinon.stub().withArgs('tracePropagationStyle.extract').returns('env_var') + + delete textMap['x-datadog-trace-id'] + delete textMap['x-datadog-parent-id'] + + TextMapPropagator = proxyquire('../../../src/opentracing/propagation/text_map', { + '../../config/helper': { + getConfiguredEnvName: sinon.stub().withArgs('DD_TRACE_PROPAGATION_STYLE') + .returns('DD_TRACE_PROPAGATION_STYLE'), + }, + '../../log': log, + '../../telemetry/metrics': telemetryMetrics, + }) + propagator = new TextMapPropagator(config) + }) + + it('should extract B3 as multiple headers', () => { + textMap['x-b3-traceid'] = '0000000000000123' + textMap['x-b3-spanid'] = '0000000000000456' + textMap['x-b3-sampled'] = '1' + + const spanContext = propagator.extract(textMap) + + assert.deepStrictEqual(spanContext, createContext({ + traceId: id('123', 16), + spanId: id('456', 16), + sampling: { + priority: AUTO_KEEP, + }, + })) + }) + }) + + describe('with B3 propagation from OTEL_PROPAGATORS', () => { + beforeEach(() => { + config.tracePropagationStyle.extract = ['b3'] + config.getOrigin = sinon.stub().withArgs('tracePropagationStyle.extract').returns('env_var') + + delete textMap['x-datadog-trace-id'] + delete textMap['x-datadog-parent-id'] + + TextMapPropagator = proxyquire('../../../src/opentracing/propagation/text_map', { + '../../config/helper': { + getConfiguredEnvName: sinon.stub().withArgs('DD_TRACE_PROPAGATION_STYLE') + .returns('OTEL_PROPAGATORS'), + }, + '../../log': log, + '../../telemetry/metrics': telemetryMetrics, + }) + propagator = new TextMapPropagator(config) + }) + + it('should extract B3 as a single header', () => { + textMap.b3 = '0000000000000123-0000000000000456-1' + + const spanContext = propagator.extract(textMap) + + assert.deepStrictEqual(spanContext, createContext({ + traceId: id('123', 16), + spanId: id('456', 16), + sampling: { + priority: AUTO_KEEP, + }, + })) + }) + }) + describe('with B3 propagation as a single header', () => { beforeEach(() => { config.tracePropagationStyle.extract = ['b3 single header'] diff --git a/packages/dd-trace/test/plugin_manager.spec.js b/packages/dd-trace/test/plugin_manager.spec.js index 9a2f9e1c884..9ce71da9ef5 100644 --- a/packages/dd-trace/test/plugin_manager.spec.js +++ b/packages/dd-trace/test/plugin_manager.spec.js @@ -23,6 +23,15 @@ describe('Plugin Manager', () => { let Eight let pm + function makeTracerConfig (overrides = {}) { + return { + plugins: true, + spanAttributeSchema: 'v0', + spanRemoveIntegrationFromService: false, + ...overrides, + } + } + beforeEach(() => { tracer = { _nomenclature: nomenclature, @@ -31,8 +40,10 @@ describe('Plugin Manager', () => { class FakePlugin { constructor (aTracer) { assert.strictEqual(aTracer, tracer) - instantiated.push(this.constructor.id) + instantiated.push(/** @type {{ id: string }} */ (/** @type {unknown} */ (this.constructor)).id) } + + configure () {} } const plugins = { @@ -108,7 +119,7 @@ describe('Plugin Manager', () => { it('should keep the config for future configure calls', () => { pm.configurePlugin('two', { foo: 'bar' }) - pm.configure() + pm.configure(makeTracerConfig()) loadChannel.publish({ name: 'two' }) sinon.assert.calledWithMatch(Two.prototype.configure, { enabled: true, @@ -118,7 +129,7 @@ describe('Plugin Manager', () => { }) describe('without env vars', () => { - beforeEach(() => pm.configure()) + beforeEach(() => pm.configure(makeTracerConfig())) it('works with no config param', () => { pm.configurePlugin('two') @@ -158,7 +169,7 @@ describe('Plugin Manager', () => { }) describe('with disabled plugins', () => { - beforeEach(() => pm.configure()) + beforeEach(() => pm.configure(makeTracerConfig())) it('should not call configure on individual enable override', () => { pm.configurePlugin('five', { enabled: true }) @@ -167,7 +178,7 @@ describe('Plugin Manager', () => { }) it('should not configure all disabled plugins', () => { - pm.configure({}) + pm.configure(makeTracerConfig()) loadChannel.publish({ name: 'five' }) sinon.assert.notCalled(Five.prototype.configure) sinon.assert.notCalled(Six.prototype.configure) @@ -175,7 +186,7 @@ describe('Plugin Manager', () => { }) describe('with env var true', () => { - beforeEach(() => pm.configure()) + beforeEach(() => pm.configure(makeTracerConfig())) beforeEach(() => { process.env.DD_TRACE_TWO_ENABLED = '1' @@ -223,7 +234,7 @@ describe('Plugin Manager', () => { }) describe('with env var false', () => { - beforeEach(() => pm.configure()) + beforeEach(() => pm.configure(makeTracerConfig())) beforeEach(() => { process.env.DD_TRACE_TWO_ENABLED = '0' @@ -274,7 +285,7 @@ describe('Plugin Manager', () => { describe('configure', () => { describe('without the load event', () => { it('should not instantiate plugins', () => { - pm.configure() + pm.configure(makeTracerConfig()) pm.configurePlugin('two') assert.strictEqual(instantiated.length, 0) sinon.assert.notCalled(Two.prototype.configure) @@ -283,13 +294,13 @@ describe('Plugin Manager', () => { describe('with an experimental plugin', () => { it('should disable the plugin by default', () => { - pm.configure() + pm.configure(makeTracerConfig()) loadChannel.publish({ name: 'eight' }) sinon.assert.calledWithMatch(Eight.prototype.configure, { enabled: false }) }) it('should enable the plugin when configured programmatically', () => { - pm.configure() + pm.configure(makeTracerConfig()) pm.configurePlugin('eight') loadChannel.publish({ name: 'eight' }) sinon.assert.calledWithMatch(Eight.prototype.configure, { enabled: true }) @@ -297,24 +308,24 @@ describe('Plugin Manager', () => { it('should enable the plugin when configured with an environment variable', () => { process.env.DD_TRACE_EIGHT_ENABLED = 'true' - pm.configure() + pm.configure(makeTracerConfig()) loadChannel.publish({ name: 'eight' }) sinon.assert.calledWithMatch(Eight.prototype.configure, { enabled: true }) }) }) it('instantiates plugin classes', () => { - pm.configure() + pm.configure(makeTracerConfig()) loadChannel.publish({ name: 'two' }) loadChannel.publish({ name: 'four' }) assert.deepStrictEqual(instantiated, ['two', 'four']) }) describe('service naming schema manager', () => { - const config = { + const config = makeTracerConfig({ foo: { bar: 1 }, baz: 2, - } + }) let configureSpy beforeEach(() => { @@ -331,19 +342,19 @@ describe('Plugin Manager', () => { }) }) - it('skips configuring plugins entirely when plugins is false', () => { - pm.configurePlugin = sinon.spy() - pm.configure({ plugins: false }) - sinon.assert.notCalled(pm.configurePlugin) + it('disables plugins globally when plugins is false', () => { + pm.configure(makeTracerConfig({ plugins: false })) + loadChannel.publish({ name: 'two' }) + sinon.assert.calledWithMatch(Two.prototype.configure, { enabled: false }) }) it('observes configuration options', () => { - pm.configure({ + pm.configure(makeTracerConfig({ serviceMapping: { two: 'deux' }, logInjection: true, queryStringObfuscation: '.*', clientIpEnabled: true, - }) + })) loadChannel.publish({ name: 'two' }) loadChannel.publish({ name: 'four' }) sinon.assert.calledWithMatch(Two.prototype.configure, { @@ -363,7 +374,7 @@ describe('Plugin Manager', () => { }) describe('destroy', () => { - beforeEach(() => pm.configure()) + beforeEach(() => pm.configure(makeTracerConfig())) it('should disable the plugins', () => { loadChannel.publish({ name: 'two' }) diff --git a/packages/dd-trace/test/plugins/versions/package.json b/packages/dd-trace/test/plugins/versions/package.json index 51504963a7e..c91739244a8 100644 --- a/packages/dd-trace/test/plugins/versions/package.json +++ b/packages/dd-trace/test/plugins/versions/package.json @@ -101,7 +101,7 @@ "cookie": "1.1.1", "cookie-parser": "1.4.7", "couchbase": "4.6.0", - "cypress": "15.9.0", + "cypress": "15.13.0", "cypress-fail-fast": "7.1.1", "dd-trace-api": "1.0.0", "ejs": "4.0.1", @@ -200,7 +200,7 @@ "stripe": "20.1.0", "tedious": "19.2.0", "tinypool": "2.1.0", - "typescript": "5.9.3", + "typescript": "6.0.2", "undici": "7.18.2", "vitest": "4.1.0", "when": "3.7.8", diff --git a/packages/dd-trace/test/profiling/config.spec.js b/packages/dd-trace/test/profiling/config.spec.js index ead276ba385..60361002fb6 100644 --- a/packages/dd-trace/test/profiling/config.spec.js +++ b/packages/dd-trace/test/profiling/config.spec.js @@ -9,6 +9,7 @@ const satisfies = require('semifies') const { assertObjectContains } = require('../../../../integration-tests/helpers') require('../setup/core') +const { getConfigFresh } = require('../helpers/config') const { AgentExporter } = require('../../src/profiling/exporters/agent') const { FileExporter } = require('../../src/profiling/exporters/file') const WallProfiler = require('../../src/profiling/profilers/wall') @@ -22,37 +23,12 @@ const oomMonitoringSupported = process.platform !== 'win32' const isAtLeast24 = satisfies(process.versions.node, '>=24.0.0') const zstdOrGzip = isAtLeast24 ? 'zstd' : 'gzip' +/** @typedef {InstanceType<(typeof import('../../src/profiling/config'))['Config']>} ProfilerConfig */ + describe('config', () => { - let Config let env - const nullLogger = { - debug () { }, - info () { }, - warn () { }, - error () { }, - } beforeEach(() => { - const ProfilingConfig = require('../../src/profiling/config').Config - // Wrap the real profiling Config so tests see a valid default URL when none - // is provided, matching what the tracer Config singleton would provide at runtime. - Config = class TestConfig extends ProfilingConfig { - constructor (options = {}) { - const hasAddress = - options.url !== undefined || - options.hostname !== undefined || - options.port !== undefined - - if (hasAddress) { - super(options) - } else { - super({ - url: 'http://127.0.0.1:8126', - ...options, - }) - } - } - } env = process.env process.env = {} }) @@ -61,82 +37,115 @@ describe('config', () => { process.env = env }) + /** + * @param {Record} [tracerOptions] + * @returns {{config: ProfilerConfig, warnings: string[], errors: string[]}} + */ + function getProfilerConfig (tracerOptions) { + process.env.DD_PROFILING_ENABLED = '1' + + const tracerConfig = getConfigFresh(tracerOptions) + + const ProfilingConfig = require('../../src/profiling/config').Config + const config = /** @type {ProfilerConfig} */ (new ProfilingConfig(tracerConfig)) + + return { + config, + warnings: [], + errors: [], + } + } + it('should have the correct defaults', () => { - const config = new Config() + const { config } = getProfilerConfig() assertObjectContains(config, { - service: 'node', flushInterval: 65 * 1000, + activation: 'manual', + v8ProfilerBugWorkaroundEnabled: true, + cpuProfilingEnabled: samplingContextsAvailable, + uploadCompression: { + method: zstdOrGzip, + level: undefined, + }, }) - - assert.deepStrictEqual(config.tags, { - service: 'node', + assert.strictEqual(typeof config.service, 'string') + assert.ok(config.service.length > 0) + assert.strictEqual(typeof config.version, 'string') + assertObjectContains(config.tags, { + service: config.service, + version: config.version, }) - + assert.strictEqual(config.tags.host, undefined) assert.ok(config.logger instanceof ConsoleLogger) - assert.ok(config.exporters[0] instanceof AgentExporter) - assert.ok(config.profilers[0] instanceof SpaceProfiler) - assert.ok(config.profilers[1] instanceof WallProfiler) - assert.strictEqual(config.profilers[1].codeHotspotsEnabled(), samplingContextsAvailable) - assert.strictEqual(config.v8ProfilerBugWorkaroundEnabled, true) - assert.strictEqual(config.cpuProfilingEnabled, samplingContextsAvailable) - assert.strictEqual(config.uploadCompression.method, zstdOrGzip) - assert.strictEqual(config.uploadCompression.level, undefined) + assert.deepStrictEqual( + config.profilers.slice(0, 2).map(profiler => profiler.constructor), + [SpaceProfiler, WallProfiler] + ) + assert.strictEqual( + /** @type {InstanceType} */ (config.profilers[1]).codeHotspotsEnabled(), + samplingContextsAvailable + ) + assert.deepStrictEqual(config.exporters.map(exporter => exporter.constructor), [AgentExporter]) }) it('should support configuration options', () => { - const options = { + process.env = { + DD_PROFILING_EXPORTERS: 'agent,file', + DD_PROFILING_PROFILERS: 'space,wall', + DD_PROFILING_CODEHOTSPOTS_ENABLED: '0', + } + + const { config } = getProfilerConfig({ service: 'test', version: '1.2.3-test.0', - logger: nullLogger, - exporters: 'agent,file', - profilers: 'space,wall', url: 'http://localhost:1234/', - codeHotspotsEnabled: false, reportHostname: true, - } - - const config = new Config(options) + }) - assert.strictEqual(config.service, options.service) - assert.strictEqual(typeof config.tags.host, 'string') - assert.strictEqual(config.version, options.version) - assert.ok(typeof config.tags === 'object' && config.tags !== null) + assertObjectContains(config, { + service: 'test', + version: '1.2.3-test.0', + flushInterval: 65 * 1000, + tags: { + service: 'test', + version: '1.2.3-test.0', + }, + }) assert.strictEqual(typeof config.tags.host, 'string') - assert.strictEqual(config.tags.service, options.service) - assert.strictEqual(config.tags.version, options.version) - assert.strictEqual(config.flushInterval, 65 * 1000) - assert.ok(Array.isArray(config.exporters)) - assert.strictEqual(config.exporters.length, 2) - assert.ok(config.exporters[0] instanceof AgentExporter) - assert.strictEqual(config.exporters[0]._url.toString(), options.url) - assert.ok(config.exporters[1] instanceof FileExporter) - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 2 + (samplingContextsAvailable ? 1 : 0)) - assert.ok(config.profilers[0] instanceof SpaceProfiler) - assert.ok(config.profilers[1] instanceof WallProfiler) - assert.strictEqual(config.profilers[1].codeHotspotsEnabled(), false) - if (samplingContextsAvailable) { - assert.ok(config.profilers[2] instanceof EventsProfiler) - } + assert.strictEqual(config.exporters[0]._url.toString(), 'http://localhost:1234/') + assert.deepStrictEqual( + config.exporters.map(exporter => exporter.constructor), + [AgentExporter, FileExporter] + ) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + samplingContextsAvailable + ? [SpaceProfiler, WallProfiler, EventsProfiler] + : [SpaceProfiler, WallProfiler] + ) + assert.strictEqual( + /** @type {InstanceType} */ (config.profilers[1]).codeHotspotsEnabled(), + false + ) }) it('should not include host tag when reportHostname is false', () => { - const config = new Config({ reportHostname: false }) + const { config } = getProfilerConfig({ reportHostname: false }) assert.strictEqual(config.tags.host, undefined) assert.ok(!('host' in config.tags)) }) it('should not include host tag when reportHostname is not set', () => { - const config = new Config({}) + const { config } = getProfilerConfig() assert.strictEqual(config.tags.host, undefined) assert.ok(!('host' in config.tags)) }) it('should include host tag when reportHostname is true', () => { - const config = new Config({ reportHostname: true }) + const { config } = getProfilerConfig({ reportHostname: true }) assert.strictEqual(typeof config.tags.host, 'string') assert.ok(config.tags.host.length > 0) @@ -144,41 +153,38 @@ describe('config', () => { }) it('should filter out invalid profilers', () => { + process.env = { + DD_PROFILING_PROFILERS: 'nope,also_nope', + } + + /** @type {string[]} */ const errors = [] - const options = { - logger: { - debug () {}, - info () {}, - warn () {}, - error (error) { - errors.push(error) - }, + const logger = { + debug () {}, + info () {}, + warn () {}, + error (message) { + errors.push(String(message)) }, - profilers: 'nope,also_nope', } - const config = new Config(options) - - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 0) + const { config } = getProfilerConfig({ logger }) - assert.strictEqual(errors.length, 2) - assert.strictEqual(errors[0], 'Unknown profiler "nope"') - assert.strictEqual(errors[1], 'Unknown profiler "also_nope"') + assert.deepStrictEqual(config.profilers.map(profiler => profiler.constructor), []) + assert.deepStrictEqual(errors, [ + 'Unknown profiler "nope"', + 'Unknown profiler "also_nope"', + ]) }) it('should support profiler config with empty DD_PROFILING_PROFILERS', () => { process.env = { DD_PROFILING_PROFILERS: '', } - const options = { - logger: nullLogger, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 0) + assert.deepStrictEqual(config.profilers.map(profiler => profiler.constructor), []) }) it('should support profiler config with DD_PROFILING_PROFILERS', () => { @@ -186,24 +192,23 @@ describe('config', () => { DD_PROFILING_PROFILERS: 'wall', DD_PROFILING_V8_PROFILER_BUG_WORKAROUND: '0', } - if (samplingContextsAvailable) { - process.env.DD_PROFILING_EXPERIMENTAL_CPU_ENABLED = '1' - } - const options = { - logger: nullLogger, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 1 + (samplingContextsAvailable ? 1 : 0)) - assert.ok(config.profilers[0] instanceof WallProfiler) - assert.strictEqual(config.profilers[0].codeHotspotsEnabled(), samplingContextsAvailable) - if (samplingContextsAvailable) { - assert.ok(config.profilers[1] instanceof EventsProfiler) - } - assert.strictEqual(config.v8ProfilerBugWorkaroundEnabled, false) - assert.strictEqual(config.cpuProfilingEnabled, samplingContextsAvailable) + assertObjectContains(config, { + v8ProfilerBugWorkaroundEnabled: false, + cpuProfilingEnabled: samplingContextsAvailable, + }) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + samplingContextsAvailable + ? [WallProfiler, EventsProfiler] + : [WallProfiler] + ) + assert.strictEqual( + /** @type {InstanceType} */ (config.profilers[0]).codeHotspotsEnabled(), + samplingContextsAvailable + ) }) it('should support profiler config with DD_PROFILING_XXX_ENABLED', () => { @@ -212,15 +217,10 @@ describe('config', () => { DD_PROFILING_WALLTIME_ENABLED: '0', DD_PROFILING_HEAP_ENABLED: '1', } - const options = { - logger: nullLogger, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 1) - assert.ok(config.profilers[0] instanceof SpaceProfiler) + assert.deepStrictEqual(config.profilers.map(profiler => profiler.constructor), [SpaceProfiler]) }) it('should ensure space profiler is ordered first with DD_PROFILING_HEAP_ENABLED', () => { @@ -228,16 +228,15 @@ describe('config', () => { DD_PROFILING_PROFILERS: 'wall', DD_PROFILING_HEAP_ENABLED: '1', } - const options = { - logger: nullLogger, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 2 + (samplingContextsAvailable ? 1 : 0)) - assert.ok(config.profilers[0] instanceof SpaceProfiler) - assert.ok(config.profilers[1] instanceof WallProfiler) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + samplingContextsAvailable + ? [SpaceProfiler, WallProfiler, EventsProfiler] + : [SpaceProfiler, WallProfiler] + ) }) it('should ensure space profiler order is preserved when explicitly set with DD_PROFILING_PROFILERS', () => { @@ -245,20 +244,18 @@ describe('config', () => { DD_PROFILING_PROFILERS: 'wall,space', DD_PROFILING_HEAP_ENABLED: '1', } - const options = { - logger: nullLogger, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 2 + (samplingContextsAvailable ? 1 : 0)) - assert.ok(config.profilers[0] instanceof WallProfiler) - assert.ok(config.profilers[1] instanceof SpaceProfiler) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + samplingContextsAvailable + ? [WallProfiler, SpaceProfiler, EventsProfiler] + : [WallProfiler, SpaceProfiler] + ) }) it('should be able to read some env vars', () => { - const oldenv = process.env process.env = { DD_PROFILING_DEBUG_SOURCE_MAPS: '1', DD_PROFILING_HEAP_SAMPLING_INTERVAL: '1000', @@ -267,18 +264,15 @@ describe('config', () => { DD_PROFILING_TIMELINE_ENABLED: '0', } - const options = { - logger: nullLogger, - } - - const config = new Config(options) - assert.strictEqual(config.debugSourceMaps, true) - assert.strictEqual(config.heapSamplingInterval, 1000) - assert.strictEqual(config.pprofPrefix, 'test-prefix') - assert.strictEqual(config.uploadTimeout, 10000) - assert.strictEqual(config.timelineEnabled, false) + const { config } = getProfilerConfig() - process.env = oldenv + assertObjectContains(config, { + debugSourceMaps: true, + heapSamplingInterval: 1000, + pprofPrefix: 'test-prefix', + uploadTimeout: 10000, + timelineEnabled: false, + }) }) it('should deduplicate profilers', () => { @@ -286,48 +280,20 @@ describe('config', () => { DD_PROFILING_PROFILERS: 'wall,wall', DD_PROFILING_WALLTIME_ENABLED: '1', } - const options = { - logger: nullLogger, - } - - const config = new Config(options) - - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 1 + (samplingContextsAvailable ? 1 : 0)) - assert.ok(config.profilers[0] instanceof WallProfiler) - if (samplingContextsAvailable) { - assert.ok(config.profilers[1] instanceof EventsProfiler) - } - }) - - it('should prioritize options over env variables', () => { - if (!samplingContextsAvailable) { - return - } - - process.env = { - DD_PROFILING_PROFILERS: 'space', - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '1', - } - const options = { - logger: nullLogger, - profilers: ['wall'], - codeHotspotsEnabled: false, - endpointCollection: false, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 2) - assert.ok(config.profilers[0] instanceof WallProfiler) - assert.strictEqual(config.profilers[0].codeHotspotsEnabled(), false) - assert.strictEqual(config.profilers[0].endpointCollectionEnabled(), false) - assert.ok(config.profilers[1] instanceof EventsProfiler) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + samplingContextsAvailable + ? [WallProfiler, EventsProfiler] + : [WallProfiler] + ) }) - it('should prioritize non-experimental env variables and warn about experimental ones', () => { + it('should prioritize non-experimental env variables and warn about experimental ones', function () { if (!samplingContextsAvailable) { + this.skip() return } @@ -338,66 +304,71 @@ describe('config', () => { DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '0', DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED: '1', } - const warnings = [] - const options = { - logger: { - debug () {}, - info () {}, - warn (warning) { - warnings.push(warning) - }, - error () {}, - }, - } - const config = new Config(options) + const { config } = getProfilerConfig() - assert.ok(Array.isArray(config.profilers)) - assert.strictEqual(config.profilers.length, 2) - assert.ok(config.profilers[0] instanceof WallProfiler) - assert.strictEqual(config.profilers[0].codeHotspotsEnabled(), false) - assert.strictEqual(config.profilers[0].endpointCollectionEnabled(), false) - assert.ok(config.profilers[1] instanceof EventsProfiler) + assert.deepStrictEqual( + config.profilers.map(profiler => profiler.constructor), + [WallProfiler, EventsProfiler] + ) + assert.strictEqual( + /** @type {InstanceType} */ (config.profilers[0]).codeHotspotsEnabled(), + false + ) + assert.strictEqual( + /** @type {InstanceType} */ (config.profilers[0]).endpointCollectionEnabled(), + false + ) }) - function optionOnlyWorksWithGivenCondition (property, name, condition) { - const options = { - [property]: true, + it('should disable code hotspots on unsupported platforms', function () { + process.env = { + DD_PROFILING_CODEHOTSPOTS_ENABLED: '1', } - if (condition) { - // should silently succeed - // eslint-disable-next-line no-new - new Config(options) - } else { - // should throw - // eslint-disable-next-line no-new - assert.throws(() => { new Config(options) }, `${name} not supported on `) + const { config } = getProfilerConfig() + + assert.strictEqual(config.codeHotspotsEnabled, samplingContextsAvailable) + }) + + it('should disable endpoint collection on unsupported platforms', function () { + process.env = { + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: '1', } - } - function optionOnlyWorksWithSamplingContexts (property, name) { - optionOnlyWorksWithGivenCondition(property, name, samplingContextsAvailable) - } + const { config } = getProfilerConfig() - it('should only allow code hotspots on supported platforms', () => { - optionOnlyWorksWithSamplingContexts('codeHotspotsEnabled', 'Code hotspots') + assert.strictEqual(config.endpointCollectionEnabled, samplingContextsAvailable) }) - it('should only allow endpoint collection on supported platforms', () => { - optionOnlyWorksWithSamplingContexts('endpointCollection', 'Endpoint collection') - }) + it('should disable CPU profiling on unsupported platforms', function () { + process.env = { + DD_PROFILING_CPU_ENABLED: '1', + } + + const { config } = getProfilerConfig() - it('should only allow CPU profiling on supported platforms', () => { - optionOnlyWorksWithSamplingContexts('cpuProfilingEnabled', 'CPU profiling') + assert.strictEqual(config.cpuProfilingEnabled, samplingContextsAvailable) }) - it('should only allow timeline view on supported platforms', () => { - optionOnlyWorksWithSamplingContexts('timelineEnabled', 'Timeline view') + it('should disable timeline view on unsupported platforms', function () { + process.env = { + DD_PROFILING_TIMELINE_ENABLED: '1', + } + + const { config } = getProfilerConfig() + + assert.strictEqual(config.timelineEnabled, samplingContextsAvailable) }) - it('should only allow OOM monitoring on supported platforms', () => { - optionOnlyWorksWithGivenCondition('oomMonitoring', 'OOM monitoring', oomMonitoringSupported) + it('should disable OOM monitoring on unsupported platforms', function () { + process.env = { + DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED: '1', + } + + const { config } = getProfilerConfig() + + assert.strictEqual(config.oomMonitoring.enabled, oomMonitoringSupported) }) it('should support tags', () => { @@ -405,7 +376,7 @@ describe('config', () => { env: 'dev', } - const config = new Config({ tags }) + const { config } = getProfilerConfig({ tags }) assertObjectContains(config.tags, tags) }) @@ -420,7 +391,7 @@ describe('config', () => { version: '3.2.1', } - const config = new Config({ env, service, version, tags }) + const { config } = getProfilerConfig({ env, service, version, tags }) assertObjectContains(config.tags, { env, service, version }) }) @@ -429,21 +400,22 @@ describe('config', () => { const DUMMY_GIT_SHA = '13851f2b092e97acebab1b73f6c0e7818e795b50' const DUMMY_REPOSITORY_URL = 'git@github.com:DataDog/sci_git_example.git' - const config = new Config({ - repositoryUrl: DUMMY_REPOSITORY_URL, - commitSHA: DUMMY_GIT_SHA, - }) + process.env = { + DD_GIT_COMMIT_SHA: DUMMY_GIT_SHA, + DD_GIT_REPOSITORY_URL: DUMMY_REPOSITORY_URL, + } + + const { config } = getProfilerConfig() assertObjectContains(config.tags, { 'git.repository_url': DUMMY_REPOSITORY_URL, 'git.commit.sha': DUMMY_GIT_SHA }) }) it('should support IPv6 hostname', () => { - const options = { + const { config } = getProfilerConfig({ hostname: '::1', port: '8126', - } + }) - const config = new Config(options) const exporterUrl = config.exporters[0]._url.toString() const expectedUrl = new URL('http://[::1]:8126').toString() @@ -454,7 +426,8 @@ describe('config', () => { process.env = { DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED: 'false', } - const config = new Config({}) + + const { config } = getProfilerConfig() assert.deepStrictEqual(config.oomMonitoring, { enabled: false, @@ -465,12 +438,17 @@ describe('config', () => { }) }) + function assertOomExportCommand (config) { + assert.ok(config.oomMonitoring.exportCommand[3].includes(`service:${config.service}`)) + assert.ok(config.oomMonitoring.exportCommand[3].includes('snapshot:on_oom')) + } + it('should enable OOM heap profiler by default and use process as default strategy', () => { - const config = new Config({ reportHostname: true }) + const { config } = getProfilerConfig({ reportHostname: true }) if (oomMonitoringSupported) { - assert.deepStrictEqual(config.oomMonitoring, { - enabled: oomMonitoringSupported, + assertObjectContains(config.oomMonitoring, { + enabled: true, heapLimitExtensionSize: 0, maxHeapExtensionCount: 0, exportStrategies: ['process'], @@ -478,51 +456,54 @@ describe('config', () => { process.execPath, path.normalize(path.join(__dirname, '../../src/profiling', 'exporter_cli.js')), 'http://127.0.0.1:8126/', - `host:${config.tags.host},service:node,snapshot:on_oom`, 'space', ], }) + assertOomExportCommand(config) } else { assert.strictEqual(config.oomMonitoring.enabled, false) } }) - it('should allow configuring exporters by string or string array', async () => { + it('should allow configuring exporters through DD_PROFILING_EXPORTERS', () => { + /** @type {Array<[string, (typeof AgentExporter | typeof FileExporter)[]]>} */ const checks = [ - 'agent', - ['agent'], + ['agent', [AgentExporter]], + ['agent,file', [AgentExporter, FileExporter]], ] - for (const exporters of checks) { - const config = new Config({ - sourceMap: false, - exporters, - }) + for (const [exporters, expected] of checks) { + process.env = { + DD_PROFILING_EXPORTERS: exporters, + } + + const { config } = getProfilerConfig() - assert.strictEqual(typeof config.exporters[0].export, 'function') + assert.deepStrictEqual(config.exporters.map(exporter => exporter.constructor), expected) } }) - it('should allow configuring profilers by string or string arrays', async () => { + it('should allow configuring profilers through DD_PROFILING_PROFILERS', () => { + /** @type {Array>} */ const checks = [ ['space', SpaceProfiler], ['wall', WallProfiler, EventsProfiler], ['space,wall', SpaceProfiler, WallProfiler, EventsProfiler], ['wall,space', WallProfiler, SpaceProfiler, EventsProfiler], - [['space', 'wall'], SpaceProfiler, WallProfiler, EventsProfiler], - [['wall', 'space'], WallProfiler, SpaceProfiler, EventsProfiler], ].map(profilers => profilers.filter(profiler => samplingContextsAvailable || profiler !== EventsProfiler)) - for (const [profilers, ...expected] of checks) { - const config = new Config({ - sourceMap: false, - profilers, - }) - - assert.strictEqual(config.profilers.length, expected.length) - for (let i = 0; i < expected.length; i++) { - assert.ok(config.profilers[i] instanceof expected[i]) + for (const check of checks) { + const profilers = /** @type {string} */ (check[0]) + const expected = /** @type {Array} */ ( + check.slice(1) + ) + process.env = { + DD_PROFILING_PROFILERS: profilers, } + + const { config } = getProfilerConfig() + + assert.deepStrictEqual(config.profilers.map(profiler => profiler.constructor), expected) } }) @@ -535,9 +516,9 @@ describe('config', () => { DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: 'process,async,process', } - const config = new Config({ reportHostname: true }) + const { config } = getProfilerConfig({ reportHostname: true, tags: {} }) - assert.deepStrictEqual(config.oomMonitoring, { + assertObjectContains(config.oomMonitoring, { enabled: true, heapLimitExtensionSize: 1000000, maxHeapExtensionCount: 2, @@ -546,10 +527,10 @@ describe('config', () => { process.execPath, path.normalize(path.join(__dirname, '../../src/profiling', 'exporter_cli.js')), 'http://127.0.0.1:8126/', - `host:${config.tags.host},service:node,snapshot:on_oom`, 'space', ], }) + assertOomExportCommand(config) }) } @@ -560,7 +541,7 @@ describe('config', () => { if (!isSupported) { this.skip() } else { - const config = new Config({}) + const { config } = getProfilerConfig() assert.strictEqual(config.asyncContextFrameEnabled, true) } }) @@ -569,16 +550,12 @@ describe('config', () => { if (!isSupported) { this.skip() } else { - process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED = '0' - try { - const config = new Config({ - // In production this comes from the tracer Config singleton; we mimic it here. - url: 'http://127.0.0.1:8126', - }) - assert.strictEqual(config.asyncContextFrameEnabled, false) - } finally { - delete process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED + process.env = { + DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED: '0', } + + const { config } = getProfilerConfig() + assert.strictEqual(config.asyncContextFrameEnabled, false) } }) }) @@ -588,7 +565,7 @@ describe('config', () => { if (isSupported) { this.skip() } else { - const config = new Config({}) + const { config } = getProfilerConfig() assert.strictEqual(config.asyncContextFrameEnabled, false) } }) @@ -597,13 +574,12 @@ describe('config', () => { if (isSupported) { this.skip() } else { - process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED = '1' - try { - const config = new Config() - assert.strictEqual(config.asyncContextFrameEnabled, false) - } finally { - delete process.env.DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED + process.env = { + DD_PROFILING_ASYNC_CONTEXT_FRAME_ENABLED: '1', } + + const { config } = getProfilerConfig() + assert.strictEqual(config.asyncContextFrameEnabled, false) } }) }) @@ -611,30 +587,33 @@ describe('config', () => { describe('upload compression settings', () => { const expectConfig = (env, method, level, warning) => { - process.env = { - DD_PROFILING_DEBUG_UPLOAD_COMPRESSION: env, - } + process.env = env === undefined + ? {} + : { DD_PROFILING_DEBUG_UPLOAD_COMPRESSION: env } + + process.env.DD_TRACE_DEBUG = '1' + /** @type {string[]} */ + const warnings = [] const logger = { - warnings: [], debug () {}, info () {}, warn (message) { - this.warnings.push(message) + warnings.push(message) }, error () {}, } - const config = new Config({ - logger, - // In production this comes from the tracer Config singleton; we mimic it here. - url: 'http://127.0.0.1:8126', + + const { config } = getProfilerConfig({ logger }) + const compressionWarnings = warnings.filter(message => { + return message.includes('DD_PROFILING_DEBUG_UPLOAD_COMPRESSION') || + message.includes('Invalid compression level ') }) if (warning) { - assert.strictEqual(logger.warnings.length, 1) - assert.strictEqual(logger.warnings[0], warning) + assert.match(compressionWarnings.join('\n'), new RegExp(RegExp.escape(warning))) } else { - assert.strictEqual(logger.warnings.length, 0) + assert.deepStrictEqual(compressionWarnings, []) } assert.deepStrictEqual(config.uploadCompression, { method, level }) @@ -649,11 +628,13 @@ describe('config', () => { }) it('should reject unknown methods', () => { - expectConfig('foo', zstdOrGzip, undefined, 'Invalid profile upload compression method "foo". Will use "on".') + expectConfig('foo', zstdOrGzip, undefined, "Invalid value: 'foo' for ") }) it('should accept supported compression levels in methods that support levels', () => { - [['gzip', 9], ['zstd', 22]].forEach(([method, maxLevel]) => { + /** @type {Array<[string, number]>} */ + const methods = [['gzip', 9], ['zstd', 22]] + methods.forEach(([method, maxLevel]) => { for (let i = 1; i <= maxLevel; i++) { expectConfig(`${method}-${i}`, method, i) } @@ -662,28 +643,27 @@ describe('config', () => { it('should reject invalid compression levels in methods that support levels', () => { ['gzip', 'zstd'].forEach((method) => { - expectConfig(`${method}-foo`, method, undefined, - 'Invalid compression level "foo". Will use default level.') + expectConfig(`${method}-foo`, zstdOrGzip, undefined, + `Invalid value: '${method}-foo' for DD_PROFILING_DEBUG_UPLOAD_COMPRESSION (source: env_var), picked default`) }) }) it('should reject compression levels in methods that do not support levels', () => { ['on', 'off'].forEach((method) => { - const effectiveMethod = method === 'on' ? zstdOrGzip : method - expectConfig(`${method}-3`, effectiveMethod, undefined, - `Compression levels are not supported for "${method}".`) - expectConfig(`${method}-foo`, effectiveMethod, undefined, - `Compression levels are not supported for "${method}".`) + expectConfig(`${method}-3`, zstdOrGzip, undefined, + `Invalid value: '${method}-3' for DD_PROFILING_DEBUG_UPLOAD_COMPRESSION (source: env_var), picked default`) + expectConfig(`${method}-foo`, zstdOrGzip, undefined, + `Invalid value: '${method}-foo' for DD_PROFILING_DEBUG_UPLOAD_COMPRESSION (source: env_var), picked default`) }) }) it('should normalize compression levels', () => { - expectConfig('gzip-0', 'gzip', 1, 'Invalid compression level 0. Will use 1.') + expectConfig('gzip-0', zstdOrGzip, undefined, "Invalid value: 'gzip-0'") expectConfig('gzip-10', 'gzip', 9, 'Invalid compression level 10. Will use 9.') - expectConfig('gzip-3.14', 'gzip', 3) - expectConfig('zstd-0', 'zstd', 1, 'Invalid compression level 0. Will use 1.') + expectConfig('gzip-3.14', zstdOrGzip, undefined, "Invalid value: 'gzip-3.14'") + expectConfig('zstd-0', zstdOrGzip, undefined, "Invalid value: 'zstd-0'") expectConfig('zstd-23', 'zstd', 22, 'Invalid compression level 23. Will use 22.') - expectConfig('zstd-3.14', 'zstd', 3) + expectConfig('zstd-3.14', zstdOrGzip, undefined, "Invalid value: 'zstd-3.14'") }) }) }) diff --git a/packages/dd-trace/test/profiling/profiler.spec.js b/packages/dd-trace/test/profiling/profiler.spec.js index 0b0b405e8a8..a1c3ba59b82 100644 --- a/packages/dd-trace/test/profiling/profiler.spec.js +++ b/packages/dd-trace/test/profiling/profiler.spec.js @@ -27,6 +27,29 @@ describe('profiler', function () { let SourceMapperStub let mapperInstance let interval + let flushInterval + + class ConfigStub { + constructor (options) { + const compression = process.env.DD_PROFILING_DEBUG_UPLOAD_COMPRESSION ?? 'off' + const [method, level0] = compression.split('-') + const level = level0 ? Number.parseInt(level0, 10) : undefined + + this.endpointCollectionEnabled = false + this.debugSourceMaps = false + this.exporters = options.exporters ?? exporters + this.flushInterval = options.flushInterval ?? flushInterval + this.logger = options.logger ?? logger + this.profilers = options.profilers ?? profilers + this.sourceMap = options.sourceMap ?? false + this.systemInfoReport = {} + this.tags = { ...options.tags } + this.uploadCompression = { + method, + level: Number.isNaN(level) ? undefined : level, + } + } + } function waitForExport () { return Promise.all([ @@ -39,7 +62,8 @@ describe('profiler', function () { } function setUpProfiler () { - interval = 65 * 1000 + flushInterval = 65 * 1000 + interval = flushInterval clock = sinon.useFakeTimers({ toFake: ['Date', 'setTimeout', 'clearTimeout', 'setInterval', 'clearInterval'], }) @@ -87,7 +111,7 @@ describe('profiler', function () { SourceMapperStub = sinon.stub().returns(mapperInstance) } - function makeStartOptions (overrides = {}) { + function makeStartOptions (overrides) { return { profilers, exporters, @@ -99,6 +123,9 @@ describe('profiler', function () { describe('not serverless', function () { function initProfiler () { Profiler = proxyquire('../../src/profiling/profiler', { + './config': { + Config: ConfigStub, + }, '@datadog/pprof': { SourceMapper: SourceMapperStub, }, @@ -434,6 +461,9 @@ describe('profiler', function () { function initServerlessProfiler () { Profiler = proxyquire('../../src/profiling/profiler', { + './config': { + Config: ConfigStub, + }, '@datadog/pprof': { SourceMapper: SourceMapperStub, }, diff --git a/packages/dd-trace/test/profiling/profilers/events.spec.js b/packages/dd-trace/test/profiling/profilers/events.spec.js index b2bbf3c5784..14532657d0f 100644 --- a/packages/dd-trace/test/profiling/profilers/events.spec.js +++ b/packages/dd-trace/test/profiling/profilers/events.spec.js @@ -7,15 +7,25 @@ const dc = require('dc-polyfill') require('../../setup/core') const { storage } = require('../../../../datadog-core') +const { getConfigFresh } = require('../../helpers/config') const { availableParallelism, effectiveLibuvThreadCount } = require('../../../src/profiling/libuv-size') const EventsProfiler = require('../../../src/profiling/profilers/events') const startCh = dc.channel('apm:dns:lookup:start') const finishCh = dc.channel('apm:dns:lookup:finish') +function getProfilerConfig (tracerOptions) { + const tracerConfig = getConfigFresh(tracerOptions) + const ProfilingConfig = require('../../../src/profiling/config').Config + return new ProfilingConfig({ + url: 'http://127.0.0.1:8126', + ...tracerConfig, + }) +} + describe('profilers/events', () => { it('should provide info', () => { - const info = new EventsProfiler({ samplingInterval: 1 }).getInfo() + const info = new EventsProfiler(getProfilerConfig()).getInfo() assert(info.maxSamples > 0) }) diff --git a/packages/dd-trace/test/profiling/profilers/wall.spec.js b/packages/dd-trace/test/profiling/profilers/wall.spec.js index feb6e854ed2..d87b349c0b4 100644 --- a/packages/dd-trace/test/profiling/profilers/wall.spec.js +++ b/packages/dd-trace/test/profiling/profilers/wall.spec.js @@ -296,6 +296,404 @@ describe('profilers/native/wall', () => { }) }) + describe('_generateLabels with custom labels (ACF)', () => { + it('should include custom labels from array context', () => { + const profiler = new NativeWallProfiler({ + timelineEnabled: true, + asyncContextFrameEnabled: true, + }) + profiler.start() + profiler.stop() + + const shared = require('../../../src/profiling/profilers/shared') + const nativeThreadId = shared.getThreadLabels()['os thread id'] + const threadInfo = { + 'thread name': 'Main Event Loop', + 'thread id': '0', + 'os thread id': nativeThreadId, + } + + // Array context: [profilingContext, customLabels] + const actual = profiler._generateLabels({ + node: {}, + context: { + timestamp: 1234n, + context: [ + { spanId: '123', rootSpanId: '456' }, + { customer: 'acme', region: 'us-east' }, + ], + }, + }) + + assert.deepStrictEqual(actual, { + ...threadInfo, + end_timestamp_ns: 1234000n, + 'span id': '123', + 'local root span id': '456', + customer: 'acme', + region: 'us-east', + }) + }) + + it('should handle array context with empty profiling context', () => { + const profiler = new NativeWallProfiler({ + asyncContextFrameEnabled: true, + codeHotspotsEnabled: true, + }) + profiler.start() + profiler.stop() + + const shared = require('../../../src/profiling/profilers/shared') + const nativeThreadId = shared.getThreadLabels()['os thread id'] + const threadInfo = { + 'thread name': 'Main Event Loop', + 'thread id': '0', + 'os thread id': nativeThreadId, + } + + // ref is not an object (e.g. undefined) but custom labels exist + const actual = profiler._generateLabels({ + node: {}, + context: { + context: [undefined, { tier: 'premium' }], + }, + }) + + assert.deepStrictEqual(actual, { + ...threadInfo, + tier: 'premium', + }) + }) + + it('should not treat non-ACF ref context as array', () => { + const profiler = new NativeWallProfiler({ + timelineEnabled: true, + asyncContextFrameEnabled: false, + }) + profiler.start() + profiler.stop() + + const shared = require('../../../src/profiling/profilers/shared') + const nativeThreadId = shared.getThreadLabels()['os thread id'] + const threadInfo = { + 'thread name': 'Main Event Loop', + 'thread id': '0', + 'os thread id': nativeThreadId, + } + + // In non-ACF mode, context.context.ref is used, not context.context + const actual = profiler._generateLabels({ + node: {}, + context: { + timestamp: 1234n, + context: { ref: { spanId: '789' } }, + }, + }) + + assert.deepStrictEqual(actual, { + ...threadInfo, + end_timestamp_ns: 1234000n, + 'span id': '789', + }) + }) + }) + + describe('runWithLabels', () => { + let enterCh + let currentStore + let localPprof + let WallProfiler + + beforeEach(() => { + enterCh = dc.channel('dd-trace:storage:enter') + currentStore = null + + localPprof = { + ...pprof, + time: { + ...pprof.time, + setContext: sinon.stub(), + getContext: sinon.stub(), + runWithContext: sinon.stub(), + }, + } + + WallProfiler = proxyquire('../../../src/profiling/profilers/wall', { + '@datadog/pprof': localPprof, + '../../../../datadog-core': { + storage: () => ({ + getStore: () => currentStore, + enterWith () {}, + run (store, cb, ...args) { return cb(...args) }, + }), + }, + }) + }) + + it('should call runWithContext with array context when ACF is enabled', () => { + localPprof.time.getContext.returns({ spanId: '123' }) + localPprof.time.runWithContext.callsFake((ctx, fn) => fn()) + + const profiler = new WallProfiler({ + asyncContextFrameEnabled: true, + codeHotspotsEnabled: true, + }) + profiler.start() + + let called = false + profiler.runWithLabels({ customer: 'acme' }, () => { called = true }) + + assert.ok(called) + sinon.assert.calledOnce(localPprof.time.runWithContext) + const [ctx] = localPprof.time.runWithContext.firstCall.args + assert.ok(Array.isArray(ctx)) + assert.deepStrictEqual(ctx[0], { spanId: '123' }) + assert.deepStrictEqual(ctx[1], { customer: 'acme' }) + + profiler.stop() + }) + + it('should merge labels when nested', () => { + // Outer call: no existing array context + localPprof.time.getContext.onFirstCall().returns({ spanId: '123' }) + // Inner call: existing array context from outer call + localPprof.time.getContext.onSecondCall().returns([{ spanId: '123' }, { customer: 'acme' }]) + localPprof.time.runWithContext.callsFake((ctx, fn) => fn()) + + const profiler = new WallProfiler({ + asyncContextFrameEnabled: true, + codeHotspotsEnabled: true, + }) + profiler.start() + + profiler.runWithLabels({ customer: 'acme' }, () => { + profiler.runWithLabels({ region: 'us-east' }, () => {}) + }) + + const innerCtx = localPprof.time.runWithContext.secondCall.args[0] + assert.ok(Array.isArray(innerCtx)) + assert.deepStrictEqual(innerCtx[0], { spanId: '123' }) + assert.deepStrictEqual(innerCtx[1], { customer: 'acme', region: 'us-east' }) + + profiler.stop() + }) + + it('should override outer labels with inner labels of same key', () => { + localPprof.time.getContext.onFirstCall().returns({}) + localPprof.time.getContext.onSecondCall().returns([{}, { customer: 'acme' }]) + localPprof.time.runWithContext.callsFake((ctx, fn) => fn()) + + const profiler = new WallProfiler({ + asyncContextFrameEnabled: true, + codeHotspotsEnabled: true, + }) + profiler.start() + + profiler.runWithLabels({ customer: 'acme' }, () => { + profiler.runWithLabels({ customer: 'beta' }, () => {}) + }) + + const innerCtx = localPprof.time.runWithContext.secondCall.args[0] + assert.deepStrictEqual(innerCtx[1], { customer: 'beta' }) + + profiler.stop() + }) + + it('should passthrough when ACF is not enabled', () => { + const profiler = new WallProfiler({ + asyncContextFrameEnabled: false, + codeHotspotsEnabled: true, + }) + profiler.start() + + let called = false + const result = profiler.runWithLabels({ customer: 'acme' }, () => { + called = true + return 42 + }) + + assert.ok(called) + assert.strictEqual(result, 42) + sinon.assert.notCalled(localPprof.time.runWithContext) + + profiler.stop() + }) + + it('should passthrough when contexts are not enabled', () => { + const profiler = new WallProfiler({ + asyncContextFrameEnabled: true, + }) + profiler.start() + + let called = false + profiler.runWithLabels({ customer: 'acme' }, () => { called = true }) + + assert.ok(called) + sinon.assert.notCalled(localPprof.time.runWithContext) + + profiler.stop() + }) + + it('should let internal labels overwrite custom labels with same key', () => { + const profiler = new WallProfiler({ + asyncContextFrameEnabled: true, + codeHotspotsEnabled: true, + }) + profiler.start() + profiler.stop() + + const shared = require('../../../src/profiling/profilers/shared') + const nativeThreadId = shared.getThreadLabels()['os thread id'] + + // Custom label collides with internal 'span id' label + const actual = profiler._generateLabels({ + node: {}, + context: { + context: [ + { spanId: '123' }, + { 'span id': 'should-be-overwritten', customer: 'acme' }, + ], + }, + }) + + assert.deepStrictEqual(actual, { + 'thread name': 'Main Event Loop', + 'thread id': '0', + 'os thread id': nativeThreadId, + 'span id': '123', + customer: 'acme', + }) + }) + + it('should preserve custom labels in #enter when custom labels are active', () => { + const customLabelsCtx = [{ spanId: '123' }, { customer: 'acme' }] + localPprof.time.getContext.returns(customLabelsCtx) + localPprof.time.runWithContext.callsFake((ctx, fn) => { + // Simulate #enter being called inside runWithContext scope + currentStore = { span: null } + enterCh.publish() + return fn() + }) + + const profiler = new WallProfiler({ + asyncContextFrameEnabled: true, + codeHotspotsEnabled: true, + }) + profiler.start() + + profiler.runWithLabels({ customer: 'acme' }, () => {}) + + // Verify that setContext was called with an array preserving custom labels + const setContextCall = localPprof.time.setContext.lastCall + assert.ok(setContextCall, 'setContext should have been called') + const setCtx = setContextCall.args[0] + assert.ok(Array.isArray(setCtx), 'setContext should receive an array when custom labels are active') + assert.deepStrictEqual(setCtx[1], { customer: 'acme' }) + + profiler.stop() + }) + + it('should skip setContext when profiling context is unchanged (array)', () => { + const profiler = new WallProfiler({ + asyncContextFrameEnabled: true, + codeHotspotsEnabled: true, + }) + profiler.start() + + // Activate custom labels + localPprof.time.getContext.returns({}) + localPprof.time.runWithContext.callsFake((ctx, fn) => fn()) + profiler.runWithLabels({ customer: 'acme' }, () => {}) + + // Now simulate #enter where the profiling context is the same object + const sameCtx = { spanId: '123' } + localPprof.time.getContext.returns([sameCtx, { customer: 'acme' }]) + + // Make getActiveSpan return a span that produces sameCtx + const spanCtx = { _spanId: {}, _parentId: null, _tags: {}, _trace: { started: [] } } + const span = { context: () => spanCtx } + spanCtx._trace.started.push(span) + currentStore = { span } + + // First enter — sets context + enterCh.publish() + const callCountAfterFirst = localPprof.time.setContext.callCount + + // Second enter with same span — getActiveSpan returns same cached context + const lastCtx = localPprof.time.setContext.lastCall?.args[0]?.[0] ?? sameCtx + localPprof.time.getContext.returns([lastCtx, { customer: 'acme' }]) + enterCh.publish() + + // setContext should not have been called again since the profiling context is the same object + assert.strictEqual(localPprof.time.setContext.callCount, callCountAfterFirst) + + profiler.stop() + }) + + it('should skip setContext when context is unchanged (non-array)', () => { + const profiler = new WallProfiler({ + asyncContextFrameEnabled: true, + codeHotspotsEnabled: true, + }) + profiler.start() + + // Activate the monotonic flag + localPprof.time.getContext.returns({}) + localPprof.time.runWithContext.callsFake((ctx, fn) => fn()) + profiler.runWithLabels({ customer: 'acme' }, () => {}) + + // Simulate an async context without custom labels + const sameCtx = { spanId: '456' } + const spanCtx = { _spanId: {}, _parentId: null, _tags: {}, _trace: { started: [] } } + const span = { context: () => spanCtx } + spanCtx._trace.started.push(span) + currentStore = { span } + + // First enter — sets context + localPprof.time.getContext.returns(sameCtx) + enterCh.publish() + const callCountAfterFirst = localPprof.time.setContext.callCount + + // Second enter — getContext returns the same object that was just set + const lastSet = localPprof.time.setContext.lastCall.args[0] + localPprof.time.getContext.returns(lastSet) + enterCh.publish() + + // setContext should not have been called again + assert.strictEqual(localPprof.time.setContext.callCount, callCountAfterFirst) + + profiler.stop() + }) + + it('should preserve custom labels in #enter for async continuations after runWithLabels returns', () => { + // After runWithLabels returns, async continuations still carry the custom + // labels in their CPED frame. The monotonic flag ensures #enter checks. + const profiler = new WallProfiler({ + asyncContextFrameEnabled: true, + codeHotspotsEnabled: true, + }) + profiler.start() + + // First call sets the monotonic flag + localPprof.time.getContext.returns({}) + localPprof.time.runWithContext.callsFake((ctx, fn) => fn()) + profiler.runWithLabels({ customer: 'acme' }, () => {}) + + // Now simulate an async continuation where getContext returns array + // (CPED frame from the runWithContext scope is restored) + localPprof.time.getContext.returns([{ spanId: '789' }, { customer: 'acme' }]) + currentStore = { span: null } + enterCh.publish() + + // #enter should have preserved the custom labels + const setCtx = localPprof.time.setContext.lastCall.args[0] + assert.ok(Array.isArray(setCtx), 'setContext should receive an array for async continuations') + assert.deepStrictEqual(setCtx[1], { customer: 'acme' }) + + profiler.stop() + }) + }) + describe('webTags caching in getProfilingContext', () => { // TracingPlugin.startSpan() calls storage.enterWith({span}) immediately on span // creation, before the plugin calls addRequestTags() to set span.type='web'. diff --git a/packages/dd-trace/test/proxy.spec.js b/packages/dd-trace/test/proxy.spec.js index fe80590fd37..4c42a2f2a4f 100644 --- a/packages/dd-trace/test/proxy.spec.js +++ b/packages/dd-trace/test/proxy.spec.js @@ -151,9 +151,8 @@ describe('TracerProxy', () => { enabled: true, }, }, - injectionEnabled: [], + injectionEnabled: undefined, logger: 'logger', - debug: true, profiling: {}, apmTracingEnabled: false, appsec: {}, @@ -412,12 +411,12 @@ describe('TracerProxy', () => { sinon.assert.notCalled(appsec.enable) sinon.assert.notCalled(iast.enable) - let conf = { tracing_enabled: false } + let conf = { tracing: false } handlers.get('APM_TRACING')(createApmTracingTransaction('test-config-1', conf)) sinon.assert.notCalled(appsec.disable) sinon.assert.notCalled(iast.disable) - conf = { tracing_enabled: true } + conf = { tracing: true } handlers.get('APM_TRACING')(createApmTracingTransaction('test-config-1', conf, 'modify')) sinon.assert.calledOnce(DatadogTracer) sinon.assert.calledOnce(AppsecSdk) @@ -439,7 +438,7 @@ describe('TracerProxy', () => { config.appsec.enabled = true config.iast.enabled = true config.setRemoteConfig = conf => { - config.tracing = conf.tracing_enabled + config.tracing = conf.tracing } const remoteConfigProxy = new RemoteConfigProxy() @@ -448,12 +447,12 @@ describe('TracerProxy', () => { sinon.assert.calledOnceWithExactly(appsec.enable, config) sinon.assert.calledOnceWithExactly(iast.enable, config, tracer) - let conf = { tracing_enabled: false } + let conf = { tracing: false } handlers.get('APM_TRACING')(createApmTracingTransaction('test-config-2', conf)) sinon.assert.called(appsec.disable) sinon.assert.called(iast.disable) - conf = { tracing_enabled: true } + conf = { tracing: true } handlers.get('APM_TRACING')(createApmTracingTransaction('test-config-2', conf, 'modify')) sinon.assert.calledTwice(appsec.enable) sinon.assert.calledWithExactly(appsec.enable.secondCall, config) diff --git a/packages/dd-trace/test/ritm.spec.js b/packages/dd-trace/test/ritm.spec.js index 0528df24427..7cabc9e60c8 100644 --- a/packages/dd-trace/test/ritm.spec.js +++ b/packages/dd-trace/test/ritm.spec.js @@ -5,33 +5,22 @@ const Module = require('node:module') const sinon = require('sinon') const dc = require('dc-polyfill') -const { describe, it, before, beforeEach, afterEach } = require('mocha') +const { describe, it, before, beforeEach } = require('mocha') require('./setup/core') const Hook = require('../src/ritm') describe('Ritm', () => { - const monkeyPatchedModuleName = 'dd-trace-monkey-patched-module' - const missingModuleName = 'package-does-not-exist' - let moduleLoadStartChannel, moduleLoadEndChannel, startListener, endListener - let utilHook, aHook, bHook, httpHook, relativeHook + const mockedModuleName = '@azure/functions-core' before(() => { moduleLoadStartChannel = dc.channel('dd-trace:moduleLoadStart') moduleLoadEndChannel = dc.channel('dd-trace:moduleLoadEnd') - }) - - beforeEach(() => { - startListener = sinon.fake() - endListener = sinon.fake() - - moduleLoadStartChannel.subscribe(startListener) - moduleLoadEndChannel.subscribe(endListener) Module.prototype.require = new Proxy(Module.prototype.require, { apply (target, thisArg, argArray) { - if (argArray[0] === monkeyPatchedModuleName) { + if (argArray[0] === mockedModuleName) { return { version: '1.0.0', registerHook: () => { }, @@ -42,31 +31,30 @@ describe('Ritm', () => { }, }) - utilHook = Hook('util') - aHook = Hook('module-a') - bHook = Hook('module-b') - httpHook = new Hook(['http'], function onRequire (exports, name, basedir) { + function onRequire () { } + Hook(['util'], onRequire) + Hook(['module-a'], onRequire) + Hook(['module-b'], onRequire) + Hook(['http'], function onRequire (exports, name, basedir) { exports.foo = 1 return exports }) - relativeHook = new Hook(['./ritm-tests/relative/module-c'], function onRequire (exports) { + Hook(['./ritm-tests/relative/module-c'], function onRequire (exports) { exports.foo = 1 return exports }) }) - afterEach(() => { - utilHook.unhook() - aHook.unhook() - bHook.unhook() - httpHook.unhook() - relativeHook.unhook() + beforeEach(() => { + startListener = sinon.fake() + endListener = sinon.fake() + + moduleLoadStartChannel.subscribe(startListener) + moduleLoadEndChannel.subscribe(endListener) }) it('should shim util', () => { - assert.equal(startListener.callCount, 0) - assert.equal(endListener.callCount, 0) - require('util') + require('node:util') assert.equal(startListener.callCount, 1) assert.equal(endListener.callCount, 1) }) @@ -115,16 +103,17 @@ describe('Ritm', () => { }) it('should fall back to monkey patched module', () => { - const http = /** @type {{ foo?: number }} */ (require('http')) - assert.equal(http.foo, 1, 'normal hooking still works') + // @ts-expect-error - Patching module works as expected + assert.equal(require('node:http').foo, 1, 'normal hooking still works') - const monkeyPatchedModule = require(monkeyPatchedModuleName) - assert.ok(monkeyPatchedModule, 'requiring monkey patched module works') - assert.equal(monkeyPatchedModule.version, '1.0.0') - assert.equal(typeof monkeyPatchedModule.registerHook, 'function') + const fnCore = require(mockedModuleName) + assert.ok(fnCore, 'requiring monkey patched in module works') + assert.equal(fnCore.version, '1.0.0') + assert.equal(typeof fnCore.registerHook, 'function') assert.throws( - () => require(missingModuleName), + // @ts-expect-error - Package does not exist + () => require('package-does-not-exist'), /Cannot find module 'package-does-not-exist'/, 'a failing `require(...)` can still throw as expected' ) diff --git a/packages/dd-trace/test/telemetry/index.spec.js b/packages/dd-trace/test/telemetry/index.spec.js index 697a6b9583e..02636f90412 100644 --- a/packages/dd-trace/test/telemetry/index.spec.js +++ b/packages/dd-trace/test/telemetry/index.spec.js @@ -16,8 +16,10 @@ const tracerVersion = require('../../../../package.json').version const processTags = require('../../src/process-tags') const DEFAULT_HEARTBEAT_INTERVAL = 60000 +const DEFAULT_EXTENDED_HEARTBEAT_INTERVAL = 86400000 let traceAgent +let traceAgentSeqBase describe('telemetry (proxy)', () => { let telemetry @@ -30,7 +32,6 @@ describe('telemetry (proxy)', () => { beforeEach(() => { telemetry = sinon.spy({ start () {}, - stop () {}, updateIntegrations () {}, updateConfig () {}, appClosing () {}, @@ -46,15 +47,13 @@ describe('telemetry (proxy)', () => { proxy.start(config) proxy.updateIntegrations() - proxy.updateConfig() + proxy.updateConfig([], config) proxy.appClosing() - proxy.stop() sinon.assert.calledWith(telemetry.start, config) sinon.assert.called(telemetry.updateIntegrations) sinon.assert.called(telemetry.updateConfig) sinon.assert.called(telemetry.appClosing) - sinon.assert.called(telemetry.stop) }) it('should proxy when enabled from updateConfig', () => { @@ -63,12 +62,10 @@ describe('telemetry (proxy)', () => { proxy.updateConfig([], config) proxy.updateIntegrations() proxy.appClosing() - proxy.stop() sinon.assert.called(telemetry.updateIntegrations) sinon.assert.calledWith(telemetry.updateConfig, [], config) sinon.assert.called(telemetry.appClosing) - sinon.assert.called(telemetry.stop) }) }) @@ -96,6 +93,7 @@ describe('telemetry', () => { }) traceAgent.reqs = [] + traceAgentSeqBase = undefined telemetry = proxyquire('../../src/telemetry/telemetry', { '../exporters/common/docker': { @@ -123,7 +121,11 @@ describe('telemetry', () => { circularObject.child.parent = circularObject telemetry.start({ - telemetry: { enabled: true, heartbeatInterval: DEFAULT_HEARTBEAT_INTERVAL }, + telemetry: { + enabled: true, + heartbeatInterval: DEFAULT_HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, hostname: 'localhost', port: traceAgent.address().port, service: 'test service', @@ -150,7 +152,6 @@ describe('telemetry', () => { }) after(() => { - telemetry.stop() traceAgent.close() }) @@ -250,15 +251,18 @@ describe('telemetry', () => { }) it('should do nothing when not enabled', (done) => { - telemetry.stop() - const server = http.createServer(() => { assert.fail('server should not be called') }).listen(0, () => { telemetry.start({ - telemetry: { enabled: false, heartbeatInterval: 60000 }, + telemetry: { + enabled: false, + heartbeatInterval: 60000, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, hostname: 'localhost', port: (/** @type {import('net').AddressInfo} */ (server.address())).port, + appsec: { sca: { enabled: false } }, }) setTimeout(() => { @@ -276,8 +280,12 @@ describe('telemetry', () => { }, }) notEnabledTelemetry.start({ - telemetry: { enabled: false, heartbeatInterval: DEFAULT_HEARTBEAT_INTERVAL }, - appsec: { enabled: false }, + telemetry: { + enabled: false, + heartbeatInterval: DEFAULT_HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, + appsec: { enabled: false, sca: { enabled: undefined } }, profiling: { enabled: false }, }, { _pluginsByName: pluginsByName, @@ -301,7 +309,6 @@ describe('telemetry app-heartbeat', () => { after(() => { clock.restore() - telemetry.stop() traceAgent.close() }) @@ -324,7 +331,11 @@ describe('telemetry app-heartbeat', () => { }) telemetry.start({ - telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + telemetry: { + enabled: true, + heartbeatInterval: HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, hostname: 'localhost', port: 0, service: 'test service', @@ -360,8 +371,7 @@ describe('Telemetry extended heartbeat', () => { afterEach(() => { clock.restore() - telemetry.stop() - traceAgent.close() + traceAgent?.close() }) it('should be sent every 24 hours', (done) => { @@ -391,7 +401,11 @@ describe('Telemetry extended heartbeat', () => { }) telemetry.start({ - telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + telemetry: { + enabled: true, + heartbeatInterval: HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, hostname: 'localhost', port: 0, service: 'test service', @@ -405,10 +419,10 @@ describe('Telemetry extended heartbeat', () => { }, { _pluginsByName: pluginsByName, }) - clock.tick(86400000) + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) assert.strictEqual(extendedHeartbeatRequest, 'app-extended-heartbeat') assert.strictEqual(beats, 1) - clock.tick(86400000) + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) assert.strictEqual(beats, 2) done() }) @@ -434,7 +448,11 @@ describe('Telemetry extended heartbeat', () => { }) const config = { - telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + telemetry: { + enabled: true, + heartbeatInterval: HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, hostname: 'localhost', port: 0, service: 'test service', @@ -449,38 +467,37 @@ describe('Telemetry extended heartbeat', () => { telemetry.start(config, { _pluginsByName: pluginsByName }) - clock.tick(86400000) + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) assert.deepStrictEqual(configuration, []) const changes = [ { name: 'test', value: true, origin: 'code', seq_id: 0 }, ] telemetry.updateConfig(changes, config) - clock.tick(86400000) + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) assert.deepStrictEqual(configuration, changes) - const updatedChanges = [ - { name: 'test', value: false, origin: 'code', seq_id: 1 }, - ] + const change = { name: 'test', value: false, origin: 'code', seq_id: 1 } + const updatedChanges = [change] telemetry.updateConfig(updatedChanges, config) - clock.tick(86400000) + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) assert.deepStrictEqual(configuration, updatedChanges) const changeNeedingNameRemapping = [ { name: 'sampleRate', value: 0, origin: 'code', seq_id: 2 }, ] + /** @type {{ name: string, value: unknown, origin: string, seq_id: number }[]} */ const expectedConfigList = [ - updatedChanges[0], - { ...changeNeedingNameRemapping[0], name: 'DD_TRACE_SAMPLE_RATE' }, + ...changeNeedingNameRemapping, ] telemetry.updateConfig(changeNeedingNameRemapping, config) - clock.tick(86400000) + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) assert.deepStrictEqual(configuration, expectedConfigList) const samplingRule = [ { name: 'sampler.rules', - value: [ + value: JSON.stringify([ { service: '*', sampling_rate: 1 }, { service: 'svc*', @@ -489,40 +506,95 @@ describe('Telemetry extended heartbeat', () => { tags: { 'tag-a': 'ta-v*', 'tag-b': 'tb-v?', 'tag-c': 'tc-v' }, sample_rate: 0.5, }, - ], + ]), origin: 'code', seq_id: 3, }, ] - const expectedConfigListWithSamplingRules = expectedConfigList.concat([ - { - name: 'DD_TRACE_SAMPLING_RULES', - value: '[{"service":"*","sampling_rate":1},' + - '{"service":"svc*","resource":"*abc","name":"op-??",' + - '"tags":{"tag-a":"ta-v*","tag-b":"tb-v?","tag-c":"tc-v"},"sample_rate":0.5}]', - origin: 'code', - seq_id: 3, - }, - ]) + /** @type {{ name: string, value: unknown, origin: string, seq_id: number }[]} */ + const expectedConfigListWithSamplingRules = samplingRule telemetry.updateConfig(samplingRule, config) - clock.tick(86400000) + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) assert.deepStrictEqual(configuration, expectedConfigListWithSamplingRules) - const chainedChanges = expectedConfigListWithSamplingRules.concat([ + /** @type {{ name: string, value: unknown, origin: string, seq_id: number }[]} */ + const chainedChanges = [ { name: 'test', value: true, origin: 'env', seq_id: 4 }, { name: 'test', value: false, origin: 'remote_config', seq_id: 5 }, - ]) + ] const samplingRule2 = [ - { name: 'test', value: true, origin: 'env' }, - { name: 'test', value: false, origin: 'remote_config' }, + { name: 'test', value: true, origin: 'env', seq_id: 4 }, + { name: 'test', value: false, origin: 'remote_config', seq_id: 5 }, ] telemetry.updateConfig(samplingRule2, config) - clock.tick(86400000) + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) assert.deepStrictEqual(configuration, chainedChanges) done() }) + + it('should serialize URL, object, and function config values for extended heartbeat', (done) => { + let configuration + + const sendDataRequest = { + sendData: (config, application, host, reqType, payload, cb = () => {}) => { + if (reqType === 'app-extended-heartbeat') { + configuration = payload.configuration + } + }, + } + + telemetry = proxyquire('../../src/telemetry/telemetry', { + '../exporters/common/docker': { + id () { + return 'test docker id' + }, + }, + './send-data': sendDataRequest, + }) + + const config = { + telemetry: { + enabled: true, + heartbeatInterval: HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, + hostname: 'localhost', + port: 0, + service: 'test service', + version: '1.2.3-beta4', + appsec: { enabled: true }, + profiling: { enabled: true }, + env: 'preprod', + tags: { + 'runtime-id': '1a2b3c', + }, + } + + telemetry.start(config, { _pluginsByName: pluginsByName }) + + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) + assert.deepStrictEqual(configuration, []) + + const objectValue = { + foo: 'bar', + nested: { answer: 42 }, + } + + const changes = [ + { name: 'url', value: 'http://example.test:4318/v1/traces', origin: 'code', seq_id: 0 }, + { name: 'payload', value: JSON.stringify(objectValue), origin: 'code', seq_id: 1 }, + { name: 'callback', value: 'telemetryCallback', origin: 'code', seq_id: 2 }, + ] + + telemetry.updateConfig(changes, config) + + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) + assert.deepStrictEqual(configuration, changes) + + done() + }) }) // deleted this test for now since the global interval is now used for app-extended heartbeat @@ -579,7 +651,11 @@ describe('Telemetry retry', () => { }) telemetry.start({ - telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + telemetry: { + enabled: true, + heartbeatInterval: HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, hostname: 'localhost', port: 0, service: 'test service', @@ -668,7 +744,11 @@ describe('Telemetry retry', () => { }) telemetry.start({ - telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + telemetry: { + enabled: true, + heartbeatInterval: HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, hostname: 'localhost', port: 0, service: 'test service', @@ -739,7 +819,11 @@ describe('Telemetry retry', () => { }) telemetry.start({ - telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + telemetry: { + enabled: true, + heartbeatInterval: HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, hostname: 'localhost', port: 0, service: 'test service', @@ -801,7 +885,11 @@ describe('Telemetry retry', () => { // Start function sends 2 messages app-started & app-integrations-change telemetry.start({ - telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + telemetry: { + enabled: true, + heartbeatInterval: HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, hostname: 'localhost', port: 0, service: 'test service', @@ -885,7 +973,11 @@ describe('Telemetry retry', () => { // Start function sends 2 messages app-started & app-integrations-change telemetry.start({ - telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + telemetry: { + enabled: true, + heartbeatInterval: HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, hostname: 'localhost', port: 0, service: 'test service', @@ -903,7 +995,7 @@ describe('Telemetry retry', () => { pluginsByName.foo1 = { _enabled: true } telemetry.updateIntegrations() // This sends an batch message and fails // Skip forward a day - clock.tick(86400000) + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) assert.strictEqual(extendedHeartbeatRequest, 'app-extended-heartbeat') assertObjectContains(extendedHeartbeatPayload, { integrations: [{ @@ -973,13 +1065,18 @@ describe('AVM OSS', () => { }) traceAgent.reqs = [] + traceAgentSeqBase = undefined delete require.cache[require.resolve('../../src/telemetry/send-data')] delete require.cache[require.resolve('../../src/telemetry/telemetry')] telemetry = require('../../src/telemetry/telemetry') telemetryConfig = { - telemetry: { enabled: true, heartbeatInterval: HEARTBEAT_INTERVAL }, + telemetry: { + enabled: true, + heartbeatInterval: HEARTBEAT_INTERVAL, + extendedHeartbeatInterval: DEFAULT_EXTENDED_HEARTBEAT_INTERVAL, + }, hostname: 'localhost', port: traceAgent.address().port, service: 'test service', @@ -1003,26 +1100,25 @@ describe('AVM OSS', () => { after((done) => { clock.restore() - telemetry.stop() traceAgent.close(done) }) it('in app-started message', () => { return testSeq(1, 'app-started', payload => { assert.deepStrictEqual(payload.configuration, [ - { name: 'appsec.sca.enabled', value: scaValue, origin: scaValueOrigin, seq_id: 0 }, + { name: 'appsec.sca.enabled', value: scaValue, origin: scaValueOrigin }, ]) - }, true) + }) }) it('in app-extended-heartbeat message', () => { // Skip a full day - clock.tick(86400000) + clock.tick(DEFAULT_EXTENDED_HEARTBEAT_INTERVAL) return testSeq(2, 'app-extended-heartbeat', payload => { assert.deepStrictEqual(payload.configuration, [ - { name: 'appsec.sca.enabled', value: scaValue, origin: scaValueOrigin, seq_id: 0 }, + { name: 'appsec.sca.enabled', value: scaValue, origin: scaValueOrigin }, ]) - }, true) + }) }) }) }) @@ -1042,7 +1138,6 @@ describe('AVM OSS', () => { }) after(() => { - telemetry.stop() sinon.restore() }) @@ -1050,7 +1145,7 @@ describe('AVM OSS', () => { telemetry.start( { telemetry: { enabled: false }, - sca: { enabled: true }, + appsec: { sca: { enabled: true } }, } ) @@ -1064,6 +1159,8 @@ async function testSeq (seqId, reqType, validatePayload) { await once(traceAgent, 'handled-req') } const req = traceAgent.reqs[seqId - 1] + traceAgentSeqBase ??= req.body.seq_id - (seqId - 1) + assert.strictEqual(req.method, 'POST') assert.strictEqual(req.url, '/telemetry/proxy/api/v2/apmtelemetry') assertObjectContains(req.headers, { @@ -1098,7 +1195,7 @@ async function testSeq (seqId, reqType, validatePayload) { naming_schema_version: '', request_type: reqType, runtime_id: '1a2b3c', - seq_id: seqId, + seq_id: traceAgentSeqBase + seqId - 1, application: { service_name: 'test service', env: 'preprod', diff --git a/packages/dd-trace/test/telemetry/send-data.spec.js b/packages/dd-trace/test/telemetry/send-data.spec.js index c3e7e78f460..a9cc9a2c0c4 100644 --- a/packages/dd-trace/test/telemetry/send-data.spec.js +++ b/packages/dd-trace/test/telemetry/send-data.spec.js @@ -14,6 +14,7 @@ describe('sendData', () => { language_name: 'nodejs', tracer_version: 'version', } + const host = { hostname: 'test-host' } let sendDataModule let request @@ -25,17 +26,17 @@ describe('sendData', () => { }) }) - it('should call to request (TCP)', () => { + it('sends telemetry to the agent using hostname and port', () => { sendDataModule.sendData({ hostname: '', port: '12345', tags: { 'runtime-id': '123' }, - }, application, 'test', 'req-type') + }, application, host, 'req-type') sinon.assert.calledOnce(request) const options = request.getCall(0).args[1] - assert.deepStrictEqual(options, { + assertObjectContains(options, { method: 'POST', path: '/telemetry/proxy/api/v2/apmtelemetry', headers: { @@ -52,16 +53,16 @@ describe('sendData', () => { }) }) - it('should call to request (UDP)', () => { + it('sends telemetry to the configured socket url', () => { sendDataModule.sendData({ url: 'unix:/foo/bar/baz', tags: { 'runtime-id': '123' }, - }, application, 'test', 'req-type') + }, application, host, 'req-type') sinon.assert.calledOnce(request) const options = request.getCall(0).args[1] - assert.deepStrictEqual(options, { + assertObjectContains(options, { method: 'POST', path: '/telemetry/proxy/api/v2/apmtelemetry', headers: { @@ -78,40 +79,25 @@ describe('sendData', () => { }) }) - it('should add debug header if DD_TELEMETRY_DEBUG is present', () => { + it('adds the debug header when telemetry debug mode is enabled', () => { sendDataModule.sendData({ url: '/test', tags: { 'runtime-id': '123' }, telemetry: { debug: true }, - }, application, 'test', 'req-type') + }, application, host, 'req-type') sinon.assert.calledOnce(request) const options = request.getCall(0).args[1] - assert.deepStrictEqual(options, { - method: 'POST', - path: '/telemetry/proxy/api/v2/apmtelemetry', - headers: { - 'content-type': 'application/json', - 'dd-telemetry-api-version': 'v2', - 'dd-telemetry-request-type': 'req-type', - 'dd-telemetry-debug-enabled': 'true', - 'dd-client-library-language': application.language_name, - 'dd-client-library-version': application.tracer_version, - 'dd-session-id': '123', - }, - url: '/test', - hostname: undefined, - port: undefined, - }) + assert.strictEqual(options.headers['dd-telemetry-debug-enabled'], 'true') }) - it('should include dd-root-session-id header when rootSessionId differs from runtime-id', () => { + it('includes both child and root session ids when provided', () => { sendDataModule.sendData({ url: '/test', tags: { 'runtime-id': 'child-runtime-id' }, - rootSessionId: 'root-runtime-id', - }, application, 'test', 'req-type') + DD_ROOT_JS_SESSION_ID: 'root-runtime-id', + }, application, host, 'req-type') sinon.assert.calledOnce(request) const options = request.getCall(0).args[1] @@ -120,28 +106,14 @@ describe('sendData', () => { assert.strictEqual(options.headers['dd-root-session-id'], 'root-runtime-id') }) - it('should not include dd-root-session-id header when rootSessionId equals runtime-id', () => { - sendDataModule.sendData({ - url: '/test', - tags: { 'runtime-id': 'same-id' }, - rootSessionId: 'same-id', - }, application, 'test', 'req-type') - - sinon.assert.calledOnce(request) - const options = request.getCall(0).args[1] - - assert.strictEqual(options.headers['dd-session-id'], 'same-id') - assert.strictEqual(options.headers['dd-root-session-id'], undefined) - }) - - it('should remove not wanted properties from a payload with object type', () => { + it('removes internal-only fields from object payloads before sending them', () => { const payload = { message: 'test', logger: {}, tags: {}, serviceMapping: {}, } - sendDataModule.sendData({ tags: { 'runtime-id': '123' } }, 'test', 'test', 'req-type', payload) + sendDataModule.sendData({ tags: { 'runtime-id': '123' } }, application, host, 'req-type', payload) sinon.assert.calledOnce(request) const data = JSON.parse(request.getCall(0).args[0]) @@ -150,7 +122,7 @@ describe('sendData', () => { assert.deepStrictEqual(data.payload, trimmedPayload) }) - it('should send batch request with retryPayload', () => { + it('preserves batch payload items when sending message batches', () => { const retryObjData = { payload: { foo: 'bar' }, request_type: 'req-type-1' } const payload = [{ request_type: 'req-type-2', @@ -164,7 +136,7 @@ describe('sendData', () => { }, retryObjData] sendDataModule.sendData({ tags: { 'runtime-id': '123' } }, - { language: 'js' }, 'test', 'message-batch', payload) + application, host, 'message-batch', payload) sinon.assert.calledOnce(request) @@ -185,7 +157,7 @@ describe('sendData', () => { assert.deepStrictEqual(data.payload, expectedPayload) }) - it('should also work in CI Visibility agentless mode', () => { + it('uses the CI Visibility agentless intake when agentless mode is enabled', () => { process.env.DD_CIVISIBILITY_AGENTLESS_ENABLED = '1' sendDataModule.sendData( @@ -195,7 +167,8 @@ describe('sendData', () => { site: 'datadoghq.eu', }, application, - 'test', 'req-type' + host, + 'req-type' ) sinon.assert.calledOnce(request) diff --git a/packages/dd-trace/test/telemetry/session-propagation.spec.js b/packages/dd-trace/test/telemetry/session-propagation.spec.js index 7a40ca4cf68..00363dacbe3 100644 --- a/packages/dd-trace/test/telemetry/session-propagation.spec.js +++ b/packages/dd-trace/test/telemetry/session-propagation.spec.js @@ -2,214 +2,242 @@ const assert = require('node:assert/strict') -const { describe, it, beforeEach, afterEach } = require('mocha') -const sinon = require('sinon') -const dc = require('dc-polyfill') +const { describe, it, beforeEach } = require('mocha') +const proxyquire = require('proxyquire').noPreserveCache() require('../setup/core') +/** + * @typedef {{ + * callArgs?: unknown[], + * shell: boolean, + * command?: string, + * file?: string + * }} ChildProcessContext + */ +/** + * @typedef {{ + * telemetry?: { enabled?: boolean }, + * DD_ROOT_JS_SESSION_ID?: string, + * tags?: { 'runtime-id'?: string } + * }} SessionPropagationConfigOverrides + */ +/** + * @typedef {{ + * subscribe(subscribers: { start?: (context: ChildProcessContext) => void }): void, + * start: { publish(context: ChildProcessContext): void } + * }} FakeTracingChannel + */ + describe('session-propagation', () => { - const childProcessChannel = dc.tracingChannel('datadog:child_process:execution') + /** @type {FakeTracingChannel} */ + let childProcessChannel let sessionPropagation - beforeEach(() => { - // Fresh require to reset the subscribed flag - delete require.cache[require.resolve('../../src/telemetry/session-propagation')] - sessionPropagation = require('../../src/telemetry/session-propagation') - }) - - afterEach(() => { - sinon.restore() - }) - - it('should subscribe to child_process channel', () => { - sessionPropagation.start({ - telemetry: { enabled: true }, - rootSessionId: 'root-id', - tags: { 'runtime-id': 'current-id' }, - }) - - assert.ok(childProcessChannel.start.hasSubscribers) - }) - - it('should not subscribe when telemetry is disabled', () => { - const subscribeSpy = sinon.spy(childProcessChannel, 'subscribe') - - sessionPropagation.start({ - telemetry: { enabled: false }, - rootSessionId: 'root-id', - tags: { 'runtime-id': 'current-id' }, - }) - - assert.strictEqual(subscribeSpy.callCount, 0) - }) - - it('should only subscribe once', () => { - const config = { telemetry: { enabled: true }, rootSessionId: 'root-id', tags: { 'runtime-id': 'current-id' } } - sessionPropagation.start(config) - - const subscribeSpy = sinon.spy(childProcessChannel, 'subscribe') - sessionPropagation.start(config) - - assert.strictEqual(subscribeSpy.callCount, 0) - }) - - it('should unsubscribe and allow re-subscribe after stop()', () => { - sessionPropagation.start({ - telemetry: { enabled: true }, - rootSessionId: 'root-id', - tags: { 'runtime-id': 'current-id' }, - }) - - sessionPropagation.stop() + /** + * @param {SessionPropagationConfigOverrides} [overrides] + */ + function createConfig (overrides = {}) { + /** + * @type {{ + * telemetry: { enabled: boolean }, + * DD_ROOT_JS_SESSION_ID: string | undefined, + * tags: { 'runtime-id': string } + * }} + */ + const config = { + telemetry: { enabled: true, ...overrides.telemetry }, + DD_ROOT_JS_SESSION_ID: undefined, + tags: { 'runtime-id': 'current-id', ...overrides.tags }, + } + + if (overrides.DD_ROOT_JS_SESSION_ID) { + config.DD_ROOT_JS_SESSION_ID = overrides.DD_ROOT_JS_SESSION_ID + } + + return config + } + + /** + * @param {Record} additions + * @returns {NodeJS.ProcessEnv} + */ + function createExpectedEnv (additions) { + return { + ...process.env, + ...additions, + } + } + + /** + * @param {ChildProcessContext} context + * @returns {ChildProcessContext} + */ + function publishStart (context) { + childProcessChannel.start.publish(context) + return context + } + + /** + * @returns {FakeTracingChannel} + */ + function createTracingChannel () { + /** @type {((context: ChildProcessContext) => void)[]} */ + const startSubscribers = [] + + return { + subscribe (subscribers) { + if (typeof subscribers.start === 'function') { + startSubscribers.push(subscribers.start) + } + }, + start: { + publish (context) { + for (const subscriber of startSubscribers) { + subscriber(context) + } + }, + }, + } + } - // After stop(), start() should accept new config - sessionPropagation.start({ - telemetry: { enabled: true }, - rootSessionId: 'new-root', - tags: { 'runtime-id': 'new-id' }, + beforeEach(() => { + childProcessChannel = createTracingChannel() + sessionPropagation = proxyquire('../../src/telemetry/session-propagation', { + 'dc-polyfill': { + tracingChannel () { + return childProcessChannel + }, + }, }) - - const context = { callArgs: ['node', ['test.js'], {}], shell: false } - sessionPropagation._onChildProcessStart(context) - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'new-root') - assert.strictEqual(context.callArgs[2].env.DD_PARENT_JS_SESSION_ID, 'new-id') }) - describe('env injection via callArgs', () => { - let onChildProcessStart - - beforeEach(() => { - sessionPropagation.start({ - telemetry: { enabled: true }, - rootSessionId: 'root-id', - tags: { 'runtime-id': 'current-id' }, - }) - onChildProcessStart = sessionPropagation._onChildProcessStart - }) + describe('child process execution contexts', () => { + it('seeds child process options with the current runtime id when there is no inherited root', () => { + sessionPropagation.start(createConfig()) - it('should inject env vars when callArgs has (file, args, options)', () => { const context = { callArgs: ['node', ['test.js'], { cwd: '/tmp', env: { FOO: 'bar' } }], shell: false, } - onChildProcessStart(context) - - assert.strictEqual(context.callArgs[0], 'node') - assert.deepStrictEqual(context.callArgs[1], ['test.js']) - assert.strictEqual(context.callArgs[2].cwd, '/tmp') - assert.strictEqual(context.callArgs[2].env.FOO, 'bar') - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[2].env.DD_PARENT_JS_SESSION_ID, 'current-id') + publishStart(context) + + assert.deepStrictEqual(context.callArgs, [ + 'node', + ['test.js'], + { + cwd: '/tmp', + env: { + FOO: 'bar', + DD_ROOT_JS_SESSION_ID: 'current-id', + }, + }, + ]) }) - it('should inject env vars when callArgs has (file, options)', () => { - const context = { - callArgs: ['node', { cwd: '/tmp' }], - shell: false, - } - - onChildProcessStart(context) + it('uses process.env as the base when the execution context provides options without env', () => { + sessionPropagation.start(createConfig()) - assert.strictEqual(context.callArgs[0], 'node') - assert.strictEqual(context.callArgs[1].cwd, '/tmp') - assert.strictEqual(context.callArgs[1].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[1].env.DD_PARENT_JS_SESSION_ID, 'current-id') - }) - - it('should inject env vars when callArgs has (file) only for non-shell', () => { const context = { - callArgs: ['node'], + callArgs: ['npm', ['run', 'test'], { cwd: '/tmp' }], shell: false, } - onChildProcessStart(context) + publishStart(context) - assert.strictEqual(context.callArgs[0], 'node') - assert.deepStrictEqual(context.callArgs[1], []) - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[2].env.DD_PARENT_JS_SESSION_ID, 'current-id') + assert.deepStrictEqual(context.callArgs, [ + 'npm', + ['run', 'test'], + { + cwd: '/tmp', + env: createExpectedEnv({ DD_ROOT_JS_SESSION_ID: 'current-id' }), + }, + ]) }) - it('should inject env vars as options for shell commands with no options', () => { + it('adds shell options when the execution context does not provide any', () => { + sessionPropagation.start(createConfig()) + const context = { callArgs: ['ls -la'], shell: true, } - onChildProcessStart(context) + publishStart(context) - assert.strictEqual(context.callArgs[0], 'ls -la') - assert.strictEqual(context.callArgs[1].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[1].env.DD_PARENT_JS_SESSION_ID, 'current-id') + assert.deepStrictEqual(context.callArgs, [ + 'ls -la', + { env: createExpectedEnv({ DD_ROOT_JS_SESSION_ID: 'current-id' }) }, + ]) }) - it('should use process.env as base when no env is specified', () => { + it('preserves callbacks when it needs to insert child process options', () => { + sessionPropagation.start(createConfig()) + + const cb = () => {} const context = { - callArgs: ['node', ['test.js'], {}], + callArgs: ['cmd', cb], shell: false, } - onChildProcessStart(context) + publishStart(context) - const env = context.callArgs[2].env - assert.strictEqual(env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.ok(Object.keys(env).length > 2, 'env should contain process.env keys') + assert.deepStrictEqual(context.callArgs, [ + 'cmd', + [], + { env: createExpectedEnv({ DD_ROOT_JS_SESSION_ID: 'current-id' }) }, + cb, + ]) }) - it('should preserve callback when callArgs has (file, args, cb)', () => { - const cb = () => {} + it('does not change child process execution when telemetry is disabled', () => { + sessionPropagation.start(createConfig({ telemetry: { enabled: false } })) + const context = { - callArgs: ['node', ['-v'], cb], + callArgs: ['node', ['test.js'], { cwd: '/tmp', env: { FOO: 'bar' } }], shell: false, } - onChildProcessStart(context) + publishStart(context) - assert.strictEqual(context.callArgs[0], 'node') - assert.deepStrictEqual(context.callArgs[1], ['-v']) - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[3], cb) + assert.deepStrictEqual(context.callArgs, ['node', ['test.js'], { cwd: '/tmp', env: { FOO: 'bar' } }]) }) - it('should preserve callback when callArgs has (file, cb)', () => { - const cb = () => {} - const context = { - callArgs: ['cmd', cb], - shell: false, - } + it('preserves an inherited root session id instead of replacing it with the current runtime id', () => { + sessionPropagation.start(createConfig({ DD_ROOT_JS_SESSION_ID: 'root-id' })) - onChildProcessStart(context) + const context = publishStart({ callArgs: ['node', ['test.js'], {}], shell: false }) - assert.strictEqual(context.callArgs[0], 'cmd') - assert.deepStrictEqual(context.callArgs[1], []) - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[3], cb) + assert.deepStrictEqual(context.callArgs, [ + 'node', + ['test.js'], + { env: createExpectedEnv({ DD_ROOT_JS_SESSION_ID: 'root-id' }) }, + ]) }) - it('should merge into existing options when args is skipped with undefined', () => { - const context = { - callArgs: ['node', undefined, { cwd: '/tmp', env: { FOO: 'bar' } }], - shell: false, - } + it('uses process.env as the base when it adds options for non-shell commands', () => { + sessionPropagation.start(createConfig()) - onChildProcessStart(context) + const context = publishStart({ callArgs: ['node'], shell: false }) - assert.strictEqual(context.callArgs[2].cwd, '/tmp') - assert.strictEqual(context.callArgs[2].env.FOO, 'bar') - assert.strictEqual(context.callArgs[2].env.DD_ROOT_JS_SESSION_ID, 'root-id') - assert.strictEqual(context.callArgs[2].env.DD_PARENT_JS_SESSION_ID, 'current-id') + assert.deepStrictEqual(context.callArgs, [ + 'node', + [], + { env: createExpectedEnv({ DD_ROOT_JS_SESSION_ID: 'current-id' }) }, + ]) }) - it('should not modify context without callArgs', () => { + it('ignores execution contexts without call arguments', () => { + sessionPropagation.start(createConfig()) + const context = { command: 'node test.js', file: 'node', shell: false, } - onChildProcessStart(context) + publishStart(context) assert.strictEqual(context.callArgs, undefined) }) diff --git a/scripts/generate-config-types.js b/scripts/generate-config-types.js index 66aecd34a78..f3654d605b4 100644 --- a/scripts/generate-config-types.js +++ b/scripts/generate-config-types.js @@ -38,7 +38,7 @@ const TRANSFORM_TYPE_OVERRIDES = { normalizeProfilingEnabled: "'true' | 'false' | 'auto'", parseOtelTags: 'Record', sampleRate: 'number', - setIntegerRangeSet: 'number[]', + setGRPCRange: 'number[]', splitJSONPathRules: 'string[]', } diff --git a/vendor/package-lock.json b/vendor/package-lock.json index 2b6a031cccf..7175fa6e811 100644 --- a/vendor/package-lock.json +++ b/vendor/package-lock.json @@ -39,7 +39,7 @@ "ttl-set": "^1.0.0" }, "devDependencies": { - "@rspack/core": "^1.7.8", + "@rspack/core": "^1.7.11", "license-webpack-plugin": "^4.0.2" } }, @@ -83,21 +83,21 @@ } }, "node_modules/@emnapi/core": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.9.0.tgz", - "integrity": "sha512-0DQ98G9ZQZOxfUcQn1waV2yS8aWdZ6kJMbYCJB3oUBecjWYO1fqJ+a1DRfPF3O5JEkwqwP1A9QEN/9mYm2Yd0w==", + "version": "1.9.2", + "resolved": "https://registry.npmjs.org/@emnapi/core/-/core-1.9.2.tgz", + "integrity": "sha512-UC+ZhH3XtczQYfOlu3lNEkdW/p4dsJ1r/bP7H8+rhao3TTTMO1ATq/4DdIi23XuGoFY+Cz0JmCbdVl0hz9jZcA==", "dev": true, "license": "MIT", "optional": true, "dependencies": { - "@emnapi/wasi-threads": "1.2.0", + "@emnapi/wasi-threads": "1.2.1", "tslib": "^2.4.0" } }, "node_modules/@emnapi/runtime": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.9.0.tgz", - "integrity": "sha512-QN75eB0IH2ywSpRpNddCRfQIhmJYBCJ1x5Lb3IscKAL8bMnVAKnRg8dCoXbHzVLLH7P38N2Z3mtulB7W0J0FKw==", + "version": "1.9.2", + "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.9.2.tgz", + "integrity": "sha512-3U4+MIWHImeyu1wnmVygh5WlgfYDtyf0k8AbLhMFxOipihf6nrWC4syIm/SwEeec0mNSafiiNnMJwbza/Is6Lw==", "dev": true, "license": "MIT", "optional": true, @@ -106,9 +106,9 @@ } }, "node_modules/@emnapi/wasi-threads": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.2.0.tgz", - "integrity": "sha512-N10dEJNSsUx41Z6pZsXU8FjPjpBEplgH24sfkmITrBED1/U2Esum9F3lfLrMjKHHjmi557zQn7kR9R+XWXu5Rg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@emnapi/wasi-threads/-/wasi-threads-1.2.1.tgz", + "integrity": "sha512-uTII7OYF+/Mes/MrcIOYp5yOtSMLBWSIoLPpcgwipoiKbli6k322tcoFsxoIIxPDqW01SQGAgko4EzZi2BNv2w==", "dev": true, "license": "MIT", "optional": true, @@ -336,28 +336,28 @@ "license": "BSD-3-Clause" }, "node_modules/@rspack/binding": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/binding/-/binding-1.7.8.tgz", - "integrity": "sha512-P4fbrQx5hRhAiC8TBTEMCTnNawrIzJLjWwAgrTwRxjgenpjNvimEkQBtSGrXOY+c+MV5Q74P+9wPvVWLKzRkQQ==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/binding/-/binding-1.7.11.tgz", + "integrity": "sha512-2MGdy2s2HimsDT444Bp5XnALzNRxuBNc7y0JzyuqKbHBywd4x2NeXyhWXXoxufaCFu5PBc9Qq9jyfjW2Aeh06Q==", "dev": true, "license": "MIT", "optionalDependencies": { - "@rspack/binding-darwin-arm64": "1.7.8", - "@rspack/binding-darwin-x64": "1.7.8", - "@rspack/binding-linux-arm64-gnu": "1.7.8", - "@rspack/binding-linux-arm64-musl": "1.7.8", - "@rspack/binding-linux-x64-gnu": "1.7.8", - "@rspack/binding-linux-x64-musl": "1.7.8", - "@rspack/binding-wasm32-wasi": "1.7.8", - "@rspack/binding-win32-arm64-msvc": "1.7.8", - "@rspack/binding-win32-ia32-msvc": "1.7.8", - "@rspack/binding-win32-x64-msvc": "1.7.8" + "@rspack/binding-darwin-arm64": "1.7.11", + "@rspack/binding-darwin-x64": "1.7.11", + "@rspack/binding-linux-arm64-gnu": "1.7.11", + "@rspack/binding-linux-arm64-musl": "1.7.11", + "@rspack/binding-linux-x64-gnu": "1.7.11", + "@rspack/binding-linux-x64-musl": "1.7.11", + "@rspack/binding-wasm32-wasi": "1.7.11", + "@rspack/binding-win32-arm64-msvc": "1.7.11", + "@rspack/binding-win32-ia32-msvc": "1.7.11", + "@rspack/binding-win32-x64-msvc": "1.7.11" } }, "node_modules/@rspack/binding-darwin-arm64": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/binding-darwin-arm64/-/binding-darwin-arm64-1.7.8.tgz", - "integrity": "sha512-KS6SRc+4VYRdX1cKr1j1HEuMNyEzt7onBS0rkenaiCRRYF0z4WNZNyZqRiuxgM3qZ3TISF7gdmgJQyd4ZB43ig==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/binding-darwin-arm64/-/binding-darwin-arm64-1.7.11.tgz", + "integrity": "sha512-oduECiZVqbO5zlVw+q7Vy65sJFth99fWPTyucwvLJJtJkPL5n17Uiql2cYP6Ijn0pkqtf1SXgK8WjiKLG5bIig==", "cpu": [ "arm64" ], @@ -369,9 +369,9 @@ ] }, "node_modules/@rspack/binding-darwin-x64": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/binding-darwin-x64/-/binding-darwin-x64-1.7.8.tgz", - "integrity": "sha512-uyXSDKLg2CtqIJrsJDlCqQH80YIPsCUiTToJ59cXAG3v4eke0Qbiv6d/+pV0h/mc0u4inAaSkr5dD18zkMIghw==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/binding-darwin-x64/-/binding-darwin-x64-1.7.11.tgz", + "integrity": "sha512-a1+TtTE9ap6RalgFi7FGIgkJP6O4Vy6ctv+9WGJy53E4kuqHR0RygzaiVxCI/GMc/vBT9vY23hyrpWb3d1vtXA==", "cpu": [ "x64" ], @@ -383,9 +383,9 @@ ] }, "node_modules/@rspack/binding-linux-arm64-gnu": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.7.8.tgz", - "integrity": "sha512-dD6gSHA18Uj0eqc1FCwwQ5IO5mIckrpYN4H4kPk9Pjau+1mxWvC4y5Lryz1Z8P/Rh1lnQ/wwGE0XL9nd80+LqQ==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.7.11.tgz", + "integrity": "sha512-P0QrGRPbTWu6RKWfN0bDtbnEps3rXH0MWIMreZABoUrVmNQKtXR6e73J3ub6a+di5s2+K0M2LJ9Bh2/H4UsDUA==", "cpu": [ "arm64" ], @@ -397,9 +397,9 @@ ] }, "node_modules/@rspack/binding-linux-arm64-musl": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.7.8.tgz", - "integrity": "sha512-m+uBi9mEVGkZ02PPOAYN2BSmmvc00XGa6v9CjV8qLpolpUXQIMzDNG+i1fD5SHp8LO+XWsZJOHypMsT0MzGTGw==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.7.11.tgz", + "integrity": "sha512-6ky7R43VMjWwmx3Yx7Jl7faLBBMAgMDt+/bN35RgwjiPgsIByz65EwytUVuW9rikB43BGHvA/eqlnjLrUzNBqw==", "cpu": [ "arm64" ], @@ -411,9 +411,9 @@ ] }, "node_modules/@rspack/binding-linux-x64-gnu": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.7.8.tgz", - "integrity": "sha512-IAPp2L3yS33MAEkcGn/I1gO+a+WExJHXz2ZlRlL2oFCUGpYi2ZQHyAcJ3o2tJqkXmdqsTiN+OjEVMd/RcLa24g==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.7.11.tgz", + "integrity": "sha512-cuOJMfCOvb2Wgsry5enXJ3iT1FGUjdPqtGUBVupQlEG4ntSYsQ2PtF4wIDVasR3wdxC5nQbipOrDiN/u6fYsdQ==", "cpu": [ "x64" ], @@ -425,9 +425,9 @@ ] }, "node_modules/@rspack/binding-linux-x64-musl": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/binding-linux-x64-musl/-/binding-linux-x64-musl-1.7.8.tgz", - "integrity": "sha512-do/QNzb4GWdXCsipblDcroqRDR3BFcbyzpZpAw/3j9ajvEqsOKpdHZpILT2NZX/VahhjqfqB3k0kJVt3uK7UYQ==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/binding-linux-x64-musl/-/binding-linux-x64-musl-1.7.11.tgz", + "integrity": "sha512-CoK37hva4AmHGh3VCsQXmGr40L36m1/AdnN5LEjUX6kx5rEH7/1nEBN6Ii72pejqDVvk9anEROmPDiPw10tpFg==", "cpu": [ "x64" ], @@ -439,9 +439,9 @@ ] }, "node_modules/@rspack/binding-wasm32-wasi": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/binding-wasm32-wasi/-/binding-wasm32-wasi-1.7.8.tgz", - "integrity": "sha512-mHtgYTpdhx01i0XNKFYBZyCjtv9YUe/sDfpD1QK4FytPFB+1VpYnmZiaJIMM77VpNsjxGAqWhmUYxi2P6jWifw==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/binding-wasm32-wasi/-/binding-wasm32-wasi-1.7.11.tgz", + "integrity": "sha512-OtrmnPUVJMxjNa3eDMfHyPdtlLRmmp/aIm0fQHlAOATbZvlGm12q7rhPW5BXTu1yh+1rQ1/uqvz+SzKEZXuJaQ==", "cpu": [ "wasm32" ], @@ -453,9 +453,9 @@ } }, "node_modules/@rspack/binding-win32-arm64-msvc": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.7.8.tgz", - "integrity": "sha512-Mkxg86F7kIT4pM9XvE/1LAGjK5NOQi/GJxKyyiKbUAeKM8XBUizVeNuvKR0avf2V5IDAIRXiH1SX8SpujMJteA==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.7.11.tgz", + "integrity": "sha512-lObFW6e5lCWNgTBNwT//yiEDbsxm9QG4BYUojqeXxothuzJ/L6ibXz6+gLMvbOvLGV3nKgkXmx8GvT9WDKR0mA==", "cpu": [ "arm64" ], @@ -467,9 +467,9 @@ ] }, "node_modules/@rspack/binding-win32-ia32-msvc": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-1.7.8.tgz", - "integrity": "sha512-VmTOZ/X7M85lKFNwb2qJpCRzr4SgO42vucq/X7Uz1oSoTPAf8UUMNdi7BPnu+D4lgy6l8PwV804ZyHO3gGsvPA==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/binding-win32-ia32-msvc/-/binding-win32-ia32-msvc-1.7.11.tgz", + "integrity": "sha512-0pYGnZd8PPqNR68zQ8skamqNAXEA1sUfXuAdYcknIIRq2wsbiwFzIc0Pov1cIfHYab37G7sSIPBiOUdOWF5Ivw==", "cpu": [ "ia32" ], @@ -481,9 +481,9 @@ ] }, "node_modules/@rspack/binding-win32-x64-msvc": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.7.8.tgz", - "integrity": "sha512-BK0I4HAwp/yQLnmdJpUtGHcht3x11e9fZwyaiMzznznFc+Oypbf+FS5h+aBgpb53QnNkPpdG7MfAPoKItOcU8A==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.7.11.tgz", + "integrity": "sha512-EeQXayoQk/uBkI3pdoXfQBXNIUrADq56L3s/DFyM2pJeUDrWmhfIw2UFIGkYPTMSCo8F2JcdcGM32FGJrSnU0Q==", "cpu": [ "x64" ], @@ -495,14 +495,14 @@ ] }, "node_modules/@rspack/core": { - "version": "1.7.8", - "resolved": "https://registry.npmjs.org/@rspack/core/-/core-1.7.8.tgz", - "integrity": "sha512-kT6yYo8xjKoDfM7iB8N9AmN9DJIlrs7UmQDbpTu1N4zaZocN1/t2fIAWOKjr5+3eJlZQR2twKZhDVHNLbLPjOw==", + "version": "1.7.11", + "resolved": "https://registry.npmjs.org/@rspack/core/-/core-1.7.11.tgz", + "integrity": "sha512-rsD9b+Khmot5DwCMiB3cqTQo53ioPG3M/A7BySu8+0+RS7GCxKm+Z+mtsjtG/vsu4Tn2tcqCdZtA3pgLoJB+ew==", "dev": true, "license": "MIT", "dependencies": { "@module-federation/runtime-tools": "0.22.0", - "@rspack/binding": "1.7.8", + "@rspack/binding": "1.7.11", "@rspack/lite-tapable": "1.1.0" }, "engines": { diff --git a/vendor/package.json b/vendor/package.json index 8308f955620..19609b97772 100644 --- a/vendor/package.json +++ b/vendor/package.json @@ -36,7 +36,7 @@ "ttl-set": "^1.0.0" }, "devDependencies": { - "@rspack/core": "^1.7.8", + "@rspack/core": "^1.7.11", "license-webpack-plugin": "^4.0.2" } } diff --git a/yarn.lock b/yarn.lock index 1006eab9385..5988ec119f4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -197,10 +197,10 @@ dependencies: spark-md5 "^3.0.2" -"@datadog/libdatadog@0.9.2": - version "0.9.2" - resolved "https://registry.yarnpkg.com/@datadog/libdatadog/-/libdatadog-0.9.2.tgz#d7a0193ab656bd9cc40649f300ef6c54d9bea52d" - integrity sha512-grOerTYuU3wHuFIOBGg3jB144A3KEthEdVEL3meeiXYo7E7fBXXGRgAOwVE42VXFXfl0r8kDKCL7KupBc511tg== +"@datadog/libdatadog@0.9.3": + version "0.9.3" + resolved "https://registry.yarnpkg.com/@datadog/libdatadog/-/libdatadog-0.9.3.tgz#c9a26946e1f4a750889594790b3434070997b8fa" + integrity sha512-L+scIlcRRRF0qjeSU3VQLQlqezfQHkDdnOdbmx/gLjPqewKSyqVGp7XRdKXYo2vZTzmG8dH6rPKXwgI68UQufw== "@datadog/native-appsec@11.0.1": version "11.0.1" @@ -272,23 +272,23 @@ dependencies: tslib "^2.4.0" -"@es-joy/jsdoccomment@~0.84.0": - version "0.84.0" - resolved "https://registry.yarnpkg.com/@es-joy/jsdoccomment/-/jsdoccomment-0.84.0.tgz#4d798d33207825dd1d85babbfbacc3a76c3ba634" - integrity sha512-0xew1CxOam0gV5OMjh2KjFQZsKL2bByX1+q4j3E73MpYIdyUxcZb/xQct9ccUb+ve5KGUYbCUxyPnYB7RbuP+w== +"@es-joy/jsdoccomment@~0.86.0": + version "0.86.0" + resolved "https://registry.yarnpkg.com/@es-joy/jsdoccomment/-/jsdoccomment-0.86.0.tgz#f7276904ed73bf2136993627033aeb5183b4392a" + integrity sha512-ukZmRQ81WiTpDWO6D/cTBM7XbrNtutHKvAVnZN/8pldAwLoJArGOvkNyxPTBGsPjsoaQBJxlH+tE2TNA/92Qgw== dependencies: "@types/estree" "^1.0.8" - "@typescript-eslint/types" "^8.54.0" - comment-parser "1.4.5" + "@typescript-eslint/types" "^8.58.0" + comment-parser "1.4.6" esquery "^1.7.0" - jsdoc-type-pratt-parser "~7.1.1" + jsdoc-type-pratt-parser "~7.2.0" "@es-joy/resolve.exports@1.2.0": version "1.2.0" resolved "https://registry.yarnpkg.com/@es-joy/resolve.exports/-/resolve.exports-1.2.0.tgz#fe541a68aa080255f798c8561714ac8fad72cdd5" integrity sha512-Q9hjxWI5xBM+qW2enxfe8wDKdFWMfd0Z29k5ZJnuBqD/CasY5Zryj09aCA6owbGATWz+39p5uIdaHXpopOcG8g== -"@eslint-community/eslint-utils@^4.1.2", "@eslint-community/eslint-utils@^4.4.0", "@eslint-community/eslint-utils@^4.4.1", "@eslint-community/eslint-utils@^4.5.0", "@eslint-community/eslint-utils@^4.8.0", "@eslint-community/eslint-utils@^4.9.0", "@eslint-community/eslint-utils@^4.9.1": +"@eslint-community/eslint-utils@^4.1.2", "@eslint-community/eslint-utils@^4.4.0", "@eslint-community/eslint-utils@^4.4.1", "@eslint-community/eslint-utils@^4.5.0", "@eslint-community/eslint-utils@^4.8.0", "@eslint-community/eslint-utils@^4.9.1": version "4.9.1" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz#4e90af67bc51ddee6cdef5284edf572ec376b595" integrity sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ== @@ -961,10 +961,10 @@ resolved "https://registry.yarnpkg.com/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-15.0.1.tgz#49f731d9453f52d64dd79f5a5626c1cf1b81bea4" integrity sha512-Ko2tjWJq8oozHzHV+reuvS5KYIRAokHnGbDwGh/J64LntgpbuylF74ipEL24HCyRjf9FOlBiBHWBR1RlVKsI1w== -"@typescript-eslint/types@^8.54.0", "@typescript-eslint/types@^8.56.0": - version "8.56.1" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-8.56.1.tgz#975e5942bf54895291337c91b9191f6eb0632ab9" - integrity sha512-dbMkdIUkIkchgGDIv7KLUpa0Mda4IYjo4IAMJUZ+3xNoUXxMsk9YtKpTHSChRS85o+H9ftm51gsK1dZReY9CVw== +"@typescript-eslint/types@^8.56.0", "@typescript-eslint/types@^8.58.0": + version "8.58.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-8.58.0.tgz#e94ae7abdc1c6530e71183c1007b61fa93112a5a" + integrity sha512-O9CjxypDT89fbHxRfETNoAnHj/i6IpRK0CvbVN3qibxlLdo5p5hcLmUuCCrHMpxiWSwKyI8mCP7qRNYuOJ0Uww== "@yarnpkg/lockfile@^1.1.0": version "1.1.0" @@ -1153,14 +1153,14 @@ available-typed-arrays@^1.0.7: dependencies: possible-typed-array-names "^1.0.0" -axios@^1.13.4: - version "1.13.6" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.13.6.tgz#c3f92da917dc209a15dd29936d20d5089b6b6c98" - integrity sha512-ChTCHMouEe2kn713WHbQGcuYrr6fXTBiu460OTwWrWob16g1bXn4vtz07Ope7ewMozJAnEquLk5lWQWtBig9DQ== +axios@^1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.15.0.tgz#0fcee91ef03d386514474904b27863b2c683bf4f" + integrity sha512-wWyJDlAatxk30ZJer+GeCWS209sA42X+N5jU2jy6oHTp7ufw8uzUTVFBX9+wTfAlhiJXGS0Bq7X6efruWjuK9Q== dependencies: follow-redirects "^1.15.11" form-data "^4.0.5" - proxy-from-env "^1.1.0" + proxy-from-env "^2.1.0" balanced-match@^1.0.0: version "1.0.2" @@ -1369,7 +1369,7 @@ chokidar@^4.0.1: dependencies: readdirp "^4.0.1" -ci-info@^4.3.1: +ci-info@^4.4.0: version "4.4.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-4.4.0.tgz#7d54eff9f54b45b62401c26032696eb59c8bd18c" integrity sha512-77PSwercCZU2Fc4sX94eF8k8Pxte6JAwL4/ICZLFjJLqegs7kCuAsqqj/70NQF6TvDpgFjkubQB2FW2ZZddvQg== @@ -1438,10 +1438,10 @@ commander@^10.0.1: resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06" integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug== -comment-parser@1.4.5: - version "1.4.5" - resolved "https://registry.yarnpkg.com/comment-parser/-/comment-parser-1.4.5.tgz#6c595cd090737a1010fe5ff40d86e1d21b7bd6ce" - integrity sha512-aRDkn3uyIlCFfk5NUA+VdwMmMsh8JGhc4hapfV4yxymHGQ3BVskMQfoXGpCo5IoBuQ9tS5iiVKhCpTcB4pW4qw== +comment-parser@1.4.6: + version "1.4.6" + resolved "https://registry.yarnpkg.com/comment-parser/-/comment-parser-1.4.6.tgz#49a6b1d53fa563324f7577ab8c0b26db4e7d1f9a" + integrity sha512-ObxuY6vnbWTN6Od72xfwN9DbzC7Y2vv8u1Soi9ahRKL37gb6y1qk6/dgjs+3JWuXJHWvsg3BXIwzd/rkmAwavg== commondir@^1.0.1: version "1.0.1" @@ -1493,10 +1493,10 @@ cookie@^0.7.1: resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.2.tgz#556369c472a2ba910f2979891b526b3436237ed7" integrity sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w== -core-js-compat@^3.46.0: - version "3.48.0" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.48.0.tgz#7efbe1fc1cbad44008190462217cc5558adaeaa6" - integrity sha512-OM4cAF3D6VtH/WkLtWvyNC56EZVXsZdU3iqaMG2B4WvYrlqU831pc4UtG5yp0sE9z8Y02wVN7PjW5Zf9Gt0f1Q== +core-js-compat@^3.49.0: + version "3.49.0" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.49.0.tgz#06145447d92f4aaf258a0c44f24b47afaeaffef6" + integrity sha512-VQXt1jr9cBz03b331DFDCCP90b3fanciLkgiOoy8SBHy06gNf+vQ1A3WFLqG7I8TipYIKeYK9wxd0tUrvHcOZA== dependencies: browserslist "^4.28.1" @@ -1830,12 +1830,12 @@ eslint-module-utils@^2.12.1: dependencies: debug "^3.2.7" -eslint-plugin-cypress@^6.2.1: - version "6.2.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-cypress/-/eslint-plugin-cypress-6.2.1.tgz#a607cffe9967b941c1368bc4d0ce79e35d15f40f" - integrity sha512-pbjvNE2GKG3hlerT3SIQcRnsMfzFnpAyg0V17D+Gm/a18cxBOVLUxWWANAs7LzykdTPn+SKgzczhF0+IODFs7w== +eslint-plugin-cypress@^6.2.2: + version "6.2.2" + resolved "https://registry.yarnpkg.com/eslint-plugin-cypress/-/eslint-plugin-cypress-6.2.2.tgz#26c7ab6c6dedc2ad4791ac939ef8b5386cd04f1d" + integrity sha512-lOhmMWb5/+zv28EaDOT7C3lgGgWL8DMZglTUNUaaGfleK89joDBRXX8LV01ygx3dK1RbvG54b8t025/5QEUKgg== dependencies: - globals "^17.3.0" + globals "^17.4.0" eslint-plugin-es-x@^7.8.0: version "7.8.0" @@ -1871,18 +1871,18 @@ eslint-plugin-import@^2.32.0: string.prototype.trimend "^1.0.9" tsconfig-paths "^3.15.0" -eslint-plugin-jsdoc@^62.8.1: - version "62.8.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-62.8.1.tgz#83437f200a5f8beeba85af5244f88cacbf6cf5ba" - integrity sha512-e9358PdHgvcMF98foNd3L7hVCw70Lt+YcSL7JzlJebB8eT5oRJtW6bHMQKoAwJtw6q0q0w/fRIr2kwnHdFDI6A== +eslint-plugin-jsdoc@^62.9.0: + version "62.9.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-62.9.0.tgz#a4902f6978b1e7cc5c5d1a528ecf7d8c7ce716d9" + integrity sha512-PY7/X4jrVgoIDncUmITlUqK546Ltmx/Pd4Hdsu4CvSjryQZJI2mEV4vrdMufyTetMiZ5taNSqvK//BTgVUlNkA== dependencies: - "@es-joy/jsdoccomment" "~0.84.0" + "@es-joy/jsdoccomment" "~0.86.0" "@es-joy/resolve.exports" "1.2.0" are-docs-informative "^0.0.2" - comment-parser "1.4.5" + comment-parser "1.4.6" debug "^4.4.3" escape-string-regexp "^4.0.0" - espree "^11.1.0" + espree "^11.2.0" esquery "^1.7.0" html-entities "^2.6.0" object-deep-merge "^2.0.0" @@ -1921,26 +1921,26 @@ eslint-plugin-promise@^7.2.1: dependencies: "@eslint-community/eslint-utils" "^4.4.0" -eslint-plugin-unicorn@^63.0.0: - version "63.0.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-unicorn/-/eslint-plugin-unicorn-63.0.0.tgz#db210b87bb66f0f15ab675ba13d9f1fb61016b22" - integrity sha512-Iqecl9118uQEXYh7adylgEmGfkn5es3/mlQTLLkd4pXkIk9CTGrAbeUux+YljSa2ohXCBmQQ0+Ej1kZaFgcfkA== +eslint-plugin-unicorn@^64.0.0: + version "64.0.0" + resolved "https://registry.yarnpkg.com/eslint-plugin-unicorn/-/eslint-plugin-unicorn-64.0.0.tgz#e1cd29155d7da42cd42180211f053ed9b68d11f5" + integrity sha512-rNZwalHh8i0UfPlhNwg5BTUO1CMdKNmjqe+TgzOTZnpKoi8VBgsW7u9qCHIdpxEzZ1uwrJrPF0uRb7l//K38gA== dependencies: "@babel/helper-validator-identifier" "^7.28.5" - "@eslint-community/eslint-utils" "^4.9.0" + "@eslint-community/eslint-utils" "^4.9.1" change-case "^5.4.4" - ci-info "^4.3.1" + ci-info "^4.4.0" clean-regexp "^1.0.0" - core-js-compat "^3.46.0" + core-js-compat "^3.49.0" find-up-simple "^1.0.1" - globals "^16.4.0" + globals "^17.4.0" indent-string "^5.0.0" is-builtin-module "^5.0.0" jsesc "^3.1.0" pluralize "^8.0.0" regexp-tree "^0.1.27" regjsparser "^0.13.0" - semver "^7.7.3" + semver "^7.7.4" strip-indent "^4.1.1" eslint-scope@^8.4.0: @@ -2015,10 +2015,10 @@ espree@^10.0.1, espree@^10.4.0: acorn-jsx "^5.3.2" eslint-visitor-keys "^4.2.1" -espree@^11.1.0: - version "11.1.1" - resolved "https://registry.yarnpkg.com/espree/-/espree-11.1.1.tgz#866f6bc9ccccd6f28876b7a6463abb281b9cb847" - integrity sha512-AVHPqQoZYc+RUM4/3Ly5udlZY/U4LS8pIG05jEjWM2lQMU/oaZ7qshzAl2YP1tfNmXfftH3ohurfwNAug+MnsQ== +espree@^11.2.0: + version "11.2.0" + resolved "https://registry.yarnpkg.com/espree/-/espree-11.2.0.tgz#01d5e47dc332aaba3059008362454a8cc34ccaa5" + integrity sha512-7p3DrVEIopW1B1avAGLuCSh1jubc01H2JHc8B4qqGblmg5gI9yumBgACjWo4JlIc04ufug4xJ3SQI8HkS/Rgzw== dependencies: acorn "^8.16.0" acorn-jsx "^5.3.2" @@ -2361,12 +2361,7 @@ globals@^15.11.0, globals@^15.14.0: resolved "https://registry.yarnpkg.com/globals/-/globals-15.15.0.tgz#7c4761299d41c32b075715a4ce1ede7897ff72a8" integrity sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg== -globals@^16.4.0: - version "16.5.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-16.5.0.tgz#ccf1594a437b97653b2be13ed4d8f5c9f850cac1" - integrity sha512-c/c15i26VrJ4IRt5Z89DnIzCGDn9EcebibhAOjw5ibqEHsE1wLUgkPn9RDmNcUKyU87GeaL633nyJ+pplFR2ZQ== - -globals@^17.2.0, globals@^17.3.0: +globals@^17.2.0, globals@^17.4.0: version "17.4.0" resolved "https://registry.yarnpkg.com/globals/-/globals-17.4.0.tgz#33d7d297ed1536b388a0e2f4bcd0ff19c8ff91b5" integrity sha512-hjrNztw/VajQwOLsMNT1cbJiH2muO3OROCHnbehc8eY5JyD2gqz4AcMHPqgaOR59DjgUjYAYLeH699g/eWi2jw== @@ -2890,10 +2885,10 @@ js-yaml@^4.1.0, js-yaml@^4.1.1: dependencies: argparse "^2.0.1" -jsdoc-type-pratt-parser@~7.1.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-7.1.1.tgz#c67be3c812aaf1405bef3e965e8c3db50a5cad1b" - integrity sha512-/2uqY7x6bsrpi3i9LVU6J89352C0rpMk0as8trXxCtvd4kPk1ke/Eyif6wqfSLvoNJqcDG9Vk4UsXgygzCt2xA== +jsdoc-type-pratt-parser@~7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/jsdoc-type-pratt-parser/-/jsdoc-type-pratt-parser-7.2.0.tgz#0a29c27bd4e01e85e4617625e34e797be1486a9b" + integrity sha512-dh140MMgjyg3JhJZY/+iEzW+NO5xR2gpbDFKHqotCmexElVntw7GjWjt511+C/Ef02RU5TKYrJo/Xlzk+OLaTw== jsesc@^3.0.2, jsesc@^3.1.0, jsesc@~3.1.0: version "3.1.0" @@ -3621,10 +3616,10 @@ proxy-addr@^2.0.7: forwarded "0.2.0" ipaddr.js "1.9.1" -proxy-from-env@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" - integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== +proxy-from-env@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-2.1.0.tgz#a7487568adad577cfaaa7e88c49cab3ab3081aba" + integrity sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA== proxyquire@^2.1.3: version "2.1.3" @@ -3857,7 +3852,7 @@ semver@^6.0.0, semver@^6.3.1: resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.5.0, semver@^7.5.3, semver@^7.5.4, semver@^7.6.3, semver@^7.7.2, semver@^7.7.3, semver@^7.7.4: +semver@^7.5.0, semver@^7.5.3, semver@^7.5.4, semver@^7.6.3, semver@^7.7.2, semver@^7.7.4: version "7.7.4" resolved "https://registry.yarnpkg.com/semver/-/semver-7.7.4.tgz#28464e36060e991fa7a11d0279d2d3f3b57a7e8a" integrity sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA== @@ -4377,10 +4372,10 @@ typedarray@^0.0.6: resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== -typescript@^5.9.2: - version "5.9.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.9.3.tgz#5b4f59e15310ab17a216f5d6cf53ee476ede670f" - integrity sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw== +typescript@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-6.0.2.tgz#0b1bfb15f68c64b97032f3d78abbf98bdbba501f" + integrity sha512-bGdAIrZ0wiGDo5l8c++HWtbaNCWTS4UTv7RaTH/ThVIgjkveJt83m74bBHMJkuCbslY8ixgLBVZJIOiQlQTjfQ== unbox-primitive@^1.1.0: version "1.1.0"