diff --git a/.github/workflows/apm-integrations.yml b/.github/workflows/apm-integrations.yml index 6036d41166f..416200dd824 100644 --- a/.github/workflows/apm-integrations.yml +++ b/.github/workflows/apm-integrations.yml @@ -384,7 +384,17 @@ jobs: - uses: ./.github/actions/install - run: yarn test:plugins:ci - cookie-session: + express-mongo-sanitize: + runs-on: ubuntu-latest + env: + PLUGINS: express-mongo-sanitize + steps: + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 + - uses: ./.github/actions/plugins/test + with: + dd_api_key: ${{ secrets.DD_API_KEY }} + + express-session: runs-on: ubuntu-latest env: PLUGINS: express-session diff --git a/.github/workflows/nightly-dispatch.yml b/.github/workflows/custom-node-version-dispatch.yml similarity index 97% rename from .github/workflows/nightly-dispatch.yml rename to .github/workflows/custom-node-version-dispatch.yml index a090e6b96d5..017cee6f49c 100644 --- a/.github/workflows/nightly-dispatch.yml +++ b/.github/workflows/custom-node-version-dispatch.yml @@ -1,4 +1,4 @@ -name: Nightly Testing +name: Nightly and custom Node.js builds on: workflow_dispatch: diff --git a/.github/workflows/serverless.yml b/.github/workflows/serverless.yml index 12fa2901ba2..c5b12893bcf 100644 --- a/.github/workflows/serverless.yml +++ b/.github/workflows/serverless.yml @@ -78,10 +78,11 @@ jobs: EXTRA_CORS_EXPOSE_HEADERS: x-amz-request-id,x-amzn-requestid,x-amz-id-2 AWS_DEFAULT_REGION: us-east-1 FORCE_NONINTERACTIVE: 'true' - LAMBDA_EXECUTOR: local START_WEB: '0' ports: - 4566:4566 + volumes: + - /var/run/docker.sock:/var/run/docker.sock:rw # we have two localstacks since upgrading localstack was causing lambda & S3 tests to fail # To-Do: Debug localstack / lambda and localstack / S3 localstack-legacy: diff --git a/docs/test.ts b/docs/test.ts index c5cde0aaa52..7cb8b4ba9d0 100644 --- a/docs/test.ts +++ b/docs/test.ts @@ -262,7 +262,6 @@ const elasticsearchOptions: plugins.elasticsearch = { const awsSdkOptions: plugins.aws_sdk = { service: 'test', - splitByAwsService: false, batchPropagationEnabled: false, hooks: { request: (span?: Span, response?) => {}, diff --git a/index.d.ts b/index.d.ts index a4f7325a315..95fb0e51e89 100644 --- a/index.d.ts +++ b/index.d.ts @@ -1698,12 +1698,6 @@ declare namespace tracer { * [aws-sdk](https://github.com/aws/aws-sdk-js) module. */ interface aws_sdk extends Instrumentation { - /** - * Whether to add a suffix to the service name so that each AWS service has its own service name. - * @default true - */ - splitByAwsService?: boolean; - /** * Whether to inject all messages during batch AWS SQS, Kinesis, and SNS send operations. Normal * behavior is to inject the first message in batch send operations. diff --git a/integration-tests/ci-visibility/subproject/cypress-config.json b/integration-tests/ci-visibility/subproject/cypress-config.json index 3ad19f9f90a..20c25ae5c6c 100644 --- a/integration-tests/ci-visibility/subproject/cypress-config.json +++ b/integration-tests/ci-visibility/subproject/cypress-config.json @@ -4,6 +4,6 @@ "pluginsFile": "cypress/plugins-old/index.js", "supportFile": "cypress/support/e2e.js", "integrationFolder": "cypress/e2e", - "defaultCommandTimeout": 100, + "defaultCommandTimeout": 1000, "nodeVersion": "system" } diff --git a/integration-tests/ci-visibility/subproject/cypress.config.js b/integration-tests/ci-visibility/subproject/cypress.config.js index 7b9383ea40d..2708de10f00 100644 --- a/integration-tests/ci-visibility/subproject/cypress.config.js +++ b/integration-tests/ci-visibility/subproject/cypress.config.js @@ -1,7 +1,7 @@ 'use strict' module.exports = { - defaultCommandTimeout: 100, + defaultCommandTimeout: 1000, e2e: { setupNodeEvents (on, config) { return require('dd-trace/ci/cypress/plugin')(on, config) diff --git a/integration-tests/cypress-config.json b/integration-tests/cypress-config.json index 3ad19f9f90a..20c25ae5c6c 100644 --- a/integration-tests/cypress-config.json +++ b/integration-tests/cypress-config.json @@ -4,6 +4,6 @@ "pluginsFile": "cypress/plugins-old/index.js", "supportFile": "cypress/support/e2e.js", "integrationFolder": "cypress/e2e", - "defaultCommandTimeout": 100, + "defaultCommandTimeout": 1000, "nodeVersion": "system" } diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index 1efa4048edf..3d0391814de 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -493,7 +493,7 @@ moduleTypes.forEach(({ .map(file => file.filename) assert.includeMembers(fileNames, Object.keys(coverageFixture)) - }, 20000) + }, 25000) childProcess = exec( testCommand, @@ -988,7 +988,7 @@ moduleTypes.forEach(({ 'ci-visibility/subproject/src/index.tsx', 'ci-visibility/subproject/cypress/e2e/spec.cy.js' ]) - }, 10000) + }, 25000) childProcess = exec( command, @@ -1028,7 +1028,7 @@ moduleTypes.forEach(({ assert.exists(testEvent.content.test_session_id) assert.notEqual(testEvent.content.test_suite_id, testModuleEvent.content.test_module_id) }) - }) + }, 25000) childProcess = exec( testCommand, @@ -1173,7 +1173,7 @@ moduleTypes.forEach(({ const testSession = events.find(event => event.type === 'test_session_end').content assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') - }) + }, 25000) const { NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress @@ -1241,7 +1241,7 @@ moduleTypes.forEach(({ const testSession = events.find(event => event.type === 'test_session_end').content assert.notProperty(testSession.meta, TEST_EARLY_FLAKE_ENABLED) - }) + }, 25000) const specToRun = 'cypress/e2e/spec.cy.js' childProcess = exec( @@ -1298,7 +1298,7 @@ moduleTypes.forEach(({ const testSession = events.find(event => event.type === 'test_session_end').content assert.propertyVal(testSession.meta, TEST_EARLY_FLAKE_ENABLED, 'true') - }) + }, 25000) const specToRun = 'cypress/e2e/skipped-test.js' @@ -1353,7 +1353,7 @@ moduleTypes.forEach(({ const newTests = tests.filter(test => test.meta[TEST_IS_NEW] === 'true') assert.equal(newTests.length, 0) - }) + }, 25000) const specToRun = 'cypress/e2e/spec.cy.js' @@ -1624,7 +1624,7 @@ moduleTypes.forEach(({ 'cypress/e2e/flaky-test-retries.js.flaky test retry always passes' ]) assert.equal(tests.filter(test => test.meta[TEST_RETRY_REASON] === TEST_RETRY_REASON_TYPES.atr).length, 0) - }) + }, 25000) const { NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress @@ -1683,7 +1683,7 @@ moduleTypes.forEach(({ ]) assert.equal(tests.filter(test => test.meta[TEST_RETRY_REASON] === TEST_RETRY_REASON_TYPES.atr).length, 2) - }) + }, 25000) const { NODE_OPTIONS, // NODE_OPTIONS dd-trace config does not work with cypress @@ -1795,7 +1795,7 @@ moduleTypes.forEach(({ const testSession = events.find(event => event.type === 'test_session_end').content assert.notProperty(testSession.meta, TEST_EARLY_FLAKE_ENABLED) - }) + }, 25000) const specToRun = 'cypress/e2e/spec.cy.js' childProcess = exec( @@ -1835,7 +1835,7 @@ moduleTypes.forEach(({ const test = events.find(event => event.type === 'test').content assert.equal(test.resource, 'cypress/e2e/multi-origin.js.tests multiple origins') assert.equal(test.meta[TEST_STATUS], 'pass') - }) + }, 25000) secondWebAppServer = http.createServer((req, res) => { res.setHeader('Content-Type', 'text/html') @@ -2011,7 +2011,7 @@ moduleTypes.forEach(({ } } } - }) + }, 25000) const runAttemptToFixTest = async ({ isAttemptToFix, @@ -2195,7 +2195,7 @@ moduleTypes.forEach(({ assert.propertyVal(failedTest.meta, TEST_STATUS, 'fail') assert.notProperty(failedTest.meta, TEST_MANAGEMENT_IS_DISABLED) } - }) + }, 25000) const runDisableTest = async (isDisabling, extraEnvVars = {}) => { const testAssertionsPromise = getTestAssertions(isDisabling) @@ -2294,7 +2294,7 @@ moduleTypes.forEach(({ assert.propertyVal(failedTest.meta, TEST_STATUS, 'fail') assert.notProperty(failedTest.meta, TEST_MANAGEMENT_IS_QUARANTINED) } - }) + }, 25000) const runQuarantineTest = async (isQuarantining, extraEnvVars = {}) => { const testAssertionsPromise = getTestAssertions(isQuarantining) @@ -2366,7 +2366,7 @@ moduleTypes.forEach(({ const tests = events.filter(event => event.type === 'test').map(event => event.content) // it is not retried assert.equal(tests.length, 1) - }) + }, 25000) const { NODE_OPTIONS, diff --git a/integration-tests/esbuild/build.esm-hono-output-cjs.mjs b/integration-tests/esbuild/build.esm-hono-output-cjs.mjs index 188418e7af7..a3b60d3ca03 100644 --- a/integration-tests/esbuild/build.esm-hono-output-cjs.mjs +++ b/integration-tests/esbuild/build.esm-hono-output-cjs.mjs @@ -18,10 +18,6 @@ const external = [ // required if you use profiling '@datadog/pprof', - // @openfeature/core is a peer dependency of @openfeature/server-sdk - // which is used by @datadog/openfeature-node-server - '@openfeature/core', - // required if you use Datadog security features '@datadog/native-appsec', '@datadog/native-iast-taint-tracking', diff --git a/integration-tests/esbuild/build.esm-hono-output-esm.mjs b/integration-tests/esbuild/build.esm-hono-output-esm.mjs index b0029b87122..b30153a22aa 100644 --- a/integration-tests/esbuild/build.esm-hono-output-esm.mjs +++ b/integration-tests/esbuild/build.esm-hono-output-esm.mjs @@ -18,10 +18,6 @@ const external = [ // required if you use profiling '@datadog/pprof', - // @openfeature/core is a peer dependency of @openfeature/server-sdk - // which is used by @datadog/openfeature-node-server - '@openfeature/core', - // required if you use Datadog security features '@datadog/native-appsec', '@datadog/native-iast-taint-tracking', diff --git a/integration-tests/esbuild/build.esm-http-output-cjs.mjs b/integration-tests/esbuild/build.esm-http-output-cjs.mjs index 7af0e78341c..ef9155b3aa7 100644 --- a/integration-tests/esbuild/build.esm-http-output-cjs.mjs +++ b/integration-tests/esbuild/build.esm-http-output-cjs.mjs @@ -18,10 +18,6 @@ const external = [ // required if you use profiling '@datadog/pprof', - // @openfeature/core is a peer dependency of @openfeature/server-sdk - // which is used by @datadog/openfeature-node-server - '@openfeature/core', - // required if you use Datadog security features '@datadog/native-appsec', '@datadog/native-iast-taint-tracking', diff --git a/integration-tests/esbuild/build.esm-http-output-esm.mjs b/integration-tests/esbuild/build.esm-http-output-esm.mjs index 45467dedd8e..427dabb023c 100644 --- a/integration-tests/esbuild/build.esm-http-output-esm.mjs +++ b/integration-tests/esbuild/build.esm-http-output-esm.mjs @@ -18,10 +18,6 @@ const external = [ // required if you use profiling '@datadog/pprof', - // @openfeature/core is a peer dependency of @openfeature/server-sdk - // which is used by @datadog/openfeature-node-server - '@openfeature/core', - // required if you use Datadog security features '@datadog/native-appsec', '@datadog/native-iast-taint-tracking', diff --git a/integration-tests/esbuild/openfeature.spec.js b/integration-tests/esbuild/openfeature.spec.js new file mode 100644 index 00000000000..f151fa227ce --- /dev/null +++ b/integration-tests/esbuild/openfeature.spec.js @@ -0,0 +1,43 @@ +'use strict' + +const { execSync } = require('node:child_process') +const path = require('node:path') + +const { FakeAgent, createSandbox } = require('../helpers') + +// This should switch to our withVersion helper. The order here currently matters. +const esbuildVersions = ['latest', '0.16.12'] + +esbuildVersions.forEach((version) => { + describe('OpenFeature', () => { + let sandbox, agent, cwd + + before(async () => { + sandbox = await createSandbox([`esbuild@${version}`, 'hono', '@hono/node-server'], false, [__dirname]) + cwd = sandbox.folder + // remove all node_modules and bun.lock file and install with yarn + // TODO add this in createSandbox if it's need in more places + execSync(`rm -rf ${path.join(cwd, 'node_modules')}`, { cwd }) + execSync(`rm -rf ${path.join(cwd, 'bun.lock')}`, { cwd }) + execSync('npm install -g yarn', { cwd }) + + execSync('yarn', { cwd }) + }) + + beforeEach(async () => { + agent = await new FakeAgent().start() + }) + + after(() => { + sandbox.remove() + }) + + afterEach(() => { + agent.stop() + }) + + it('should not crash build after installing with yarn', () => { + execSync('node esbuild/build.esm-hono-output-esm.mjs', { cwd }) + }) + }) +}) diff --git a/integration-tests/esbuild/package.json b/integration-tests/esbuild/package.json index 437d2276cb4..573daca3301 100644 --- a/integration-tests/esbuild/package.json +++ b/integration-tests/esbuild/package.json @@ -20,13 +20,13 @@ "author": "Thomas Hunter II ", "license": "ISC", "dependencies": { - "@apollo/server": "5.0.0", + "@apollo/server": "5.1.0", "@koa/router": "14.0.0", "aws-sdk": "2.1692.0", - "axios": "1.12.2", + "axios": "1.13.1", "express": "4.21.2", "knex": "3.1.0", - "koa": "3.1.0", + "koa": "3.1.1", "openai": "6.7.0" } } diff --git a/integration-tests/helpers/index.js b/integration-tests/helpers/index.js index 3aff18bcc0f..02b6a60f96e 100644 --- a/integration-tests/helpers/index.js +++ b/integration-tests/helpers/index.js @@ -221,7 +221,7 @@ function execHelper (command, options) { console.log('Exec SUCCESS: ', command) } catch (error) { console.error('Exec ERROR: ', command, error) - if (command.startsWith('bun')) { + if (command.startsWith(BUN)) { try { console.log('Exec RETRY START: ', command) execSync(command, options) diff --git a/integration-tests/vitest/vitest.spec.js b/integration-tests/vitest/vitest.spec.js index 07b4482f96b..ec3883f9bbd 100644 --- a/integration-tests/vitest/vitest.spec.js +++ b/integration-tests/vitest/vitest.spec.js @@ -53,6 +53,7 @@ const { DD_CAPABILITIES_IMPACTED_TESTS } = require('../../packages/dd-trace/src/plugins/util/test') const { DD_HOST_CPU_COUNT } = require('../../packages/dd-trace/src/plugins/util/env') +const { NODE_MAJOR } = require('../../version') const NUM_RETRIES_EFD = 3 @@ -399,7 +400,10 @@ versions.forEach((version) => { }) // total code coverage only works for >=2.0.0 - if (version === 'latest') { + // v4 dropped support for Node 18. Every test but this once passes, so we'll leave them + // for now. The breaking change is in https://github.com/vitest-dev/vitest/commit/9a0bf2254 + // shipped in https://github.com/vitest-dev/vitest/releases/tag/v4.0.0-beta.12 + if (version === 'latest' && NODE_MAJOR >= 20) { const coverageProviders = ['v8', 'istanbul'] coverageProviders.forEach((coverageProvider) => { diff --git a/package.json b/package.json index 63507ef40ef..3d143355e79 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dd-trace", - "version": "5.74.0", + "version": "5.74.1", "description": "Datadog APM tracing client for JavaScript", "main": "index.js", "typings": "index.d.ts", @@ -158,25 +158,13 @@ "tlhunter-sorted-set": "^0.1.0", "ttl-set": "^1.0.0" }, - "peerDependencies": { - "@openfeature/core": "^1.9.0", - "@openfeature/server-sdk": "~1.20.0" - }, - "peerDependenciesMeta": { - "@openfeature/core": { - "optional": true - }, - "@openfeature/server-sdk": { - "optional": true - } - }, "devDependencies": { "@babel/helpers": "^7.27.6", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "^9.29.0", "@msgpack/msgpack": "^3.1.2", - "@openfeature/core": "^1.8.1", - "@openfeature/server-sdk": "~1.20.0", + "@openfeature/core": "^1.9.0", + "@openfeature/server-sdk": "^1.20.0", "@stylistic/eslint-plugin": "^5.0.0", "@types/chai": "^4.3.16", "@types/mocha": "^10.0.10", @@ -214,7 +202,7 @@ "tap": "^16.3.10", "tiktoken": "^1.0.21", "typescript": "^5.9.2", - "workerpool": "^9.2.0", + "workerpool": "^10.0.0", "yaml": "^2.8.0", "yarn-deduplicate": "^6.0.2" } diff --git a/packages/datadog-esbuild/index.js b/packages/datadog-esbuild/index.js index 5d5060ecb09..97a318ac3ba 100644 --- a/packages/datadog-esbuild/index.js +++ b/packages/datadog-esbuild/index.js @@ -114,6 +114,7 @@ ${build.initialOptions.banner.js}` } try { + // eslint-disable-next-line n/no-unpublished-require require.resolve('@openfeature/core') } catch (error) { build.initialOptions.external ??= [] diff --git a/packages/datadog-instrumentations/src/cookie-parser.js b/packages/datadog-instrumentations/src/cookie-parser.js index ba9b6ae33d4..dadf649ae05 100644 --- a/packages/datadog-instrumentations/src/cookie-parser.js +++ b/packages/datadog-instrumentations/src/cookie-parser.js @@ -25,8 +25,6 @@ addHook({ name: 'cookie-parser', versions: ['>=1.0.0'] }, cookieParser => { - // This prevents the non default export from entering the wrapping process - if (cookieParser.default) return cookieParser return shimmer.wrapFunction(cookieParser, cookieParser => function () { const cookieMiddleware = cookieParser.apply(this, arguments) diff --git a/packages/datadog-instrumentations/src/express-session.js b/packages/datadog-instrumentations/src/express-session.js index 0ef8d8189da..2332934fd3c 100644 --- a/packages/datadog-instrumentations/src/express-session.js +++ b/packages/datadog-instrumentations/src/express-session.js @@ -37,6 +37,5 @@ addHook({ name: 'express-session', versions: ['>=1.5.0'] }, session => { - if (session.default) return session return shimmer.wrapFunction(session, wrapSession) }) diff --git a/packages/datadog-instrumentations/src/vitest.js b/packages/datadog-instrumentations/src/vitest.js index db6b4ce225e..a75d65d2195 100644 --- a/packages/datadog-instrumentations/src/vitest.js +++ b/packages/datadog-instrumentations/src/vitest.js @@ -153,6 +153,10 @@ function isCliApiPackage (vitestPackage) { return vitestPackage.s?.name === 'startVitest' } +function isTestPackage (testPackage) { + return testPackage.V?.name === 'VitestTestRunner' +} + function getSessionStatus (state) { if (state.getCountOfFailedTests() > 0) { return 'fail' @@ -240,7 +244,9 @@ function getSortWrapper (sort, frameworkVersion) { if (isFlakyTestRetriesEnabled && !this.ctx.config.retry && flakyTestRetriesCount > 0) { this.ctx.config.retry = flakyTestRetriesCount try { - const workspaceProject = this.ctx.getCoreWorkspaceProject() + const workspaceProject = this.ctx.getCoreWorkspaceProject + ? this.ctx.getCoreWorkspaceProject() + : this.ctx.getRootProject() workspaceProject._provided._ddIsFlakyTestRetriesEnabled = isFlakyTestRetriesEnabled } catch { log.warn('Could not send library configuration to workers.') @@ -272,7 +278,9 @@ function getSortWrapper (sort, frameworkVersion) { // TODO: use this to pass session and module IDs to the worker, instead of polluting process.env // Note: setting this.ctx.config.provide directly does not work because it's cached try { - const workspaceProject = this.ctx.getCoreWorkspaceProject() + const workspaceProject = this.ctx.getCoreWorkspaceProject + ? this.ctx.getCoreWorkspaceProject() + : this.ctx.getRootProject() workspaceProject._provided._ddIsKnownTestsEnabled = isKnownTestsEnabled workspaceProject._provided._ddKnownTests = knownTests workspaceProject._provided._ddIsEarlyFlakeDetectionEnabled = isEarlyFlakeDetectionEnabled @@ -290,7 +298,9 @@ function getSortWrapper (sort, frameworkVersion) { if (isDiEnabled) { try { - const workspaceProject = this.ctx.getCoreWorkspaceProject() + const workspaceProject = this.ctx.getCoreWorkspaceProject + ? this.ctx.getCoreWorkspaceProject() + : this.ctx.getRootProject() workspaceProject._provided._ddIsDiEnabled = isDiEnabled } catch { log.warn('Could not send Dynamic Instrumentation configuration to workers.') @@ -305,7 +315,9 @@ function getSortWrapper (sort, frameworkVersion) { } else { const testManagementTests = receivedTestManagementTests try { - const workspaceProject = this.ctx.getCoreWorkspaceProject() + const workspaceProject = this.ctx.getCoreWorkspaceProject + ? this.ctx.getCoreWorkspaceProject() + : this.ctx.getRootProject() workspaceProject._provided._ddIsTestManagementTestsEnabled = isTestManagementTestsEnabled workspaceProject._provided._ddTestManagementAttemptToFixRetries = testManagementAttemptToFixRetries workspaceProject._provided._ddTestManagementTests = testManagementTests @@ -321,7 +333,9 @@ function getSortWrapper (sort, frameworkVersion) { log.error('Could not get modified tests.') } else { try { - const workspaceProject = this.ctx.getCoreWorkspaceProject() + const workspaceProject = this.ctx.getCoreWorkspaceProject + ? this.ctx.getCoreWorkspaceProject() + : this.ctx.getRootProject() workspaceProject._provided._ddIsImpactedTestsEnabled = isImpactedTestsEnabled workspaceProject._provided._ddModifiedFiles = modifiedFiles } catch { @@ -443,13 +457,7 @@ function getStartVitestWrapper (cliApiPackage, frameworkVersion) { return cliApiPackage } -addHook({ - name: 'vitest', - versions: ['>=1.6.0'], - file: 'dist/runners.js' -}, (vitestPackage) => { - const { VitestTestRunner } = vitestPackage - +function wrapVitestTestRunner (VitestTestRunner) { // `onBeforeRunTask` is run before any repetition or attempt is run // `onBeforeRunTask` is an async function shimmer.wrap(VitestTestRunner.prototype, 'onBeforeRunTask', onBeforeRunTask => function (task) { @@ -744,6 +752,30 @@ addHook({ return result }) +} + +addHook({ + name: 'vitest', + versions: ['>=4.0.0'], + filePattern: 'dist/chunks/test.*' +}, (testPackage) => { + if (!isTestPackage(testPackage)) { + return testPackage + } + + wrapVitestTestRunner(testPackage.V) + + return testPackage +}) + +addHook({ + name: 'vitest', + versions: ['>=1.6.0 <4.0.0'], + file: 'dist/runners.js' +}, (vitestPackage) => { + const { VitestTestRunner } = vitestPackage + + wrapVitestTestRunner(VitestTestRunner) return vitestPackage }) diff --git a/packages/datadog-plugin-aws-sdk/src/base.js b/packages/datadog-plugin-aws-sdk/src/base.js index f9954754df7..6ff6d6f4bec 100644 --- a/packages/datadog-plugin-aws-sdk/src/base.js +++ b/packages/datadog-plugin-aws-sdk/src/base.js @@ -282,7 +282,6 @@ function normalizeConfig (config, serviceIdentifier) { return { ...config, ...specificConfig, - splitByAwsService: config.splitByAwsService !== false, batchPropagationEnabled, hooks } diff --git a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js index c3d9fc1baba..f91647c8467 100644 --- a/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js +++ b/packages/datadog-plugin-aws-sdk/test/aws-sdk.spec.js @@ -254,7 +254,6 @@ describe('Plugin', () => { before(() => { return agent.load(['aws-sdk', 'http'], [{ service: 'test', - splitByAwsService: false, hooks: { request (span, response) { span.setTag('hook.operation', response.request.operation) diff --git a/packages/datadog-plugin-express-mongo-sanitize/test/integration-test/client.spec.js b/packages/datadog-plugin-express-mongo-sanitize/test/integration-test/client.spec.js new file mode 100644 index 00000000000..e7cdd6479ba --- /dev/null +++ b/packages/datadog-plugin-express-mongo-sanitize/test/integration-test/client.spec.js @@ -0,0 +1,43 @@ +'use strict' + +const { + createSandbox, varySandbox, + FakeAgent, spawnPluginIntegrationTestProc +} = require('../../../../integration-tests/helpers') +const { assert } = require('chai') +const { withVersions } = require('../../../dd-trace/test/setup/mocha') +const axios = require('axios') +withVersions('express-mongo-sanitize', 'express-mongo-sanitize', version => { + describe('ESM', () => { + let sandbox, variants, proc, agent + + before(async function () { + this.timeout(50000) + sandbox = await createSandbox([`'express-mongo-sanitize@${version}'`, 'express@<=4.0.0'], false, + ['./packages/datadog-plugin-express-mongo-sanitize/test/integration-test/*']) + variants = varySandbox(sandbox, 'server.mjs', 'expressMongoSanitize', undefined, 'express-mongo-sanitize') + }) + + after(async function () { + this.timeout(50000) + await sandbox.remove() + }) + + beforeEach(async () => { + agent = await new FakeAgent().start() + }) + + afterEach(async () => { + proc?.kill() + await agent.stop() + }) + + for (const variant of varySandbox.VARIANTS) { + it(`is instrumented loaded with ${variant}`, async () => { + const proc = await spawnPluginIntegrationTestProc(sandbox.folder, variants[variant], agent.port) + const response = await axios.get(`${proc.url}/?param=paramvalue`) + assert.equal(response.headers['x-counter'], '1') + }) + } + }) +}) diff --git a/packages/datadog-plugin-express-mongo-sanitize/test/integration-test/server.mjs b/packages/datadog-plugin-express-mongo-sanitize/test/integration-test/server.mjs new file mode 100644 index 00000000000..a904155a01a --- /dev/null +++ b/packages/datadog-plugin-express-mongo-sanitize/test/integration-test/server.mjs @@ -0,0 +1,24 @@ +import 'dd-trace/init.js' +import express from 'express' +import expressMongoSanitize from 'express-mongo-sanitize' +import dc from 'dc-polyfill' +const app = express() + +const sanitizeMiddlewareFinished = dc.channel('datadog:express-mongo-sanitize:filter:finish') + +let counter = 0 + +sanitizeMiddlewareFinished.subscribe(() => { + counter += 1 +}) + +app.use(expressMongoSanitize()) +app.all('/', (req, res) => { + res.setHeader('X-Counter', counter) + res.end() +}) + +const server = app.listen(0, () => { + const port = server.address().port + process.send({ port }) +}) diff --git a/packages/datadog-shimmer/src/shimmer.js b/packages/datadog-shimmer/src/shimmer.js index d23d91dad92..39b25a20343 100644 --- a/packages/datadog-shimmer/src/shimmer.js +++ b/packages/datadog-shimmer/src/shimmer.js @@ -73,6 +73,8 @@ function copyObjectProperties (original, wrapped, skipKey) { * @returns {Function} The wrapped function. */ function wrapFunction (original, wrapper) { + if (typeof original === 'object' && original !== null) return original + const wrapped = wrapper(original) if (typeof original === 'function') { diff --git a/packages/datadog-shimmer/test/shimmer.spec.js b/packages/datadog-shimmer/test/shimmer.spec.js index 1075537d717..dafe485938f 100644 --- a/packages/datadog-shimmer/test/shimmer.spec.js +++ b/packages/datadog-shimmer/test/shimmer.spec.js @@ -372,12 +372,18 @@ describe('shimmer', () => { expect(() => shimmer.wrap(() => {}, () => {})).to.throw() }) - it('should work without a function', () => { - const a = { b: 1 } + it('should work with null instead of function', () => { + const a = null const wrapped = shimmer.wrapFunction(a, x => () => x) expect(wrapped()).to.equal(a) }) + it('should not work with an object', () => { + const a = { b: 1 } + const wrapped = shimmer.wrapFunction(a, x => () => x) + expect(typeof wrapped).to.not.equal('function') + }) + it('should wrap the function', () => { const count = inc => inc diff --git a/packages/dd-trace/src/config-helper.js b/packages/dd-trace/src/config-helper.js index 184ec96c44d..eb457fcc959 100644 --- a/packages/dd-trace/src/config-helper.js +++ b/packages/dd-trace/src/config-helper.js @@ -80,6 +80,7 @@ module.exports = { * @returns {string|undefined} * @throws {Error} if the configuration is not supported */ + // This method, and callers of this method, need to be updated to check for declarative config sources as well. getEnvironmentVariable (name) { if ((name.startsWith('DD_') || name.startsWith('OTEL_') || aliasToCanonical[name]) && !supportedConfigurations[name]) { diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index e150576b74a..5008d64c5e8 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -83,20 +83,6 @@ function getFromOtelSamplerMap (otelTracesSampler, otelTracesSamplerArg) { return OTEL_TRACES_SAMPLER_MAPPING[otelTracesSampler] } -function validateOtelPropagators (propagators) { - if (!getEnv('PROPAGATION_STYLE_EXTRACT') && - !getEnv('PROPAGATION_STYLE_INJECT') && - !getEnv('DD_TRACE_PROPAGATION_STYLE') && - getEnv('OTEL_PROPAGATORS')) { - for (const style in propagators) { - if (!VALID_PROPAGATION_STYLES.has(style)) { - log.warn('unexpected value for OTEL_PROPAGATORS environment variable') - getCounter('otel.env.invalid', 'DD_TRACE_PROPAGATION_STYLE', 'OTEL_PROPAGATORS').inc() - } - } - } -} - /** * Validate the type of an environment variable * @param {string} envVar - The name of the environment variable @@ -210,30 +196,33 @@ function remapify (input, mappings) { return output } -function propagationStyle (key, option) { - // Extract by key if in object-form value - if (option !== null && typeof option === 'object' && !Array.isArray(option)) { - option = option[key] +/** + * Normalizes propagation style values to a lowercase array. + * Handles both string (comma-separated) and array inputs. + */ +function normalizePropagationStyle (value) { + if (Array.isArray(value)) { + return value.map(v => v.toLowerCase()) } - - // Should be an array at this point - if (Array.isArray(option)) return option.map(v => v.toLowerCase()) - - // If it's not an array but not undefined there's something wrong with the input - if (option !== undefined) { + if (typeof value === 'string') { + return value.split(',') + .filter(v => v !== '') + .map(v => v.trim().toLowerCase()) + } + if (value !== undefined) { log.warn('Unexpected input for config.tracePropagationStyle') } +} - // Otherwise, fallback to env var parsing - const envKey = `DD_TRACE_PROPAGATION_STYLE_${key.toUpperCase()}` - - const envVar = getEnv(envKey) ?? - getEnv('DD_TRACE_PROPAGATION_STYLE') ?? - getEnv('OTEL_PROPAGATORS') - if (envVar !== undefined) { - return envVar.split(',') - .filter(v => v !== '') - .map(v => v.trim().toLowerCase()) +/** + * Warns if both DD_TRACE_PROPAGATION_STYLE and specific inject/extract vars are set. + */ +function warnIfPropagationStyleConflict (general, inject, extract) { + if (general && (inject || extract)) { + log.warn( + // eslint-disable-next-line @stylistic/max-len + 'Use either the DD_TRACE_PROPAGATION_STYLE environment variable or separate DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables' + ) } } @@ -315,36 +304,6 @@ class Config { checkIfBothOtelAndDdEnvVarSet() - const { - DD_API_KEY, - DD_APP_KEY, - DD_INSTRUMENTATION_INSTALL_ID = null, - DD_INSTRUMENTATION_INSTALL_TIME = null, - DD_INSTRUMENTATION_INSTALL_TYPE = null, - DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH, - DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING, - DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING, - DD_TRACE_PROPAGATION_STYLE_EXTRACT, - DD_TRACE_PROPAGATION_STYLE_INJECT, - DD_TRACE_PROPAGATION_STYLE, - } = envs - - if (DD_TRACE_PROPAGATION_STYLE && ( - DD_TRACE_PROPAGATION_STYLE_INJECT || - DD_TRACE_PROPAGATION_STYLE_EXTRACT - )) { - log.warn( - // eslint-disable-next-line @stylistic/max-len - 'Use either the DD_TRACE_PROPAGATION_STYLE environment variable or separate DD_TRACE_PROPAGATION_STYLE_INJECT and DD_TRACE_PROPAGATION_STYLE_EXTRACT environment variables' - ) - } - const PROPAGATION_STYLE_INJECT = propagationStyle( - 'inject', - options.tracePropagationStyle - ) - - validateOtelPropagators(PROPAGATION_STYLE_INJECT) - if (typeof options.appsec === 'boolean') { options.appsec = { enabled: options.appsec @@ -357,42 +316,8 @@ class Config { } } - const cloudPayloadTaggingRequestRules = splitJSONPathRules( - DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING ?? - options.cloudPayloadTagging?.request ?? - '' - ) - - const cloudPayloadTaggingResponseRules = splitJSONPathRules( - DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING ?? - options.cloudPayloadTagging?.response ?? - '' - ) - - const cloudPayloadTaggingMaxDepth = maybeInt( - DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH ?? - options.cloudPayloadTagging?.maxDepth - ) ?? 10 - - // TODO: refactor - this.apiKey = DD_API_KEY - this.appKey = DD_APP_KEY - - // sent in telemetry event app-started - this.installSignature = { - id: DD_INSTRUMENTATION_INSTALL_ID, - time: DD_INSTRUMENTATION_INSTALL_TIME, - type: DD_INSTRUMENTATION_INSTALL_TYPE - } - - this.cloudPayloadTagging = { - requestsEnabled: !!cloudPayloadTaggingRequestRules, - responsesEnabled: !!cloudPayloadTaggingResponseRules, - maxDepth: cloudPayloadTaggingMaxDepth, - rules: appendRules(cloudPayloadTaggingRequestRules, cloudPayloadTaggingResponseRules) - } - this.#defaults = defaults + this.#applyDefaults() this.#applyStableConfig(this.stableConfig?.localEntries ?? {}, this.#localStableConfig) this.#applyEnvironment(envs) this.#applyStableConfig(this.stableConfig?.fleetEntries ?? {}, this.#fleetStableConfig) @@ -452,35 +377,32 @@ class Config { } #applyStableConfig (config, obj) { - const { - DD_APPSEC_ENABLED, - DD_APPSEC_SCA_ENABLED, - DD_DATA_STREAMS_ENABLED, - DD_DYNAMIC_INSTRUMENTATION_ENABLED, - DD_ENV, - DD_IAST_ENABLED, - DD_LOGS_INJECTION, - DD_PROFILING_ENABLED, - DD_RUNTIME_METRICS_ENABLED, - DD_SERVICE, - DD_VERSION - } = config - - this.#setBoolean(obj, 'appsec.enabled', DD_APPSEC_ENABLED) - this.#setBoolean(obj, 'appsec.sca.enabled', DD_APPSEC_SCA_ENABLED) - this.#setBoolean(obj, 'dsmEnabled', DD_DATA_STREAMS_ENABLED) - this.#setBoolean(obj, 'dynamicInstrumentation.enabled', DD_DYNAMIC_INSTRUMENTATION_ENABLED) - this.#setString(obj, 'env', DD_ENV) - this.#setBoolean(obj, 'iast.enabled', DD_IAST_ENABLED) - this.#setBoolean(obj, 'logInjection', DD_LOGS_INJECTION) - const profilingEnabled = normalizeProfilingEnabledValue(DD_PROFILING_ENABLED) - this.#setString(obj, 'profiling.enabled', profilingEnabled) - this.#setBoolean(obj, 'runtimeMetrics.enabled', DD_RUNTIME_METRICS_ENABLED) - this.#setString(obj, 'service', DD_SERVICE) - this.#setString(obj, 'version', DD_VERSION) + this.#applyConfigValues(config, obj, {}) + } + + // Set environment-dependent defaults that can be overridden by users + #applyDefaults () { + const defaults = this.#defaults + + if (isInServerlessEnvironment()) { + this.#setBoolean(defaults, 'crashtracking.enabled', false) + this.#setString(defaults, 'profiling.enabled', 'false') + this.#setBoolean(defaults, 'telemetry.enabled', false) + this.#setBoolean(defaults, 'remoteConfig.enabled', false) + } else { + this.#setBoolean(defaults, 'crashtracking.enabled', true) + } + + if (getEnv('JEST_WORKER_ID')) { + this.#setBoolean(defaults, 'telemetry.enabled', false) + } } - #applyEnvironment (envs) { + #applyEnvironment () { + this.#applyConfigValues(getEnvironmentVariables(), this.#env, this.#envUnprocessed) + } + + #applyConfigValues (source, target, unprocessedTarget) { const { AWS_LAMBDA_FUNCTION_NAME, DD_AGENT_HOST, @@ -489,11 +411,13 @@ class Config { DD_AI_GUARD_MAX_CONTENT_SIZE, DD_AI_GUARD_MAX_MESSAGES_LENGTH, DD_AI_GUARD_TIMEOUT, + DD_API_KEY, DD_API_SECURITY_ENABLED, DD_API_SECURITY_SAMPLE_DELAY, DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED, DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT, DD_APM_TRACING_ENABLED, + DD_APP_KEY, DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE, DD_APPSEC_COLLECT_ALL_HEADERS, DD_APPSEC_ENABLED, @@ -530,7 +454,6 @@ class Config { DD_PROFILING_ENABLED, DD_GRPC_CLIENT_ERROR_STATUSES, DD_GRPC_SERVER_ERROR_STATUSES, - JEST_WORKER_ID, DD_HEAP_SNAPSHOT_COUNT, DD_HEAP_SNAPSHOT_DESTINATION, DD_HEAP_SNAPSHOT_INTERVAL, @@ -562,6 +485,9 @@ class Config { DD_PROFILING_EXPORTERS, DD_PROFILING_SOURCE_MAP, DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD, + DD_INSTRUMENTATION_INSTALL_ID, + DD_INSTRUMENTATION_INSTALL_TIME, + DD_INSTRUMENTATION_INSTALL_TYPE, DD_REMOTE_CONFIGURATION_ENABLED, DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS, DD_RUNTIME_METRICS_ENABLED, @@ -588,6 +514,9 @@ class Config { DD_TRACE_BAGGAGE_TAG_KEYS, DD_TRACE_CLIENT_IP_ENABLED, DD_TRACE_CLIENT_IP_HEADER, + DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING, + DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING, + DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH, DD_TRACE_DYNAMODB_TABLE_PRIMARY_KEYS, DD_TRACE_ENABLED, DD_TRACE_EXPERIMENTAL_EXPORTER, @@ -646,262 +575,327 @@ class Config { OTEL_EXPORTER_OTLP_TIMEOUT, OTEL_BSP_SCHEDULE_DELAY, OTEL_BSP_MAX_EXPORT_BATCH_SIZE - } = envs + } = source const tags = {} - const env = this.#env - tagger.add(this.#parsedDdTags, parseSpaceSeparatedTags(DD_TAGS)) + const parsedDdTags = parseSpaceSeparatedTags(DD_TAGS) + tagger.add(this.#parsedDdTags, parsedDdTags) tagger.add(tags, parseSpaceSeparatedTags(handleOtel(OTEL_RESOURCE_ATTRIBUTES))) - tagger.add(tags, this.#parsedDdTags) + tagger.add(tags, parsedDdTags) tagger.add(tags, DD_TRACE_TAGS) tagger.add(tags, DD_TRACE_GLOBAL_TAGS) - this.#setBoolean(env, 'otelLogsEnabled', isTrue(DD_LOGS_OTEL_ENABLED)) + this.#setString(target, 'apiKey', DD_API_KEY) + this.#setBoolean(target, 'otelLogsEnabled', DD_LOGS_OTEL_ENABLED) // Set OpenTelemetry logs configuration with specific _LOGS_ vars taking precedence over generic _EXPORTERS_ vars if (OTEL_EXPORTER_OTLP_ENDPOINT) { // Only set if there's a custom URL, otherwise let calc phase handle the default - this.#setString(env, 'otelUrl', OTEL_EXPORTER_OTLP_ENDPOINT) + this.#setString(target, 'otelUrl', OTEL_EXPORTER_OTLP_ENDPOINT) } if (OTEL_EXPORTER_OTLP_ENDPOINT || OTEL_EXPORTER_OTLP_LOGS_ENDPOINT) { - this.#setString(env, 'otelLogsUrl', OTEL_EXPORTER_OTLP_LOGS_ENDPOINT || env.otelUrl) + this.#setString(target, 'otelLogsUrl', OTEL_EXPORTER_OTLP_LOGS_ENDPOINT || target.otelUrl) } - this.#setString(env, 'otelHeaders', OTEL_EXPORTER_OTLP_HEADERS) - this.#setString(env, 'otelLogsHeaders', OTEL_EXPORTER_OTLP_LOGS_HEADERS || env.otelHeaders) - this.#setString(env, 'otelProtocol', OTEL_EXPORTER_OTLP_PROTOCOL) - this.#setString(env, 'otelLogsProtocol', OTEL_EXPORTER_OTLP_LOGS_PROTOCOL || env.otelProtocol) - env.otelTimeout = maybeInt(OTEL_EXPORTER_OTLP_TIMEOUT) - env.otelLogsTimeout = maybeInt(OTEL_EXPORTER_OTLP_LOGS_TIMEOUT) || env.otelTimeout - env.otelLogsBatchTimeout = maybeInt(OTEL_BSP_SCHEDULE_DELAY) - env.otelLogsMaxExportBatchSize = maybeInt(OTEL_BSP_MAX_EXPORT_BATCH_SIZE) + this.#setString(target, 'otelHeaders', OTEL_EXPORTER_OTLP_HEADERS) + this.#setString(target, 'otelLogsHeaders', OTEL_EXPORTER_OTLP_LOGS_HEADERS || target.otelHeaders) + this.#setString(target, 'otelProtocol', OTEL_EXPORTER_OTLP_PROTOCOL) + this.#setString(target, 'otelLogsProtocol', OTEL_EXPORTER_OTLP_LOGS_PROTOCOL || target.otelProtocol) + target.otelTimeout = maybeInt(OTEL_EXPORTER_OTLP_TIMEOUT) + target.otelLogsTimeout = maybeInt(OTEL_EXPORTER_OTLP_LOGS_TIMEOUT) || target.otelTimeout + target.otelLogsBatchTimeout = maybeInt(OTEL_BSP_SCHEDULE_DELAY) + target.otelLogsMaxExportBatchSize = maybeInt(OTEL_BSP_MAX_EXPORT_BATCH_SIZE) this.#setBoolean( - env, + target, 'apmTracingEnabled', DD_APM_TRACING_ENABLED ?? (DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED && isFalse(DD_EXPERIMENTAL_APPSEC_STANDALONE_ENABLED)) ) - this.#setBoolean(env, 'appsec.apiSecurity.enabled', DD_API_SECURITY_ENABLED && isTrue(DD_API_SECURITY_ENABLED)) - env['appsec.apiSecurity.sampleDelay'] = maybeFloat(DD_API_SECURITY_SAMPLE_DELAY) - this.#setBoolean(env, 'appsec.apiSecurity.endpointCollectionEnabled', + this.#setString(target, 'appKey', DD_APP_KEY) + this.#setBoolean(target, 'appsec.apiSecurity.enabled', DD_API_SECURITY_ENABLED && isTrue(DD_API_SECURITY_ENABLED)) + target['appsec.apiSecurity.sampleDelay'] = maybeFloat(DD_API_SECURITY_SAMPLE_DELAY) + this.#setBoolean(target, 'appsec.apiSecurity.endpointCollectionEnabled', DD_API_SECURITY_ENDPOINT_COLLECTION_ENABLED) - env['appsec.apiSecurity.endpointCollectionMessageLimit'] = + target['appsec.apiSecurity.endpointCollectionMessageLimit'] = maybeInt(DD_API_SECURITY_ENDPOINT_COLLECTION_MESSAGE_LIMIT) - env['appsec.blockedTemplateGraphql'] = maybeFile(DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON) - env['appsec.blockedTemplateHtml'] = maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML) - this.#envUnprocessed['appsec.blockedTemplateHtml'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML - env['appsec.blockedTemplateJson'] = maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON) - this.#envUnprocessed['appsec.blockedTemplateJson'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON - this.#setBoolean(env, 'appsec.enabled', DD_APPSEC_ENABLED) - this.#setString(env, 'appsec.eventTracking.mode', DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE) + target['appsec.blockedTemplateGraphql'] = maybeFile(DD_APPSEC_GRAPHQL_BLOCKED_TEMPLATE_JSON) + target['appsec.blockedTemplateHtml'] = maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML) + unprocessedTarget['appsec.blockedTemplateHtml'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_HTML + target['appsec.blockedTemplateJson'] = maybeFile(DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON) + unprocessedTarget['appsec.blockedTemplateJson'] = DD_APPSEC_HTTP_BLOCKED_TEMPLATE_JSON + this.#setBoolean(target, 'appsec.enabled', DD_APPSEC_ENABLED) + this.#setString(target, 'appsec.eventTracking.mode', DD_APPSEC_AUTO_USER_INSTRUMENTATION_MODE) // TODO appsec.extendedHeadersCollection are deprecated, to delete in a major - this.#setBoolean(env, 'appsec.extendedHeadersCollection.enabled', DD_APPSEC_COLLECT_ALL_HEADERS) + this.#setBoolean(target, 'appsec.extendedHeadersCollection.enabled', DD_APPSEC_COLLECT_ALL_HEADERS) this.#setBoolean( - env, + target, 'appsec.extendedHeadersCollection.redaction', DD_APPSEC_HEADER_COLLECTION_REDACTION_ENABLED ) - env['appsec.extendedHeadersCollection.maxHeaders'] = maybeInt(DD_APPSEC_MAX_COLLECTED_HEADERS) - this.#envUnprocessed['appsec.extendedHeadersCollection.maxHeaders'] = DD_APPSEC_MAX_COLLECTED_HEADERS - this.#setString(env, 'appsec.obfuscatorKeyRegex', DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP) - this.#setString(env, 'appsec.obfuscatorValueRegex', DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP) - this.#setBoolean(env, 'appsec.rasp.enabled', DD_APPSEC_RASP_ENABLED) + target['appsec.extendedHeadersCollection.maxHeaders'] = maybeInt(DD_APPSEC_MAX_COLLECTED_HEADERS) + unprocessedTarget['appsec.extendedHeadersCollection.maxHeaders'] = DD_APPSEC_MAX_COLLECTED_HEADERS + this.#setString(target, 'appsec.obfuscatorKeyRegex', DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP) + this.#setString(target, 'appsec.obfuscatorValueRegex', DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP) + this.#setBoolean(target, 'appsec.rasp.enabled', DD_APPSEC_RASP_ENABLED) // TODO Deprecated, to delete in a major - this.#setBoolean(env, 'appsec.rasp.bodyCollection', DD_APPSEC_RASP_COLLECT_REQUEST_BODY) - env['appsec.rateLimit'] = maybeInt(DD_APPSEC_TRACE_RATE_LIMIT) - this.#envUnprocessed['appsec.rateLimit'] = DD_APPSEC_TRACE_RATE_LIMIT - this.#setString(env, 'appsec.rules', DD_APPSEC_RULES) + this.#setBoolean(target, 'appsec.rasp.bodyCollection', DD_APPSEC_RASP_COLLECT_REQUEST_BODY) + target['appsec.rateLimit'] = maybeInt(DD_APPSEC_TRACE_RATE_LIMIT) + unprocessedTarget['appsec.rateLimit'] = DD_APPSEC_TRACE_RATE_LIMIT + this.#setString(target, 'appsec.rules', DD_APPSEC_RULES) // DD_APPSEC_SCA_ENABLED is never used locally, but only sent to the backend - this.#setBoolean(env, 'appsec.sca.enabled', DD_APPSEC_SCA_ENABLED) - this.#setBoolean(env, 'appsec.stackTrace.enabled', DD_APPSEC_STACK_TRACE_ENABLED) - env['appsec.stackTrace.maxDepth'] = maybeInt(DD_APPSEC_MAX_STACK_TRACE_DEPTH) - this.#envUnprocessed['appsec.stackTrace.maxDepth'] = DD_APPSEC_MAX_STACK_TRACE_DEPTH - env['appsec.stackTrace.maxStackTraces'] = maybeInt(DD_APPSEC_MAX_STACK_TRACES) - this.#envUnprocessed['appsec.stackTrace.maxStackTraces'] = DD_APPSEC_MAX_STACK_TRACES - env['appsec.wafTimeout'] = maybeInt(DD_APPSEC_WAF_TIMEOUT) - this.#envUnprocessed['appsec.wafTimeout'] = DD_APPSEC_WAF_TIMEOUT - env.baggageMaxBytes = DD_TRACE_BAGGAGE_MAX_BYTES - env.baggageMaxItems = DD_TRACE_BAGGAGE_MAX_ITEMS - env.baggageTagKeys = DD_TRACE_BAGGAGE_TAG_KEYS - this.#setBoolean(env, 'clientIpEnabled', DD_TRACE_CLIENT_IP_ENABLED) - this.#setString(env, 'clientIpHeader', DD_TRACE_CLIENT_IP_HEADER?.toLowerCase()) - this.#setBoolean(env, 'crashtracking.enabled', DD_CRASHTRACKING_ENABLED ?? !this._isInServerlessEnvironment()) - this.#setBoolean(env, 'codeOriginForSpans.enabled', DD_CODE_ORIGIN_FOR_SPANS_ENABLED) + this.#setBoolean(target, 'appsec.sca.enabled', DD_APPSEC_SCA_ENABLED) + this.#setBoolean(target, 'appsec.stackTrace.enabled', DD_APPSEC_STACK_TRACE_ENABLED) + target['appsec.stackTrace.maxDepth'] = maybeInt(DD_APPSEC_MAX_STACK_TRACE_DEPTH) + unprocessedTarget['appsec.stackTrace.maxDepth'] = DD_APPSEC_MAX_STACK_TRACE_DEPTH + target['appsec.stackTrace.maxStackTraces'] = maybeInt(DD_APPSEC_MAX_STACK_TRACES) + unprocessedTarget['appsec.stackTrace.maxStackTraces'] = DD_APPSEC_MAX_STACK_TRACES + target['appsec.wafTimeout'] = maybeInt(DD_APPSEC_WAF_TIMEOUT) + unprocessedTarget['appsec.wafTimeout'] = DD_APPSEC_WAF_TIMEOUT + target.baggageMaxBytes = DD_TRACE_BAGGAGE_MAX_BYTES + target.baggageMaxItems = DD_TRACE_BAGGAGE_MAX_ITEMS + target.baggageTagKeys = DD_TRACE_BAGGAGE_TAG_KEYS + this.#setBoolean(target, 'clientIpEnabled', DD_TRACE_CLIENT_IP_ENABLED) + this.#setString(target, 'clientIpHeader', DD_TRACE_CLIENT_IP_HEADER?.toLowerCase()) + if (DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING || DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING) { + if (DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING) { + this.#setBoolean(target, 'cloudPayloadTagging.requestsEnabled', true) + } + if (DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING) { + this.#setBoolean(target, 'cloudPayloadTagging.responsesEnabled', true) + } + target['cloudPayloadTagging.rules'] = appendRules( + splitJSONPathRules(DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING), + splitJSONPathRules(DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING) + ) + } + if (DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH) { + target['cloudPayloadTagging.maxDepth'] = maybeInt(DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH) + } + this.#setBoolean(target, 'crashtracking.enabled', DD_CRASHTRACKING_ENABLED) + this.#setBoolean(target, 'codeOriginForSpans.enabled', DD_CODE_ORIGIN_FOR_SPANS_ENABLED) this.#setBoolean( - env, + target, 'codeOriginForSpans.experimental.exit_spans.enabled', DD_CODE_ORIGIN_FOR_SPANS_EXPERIMENTAL_EXIT_SPANS_ENABLED ) - this.#setString(env, 'dbmPropagationMode', DD_DBM_PROPAGATION_MODE) - this.#setString(env, 'dogstatsd.hostname', DD_DOGSTATSD_HOST) - this.#setString(env, 'dogstatsd.port', DD_DOGSTATSD_PORT) - this.#setBoolean(env, 'dsmEnabled', DD_DATA_STREAMS_ENABLED) - this.#setBoolean(env, 'dynamicInstrumentation.enabled', DD_DYNAMIC_INSTRUMENTATION_ENABLED) - this.#setString(env, 'dynamicInstrumentation.probeFile', DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE) - this.#setArray(env, 'dynamicInstrumentation.redactedIdentifiers', DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS) + this.#setString(target, 'dbmPropagationMode', DD_DBM_PROPAGATION_MODE) + this.#setString(target, 'dogstatsd.hostname', DD_DOGSTATSD_HOST) + this.#setString(target, 'dogstatsd.port', DD_DOGSTATSD_PORT) + this.#setBoolean(target, 'dsmEnabled', DD_DATA_STREAMS_ENABLED) + this.#setBoolean(target, 'dynamicInstrumentation.enabled', DD_DYNAMIC_INSTRUMENTATION_ENABLED) + this.#setString(target, 'dynamicInstrumentation.probeFile', DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE) + this.#setArray(target, 'dynamicInstrumentation.redactedIdentifiers', + DD_DYNAMIC_INSTRUMENTATION_REDACTED_IDENTIFIERS) this.#setArray( - env, + target, 'dynamicInstrumentation.redactionExcludedIdentifiers', DD_DYNAMIC_INSTRUMENTATION_REDACTION_EXCLUDED_IDENTIFIERS ) - env['dynamicInstrumentation.uploadIntervalSeconds'] = maybeFloat(DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS) - this.#envUnprocessed['dynamicInstrumentation.uploadInterval'] = DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS - this.#setString(env, 'env', DD_ENV || tags.env) - this.#setBoolean(env, 'experimental.flaggingProvider.enabled', DD_EXPERIMENTAL_FLAGGING_PROVIDER_ENABLED) - this.#setBoolean(env, 'traceEnabled', DD_TRACE_ENABLED) - this.#setBoolean(env, 'experimental.aiguard.enabled', DD_AI_GUARD_ENABLED) - this.#setString(env, 'experimental.aiguard.endpoint', DD_AI_GUARD_ENDPOINT) - env['experimental.aiguard.maxContentSize'] = maybeInt(DD_AI_GUARD_MAX_CONTENT_SIZE) - this.#envUnprocessed['experimental.aiguard.maxContentSize'] = DD_AI_GUARD_MAX_CONTENT_SIZE - env['experimental.aiguard.maxMessagesLength'] = maybeInt(DD_AI_GUARD_MAX_MESSAGES_LENGTH) - this.#envUnprocessed['experimental.aiguard.maxMessagesLength'] = DD_AI_GUARD_MAX_MESSAGES_LENGTH - env['experimental.aiguard.timeout'] = maybeInt(DD_AI_GUARD_TIMEOUT) - this.#envUnprocessed['experimental.aiguard.timeout'] = DD_AI_GUARD_TIMEOUT - this.#setBoolean(env, 'experimental.enableGetRumData', DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED) - this.#setString(env, 'experimental.exporter', DD_TRACE_EXPERIMENTAL_EXPORTER) - env.flushInterval = AWS_LAMBDA_FUNCTION_NAME ? 0 : maybeInt(DD_TRACE_FLUSH_INTERVAL) - env.flushMinSpans = maybeInt(DD_TRACE_PARTIAL_FLUSH_MIN_SPANS) - this.#envUnprocessed.flushMinSpans = DD_TRACE_PARTIAL_FLUSH_MIN_SPANS - this.#setBoolean(env, 'gitMetadataEnabled', DD_TRACE_GIT_METADATA_ENABLED) - this.#setIntegerRangeSet(env, 'grpc.client.error.statuses', DD_GRPC_CLIENT_ERROR_STATUSES) - this.#setIntegerRangeSet(env, 'grpc.server.error.statuses', DD_GRPC_SERVER_ERROR_STATUSES) - this.#setArray(env, 'headerTags', DD_TRACE_HEADER_TAGS) - env['heapSnapshot.count'] = maybeInt(DD_HEAP_SNAPSHOT_COUNT) - this.#setString(env, 'heapSnapshot.destination', DD_HEAP_SNAPSHOT_DESTINATION) - env['heapSnapshot.interval'] = maybeInt(DD_HEAP_SNAPSHOT_INTERVAL) - this.#setString(env, 'hostname', DD_AGENT_HOST) - env['iast.dbRowsToTaint'] = maybeInt(DD_IAST_DB_ROWS_TO_TAINT) - this.#setBoolean(env, 'iast.deduplicationEnabled', DD_IAST_DEDUPLICATION_ENABLED) - this.#setBoolean(env, 'iast.enabled', DD_IAST_ENABLED) - env['iast.maxConcurrentRequests'] = maybeInt(DD_IAST_MAX_CONCURRENT_REQUESTS) - this.#envUnprocessed['iast.maxConcurrentRequests'] = DD_IAST_MAX_CONCURRENT_REQUESTS - env['iast.maxContextOperations'] = maybeInt(DD_IAST_MAX_CONTEXT_OPERATIONS) - this.#envUnprocessed['iast.maxContextOperations'] = DD_IAST_MAX_CONTEXT_OPERATIONS - this.#setBoolean(env, 'iast.redactionEnabled', DD_IAST_REDACTION_ENABLED && !isFalse(DD_IAST_REDACTION_ENABLED)) - this.#setString(env, 'iast.redactionNamePattern', DD_IAST_REDACTION_NAME_PATTERN) - this.#setString(env, 'iast.redactionValuePattern', DD_IAST_REDACTION_VALUE_PATTERN) + target['dynamicInstrumentation.uploadIntervalSeconds'] = + maybeFloat(DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS) + unprocessedTarget['dynamicInstrumentation.uploadInterval'] = DD_DYNAMIC_INSTRUMENTATION_UPLOAD_INTERVAL_SECONDS + this.#setString(target, 'env', DD_ENV || tags.env) + this.#setBoolean(target, 'experimental.flaggingProvider.enabled', DD_EXPERIMENTAL_FLAGGING_PROVIDER_ENABLED) + this.#setBoolean(target, 'traceEnabled', DD_TRACE_ENABLED) + this.#setBoolean(target, 'experimental.aiguard.enabled', DD_AI_GUARD_ENABLED) + this.#setString(target, 'experimental.aiguard.endpoint', DD_AI_GUARD_ENDPOINT) + target['experimental.aiguard.maxContentSize'] = maybeInt(DD_AI_GUARD_MAX_CONTENT_SIZE) + unprocessedTarget['experimental.aiguard.maxContentSize'] = DD_AI_GUARD_MAX_CONTENT_SIZE + target['experimental.aiguard.maxMessagesLength'] = maybeInt(DD_AI_GUARD_MAX_MESSAGES_LENGTH) + unprocessedTarget['experimental.aiguard.maxMessagesLength'] = DD_AI_GUARD_MAX_MESSAGES_LENGTH + target['experimental.aiguard.timeout'] = maybeInt(DD_AI_GUARD_TIMEOUT) + unprocessedTarget['experimental.aiguard.timeout'] = DD_AI_GUARD_TIMEOUT + this.#setBoolean(target, 'experimental.enableGetRumData', DD_TRACE_EXPERIMENTAL_GET_RUM_DATA_ENABLED) + this.#setString(target, 'experimental.exporter', DD_TRACE_EXPERIMENTAL_EXPORTER) + if (AWS_LAMBDA_FUNCTION_NAME) { + target.flushInterval = 0 + } else if (DD_TRACE_FLUSH_INTERVAL) { + target.flushInterval = maybeInt(DD_TRACE_FLUSH_INTERVAL) + } + target.flushMinSpans = maybeInt(DD_TRACE_PARTIAL_FLUSH_MIN_SPANS) + unprocessedTarget.flushMinSpans = DD_TRACE_PARTIAL_FLUSH_MIN_SPANS + this.#setBoolean(target, 'gitMetadataEnabled', DD_TRACE_GIT_METADATA_ENABLED) + this.#setIntegerRangeSet(target, 'grpc.client.error.statuses', DD_GRPC_CLIENT_ERROR_STATUSES) + this.#setIntegerRangeSet(target, 'grpc.server.error.statuses', DD_GRPC_SERVER_ERROR_STATUSES) + this.#setArray(target, 'headerTags', DD_TRACE_HEADER_TAGS) + target['heapSnapshot.count'] = maybeInt(DD_HEAP_SNAPSHOT_COUNT) + this.#setString(target, 'heapSnapshot.destination', DD_HEAP_SNAPSHOT_DESTINATION) + target['heapSnapshot.interval'] = maybeInt(DD_HEAP_SNAPSHOT_INTERVAL) + this.#setString(target, 'hostname', DD_AGENT_HOST) + target['iast.dbRowsToTaint'] = maybeInt(DD_IAST_DB_ROWS_TO_TAINT) + this.#setBoolean(target, 'iast.deduplicationEnabled', DD_IAST_DEDUPLICATION_ENABLED) + this.#setBoolean(target, 'iast.enabled', DD_IAST_ENABLED) + target['iast.maxConcurrentRequests'] = maybeInt(DD_IAST_MAX_CONCURRENT_REQUESTS) + unprocessedTarget['iast.maxConcurrentRequests'] = DD_IAST_MAX_CONCURRENT_REQUESTS + target['iast.maxContextOperations'] = maybeInt(DD_IAST_MAX_CONTEXT_OPERATIONS) + unprocessedTarget['iast.maxContextOperations'] = DD_IAST_MAX_CONTEXT_OPERATIONS + this.#setBoolean(target, 'iast.redactionEnabled', DD_IAST_REDACTION_ENABLED && !isFalse(DD_IAST_REDACTION_ENABLED)) + this.#setString(target, 'iast.redactionNamePattern', DD_IAST_REDACTION_NAME_PATTERN) + this.#setString(target, 'iast.redactionValuePattern', DD_IAST_REDACTION_VALUE_PATTERN) const iastRequestSampling = maybeInt(DD_IAST_REQUEST_SAMPLING) if (iastRequestSampling !== undefined && iastRequestSampling > -1 && iastRequestSampling < 101) { - env['iast.requestSampling'] = iastRequestSampling + target['iast.requestSampling'] = iastRequestSampling + } + unprocessedTarget['iast.requestSampling'] = DD_IAST_REQUEST_SAMPLING + this.#setString(target, 'iast.securityControlsConfiguration', DD_IAST_SECURITY_CONTROLS_CONFIGURATION) + this.#setString(target, 'iast.telemetryVerbosity', DD_IAST_TELEMETRY_VERBOSITY) + this.#setBoolean(target, 'iast.stackTrace.enabled', DD_IAST_STACK_TRACE_ENABLED) + this.#setString(target, 'installSignature.id', DD_INSTRUMENTATION_INSTALL_ID) + this.#setString(target, 'installSignature.time', DD_INSTRUMENTATION_INSTALL_TIME) + this.#setString(target, 'installSignature.type', DD_INSTRUMENTATION_INSTALL_TYPE) + this.#setArray(target, 'injectionEnabled', DD_INJECTION_ENABLED) + if (DD_INJECTION_ENABLED !== undefined) { + this.#setString(target, 'instrumentationSource', DD_INJECTION_ENABLED ? 'ssi' : 'manual') } - this.#envUnprocessed['iast.requestSampling'] = DD_IAST_REQUEST_SAMPLING - this.#setString(env, 'iast.securityControlsConfiguration', DD_IAST_SECURITY_CONTROLS_CONFIGURATION) - this.#setString(env, 'iast.telemetryVerbosity', DD_IAST_TELEMETRY_VERBOSITY) - this.#setBoolean(env, 'iast.stackTrace.enabled', DD_IAST_STACK_TRACE_ENABLED) - this.#setArray(env, 'injectionEnabled', DD_INJECTION_ENABLED) - this.#setString(env, 'instrumentationSource', DD_INJECTION_ENABLED ? 'ssi' : 'manual') - this.#setBoolean(env, 'injectForce', DD_INJECT_FORCE) - this.#setBoolean(env, 'isAzureFunction', getIsAzureFunction()) - this.#setBoolean(env, 'isGCPFunction', getIsGCPFunction()) - env['langchain.spanCharLimit'] = maybeInt(DD_LANGCHAIN_SPAN_CHAR_LIMIT) - env['langchain.spanPromptCompletionSampleRate'] = maybeFloat(DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE) - this.#setBoolean(env, 'legacyBaggageEnabled', DD_TRACE_LEGACY_BAGGAGE_ENABLED) - this.#setBoolean(env, 'llmobs.agentlessEnabled', DD_LLMOBS_AGENTLESS_ENABLED) - this.#setBoolean(env, 'llmobs.enabled', DD_LLMOBS_ENABLED) - this.#setString(env, 'llmobs.mlApp', DD_LLMOBS_ML_APP) - this.#setBoolean(env, 'logInjection', DD_LOGS_INJECTION) + this.#setBoolean(target, 'injectForce', DD_INJECT_FORCE) + this.#setBoolean(target, 'isAzureFunction', getIsAzureFunction()) + this.#setBoolean(target, 'isGCPFunction', getIsGCPFunction()) + target['langchain.spanCharLimit'] = maybeInt(DD_LANGCHAIN_SPAN_CHAR_LIMIT) + target['langchain.spanPromptCompletionSampleRate'] = maybeFloat(DD_LANGCHAIN_SPAN_PROMPT_COMPLETION_SAMPLE_RATE) + this.#setBoolean(target, 'legacyBaggageEnabled', DD_TRACE_LEGACY_BAGGAGE_ENABLED) + this.#setBoolean(target, 'llmobs.agentlessEnabled', DD_LLMOBS_AGENTLESS_ENABLED) + this.#setBoolean(target, 'llmobs.enabled', DD_LLMOBS_ENABLED) + this.#setString(target, 'llmobs.mlApp', DD_LLMOBS_ML_APP) + this.#setBoolean(target, 'logInjection', DD_LOGS_INJECTION) // Requires an accompanying DD_APM_OBFUSCATION_MEMCACHED_KEEP_COMMAND=true in the agent - this.#setBoolean(env, 'memcachedCommandEnabled', DD_TRACE_MEMCACHED_COMMAND_ENABLED) - this.#setBoolean(env, 'middlewareTracingEnabled', DD_TRACE_MIDDLEWARE_TRACING_ENABLED) - this.#setBoolean(env, 'openAiLogsEnabled', DD_OPENAI_LOGS_ENABLED) - env['openai.spanCharLimit'] = maybeInt(DD_OPENAI_SPAN_CHAR_LIMIT) - this.#envUnprocessed.openaiSpanCharLimit = DD_OPENAI_SPAN_CHAR_LIMIT + this.#setBoolean(target, 'memcachedCommandEnabled', DD_TRACE_MEMCACHED_COMMAND_ENABLED) + this.#setBoolean(target, 'middlewareTracingEnabled', DD_TRACE_MIDDLEWARE_TRACING_ENABLED) + this.#setBoolean(target, 'openAiLogsEnabled', DD_OPENAI_LOGS_ENABLED) + target['openai.spanCharLimit'] = maybeInt(DD_OPENAI_SPAN_CHAR_LIMIT) + unprocessedTarget.openaiSpanCharLimit = DD_OPENAI_SPAN_CHAR_LIMIT if (DD_TRACE_PEER_SERVICE_MAPPING) { - env.peerServiceMapping = Object.fromEntries( + target.peerServiceMapping = Object.fromEntries( DD_TRACE_PEER_SERVICE_MAPPING.split(',').map(x => x.trim().split(':')) ) - this.#envUnprocessed.peerServiceMapping = DD_TRACE_PEER_SERVICE_MAPPING + unprocessedTarget.peerServiceMapping = DD_TRACE_PEER_SERVICE_MAPPING } - this.#setString(env, 'port', DD_TRACE_AGENT_PORT) - const profilingEnabled = normalizeProfilingEnabledValue( - DD_PROFILING_ENABLED ?? - (this._isInServerlessEnvironment() ? 'false' : undefined) - ) - this.#setString(env, 'profiling.enabled', profilingEnabled) - this.#setString(env, 'profiling.exporters', DD_PROFILING_EXPORTERS) - this.#setBoolean(env, 'profiling.sourceMap', DD_PROFILING_SOURCE_MAP && !isFalse(DD_PROFILING_SOURCE_MAP)) + this.#setString(target, 'port', DD_TRACE_AGENT_PORT) + const profilingEnabled = normalizeProfilingEnabledValue(DD_PROFILING_ENABLED) + this.#setString(target, 'profiling.enabled', profilingEnabled) + this.#setString(target, 'profiling.exporters', DD_PROFILING_EXPORTERS) + this.#setBoolean(target, 'profiling.sourceMap', DD_PROFILING_SOURCE_MAP && !isFalse(DD_PROFILING_SOURCE_MAP)) if (DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD) { // This is only used in testing to not have to wait 30s - env['profiling.longLivedThreshold'] = Number(DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD) + target['profiling.longLivedThreshold'] = Number(DD_INTERNAL_PROFILING_LONG_LIVED_THRESHOLD) } - this.#setString(env, 'protocolVersion', DD_TRACE_AGENT_PROTOCOL_VERSION) - this.#setString(env, 'queryStringObfuscation', DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP) - this.#setBoolean(env, 'remoteConfig.enabled', DD_REMOTE_CONFIGURATION_ENABLED ?? !this._isInServerlessEnvironment()) - env['remoteConfig.pollInterval'] = maybeFloat(DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS) - this.#envUnprocessed['remoteConfig.pollInterval'] = DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS - this.#setBoolean(env, 'reportHostname', DD_TRACE_REPORT_HOSTNAME) + this.#setString(target, 'protocolVersion', DD_TRACE_AGENT_PROTOCOL_VERSION) + this.#setString(target, 'queryStringObfuscation', DD_TRACE_OBFUSCATION_QUERY_STRING_REGEXP) + this.#setBoolean(target, 'remoteConfig.enabled', DD_REMOTE_CONFIGURATION_ENABLED) + target['remoteConfig.pollInterval'] = maybeFloat(DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS) + unprocessedTarget['remoteConfig.pollInterval'] = DD_REMOTE_CONFIG_POLL_INTERVAL_SECONDS + this.#setBoolean(target, 'reportHostname', DD_TRACE_REPORT_HOSTNAME) // only used to explicitly set runtimeMetrics to false const otelSetRuntimeMetrics = String(OTEL_METRICS_EXPORTER).toLowerCase() === 'none' ? false : undefined - this.#setBoolean(env, 'runtimeMetrics.enabled', DD_RUNTIME_METRICS_ENABLED || + this.#setBoolean(target, 'runtimeMetrics.enabled', DD_RUNTIME_METRICS_ENABLED || otelSetRuntimeMetrics) - this.#setBoolean(env, 'runtimeMetrics.eventLoop', DD_RUNTIME_METRICS_EVENT_LOOP_ENABLED) - this.#setBoolean(env, 'runtimeMetrics.gc', DD_RUNTIME_METRICS_GC_ENABLED) - this.#setBoolean(env, 'runtimeMetricsRuntimeId', DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED) - this.#setArray(env, 'sampler.spanSamplingRules', reformatSpanSamplingRules( + this.#setBoolean(target, 'runtimeMetrics.eventLoop', DD_RUNTIME_METRICS_EVENT_LOOP_ENABLED) + this.#setBoolean(target, 'runtimeMetrics.gc', DD_RUNTIME_METRICS_GC_ENABLED) + this.#setBoolean(target, 'runtimeMetricsRuntimeId', DD_RUNTIME_METRICS_RUNTIME_ID_ENABLED) + this.#setArray(target, 'sampler.spanSamplingRules', reformatSpanSamplingRules( maybeJsonFile(DD_SPAN_SAMPLING_RULES_FILE) ?? safeJsonParse(DD_SPAN_SAMPLING_RULES) )) - this.#setUnit(env, 'sampleRate', DD_TRACE_SAMPLE_RATE || + this.#setUnit(target, 'sampleRate', DD_TRACE_SAMPLE_RATE || getFromOtelSamplerMap(OTEL_TRACES_SAMPLER, OTEL_TRACES_SAMPLER_ARG)) - env['sampler.rateLimit'] = DD_TRACE_RATE_LIMIT - this.#setSamplingRule(env, 'sampler.rules', safeJsonParse(DD_TRACE_SAMPLING_RULES)) - this.#envUnprocessed['sampler.rules'] = DD_TRACE_SAMPLING_RULES - this.#setString(env, 'scope', DD_TRACE_SCOPE) - this.#setString(env, 'service', DD_SERVICE || tags.service || OTEL_SERVICE_NAME) + target['sampler.rateLimit'] = DD_TRACE_RATE_LIMIT + this.#setSamplingRule(target, 'sampler.rules', safeJsonParse(DD_TRACE_SAMPLING_RULES)) + unprocessedTarget['sampler.rules'] = DD_TRACE_SAMPLING_RULES + this.#setString(target, 'scope', DD_TRACE_SCOPE) + this.#setString(target, 'service', DD_SERVICE || tags.service || OTEL_SERVICE_NAME) if (DD_SERVICE_MAPPING) { - env.serviceMapping = Object.fromEntries( + target.serviceMapping = Object.fromEntries( DD_SERVICE_MAPPING.split(',').map(x => x.trim().split(':')) ) } - this.#setString(env, 'site', DD_SITE) + this.#setString(target, 'site', DD_SITE) if (DD_TRACE_SPAN_ATTRIBUTE_SCHEMA) { - this.#setString(env, 'spanAttributeSchema', validateNamingVersion(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA)) - this.#envUnprocessed.spanAttributeSchema = DD_TRACE_SPAN_ATTRIBUTE_SCHEMA + this.#setString(target, 'spanAttributeSchema', validateNamingVersion(DD_TRACE_SPAN_ATTRIBUTE_SCHEMA)) + unprocessedTarget.spanAttributeSchema = DD_TRACE_SPAN_ATTRIBUTE_SCHEMA } // 0: disabled, 1: logging, 2: garbage collection + logging - env.spanLeakDebug = maybeInt(DD_TRACE_SPAN_LEAK_DEBUG) - this.#setBoolean(env, 'spanRemoveIntegrationFromService', DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED) - this.#setBoolean(env, 'startupLogs', DD_TRACE_STARTUP_LOGS) - this.#setTags(env, 'tags', tags) - env.tagsHeaderMaxLength = DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH - this.#setBoolean(env, 'telemetry.enabled', DD_INSTRUMENTATION_TELEMETRY_ENABLED ?? - !(this._isInServerlessEnvironment() || JEST_WORKER_ID)) - this.#setString(env, 'instrumentation_config_id', DD_INSTRUMENTATION_CONFIG_ID) - this.#setBoolean(env, 'telemetry.debug', DD_TELEMETRY_DEBUG) - this.#setBoolean(env, 'telemetry.dependencyCollection', DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED) - env['telemetry.heartbeatInterval'] = maybeInt(Math.floor(DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000)) - this.#envUnprocessed['telemetry.heartbeatInterval'] = DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000 - this.#setBoolean(env, 'telemetry.logCollection', DD_TELEMETRY_LOG_COLLECTION_ENABLED) - this.#setBoolean(env, 'telemetry.metrics', DD_TELEMETRY_METRICS_ENABLED) - this.#setBoolean(env, 'traceId128BitGenerationEnabled', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED) - this.#setBoolean(env, 'traceId128BitLoggingEnabled', DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED) - this.#setBoolean(env, 'tracePropagationExtractFirst', DD_TRACE_PROPAGATION_EXTRACT_FIRST) - const stringPropagationBehaviorExtract = String(DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT) - env.tracePropagationBehaviorExtract = - VALID_PROPAGATION_BEHAVIOR_EXTRACT.has(stringPropagationBehaviorExtract) - ? stringPropagationBehaviorExtract - : 'continue' - this.#setBoolean(env, 'tracePropagationStyle.otelPropagators', - DD_TRACE_PROPAGATION_STYLE || - DD_TRACE_PROPAGATION_STYLE_INJECT || + target.spanLeakDebug = maybeInt(DD_TRACE_SPAN_LEAK_DEBUG) + this.#setBoolean(target, 'spanRemoveIntegrationFromService', DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED) + this.#setBoolean(target, 'startupLogs', DD_TRACE_STARTUP_LOGS) + this.#setTags(target, 'tags', tags) + target.tagsHeaderMaxLength = DD_TRACE_X_DATADOG_TAGS_MAX_LENGTH + this.#setBoolean(target, 'telemetry.enabled', DD_INSTRUMENTATION_TELEMETRY_ENABLED) + this.#setString(target, 'instrumentation_config_id', DD_INSTRUMENTATION_CONFIG_ID) + this.#setBoolean(target, 'telemetry.debug', DD_TELEMETRY_DEBUG) + this.#setBoolean(target, 'telemetry.dependencyCollection', DD_TELEMETRY_DEPENDENCY_COLLECTION_ENABLED) + target['telemetry.heartbeatInterval'] = maybeInt(Math.floor(DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000)) + unprocessedTarget['telemetry.heartbeatInterval'] = DD_TELEMETRY_HEARTBEAT_INTERVAL * 1000 + this.#setBoolean(target, 'telemetry.logCollection', DD_TELEMETRY_LOG_COLLECTION_ENABLED) + this.#setBoolean(target, 'telemetry.metrics', DD_TELEMETRY_METRICS_ENABLED) + this.#setBoolean(target, 'traceId128BitGenerationEnabled', DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED) + this.#setBoolean(target, 'traceId128BitLoggingEnabled', DD_TRACE_128_BIT_TRACEID_LOGGING_ENABLED) + warnIfPropagationStyleConflict( + DD_TRACE_PROPAGATION_STYLE, + DD_TRACE_PROPAGATION_STYLE_INJECT, DD_TRACE_PROPAGATION_STYLE_EXTRACT - ? false - : !!OTEL_PROPAGATORS) - this.#setBoolean(env, 'traceWebsocketMessagesEnabled', DD_TRACE_WEBSOCKET_MESSAGES_ENABLED) - this.#setBoolean(env, 'traceWebsocketMessagesInheritSampling', DD_TRACE_WEBSOCKET_MESSAGES_INHERIT_SAMPLING) - this.#setBoolean(env, 'traceWebsocketMessagesSeparateTraces', DD_TRACE_WEBSOCKET_MESSAGES_SEPARATE_TRACES) - this.#setBoolean(env, 'tracing', DD_TRACING_ENABLED) - this.#setString(env, 'version', DD_VERSION || tags.version) - this.#setBoolean(env, 'inferredProxyServicesEnabled', DD_TRACE_INFERRED_PROXY_SERVICES_ENABLED) - this.#setBoolean(env, 'trace.aws.addSpanPointers', DD_TRACE_AWS_ADD_SPAN_POINTERS) - this.#setString(env, 'trace.dynamoDb.tablePrimaryKeys', DD_TRACE_DYNAMODB_TABLE_PRIMARY_KEYS) - this.#setArray(env, 'graphqlErrorExtensions', DD_TRACE_GRAPHQL_ERROR_EXTENSIONS) - this.#setBoolean(env, 'trace.nativeSpanEvents', DD_TRACE_NATIVE_SPAN_EVENTS) - env['vertexai.spanPromptCompletionSampleRate'] = maybeFloat(DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE) - env['vertexai.spanCharLimit'] = maybeInt(DD_VERTEXAI_SPAN_CHAR_LIMIT) + ) + if (DD_TRACE_PROPAGATION_STYLE !== undefined) { + this.#setArray(target, 'tracePropagationStyle.inject', normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE)) + this.#setArray(target, 'tracePropagationStyle.extract', normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE)) + } + if (DD_TRACE_PROPAGATION_STYLE_INJECT !== undefined) { + this.#setArray(target, 'tracePropagationStyle.inject', + normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE_INJECT)) + } + if (DD_TRACE_PROPAGATION_STYLE_EXTRACT !== undefined) { + this.#setArray(target, 'tracePropagationStyle.extract', + normalizePropagationStyle(DD_TRACE_PROPAGATION_STYLE_EXTRACT)) + } + this.#setBoolean(target, 'tracePropagationExtractFirst', DD_TRACE_PROPAGATION_EXTRACT_FIRST) + if (DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT !== undefined) { + const stringPropagationBehaviorExtract = String(DD_TRACE_PROPAGATION_BEHAVIOR_EXTRACT) + target.tracePropagationBehaviorExtract = + VALID_PROPAGATION_BEHAVIOR_EXTRACT.has(stringPropagationBehaviorExtract) + ? stringPropagationBehaviorExtract + : 'continue' + } + if (DD_TRACE_PROPAGATION_STYLE !== undefined || + DD_TRACE_PROPAGATION_STYLE_INJECT !== undefined || + DD_TRACE_PROPAGATION_STYLE_EXTRACT !== undefined || + OTEL_PROPAGATORS !== undefined) { + // At least one var is defined, calculate value using truthy logic + const useDdStyle = DD_TRACE_PROPAGATION_STYLE || + DD_TRACE_PROPAGATION_STYLE_INJECT || + DD_TRACE_PROPAGATION_STYLE_EXTRACT + this.#setBoolean(target, 'tracePropagationStyle.otelPropagators', + useDdStyle ? false : !!OTEL_PROPAGATORS) + + // Use OTEL_PROPAGATORS if no DD-specific vars are set + if (!useDdStyle && OTEL_PROPAGATORS) { + const otelStyles = normalizePropagationStyle(OTEL_PROPAGATORS) + // Validate OTEL propagators + for (const style of otelStyles || []) { + if (!VALID_PROPAGATION_STYLES.has(style)) { + log.warn('unexpected value %s for OTEL_PROPAGATORS environment variable', style) + getCounter('otel.env.invalid', 'DD_TRACE_PROPAGATION_STYLE', 'OTEL_PROPAGATORS').inc() + } + } + // Set inject/extract from OTEL_PROPAGATORS + if (otelStyles) { + this.#setArray(target, 'tracePropagationStyle.inject', otelStyles) + this.#setArray(target, 'tracePropagationStyle.extract', otelStyles) + } + } + } + this.#setBoolean(target, 'traceWebsocketMessagesEnabled', DD_TRACE_WEBSOCKET_MESSAGES_ENABLED) + this.#setBoolean(target, 'traceWebsocketMessagesInheritSampling', DD_TRACE_WEBSOCKET_MESSAGES_INHERIT_SAMPLING) + this.#setBoolean(target, 'traceWebsocketMessagesSeparateTraces', DD_TRACE_WEBSOCKET_MESSAGES_SEPARATE_TRACES) + this.#setBoolean(target, 'tracing', DD_TRACING_ENABLED) + this.#setString(target, 'version', DD_VERSION || tags.version) + this.#setBoolean(target, 'inferredProxyServicesEnabled', DD_TRACE_INFERRED_PROXY_SERVICES_ENABLED) + this.#setBoolean(target, 'trace.aws.addSpanPointers', DD_TRACE_AWS_ADD_SPAN_POINTERS) + this.#setString(target, 'trace.dynamoDb.tablePrimaryKeys', DD_TRACE_DYNAMODB_TABLE_PRIMARY_KEYS) + this.#setArray(target, 'graphqlErrorExtensions', DD_TRACE_GRAPHQL_ERROR_EXTENSIONS) + this.#setBoolean(target, 'trace.nativeSpanEvents', DD_TRACE_NATIVE_SPAN_EVENTS) + target['vertexai.spanPromptCompletionSampleRate'] = maybeFloat(DD_VERTEXAI_SPAN_PROMPT_COMPLETION_SAMPLE_RATE) + target['vertexai.spanCharLimit'] = maybeInt(DD_VERTEXAI_SPAN_CHAR_LIMIT) } #applyOptions (options) { @@ -954,6 +948,25 @@ class Config { this.#optsUnprocessed['appsec.wafTimeout'] = options.appsec?.wafTimeout this.#setBoolean(opts, 'clientIpEnabled', options.clientIpEnabled) this.#setString(opts, 'clientIpHeader', options.clientIpHeader?.toLowerCase()) + if (options.cloudPayloadTagging?.request || options.cloudPayloadTagging?.response) { + if (options.cloudPayloadTagging.request) { + this.#setBoolean(opts, 'cloudPayloadTagging.requestsEnabled', true) + } + if (options.cloudPayloadTagging.response) { + this.#setBoolean(opts, 'cloudPayloadTagging.responsesEnabled', true) + } + opts['cloudPayloadTagging.rules'] = appendRules( + splitJSONPathRules(options.cloudPayloadTagging.request), + splitJSONPathRules(options.cloudPayloadTagging.response) + ) + } + if (options.cloudPayloadTagging?.requestsEnabled !== undefined) { + this.#setBoolean(opts, 'cloudPayloadTagging.requestsEnabled', options.cloudPayloadTagging.requestsEnabled) + } + if (options.cloudPayloadTagging?.responsesEnabled !== undefined) { + this.#setBoolean(opts, 'cloudPayloadTagging.responsesEnabled', options.cloudPayloadTagging.responsesEnabled) + } + opts['cloudPayloadTagging.maxDepth'] = maybeInt(options.cloudPayloadTagging?.maxDepth) opts.baggageMaxBytes = options.baggageMaxBytes opts.baggageMaxItems = options.baggageMaxItems opts.baggageTagKeys = options.baggageTagKeys @@ -1070,6 +1083,12 @@ class Config { this.#setBoolean(opts, 'inferredProxyServicesEnabled', options.inferredProxyServicesEnabled) this.#setBoolean(opts, 'graphqlErrorExtensions', options.graphqlErrorExtensions) this.#setBoolean(opts, 'trace.nativeSpanEvents', options.trace?.nativeSpanEvents) + if (options.tracePropagationStyle) { + this.#setArray(opts, 'tracePropagationStyle.inject', + normalizePropagationStyle(options.tracePropagationStyle.inject ?? options.tracePropagationStyle)) + this.#setArray(opts, 'tracePropagationStyle.extract', + normalizePropagationStyle(options.tracePropagationStyle.extract ?? options.tracePropagationStyle)) + } // For LLMObs, we want the environment variable to take precedence over the options. // This is reliant on environment config being set before options. @@ -1189,17 +1208,11 @@ class Config { this.#setBoolean(calc, 'spanComputePeerService', this.#getSpanComputePeerService()) this.#setBoolean(calc, 'stats.enabled', this.#isTraceStatsComputationEnabled()) const defaultPropagationStyle = this.#getDefaultPropagationStyle(this.#optionsArg) - calc['tracePropagationStyle.inject'] = propagationStyle( - 'inject', - this.#optionsArg.tracePropagationStyle - ) - calc['tracePropagationStyle.extract'] = propagationStyle( - 'extract', - this.#optionsArg.tracePropagationStyle - ) if (defaultPropagationStyle.length > 2) { - calc['tracePropagationStyle.inject'] = calc['tracePropagationStyle.inject'] || defaultPropagationStyle - calc['tracePropagationStyle.extract'] = calc['tracePropagationStyle.extract'] || defaultPropagationStyle + // b3 was added, so update defaults to include it + // This will only be used if no other source (options, env, stable config) set the value + calc['tracePropagationStyle.inject'] = defaultPropagationStyle + calc['tracePropagationStyle.extract'] = defaultPropagationStyle } } diff --git a/packages/dd-trace/src/config_defaults.js b/packages/dd-trace/src/config_defaults.js index 996aa27ec4a..a7492209c46 100644 --- a/packages/dd-trace/src/config_defaults.js +++ b/packages/dd-trace/src/config_defaults.js @@ -19,6 +19,8 @@ const service = getEnv('AWS_LAMBDA_FUNCTION_NAME') || 'node' module.exports = { + apiKey: undefined, + appKey: undefined, apmTracingEnabled: true, 'appsec.apiSecurity.enabled': true, 'appsec.apiSecurity.sampleDelay': 30, @@ -50,6 +52,10 @@ module.exports = { baggageTagKeys: 'user.id,session.id,account.id', clientIpEnabled: false, clientIpHeader: null, + 'cloudPayloadTagging.requestsEnabled': false, + 'cloudPayloadTagging.responsesEnabled': false, + 'cloudPayloadTagging.maxDepth': 10, + 'cloudPayloadTagging.rules': [], 'crashtracking.enabled': true, 'codeOriginForSpans.enabled': true, 'codeOriginForSpans.experimental.exit_spans.enabled': false, @@ -95,6 +101,9 @@ module.exports = { 'iast.telemetryVerbosity': 'INFORMATION', 'iast.stackTrace.enabled': true, injectionEnabled: [], + 'installSignature.id': null, + 'installSignature.time': null, + 'installSignature.type': null, instrumentationSource: 'manual', injectForce: null, isAzureFunction: false, diff --git a/packages/dd-trace/src/proxy.js b/packages/dd-trace/src/proxy.js index 13651fa5697..3c569c47ce1 100644 --- a/packages/dd-trace/src/proxy.js +++ b/packages/dd-trace/src/proxy.js @@ -164,7 +164,7 @@ class Tracer extends NoopProxy { rc.setProductHandler('FFE_FLAGS', (action, conf) => { // Feed UFC config directly to OpenFeature provider if (action === 'apply' || action === 'modify') { - this.openfeature._setConfiguration(conf.flag_configuration) + this.openfeature._setConfiguration(conf) } }) } diff --git a/packages/dd-trace/src/telemetry/logs/log-collector.js b/packages/dd-trace/src/telemetry/logs/log-collector.js index 35b9383c181..1d218d056ca 100644 --- a/packages/dd-trace/src/telemetry/logs/log-collector.js +++ b/packages/dd-trace/src/telemetry/logs/log-collector.js @@ -41,12 +41,14 @@ function sanitize (logEntry) { const firstIndex = stackLines.findIndex(l => l.match(STACK_FRAME_LINE_REGEX)) - const isDDCode = firstIndex !== -1 && stackLines[firstIndex].includes(ddBasePath) + // Filter to keep only DD frames stackLines = stackLines - .filter((line, index) => (isDDCode && index < firstIndex) || line.includes(ddBasePath)) + .filter((line, index) => index >= firstIndex && line.includes(ddBasePath)) .map(line => line.replace(ddBasePath, '')) - if (!isDDCode && logEntry.errorType && stackLines.length) { + // ALWAYS redact error messages (RFC requirement: exception type only, no message) + // This handles single-line and multi-line error messages + if (logEntry.errorType && stackLines.length) { stackLines = [`${logEntry.errorType}: redacted`, ...stackLines] } diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 43bf05b8977..dad09fa8f9a 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -480,7 +480,7 @@ describe('Config', () => { value: '(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)(?:(?:\\s|%20)*(?:=|%3D)[^&]+|(?:"|%22)(?:\\s|%20)*(?::|%3A)(?:\\s|%20)*(?:"|%22)(?:%2[^2]|%[^2]|[^"%])+(?:"|%22))|bearer(?:\\s|%20)+[a-z0-9\\._\\-]+|token(?::|%3A)[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}|ey[I-L](?:[\\w=-]|%3D)+\\.ey[I-L](?:[\\w=-]|%3D)+(?:\\.(?:[\\w.+\\/=-]|%3D|%2F|%2B)+)?|[\\-]{5}BEGIN(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY[\\-]{5}[^\\-]+[\\-]{5}END(?:[a-z\\s]|%20)+PRIVATE(?:\\s|%20)KEY|ssh-rsa(?:\\s|%20)*(?:[a-z0-9\\/\\.+]|%2F|%5C|%2B){100,}', origin: 'default' }, - { name: 'remoteConfig.enabled', value: true, origin: 'env_var' }, + { name: 'remoteConfig.enabled', value: true, origin: 'default' }, { name: 'remoteConfig.pollInterval', value: 5, origin: 'default' }, { name: 'reportHostname', value: false, origin: 'default' }, { name: 'reportHostname', value: false, origin: 'default' }, @@ -500,7 +500,7 @@ describe('Config', () => { { name: 'tagsHeaderMaxLength', value: 512, origin: 'default' }, { name: 'telemetry.debug', value: false, origin: 'default' }, { name: 'telemetry.dependencyCollection', value: true, origin: 'default' }, - { name: 'telemetry.enabled', value: true, origin: 'env_var' }, + { name: 'telemetry.enabled', value: true, origin: 'default' }, { name: 'telemetry.heartbeatInterval', value: 60000, origin: 'default' }, { name: 'telemetry.logCollection', value: true, origin: 'default' }, { name: 'telemetry.metrics', value: true, origin: 'default' }, @@ -882,7 +882,6 @@ describe('Config', () => { { name: 'service', value: 'service', origin: 'env_var' }, { name: 'spanAttributeSchema', value: 'v1', origin: 'env_var' }, { name: 'spanRemoveIntegrationFromService', value: true, origin: 'env_var' }, - { name: 'telemetry.enabled', value: true, origin: 'env_var' }, { name: 'traceId128BitGenerationEnabled', value: true, origin: 'env_var' }, { name: 'traceId128BitLoggingEnabled', value: true, origin: 'env_var' }, { name: 'tracing', value: false, origin: 'env_var' }, @@ -2958,6 +2957,127 @@ apm_configuration_default: const stableConfig = new Config() expect(stableConfig).to.not.have.property('stableConfig') }) + + it('should support all extended configs across product areas', () => { + fs.writeFileSync( + process.env.DD_TEST_LOCAL_CONFIG_PATH, + ` +apm_configuration_default: + DD_TRACE_PROPAGATION_STYLE: "tracecontext" + DD_TRACE_128_BIT_TRACEID_GENERATION_ENABLED: true + + DD_APPSEC_TRACE_RATE_LIMIT: 100 + DD_APPSEC_MAX_STACK_TRACES: 2 + DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP: "password|token" + + DD_IAST_REQUEST_SAMPLING: 50 + DD_IAST_MAX_CONCURRENT_REQUESTS: 10 + + DD_TELEMETRY_HEARTBEAT_INTERVAL: 42 + DD_TELEMETRY_METRICS_ENABLED: false + + DD_LLMOBS_ML_APP: "my-llm-app" + + DD_PROFILING_EXPORTERS: "agent" + + DD_DYNAMIC_INSTRUMENTATION_PROBE_FILE: "/tmp/probes" +`) + const config = new Config() + + // Tracing + expect(config).to.have.nested.property('traceId128BitGenerationEnabled', true) + expect(config).to.have.nested.deep.property('tracePropagationStyle.inject', ['tracecontext']) + expect(config).to.have.nested.deep.property('tracePropagationStyle.extract', ['tracecontext']) + + // Appsec + expect(config).to.have.nested.property('appsec.rateLimit', 100) + expect(config).to.have.nested.property('appsec.stackTrace.maxStackTraces', 2) + expect(config).to.have.nested.property('appsec.obfuscatorKeyRegex', 'password|token') + + // IAST + expect(config).to.have.nested.property('iast.requestSampling', 50) + expect(config).to.have.nested.property('iast.maxConcurrentRequests', 10) + + // Telemetry + expect(config).to.have.nested.property('telemetry.heartbeatInterval', 42000) + expect(config).to.have.nested.property('telemetry.metrics', false) + + // LLMObs + expect(config).to.have.nested.property('llmobs.mlApp', 'my-llm-app') + + // Profiling + expect(config).to.have.nested.property('profiling.exporters', 'agent') + + // Dynamic Instrumentation + expect(config).to.have.nested.property('dynamicInstrumentation.probeFile', '/tmp/probes') + }) + + // Regression test for fields that were previously set directly from environment variables + // before they were supported by stable config as well. + it('should support legacy direct-set fields through all stableconfig and env var sources', () => { + // Test 1: Local stable config should work + fs.writeFileSync( + process.env.DD_TEST_LOCAL_CONFIG_PATH, + ` +apm_configuration_default: + DD_API_KEY: "local-api-key" + DD_APP_KEY: "local-app-key" + DD_INSTRUMENTATION_INSTALL_ID: "local-install-id" + DD_INSTRUMENTATION_INSTALL_TIME: "1234567890" + DD_INSTRUMENTATION_INSTALL_TYPE: "local_install" + DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING: "all" + DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH: 5 +`) + let config = new Config() + expect(config).to.have.property('apiKey', 'local-api-key') + expect(config).to.have.property('appKey', 'local-app-key') + expect(config).to.have.nested.property('installSignature.id', 'local-install-id') + expect(config).to.have.nested.property('installSignature.time', '1234567890') + expect(config).to.have.nested.property('installSignature.type', 'local_install') + expect(config).to.have.nested.property('cloudPayloadTagging.requestsEnabled', true) + expect(config).to.have.nested.property('cloudPayloadTagging.maxDepth', 5) + + // Test 2: Env vars should take precedence over local stable config + process.env.DD_API_KEY = 'env-api-key' + process.env.DD_APP_KEY = 'env-app-key' + process.env.DD_INSTRUMENTATION_INSTALL_ID = 'env-install-id' + process.env.DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH = '7' + config = new Config() + expect(config).to.have.property('apiKey', 'env-api-key') + expect(config).to.have.property('appKey', 'env-app-key') + expect(config).to.have.nested.property('installSignature.id', 'env-install-id') + expect(config).to.have.nested.property('cloudPayloadTagging.maxDepth', 7) + + // Test 3: Fleet stable config should take precedence over env vars + fs.writeFileSync( + process.env.DD_TEST_FLEET_CONFIG_PATH, + ` +rules: + - selectors: + - origin: language + matches: + - nodejs + operator: equals + configuration: + DD_API_KEY: "fleet-api-key" + DD_APP_KEY: "fleet-app-key" + DD_INSTRUMENTATION_INSTALL_ID: "fleet-install-id" + DD_INSTRUMENTATION_INSTALL_TIME: "9999999999" + DD_INSTRUMENTATION_INSTALL_TYPE: "fleet_install" + DD_TRACE_CLOUD_REQUEST_PAYLOAD_TAGGING: "" + DD_TRACE_CLOUD_RESPONSE_PAYLOAD_TAGGING: "all" + DD_TRACE_CLOUD_PAYLOAD_TAGGING_MAX_DEPTH: 15 +`) + config = new Config() + expect(config).to.have.property('apiKey', 'fleet-api-key') + expect(config).to.have.property('appKey', 'fleet-app-key') + expect(config).to.have.nested.property('installSignature.id', 'fleet-install-id') + expect(config).to.have.nested.property('installSignature.time', '9999999999') + expect(config).to.have.nested.property('installSignature.type', 'fleet_install') + expect(config).to.have.nested.property('cloudPayloadTagging.requestsEnabled', false) + expect(config).to.have.nested.property('cloudPayloadTagging.responsesEnabled', true) + expect(config).to.have.nested.property('cloudPayloadTagging.maxDepth', 15) + }) }) context('getOrigin', () => { diff --git a/packages/dd-trace/test/plugins/plugin-structure.spec.js b/packages/dd-trace/test/plugins/plugin-structure.spec.js index 7f91912fad5..5b5554a4f28 100644 --- a/packages/dd-trace/test/plugins/plugin-structure.spec.js +++ b/packages/dd-trace/test/plugins/plugin-structure.spec.js @@ -20,7 +20,8 @@ const missingPlugins = [ 'datadog-plugin-limitd-client', // limitd-client instrumentation handles trace context propagation, no tracing is done 'datadog-plugin-mongoose', // mongoose tracing is done through mongodb-core instrumentation 'datadog-plugin-cookie-parser', // cookie-parser does not produce spans - 'datadog-plugin-express-session' // express-session does not produce spans + 'datadog-plugin-express-session', // express-session does not produce spans + 'datadog-plugin-express-mongo-sanitize' // express-mongo-sanitize does not produce spans ] // instrumentations that do not have a hook, but are still instrumented diff --git a/packages/dd-trace/test/proxy.spec.js b/packages/dd-trace/test/proxy.spec.js index b5c8450f59c..56e635887f2 100644 --- a/packages/dd-trace/test/proxy.spec.js +++ b/packages/dd-trace/test/proxy.spec.js @@ -372,7 +372,7 @@ describe('TracerProxy', () => { proxy.openfeature // Trigger lazy loading const flagConfig = { flags: { 'test-flag': {} } } - handlers.get('FFE_FLAGS')('apply', { flag_configuration: flagConfig }) + handlers.get('FFE_FLAGS')('apply', flagConfig) expect(openfeatureProvider._setConfiguration).to.have.been.calledWith(flagConfig) }) @@ -384,7 +384,7 @@ describe('TracerProxy', () => { proxy.openfeature // Trigger lazy loading const flagConfig = { flags: { 'modified-flag': {} } } - handlers.get('FFE_FLAGS')('modify', { flag_configuration: flagConfig }) + handlers.get('FFE_FLAGS')('modify', flagConfig) expect(openfeatureProvider._setConfiguration).to.have.been.calledWith(flagConfig) }) diff --git a/packages/dd-trace/test/telemetry/logs/log-collector.spec.js b/packages/dd-trace/test/telemetry/logs/log-collector.spec.js index e32e0890103..8f9dca89746 100644 --- a/packages/dd-trace/test/telemetry/logs/log-collector.spec.js +++ b/packages/dd-trace/test/telemetry/logs/log-collector.spec.js @@ -30,17 +30,49 @@ describe('telemetry log collector', () => { }) it('should store logs with same message but different stack', () => { - const ddFrame = `at T (${ddBasePath}path/to/dd/file.js:1:2)` - expect(logCollector.add({ message: 'Error 1', level: 'ERROR', stack_trace: `stack 1\n${ddFrame}` })).to.be.true - expect(logCollector.add({ message: 'Error 1', level: 'ERROR', stack_trace: `stack 2\n${ddFrame}` })).to.be.true - expect(logCollector.add({ message: 'Error 1', level: 'ERROR', stack_trace: `stack 3\n${ddFrame}` })).to.be.true + const ddFrame1 = `at T (${ddBasePath}path/to/dd/file1.js:1:2)` + const ddFrame2 = `at T (${ddBasePath}path/to/dd/file2.js:3:4)` + const ddFrame3 = `at T (${ddBasePath}path/to/dd/file3.js:5:6)` + expect(logCollector.add({ + message: 'Error 1', + level: 'ERROR', + stack_trace: `Error: msg\n${ddFrame1}`, + errorType: 'Error' + })).to.be.true + expect(logCollector.add({ + message: 'Error 1', + level: 'ERROR', + stack_trace: `Error: msg\n${ddFrame2}`, + errorType: 'Error' + })).to.be.true + expect(logCollector.add({ + message: 'Error 1', + level: 'ERROR', + stack_trace: `Error: msg\n${ddFrame3}`, + errorType: 'Error' + })).to.be.true }) it('should store logs with same message, same stack but different level', () => { const ddFrame = `at T (${ddBasePath}path/to/dd/file.js:1:2)` - expect(logCollector.add({ message: 'Error 1', level: 'ERROR', stack_trace: `stack 1\n${ddFrame}` })).to.be.true - expect(logCollector.add({ message: 'Error 1', level: 'WARN', stack_trace: `stack 1\n${ddFrame}` })).to.be.true - expect(logCollector.add({ message: 'Error 1', level: 'DEBUG', stack_trace: `stack 1\n${ddFrame}` })).to.be.true + expect(logCollector.add({ + message: 'Error 1', + level: 'ERROR', + stack_trace: `Error: msg\n${ddFrame}`, + errorType: 'Error' + })).to.be.true + expect(logCollector.add({ + message: 'Error 1', + level: 'WARN', + stack_trace: `Error: msg\n${ddFrame}`, + errorType: 'Error' + })).to.be.true + expect(logCollector.add({ + message: 'Error 1', + level: 'DEBUG', + stack_trace: `Error: msg\n${ddFrame}`, + errorType: 'Error' + })).to.be.true }) it('should not store logs with empty stack and \'Generic Error\' message', () => { @@ -52,7 +84,7 @@ describe('telemetry log collector', () => { ).to.be.false }) - it('should include original message and dd frames', () => { + it('should redact error message and include only dd frames', () => { const ddFrame = `at T (${ddBasePath}path/to/dd/file.js:1:2)` const stack = new TypeError('Error 1') .stack.replace(`Error 1${EOL}`, `Error 1${EOL}${ddFrame}${EOL}`) @@ -73,11 +105,11 @@ describe('telemetry log collector', () => { expect(logCollector.hasEntry({ message: 'Error 1', level: 'ERROR', - stack_trace: `TypeError: Error 1${EOL}${ddFrames}` + stack_trace: `TypeError: redacted${EOL}${ddFrames}` })).to.be.true }) - it('should redact stack message if first frame is not a dd frame', () => { + it('should redact error message regardless of whether first frame is DD code', () => { const thirdPartyFrame = `at callFn (/this/is/not/a/dd/frame/runnable.js:366:21) at T (${ddBasePath}path/to/dd/file.js:1:2)` const stack = new TypeError('Error 1') @@ -104,6 +136,31 @@ describe('telemetry log collector', () => { stack_trace: ddFrames })).to.be.true }) + + it('should redact multi-line error messages', () => { + const ddFrame = `at cachedExec (${ddBasePath}plugins/util/git-cache.js:96:17)` + const multiLineError = 'Error: Command failed: git rev-parse --abbrev-ref ' + + `--symbolic-full-name @{upstream}${EOL}fatal: HEAD does not point to a branch${EOL}${EOL}${ddFrame}` + + const ddFrames = multiLineError + .split(EOL) + .filter(line => line.includes(ddBasePath)) + .map(line => line.replace(ddBasePath, '')) + .join(EOL) + + expect(logCollector.add({ + message: 'Git plugin error', + level: 'ERROR', + stack_trace: multiLineError, + errorType: 'Error' + })).to.be.true + + expect(logCollector.hasEntry({ + message: 'Git plugin error', + level: 'ERROR', + stack_trace: `Error: redacted${EOL}${ddFrames}` + })).to.be.true + }) }) describe('drain', () => { diff --git a/yarn.lock b/yarn.lock index de3f8f1ab41..95bb4392b8f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -691,21 +691,21 @@ "@octokit/request-error" "^7.0.0" "@octokit/webhooks-methods" "^6.0.0" -"@openfeature/core@^1.8.1": +"@openfeature/core@^1.9.0": version "1.9.1" resolved "https://registry.yarnpkg.com/@openfeature/core/-/core-1.9.1.tgz#9925a04ed0745e92dd7b3793b35cff1ed89d54c1" integrity sha512-YySPtH4s/rKKnHRU0xyFGrqMU8XA+OIPNWDrlEFxE6DCVWCIrxE5YpiB94YD2jMFn6SSdA0cwQ8vLkCkl8lm8A== +"@openfeature/server-sdk@^1.20.0": + version "1.20.0" + resolved "https://registry.yarnpkg.com/@openfeature/server-sdk/-/server-sdk-1.20.0.tgz#43a5d3cffd915742ff64eeef53b7132279df4823" + integrity sha512-95L9CCaGVKC6+7rNCmDxjthFvUyPLOUkoYyjg9Y/Cmy0G9D63xrJBsmH6fqHtLifzAu4+E7fWAZ3TIBnnCtr7A== + "@openfeature/server-sdk@~1.18.0": version "1.18.0" resolved "https://registry.yarnpkg.com/@openfeature/server-sdk/-/server-sdk-1.18.0.tgz#5fed5f1d0900b2535878db18a78295bbaebb997b" integrity sha512-uP8nqEGBS58s3iWXx6d8vnJ6ZVt3DACJL4PWADOAuwIS4xXpID91783e9f6zQ0i1ijQpj3yx+3ZuCB2LfQMUMA== -"@openfeature/server-sdk@~1.20.0": - version "1.20.0" - resolved "https://registry.yarnpkg.com/@openfeature/server-sdk/-/server-sdk-1.20.0.tgz#43a5d3cffd915742ff64eeef53b7132279df4823" - integrity sha512-95L9CCaGVKC6+7rNCmDxjthFvUyPLOUkoYyjg9Y/Cmy0G9D63xrJBsmH6fqHtLifzAu4+E7fWAZ3TIBnnCtr7A== - "@opentelemetry/api-logs@<1.0.0": version "0.207.0" resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.207.0.tgz#ae991c51eedda55af037a3e6fc1ebdb12b289f49" @@ -5112,6 +5112,11 @@ word-wrap@^1.2.5: resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.5.tgz#d2c45c6dd4fbce621a66f136cbe328afd0410b34" integrity sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA== +workerpool@^10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-10.0.0.tgz#73f972a4e74b05dc4e45a5c54ec8eb67b67365ba" + integrity sha512-q++kYpKoYSfe7myTA5bH8nov9XKpEj7ji3f/W2h4E8yrX42Fhdt6x3E5pgsmnL1+swBZXryUwudNQJmcuodqmw== + workerpool@^9.2.0: version "9.3.4" resolved "https://registry.yarnpkg.com/workerpool/-/workerpool-9.3.4.tgz#f6c92395b2141afd78e2a889e80cb338fe9fca41"