From 2d9f72b25eb6a30f2447e00ed75e17024a7c7e9e Mon Sep 17 00:00:00 2001 From: Ayan Khan Date: Thu, 11 Jan 2024 13:07:39 -0500 Subject: [PATCH 01/21] update MIGRATING.md and README.md in preparation for v5 release (#3918) * update MIGRATING.md and README.md in preparation for v5 release --- MIGRATING.md | 15 +++++++++++++++ README.md | 20 +++++++++++--------- 2 files changed, 26 insertions(+), 9 deletions(-) diff --git a/MIGRATING.md b/MIGRATING.md index 649037d39fa..39160fce6c0 100644 --- a/MIGRATING.md +++ b/MIGRATING.md @@ -4,6 +4,21 @@ This guide describes the steps to upgrade dd-trace from a major version to the next. If you are having any issues related to migrating, please feel free to open an issue or contact our [support](https://www.datadoghq.com/support/) team. +## 4.0 to 5.0 + +### Node 16 is no longer supported + +Node.js 16 has reached EOL in September 2023 and is no longer supported. Generally +speaking, we highly recommend always keeping Node.js up to date regardless of +our support policy. + +### Update `trace` TypeScript declaration + +The TypeScript declaration for `trace` has been updated to enforce +that calls to `tracer.trace(name, fn)` must receive a function which takes at least +the span object. Previously the span was technically optional when it should not have +been as the span must be handled. + ## 3.0 to 4.0 ### Node 14 is no longer supported diff --git a/README.md b/README.md index 341283a71ca..854107f8860 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ # `dd-trace`: Node.js APM Tracer Library -[![npm v4](https://img.shields.io/npm/v/dd-trace/latest?color=blue&label=dd-trace%40v4&logo=npm)](https://www.npmjs.com/package/dd-trace) -[![npm v3](https://img.shields.io/npm/v/dd-trace/latest-node14?color=blue&label=dd-trace%40v3&logo=npm)](https://www.npmjs.com/package/dd-trace/v/latest-node12) +[![npm v5](https://img.shields.io/npm/v/dd-trace/latest?color=blue&label=dd-trace%40v4&logo=npm)](https://www.npmjs.com/package/dd-trace) +[![npm v4](https://img.shields.io/npm/v/dd-trace/latest-node16?color=blue&label=dd-trace%40v4&logo=npm)](https://www.npmjs.com/package/dd-trace/v/latest-node16) +[![npm v3](https://img.shields.io/npm/v/dd-trace/latest-node14?color=blue&label=dd-trace%40v3&logo=npm)](https://www.npmjs.com/package/dd-trace/v/latest-node14) [![codecov](https://codecov.io/gh/DataDog/dd-trace-js/branch/master/graph/badge.svg)](https://codecov.io/gh/DataDog/dd-trace-js) Bits the dog  JavaScript @@ -28,24 +29,25 @@ Most of the documentation for `dd-trace` is available on these webpages: | [`v1`](https://github.com/DataDog/dd-trace-js/tree/v1.x) | ![npm v1](https://img.shields.io/npm/v/dd-trace/legacy-v1?color=white&label=%20&style=flat-square) | `>= v12` | **End of Life** | 2021-07-13 | 2022-02-25 | | [`v2`](https://github.com/DataDog/dd-trace-js/tree/v2.x) | ![npm v2](https://img.shields.io/npm/v/dd-trace/latest-node12?color=white&label=%20&style=flat-square) | `>= v12` | **End of Life** | 2022-01-28 | 2023-08-15 | | [`v3`](https://github.com/DataDog/dd-trace-js/tree/v3.x) | ![npm v3](https://img.shields.io/npm/v/dd-trace/latest-node14?color=white&label=%20&style=flat-square) | `>= v14` | **Maintenance** | 2022-08-15 | 2024-05-15 | -| [`v4`](https://github.com/DataDog/dd-trace-js/tree/v4.x) | ![npm v4](https://img.shields.io/npm/v/dd-trace/latest?color=white&label=%20&style=flat-square) | `>= v16` | **Current** | 2023-05-12 | Unknown | +| [`v4`](https://github.com/DataDog/dd-trace-js/tree/v4.x) | ![npm v4](https://img.shields.io/npm/v/dd-trace/latest-node16?color=white&label=%20&style=flat-square) | `>= v16` | **Maintenance** | 2023-05-12 | 2025-01-11 | +| [`v5`](https://github.com/DataDog/dd-trace-js/tree/v5.x) | ![npm v5](https://img.shields.io/npm/v/dd-trace/latest?color=white&label=%20&style=flat-square) | `>= v18` | **Current** | 2024-01-11 | Unknown | -We currently maintain two release lines, namely `v3` and `v4`. -Features and bug fixes that are merged are released to the `v4` line and, if appropriate, also the `v3` line. +We currently maintain three release lines, namely `v5`, `v4` and `v3`. +Features and bug fixes that are merged are released to the `v5` line and, if appropriate, also the `v4` & `v3` line. -For any new projects it is recommended to use the `v4` release line: +For any new projects it is recommended to use the `v5` release line: ```sh $ npm install dd-trace $ yarn add dd-trace ``` -However, existing projects that already use the `v3` release line, or projects that need to support EOL versions of Node.js, may continue to use these release lines. +However, existing projects that already use the `v4` & `v3` release line, or projects that need to support EOL versions of Node.js, may continue to use these release lines. This is done by specifying the version when installing the package. ```sh -$ npm install dd-trace@3 -$ yarn add dd-trace@3 +$ npm install dd-trace@4 +$ yarn add dd-trace@4 ``` Any backwards-breaking functionality that is introduced into the library will result in an increase of the major version of the library and therefore a new release line. From 597e5677762c95e909a8425ef010185d12404f26 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Thu, 11 Jan 2024 14:29:57 -0500 Subject: [PATCH 02/21] fix empty strings being kept as valid values in config (#3952) * fix empty strings being kept as valid values in config * set defaults as values --- packages/dd-trace/src/config.js | 16 ++++++++++------ packages/dd-trace/test/config.spec.js | 10 ++++++++++ 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/packages/dd-trace/src/config.js b/packages/dd-trace/src/config.js index 293e4819bcd..7b992e31194 100644 --- a/packages/dd-trace/src/config.js +++ b/packages/dd-trace/src/config.js @@ -795,9 +795,9 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) tagger.add(tags, DD_TRACE_TAGS) tagger.add(tags, DD_TRACE_GLOBAL_TAGS) - this._setValue(env, 'service', DD_SERVICE || DD_SERVICE_NAME || tags.service) - this._setValue(env, 'env', DD_ENV || tags.env) - this._setValue(env, 'version', DD_VERSION || tags.version) + this._setString(env, 'service', DD_SERVICE || DD_SERVICE_NAME || tags.service) + this._setString(env, 'env', DD_ENV || tags.env) + this._setString(env, 'version', DD_VERSION || tags.version) this._setUnit(env, 'sampleRate', DD_TRACE_SAMPLE_RATE) this._setBoolean(env, 'logInjection', DD_LOGS_INJECTION) this._setArray(env, 'headerTags', DD_TRACE_HEADER_TAGS) @@ -812,9 +812,9 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) tagger.add(tags, options.tags) - this._setValue(opts, 'service', options.service || tags.service) - this._setValue(opts, 'env', options.env || tags.env) - this._setValue(opts, 'version', options.version || tags.version) + this._setString(opts, 'service', options.service || tags.service) + this._setString(opts, 'env', options.env || tags.env) + this._setString(opts, 'version', options.version || tags.version) this._setUnit(opts, 'sampleRate', coalesce(options.sampleRate, options.ingestion.sampleRate)) this._setBoolean(opts, 'logInjection', options.logInjection) this._setArray(opts, 'headerTags', options.headerTags) @@ -875,6 +875,10 @@ ken|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?) } } + _setString (obj, name, value) { + obj[name] = value || undefined // unset for empty strings + } + _setTags (obj, name, value) { if (!value || Object.keys(value).length === 0) { return this._setValue(obj, name, null) diff --git a/packages/dd-trace/test/config.spec.js b/packages/dd-trace/test/config.spec.js index 3e00180bfaf..55335b78d37 100644 --- a/packages/dd-trace/test/config.spec.js +++ b/packages/dd-trace/test/config.spec.js @@ -321,6 +321,16 @@ describe('Config', () => { }) }) + it('should ignore empty strings', () => { + process.env.DD_TAGS = 'service:,env:,version:' + + const config = new Config() + + expect(config).to.have.property('service', 'node') + expect(config).to.have.property('env', undefined) + expect(config).to.have.property('version', '') + }) + it('should read case-insensitive booleans from environment variables', () => { process.env.DD_TRACING_ENABLED = 'False' process.env.DD_TRACE_DEBUG = 'TRUE' From b87996e8ce4b25b343e3ee72d65dc8b54b599f2e Mon Sep 17 00:00:00 2001 From: Ayan Khan Date: Thu, 11 Jan 2024 14:30:12 -0500 Subject: [PATCH 03/21] fix readme for v5 preview (#3958) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 854107f8860..2998531fe4b 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # `dd-trace`: Node.js APM Tracer Library -[![npm v5](https://img.shields.io/npm/v/dd-trace/latest?color=blue&label=dd-trace%40v4&logo=npm)](https://www.npmjs.com/package/dd-trace) +[![npm v5](https://img.shields.io/npm/v/dd-trace/latest?color=blue&label=dd-trace%40v5&logo=npm)](https://www.npmjs.com/package/dd-trace) [![npm v4](https://img.shields.io/npm/v/dd-trace/latest-node16?color=blue&label=dd-trace%40v4&logo=npm)](https://www.npmjs.com/package/dd-trace/v/latest-node16) [![npm v3](https://img.shields.io/npm/v/dd-trace/latest-node14?color=blue&label=dd-trace%40v3&logo=npm)](https://www.npmjs.com/package/dd-trace/v/latest-node14) [![codecov](https://codecov.io/gh/DataDog/dd-trace-js/branch/master/graph/badge.svg)](https://codecov.io/gh/DataDog/dd-trace-js) From bcc2f19eb7fc4902222618364689ed6d2af29123 Mon Sep 17 00:00:00 2001 From: simon-id Date: Fri, 12 Jan 2024 10:19:19 +0100 Subject: [PATCH 04/21] fix: Do not reuse `requestOptions` object (#3959) * fix: Do not reuse `requestOptions` object * rearrange RC http options * update tests --------- Co-authored-by: Tom Moor --- .../src/appsec/remote_config/manager.js | 17 +++---- .../test/appsec/remote_config/manager.spec.js | 48 ++++++++++++++----- 2 files changed, 45 insertions(+), 20 deletions(-) diff --git a/packages/dd-trace/src/appsec/remote_config/manager.js b/packages/dd-trace/src/appsec/remote_config/manager.js index 2b9074dfcce..9cc636cd302 100644 --- a/packages/dd-trace/src/appsec/remote_config/manager.js +++ b/packages/dd-trace/src/appsec/remote_config/manager.js @@ -25,7 +25,8 @@ class RemoteConfigManager extends EventEmitter { super() const pollInterval = Math.floor(config.remoteConfig.pollInterval * 1000) - const url = config.url || new URL(format({ + + this.url = config.url || new URL(format({ protocol: 'http:', hostname: config.hostname || 'localhost', port: config.port @@ -33,12 +34,6 @@ class RemoteConfigManager extends EventEmitter { this.scheduler = new Scheduler((cb) => this.poll(cb), pollInterval) - this.requestOptions = { - url, - method: 'POST', - path: '/v0.7/config' - } - this.state = { client: { state: { // updated by `parseConfig()` @@ -122,7 +117,13 @@ class RemoteConfigManager extends EventEmitter { } poll (cb) { - request(this.getPayload(), this.requestOptions, (err, data, statusCode) => { + const options = { + url: this.url, + method: 'POST', + path: '/v0.7/config' + } + + request(this.getPayload(), options, (err, data, statusCode) => { // 404 means RC is disabled, ignore it if (statusCode === 404) return cb() diff --git a/packages/dd-trace/test/appsec/remote_config/manager.spec.js b/packages/dd-trace/test/appsec/remote_config/manager.spec.js index 744eb7dfd12..4c4d2ad1ccd 100644 --- a/packages/dd-trace/test/appsec/remote_config/manager.spec.js +++ b/packages/dd-trace/test/appsec/remote_config/manager.spec.js @@ -76,11 +76,7 @@ describe('RemoteConfigManager', () => { expect(rc.scheduler).to.equal(scheduler) - expect(rc.requestOptions).to.deep.equal({ - method: 'POST', - url: config.url, - path: '/v0.7/config' - }) + expect(rc.url).to.deep.equal(config.url) expect(rc.state).to.deep.equal({ client: { @@ -192,7 +188,11 @@ describe('RemoteConfigManager', () => { const payload = JSON.stringify(rc.state) rc.poll(() => { - expect(request).to.have.been.calledOnceWith(payload, rc.requestOptions) + expect(request).to.have.been.calledOnceWith(payload, { + url: rc.url, + method: 'POST', + path: '/v0.7/config' + }) expect(log.error).to.not.have.been.called expect(rc.parseConfig).to.not.have.been.called cb() @@ -206,7 +206,11 @@ describe('RemoteConfigManager', () => { const payload = JSON.stringify(rc.state) rc.poll(() => { - expect(request).to.have.been.calledOnceWith(payload, rc.requestOptions) + expect(request).to.have.been.calledOnceWith(payload, { + url: rc.url, + method: 'POST', + path: '/v0.7/config' + }) expect(log.error).to.have.been.calledOnceWithExactly(err) expect(rc.parseConfig).to.not.have.been.called cb() @@ -219,7 +223,11 @@ describe('RemoteConfigManager', () => { const payload = JSON.stringify(rc.state) rc.poll(() => { - expect(request).to.have.been.calledOnceWith(payload, rc.requestOptions) + expect(request).to.have.been.calledOnceWith(payload, { + url: rc.url, + method: 'POST', + path: '/v0.7/config' + }) expect(log.error).to.not.have.been.called expect(rc.parseConfig).to.have.been.calledOnceWithExactly({ a: 'b' }) cb() @@ -235,7 +243,11 @@ describe('RemoteConfigManager', () => { const payload = JSON.stringify(rc.state) rc.poll(() => { - expect(request).to.have.been.calledOnceWith(payload, rc.requestOptions) + expect(request).to.have.been.calledOnceWith(payload, { + url: rc.url, + method: 'POST', + path: '/v0.7/config' + }) expect(rc.parseConfig).to.have.been.calledOnceWithExactly({ a: 'b' }) expect(log.error).to.have.been .calledOnceWithExactly('Could not parse remote config response: Error: Unable to parse config') @@ -246,7 +258,11 @@ describe('RemoteConfigManager', () => { rc.poll(() => { expect(request).to.have.been.calledTwice - expect(request.secondCall).to.have.been.calledWith(payload2, rc.requestOptions) + expect(request.secondCall).to.have.been.calledWith(payload2, { + url: rc.url, + method: 'POST', + path: '/v0.7/config' + }) expect(rc.parseConfig).to.have.been.calledOnce expect(log.error).to.have.been.calledOnce expect(rc.state.client.state.has_error).to.be.false @@ -262,7 +278,11 @@ describe('RemoteConfigManager', () => { const payload = JSON.stringify(rc.state) rc.poll(() => { - expect(request).to.have.been.calledOnceWith(payload, rc.requestOptions) + expect(request).to.have.been.calledOnceWith(payload, { + url: rc.url, + method: 'POST', + path: '/v0.7/config' + }) expect(log.error).to.not.have.been.called expect(rc.parseConfig).to.not.have.been.called cb() @@ -279,7 +299,11 @@ describe('RemoteConfigManager', () => { expect(JSON.parse(payload).client.client_tracer.extra_services).to.deep.equal(extraServices) rc.poll(() => { - expect(request).to.have.been.calledOnceWith(payload, rc.requestOptions) + expect(request).to.have.been.calledOnceWith(payload, { + url: rc.url, + method: 'POST', + path: '/v0.7/config' + }) cb() }) }) From f56cb57509eb2bf33ce9b15e0eef51ba577c796c Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Fri, 12 Jan 2024 11:15:36 +0100 Subject: [PATCH 05/21] PROF-8850: Fix flaky code hotspots test (#3955) * Run the busy loop for longer, so there's better chance of capturing at least one sample in it. * Filter out samples taken in the root spans --- integration-tests/profiler.spec.js | 7 +++++++ integration-tests/profiler/codehotspots.js | 4 ++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js index 1d9cc091f52..e00ecf17586 100644 --- a/integration-tests/profiler.spec.js +++ b/integration-tests/profiler.spec.js @@ -230,6 +230,13 @@ describe('profiler', () => { assert.isDefined(rootSpanId, encoded) rootSpans.add(rootSpanId) + if (spanId === rootSpanId) { + // It is possible to catch a sample executing in the root span before + // it entered the nested span; we ignore these too, although we'll + // still record the root span ID as we want to assert there'll only be + // 3 of them. + continue + } const spanData = { rootSpanId, endpoint } const existingSpanData = spans.get(spanId) if (existingSpanData) { diff --git a/integration-tests/profiler/codehotspots.js b/integration-tests/profiler/codehotspots.js index 9cffc768185..749f15d8790 100644 --- a/integration-tests/profiler/codehotspots.js +++ b/integration-tests/profiler/codehotspots.js @@ -8,8 +8,8 @@ function busyLoop () { const start = process.hrtime.bigint() for (;;) { const now = process.hrtime.bigint() - // Busy cycle for 20ms - if (now - start > 20000000n) { + // Busy cycle for 100ms + if (now - start > 100000000n) { break } } From c85c704454a048a3783c71a71c3dcc50c7e21d88 Mon Sep 17 00:00:00 2001 From: simon-id Date: Mon, 15 Jan 2024 09:49:27 +0100 Subject: [PATCH 06/21] add convenience script to run the system-tests (#3961) * add convenience script to run the system-tests * fix child process stdio * catch subprocess error --- scripts/st.js | 105 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 105 insertions(+) create mode 100644 scripts/st.js diff --git a/scripts/st.js b/scripts/st.js new file mode 100644 index 00000000000..a44eb617e6f --- /dev/null +++ b/scripts/st.js @@ -0,0 +1,105 @@ +#!/usr/bin/env node +/* eslint-disable no-console, no-fallthrough */ +'use strict' + +const path = require('path') +const { writeFileSync } = require('fs') +const { execSync } = require('child_process') + +const ddtracePath = path.join(__dirname, '..') +const defaultTestPath = process.env.DD_ST_PATH || path.join(ddtracePath, '..', 'system-tests') + +const { buildAll, npm, testDir, testArgs } = parseArgs() + +const binariesPath = path.join(testDir, 'binaries') + +if (npm) { + console.log('Using NPM package:', npm) + + writeFileSync(path.join(binariesPath, 'nodejs-load-from-npm'), npm) +} else { + console.log('Using local repo') + + const packName = execSync(`npm pack ${ddtracePath}`, { + cwd: binariesPath, + stdio: [null, null, 'inherit'], + encoding: 'utf8' + }).slice(0, -1) // remove trailing newline + + writeFileSync(path.join(binariesPath, 'nodejs-load-from-npm'), `/binaries/${packName}`) +} + +try { + execSync(`./build.sh ${buildAll ? '' : '-i weblog'} && ./run.sh ${testArgs}`, { + cwd: testDir, + stdio: [null, 'inherit', 'inherit'] + }) +} catch (err) { + process.exit(err.status || 1) +} + +function parseArgs () { + const args = { + buildAll: false, + npm: null, + testDir: defaultTestPath, + testArgs: '' + } + + for (let i = 2; i < process.argv.length; i++) { + switch (process.argv[i]) { + case '-b': + case '--build-all': + args.buildAll = true + break + + case '-h': + case '--help': + helpAndExit() + break + + case '-n': + case '--npm': { + const arg = process.argv[i + 1] + if (!arg || arg[0] === '-') { + args.npm = 'dd-trace' + } else { + args.npm = arg + i++ + } + break + } + + case '-t': + case '--test-dir': { + const arg = process.argv[++i] + if (!arg || arg[0] === '-') helpAndExit() + args.testDir = arg + break + } + + case '--': + args.testArgs = process.argv.slice(i + 1).join(' ') + return args + + default: + console.log('Unknown option:', process.argv[i], '\n') + helpAndExit() + } + } + + return args +} + +function helpAndExit () { + console.log('Usage: node st.js [options...] [-- test-args]') + console.log('Options:') + console.log(' -b, --build-all Rebuild all images (default: only build weblog)') + console.log(' -h, --help Print this message') + console.log(' -n, --npm [package] Build a remote package instead of the local repo (default: "dd-trace")') + console.log(' Can be a package name (e.g. "dd-trace@4.2.0") or a git URL (e.g.') + console.log(' "git+https://github.com/DataDog/dd-trace-js.git#mybranch")') + console.log(' -t, --test-dir Specify the system-tests directory (default: "dd-trace/../system-tests/")') + console.log(' -- Passed to system-tests run.sh (e.g. "-- SCENARIO_NAME tests/path_to_test.py")') + process.exit() +} From 6bf08b908cedcaf7f9a255890a8ad00dae438c85 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Mon, 15 Jan 2024 11:03:48 +0100 Subject: [PATCH 07/21] PROF-8872: Better handling of multiple errors thrown in FakeAgent (#3963) * Mocha doesn't like an array of errors being thrown, stash it into cause instead * Cause ain't reported by Mocha, put stacks into the timeout message --- integration-tests/helpers.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/integration-tests/helpers.js b/integration-tests/helpers.js index 4be08654fb3..b80a3c7e301 100644 --- a/integration-tests/helpers.js +++ b/integration-tests/helpers.js @@ -87,7 +87,8 @@ class FakeAgent extends EventEmitter { const errors = [] const timeoutObj = setTimeout(() => { - resultReject([...errors, new Error('timeout')]) + const errorsMsg = errors.length === 0 ? '' : `, additionally:\n${errors.map(e => e.stack).join('\n')}\n===\n` + resultReject(new Error(`timeout${errorsMsg}`, { cause: { errors } })) }, timeout) const resultPromise = new Promise((resolve, reject) => { @@ -126,7 +127,8 @@ class FakeAgent extends EventEmitter { const errors = [] const timeoutObj = setTimeout(() => { - resultReject([...errors, new Error('timeout')]) + const errorsMsg = errors.length === 0 ? '' : `, additionally:\n${errors.map(e => e.stack).join('\n')}\n===\n` + resultReject(new Error(`timeout${errorsMsg}`, { cause: { errors } })) }, timeout) const resultPromise = new Promise((resolve, reject) => { From 6d57fcf5d93d25d552bab5f8adfc78aa199761a2 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Mon, 15 Jan 2024 16:54:30 +0100 Subject: [PATCH 08/21] Ignore header vulnerabilities in cors headers (#3962) --------- Co-authored-by: Igor Unanua --- .../analyzers/header-injection-analyzer.js | 6 ++-- .../header-injection.express.plugin.spec.js | 31 +++++++++++++++++++ 2 files changed, 34 insertions(+), 3 deletions(-) diff --git a/packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js index 3e622016858..62330e87a07 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/header-injection-analyzer.js @@ -48,7 +48,7 @@ class HeaderInjectionAnalyzer extends InjectionAnalyzer { if (ranges?.length > 0) { return !(this.isCookieExclusion(lowerCasedHeaderName, ranges) || this.isSameHeaderExclusion(lowerCasedHeaderName, ranges) || - this.isAccessControlAllowOriginExclusion(lowerCasedHeaderName, ranges)) + this.isAccessControlAllowExclusion(lowerCasedHeaderName, ranges)) } return false @@ -84,8 +84,8 @@ class HeaderInjectionAnalyzer extends InjectionAnalyzer { return false } - isAccessControlAllowOriginExclusion (name, ranges) { - if (name === 'access-control-allow-origin') { + isAccessControlAllowExclusion (name, ranges) { + if (name?.startsWith('access-control-allow-')) { return ranges .every(range => range.iinfo.type === HTTP_REQUEST_HEADER_VALUE) } diff --git a/packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js index dcddd88f869..655d6921e95 100644 --- a/packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js +++ b/packages/dd-trace/test/appsec/iast/analyzers/header-injection.express.plugin.spec.js @@ -239,6 +239,37 @@ describe('Header injection vulnerability', () => { }).catch(done) } }) + + testThatRequestHasNoVulnerability({ + fn: (req, res) => { + setHeaderFunction('Access-Control-Allow-Origin', req.headers['origin'], res) + setHeaderFunction('Access-Control-Allow-Headers', req.headers['access-control-request-headers'], res) + setHeaderFunction('Access-Control-Allow-Methods', req.headers['access-control-request-methods'], res) + }, + testDescription: 'Should not have vulnerability with CORS headers', + vulnerability: 'HEADER_INJECTION', + occurrencesAndLocation: { + occurrences: 1, + location: { + path: setHeaderFunctionFilename, + line: 4 + } + }, + cb: (headerInjectionVulnerabilities) => { + const evidenceString = headerInjectionVulnerabilities[0].evidence.valueParts + .map(part => part.value).join('') + expect(evidenceString).to.be.equal('custom: value') + }, + makeRequest: (done, config) => { + return axios.options(`http://localhost:${config.port}/`, { + headers: { + 'origin': 'http://custom-origin', + 'Access-Control-Request-Headers': 'TestHeader', + 'Access-Control-Request-Methods': 'GET' + } + }).catch(done) + } + }) }) }) }) From aad1f4b0125e09ff6dab49219affb3176b724e64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 16 Jan 2024 08:54:10 +0100 Subject: [PATCH 09/21] [ci-visibility] Fix telemetry in ci visibility (#3964) --- packages/datadog-plugin-cucumber/src/index.js | 10 +++++----- packages/datadog-plugin-cypress/src/plugin.js | 3 +++ packages/datadog-plugin-playwright/src/index.js | 2 ++ packages/dd-trace/src/telemetry/index.js | 11 ++++++++--- packages/dd-trace/src/telemetry/send-data.js | 4 ++-- packages/dd-trace/test/telemetry/send-data.spec.js | 3 ++- 6 files changed, 22 insertions(+), 11 deletions(-) diff --git a/packages/datadog-plugin-cucumber/src/index.js b/packages/datadog-plugin-cucumber/src/index.js index 4f2d29603b7..aef4f37eca0 100644 --- a/packages/datadog-plugin-cucumber/src/index.js +++ b/packages/datadog-plugin-cucumber/src/index.js @@ -169,12 +169,12 @@ class CucumberPlugin extends CiPlugin { } span.finish() - this.telemetry.ciVisEvent( - TELEMETRY_EVENT_FINISHED, - 'test', - { hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] } - ) if (!isStep) { + this.telemetry.ciVisEvent( + TELEMETRY_EVENT_FINISHED, + 'test', + { hasCodeOwners: !!span.context()._tags[TEST_CODE_OWNERS] } + ) finishAllTraceSpans(span) } }) diff --git a/packages/datadog-plugin-cypress/src/plugin.js b/packages/datadog-plugin-cypress/src/plugin.js index abd6abec33b..1b97e469c79 100644 --- a/packages/datadog-plugin-cypress/src/plugin.js +++ b/packages/datadog-plugin-cypress/src/plugin.js @@ -39,6 +39,7 @@ const { incrementCountMetric, distributionMetric } = require('../../dd-trace/src/ci-visibility/telemetry') +const { appClosing: appClosingTelemetry } = require('../../dd-trace/src/telemetry') const { GIT_REPOSITORY_URL, GIT_COMMIT_SHA, @@ -429,10 +430,12 @@ module.exports = (on, config) => { } if (exporter.flush) { exporter.flush(() => { + appClosingTelemetry() resolve(null) }) } else if (exporter._writer) { exporter._writer.flush(() => { + appClosingTelemetry() resolve(null) }) } diff --git a/packages/datadog-plugin-playwright/src/index.js b/packages/datadog-plugin-playwright/src/index.js index eb8810967c3..9fa6196240e 100644 --- a/packages/datadog-plugin-playwright/src/index.js +++ b/packages/datadog-plugin-playwright/src/index.js @@ -17,6 +17,7 @@ const { TELEMETRY_EVENT_CREATED, TELEMETRY_EVENT_FINISHED } = require('../../dd-trace/src/ci-visibility/telemetry') +const { appClosing: appClosingTelemetry } = require('../../dd-trace/src/telemetry') class PlaywrightPlugin extends CiPlugin { static get id () { @@ -37,6 +38,7 @@ class PlaywrightPlugin extends CiPlugin { this.testSessionSpan.finish() this.telemetry.ciVisEvent(TELEMETRY_EVENT_FINISHED, 'session') finishAllTraceSpans(this.testSessionSpan) + appClosingTelemetry() this.tracer._exporter.flush(onDone) }) diff --git a/packages/dd-trace/src/telemetry/index.js b/packages/dd-trace/src/telemetry/index.js index a9a741b8539..20502cd28da 100644 --- a/packages/dd-trace/src/telemetry/index.js +++ b/packages/dd-trace/src/telemetry/index.js @@ -140,8 +140,7 @@ function appStarted (config) { return app } -function onBeforeExit () { - process.removeListener('beforeExit', onBeforeExit) +function appClosing () { const { reqType, payload } = createPayload('app-closing') sendData(config, application, host, reqType, payload) // we flush before shutting down. Only in CI Visibility @@ -150,6 +149,11 @@ function onBeforeExit () { } } +function onBeforeExit () { + process.removeListener('beforeExit', onBeforeExit) + appClosing() +} + function createAppObject (config) { return { service_name: config.service, @@ -339,5 +343,6 @@ module.exports = { start, stop, updateIntegrations, - updateConfig + updateConfig, + appClosing } diff --git a/packages/dd-trace/src/telemetry/send-data.js b/packages/dd-trace/src/telemetry/send-data.js index e14de9a36ca..b53145218a1 100644 --- a/packages/dd-trace/src/telemetry/send-data.js +++ b/packages/dd-trace/src/telemetry/send-data.js @@ -1,6 +1,7 @@ const request = require('../exporters/common/request') const log = require('../log') +const { isTrue } = require('../util') let agentTelemetry = true @@ -49,13 +50,12 @@ function sendData (config, application, host, reqType, payload = {}, cb = () => const { hostname, port, - experimental, isCiVisibility } = config let url = config.url - const isCiVisibilityAgentlessMode = isCiVisibility && experimental?.exporter === 'datadog' + const isCiVisibilityAgentlessMode = isCiVisibility && isTrue(process.env.DD_CIVISIBILITY_AGENTLESS_ENABLED) if (isCiVisibilityAgentlessMode) { try { diff --git a/packages/dd-trace/test/telemetry/send-data.spec.js b/packages/dd-trace/test/telemetry/send-data.spec.js index a9a360e0141..408fb3bc0a5 100644 --- a/packages/dd-trace/test/telemetry/send-data.spec.js +++ b/packages/dd-trace/test/telemetry/send-data.spec.js @@ -149,9 +149,9 @@ describe('sendData', () => { }) it('should also work in CI Visibility agentless mode', () => { + process.env.DD_CIVISIBILITY_AGENTLESS_ENABLED = 1 sendDataModule.sendData( { - experimental: { exporter: 'datadog' }, isCiVisibility: true, tags: { 'runtime-id': '123' }, site: 'datadoghq.eu' @@ -168,5 +168,6 @@ describe('sendData', () => { }) const { url } = options expect(url).to.eql(new URL('https://instrumentation-telemetry-intake.eu1.datadoghq.com')) + delete process.env.DD_CIVISIBILITY_AGENTLESS_ENABLED }) }) From 1802f4361826cfcdb12607e697f11a82af82f0b2 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Tue, 16 Jan 2024 13:43:41 +0100 Subject: [PATCH 10/21] PROF-8872: Run profiler integration tests on macOS and Windows (#3945) * Run profiler integration tests on both macOS and Windows * Exclude from Windows testing those tests aren't supposed to work on it. * Make createSandbox Windows-friendly * Increase profiler tests timeout as `yarn add` in sandbox creation takes a _lot_ of time. * Disable failing OOM integration tests on Windows * Only sync once * Move profiler integration test out of general integration test suite so it will not run twice on Linux --- .github/workflows/profiling.yml | 5 + integration-tests/helpers.js | 18 +- integration-tests/profiler.spec.js | 410 ------------------- integration-tests/profiler/profiler.spec.js | 420 ++++++++++++++++++++ package.json | 1 + 5 files changed, 439 insertions(+), 415 deletions(-) delete mode 100644 integration-tests/profiler.spec.js create mode 100644 integration-tests/profiler/profiler.spec.js diff --git a/.github/workflows/profiling.yml b/.github/workflows/profiling.yml index aca3a332399..1f54c5a4c51 100644 --- a/.github/workflows/profiling.yml +++ b/.github/workflows/profiling.yml @@ -19,6 +19,7 @@ jobs: - uses: ./.github/actions/node/setup - run: yarn install - run: yarn test:profiler:ci + - run: yarn test:integration:profiler - uses: codecov/codecov-action@v3 ubuntu: @@ -29,10 +30,13 @@ jobs: - run: yarn install - uses: ./.github/actions/node/18 - run: yarn test:profiler:ci + - run: yarn test:integration:profiler - uses: ./.github/actions/node/20 - run: yarn test:profiler:ci + - run: yarn test:integration:profiler - uses: ./.github/actions/node/latest - run: yarn test:profiler:ci + - run: yarn test:integration:profiler - uses: codecov/codecov-action@v3 windows: @@ -42,4 +46,5 @@ jobs: - uses: ./.github/actions/node/setup - run: yarn install - run: yarn test:profiler:ci + - run: yarn test:integration:profiler - uses: codecov/codecov-action@v3 diff --git a/integration-tests/helpers.js b/integration-tests/helpers.js index b80a3c7e301..dcf13b40cee 100644 --- a/integration-tests/helpers.js +++ b/integration-tests/helpers.js @@ -10,8 +10,7 @@ const childProcess = require('child_process') const { fork } = childProcess const exec = promisify(childProcess.exec) const http = require('http') -const fs = require('fs') -const mkdir = promisify(fs.mkdir) +const fs = require('fs/promises') const os = require('os') const path = require('path') const rimraf = promisify(require('rimraf')) @@ -206,12 +205,21 @@ async function createSandbox (dependencies = [], isGitRepo = false, // We might use NODE_OPTIONS to init the tracer. We don't want this to affect this operations const { NODE_OPTIONS, ...restOfEnv } = process.env - await mkdir(folder) + await fs.mkdir(folder) await exec(`yarn pack --filename ${out}`) // TODO: cache this await exec(`yarn add ${allDependencies.join(' ')}`, { cwd: folder, env: restOfEnv }) for (const path of integrationTestsPaths) { - await exec(`cp -R ${path} ${folder}`) + if (process.platform === 'win32') { + await exec(`Copy-Item -Recurse -Path "${path}" -Destination "${folder}"`, { shell: 'powershell.exe' }) + } else { + await exec(`cp -R ${path} ${folder}`) + } + } + if (process.platform === 'win32') { + // On Windows, we can only sync entire filesystem volume caches. + await exec(`Write-VolumeCache ${folder[0]}`, { shell: 'powershell.exe' }) + } else { await exec(`sync ${folder}`) } @@ -221,7 +229,7 @@ async function createSandbox (dependencies = [], isGitRepo = false, if (isGitRepo) { await exec('git init', { cwd: folder }) - await exec('echo "node_modules/" > .gitignore', { cwd: folder }) + await fs.writeFile(path.join(folder, '.gitignore'), 'node_modules/', { flush: true }) await exec('git config user.email "john@doe.com"', { cwd: folder }) await exec('git config user.name "John Doe"', { cwd: folder }) await exec('git config commit.gpgsign false', { cwd: folder }) diff --git a/integration-tests/profiler.spec.js b/integration-tests/profiler.spec.js deleted file mode 100644 index e00ecf17586..00000000000 --- a/integration-tests/profiler.spec.js +++ /dev/null @@ -1,410 +0,0 @@ -'use strict' - -const { - FakeAgent, - createSandbox -} = require('./helpers') -const childProcess = require('child_process') -const { fork } = childProcess -const path = require('path') -const { assert } = require('chai') -const fs = require('fs/promises') -const fsync = require('fs') -const net = require('net') -const zlib = require('zlib') -const { Profile } = require('pprof-format') -const semver = require('semver') - -async function checkProfiles (agent, proc, timeout, - expectedProfileTypes = ['wall', 'space'], expectBadExit = false, multiplicity = 1) { - const resultPromise = agent.assertMessageReceived(({ headers, payload, files }) => { - assert.propertyVal(headers, 'host', `127.0.0.1:${agent.port}`) - assert.propertyVal(payload, 'format', 'pprof') - assert.deepPropertyVal(payload, 'types', expectedProfileTypes) - for (const [index, profileType] of expectedProfileTypes.entries()) { - assert.propertyVal(files[index], 'originalname', `${profileType}.pb.gz`) - } - }, timeout, multiplicity) - - await processExitPromise(proc, timeout, expectBadExit) - return resultPromise -} - -function processExitPromise (proc, timeout, expectBadExit = false) { - return new Promise((resolve, reject) => { - const timeoutObj = setTimeout(() => { - reject(new Error('Process timed out')) - }, timeout) - - function checkExitCode (code) { - clearTimeout(timeoutObj) - - if ((code !== 0) !== expectBadExit) { - reject(new Error(`Process exited with unexpected status code ${code}.`)) - } else { - resolve() - } - } - - proc - .on('error', reject) - .on('exit', checkExitCode) - }) -} - -async function getLatestProfile (cwd, pattern) { - const dirEntries = await fs.readdir(cwd) - // Get the latest file matching the pattern - const pprofEntries = dirEntries.filter(name => pattern.test(name)) - assert.isTrue(pprofEntries.length > 0, `No file matching pattern ${pattern} found in ${cwd}`) - const pprofEntry = pprofEntries - .map(name => ({ name, modified: fsync.statSync(path.join(cwd, name), { bigint: true }).mtimeNs })) - .reduce((a, b) => a.modified > b.modified ? a : b) - .name - const pprofGzipped = await fs.readFile(path.join(cwd, pprofEntry)) - const pprofUnzipped = zlib.gunzipSync(pprofGzipped) - return { profile: Profile.decode(pprofUnzipped), encoded: pprofGzipped.toString('base64') } -} - -async function gatherNetworkTimelineEvents (cwd, scriptFilePath, eventType, args) { - const procStart = BigInt(Date.now() * 1000000) - const proc = fork(path.join(cwd, scriptFilePath), args, { - cwd, - env: { - DD_PROFILING_PROFILERS: 'wall', - DD_PROFILING_EXPORTERS: 'file', - DD_PROFILING_ENABLED: 1, - DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED: 1 - } - }) - - await processExitPromise(proc, 5000) - const procEnd = BigInt(Date.now() * 1000000) - - const { profile, encoded } = await getLatestProfile(cwd, /^events_.+\.pprof$/) - - const strings = profile.stringTable - const tsKey = strings.dedup('end_timestamp_ns') - const eventKey = strings.dedup('event') - const hostKey = strings.dedup('host') - const addressKey = strings.dedup('address') - const portKey = strings.dedup('port') - const nameKey = strings.dedup('operation') - const eventValue = strings.dedup(eventType) - const events = [] - for (const sample of profile.sample) { - let ts, event, host, address, port, name - for (const label of sample.label) { - switch (label.key) { - case tsKey: ts = label.num; break - case nameKey: name = label.str; break - case eventKey: event = label.str; break - case hostKey: host = label.str; break - case addressKey: address = label.str; break - case portKey: port = label.num; break - default: assert.fail(`Unexpected label key ${label.key} ${strings.strings[label.key]} ${encoded}`) - } - } - // Timestamp must be defined and be between process start and end time - assert.isDefined(ts, encoded) - assert.isTrue(ts <= procEnd, encoded) - assert.isTrue(ts >= procStart, encoded) - // Gather only DNS events; ignore sporadic GC events - if (event === eventValue) { - assert.isDefined(name, encoded) - // Exactly one of these is defined - assert.isTrue(!!address !== !!host, encoded) - const ev = { name: strings.strings[name] } - if (address) { - ev.address = strings.strings[address] - } else { - ev.host = strings.strings[host] - } - if (port) { - ev.port = port - } - events.push(ev) - } - } - return events -} - -describe('profiler', () => { - let agent - let proc - let sandbox - let cwd - let profilerTestFile - let oomTestFile - let oomEnv - let oomExecArgv - const timeout = 5000 - - before(async () => { - sandbox = await createSandbox() - cwd = sandbox.folder - profilerTestFile = path.join(cwd, 'profiler/index.js') - oomTestFile = path.join(cwd, 'profiler/oom.js') - oomExecArgv = ['--max-old-space-size=50'] - }) - - after(async () => { - await sandbox.remove() - }) - - it('code hotspots and endpoint tracing works', async () => { - const procStart = BigInt(Date.now() * 1000000) - const proc = fork(path.join(cwd, 'profiler/codehotspots.js'), { - cwd, - env: { - DD_PROFILING_PROFILERS: 'wall', - DD_PROFILING_EXPORTERS: 'file', - DD_PROFILING_ENABLED: 1, - DD_PROFILING_CODEHOTSPOTS_ENABLED: 1, - DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: 1, - DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED: 1 - } - }) - - await processExitPromise(proc, 5000) - const procEnd = BigInt(Date.now() * 1000000) - - const { profile, encoded } = await getLatestProfile(cwd, /^wall_.+\.pprof$/) - - // We check the profile for following invariants: - // - every sample needs to have an 'end_timestamp_ns' label that has values (nanos since UNIX - // epoch) between process start and end. - // - it needs to have samples with 9 total different 'span id's, and 3 different - // 'local root span id's - // - samples with spans also must have a 'trace endpoint' label with values 'endpoint-0', - // 'endpoint-1', or 'endpoint-2' - // - every occurrence of a span must have the same root span and endpoint - const rootSpans = new Set() - const endpoints = new Set() - const spans = new Map() - const strings = profile.stringTable - const tsKey = strings.dedup('end_timestamp_ns') - const spanKey = strings.dedup('span id') - const rootSpanKey = strings.dedup('local root span id') - const endpointKey = strings.dedup('trace endpoint') - const threadNameKey = strings.dedup('thread name') - const threadIdKey = strings.dedup('thread id') - const osThreadIdKey = strings.dedup('os thread id') - const threadNameValue = strings.dedup('Main Event Loop') - const nonJSThreadNameValue = strings.dedup('Non-JS threads') - - for (const sample of profile.sample) { - let ts, spanId, rootSpanId, endpoint, threadName, threadId, osThreadId - for (const label of sample.label) { - switch (label.key) { - case tsKey: ts = label.num; break - case spanKey: spanId = label.str; break - case rootSpanKey: rootSpanId = label.str; break - case endpointKey: endpoint = label.str; break - case threadNameKey: threadName = label.str; break - case threadIdKey: threadId = label.str; break - case osThreadIdKey: osThreadId = label.str; break - default: assert.fail(`Unexpected label key ${strings.dedup(label.key)} ${encoded}`) - } - } - if (threadName !== nonJSThreadNameValue) { - // Timestamp must be defined and be between process start and end time - assert.isDefined(ts, encoded) - assert.isNumber(osThreadId, encoded) - assert.equal(threadId, strings.dedup('0'), encoded) - assert.isTrue(ts <= procEnd, encoded) - assert.isTrue(ts >= procStart, encoded) - // Thread name must be defined and exactly equal "Main Event Loop" - assert.equal(threadName, threadNameValue, encoded) - } else { - assert.equal(threadId, strings.dedup('NA'), encoded) - } - // Either all or none of span-related labels are defined - if (endpoint === undefined) { - // It is possible to catch a sample executing in tracer's startSpan so - // that endpoint is not yet set. We'll ignore those samples. - continue - } - if (spanId || rootSpanId) { - assert.isDefined(spanId, encoded) - assert.isDefined(rootSpanId, encoded) - - rootSpans.add(rootSpanId) - if (spanId === rootSpanId) { - // It is possible to catch a sample executing in the root span before - // it entered the nested span; we ignore these too, although we'll - // still record the root span ID as we want to assert there'll only be - // 3 of them. - continue - } - const spanData = { rootSpanId, endpoint } - const existingSpanData = spans.get(spanId) - if (existingSpanData) { - // Span's root span and endpoint must be consistent across samples - assert.deepEqual(spanData, existingSpanData, encoded) - } else { - // New span id, store span data - spans.set(spanId, spanData) - // Verify endpoint value - const endpointVal = strings.strings[endpoint] - switch (endpointVal) { - case 'endpoint-0': - case 'endpoint-1': - case 'endpoint-2': - endpoints.add(endpoint) - break - default: - assert.fail(`Unexpected endpoint value ${endpointVal} ${encoded}`) - } - } - } - } - // Need to have a total of 9 different spans, with 3 different root spans - // and 3 different endpoints. - assert.equal(spans.size, 9, encoded) - assert.equal(rootSpans.size, 3, encoded) - assert.equal(endpoints.size, 3, encoded) - }) - - if (semver.gte(process.version, '16.0.0')) { - it('dns timeline events work', async () => { - const dnsEvents = await gatherNetworkTimelineEvents(cwd, 'profiler/dnstest.js', 'dns') - assert.sameDeepMembers(dnsEvents, [ - { name: 'lookup', host: 'example.org' }, - { name: 'lookup', host: 'example.com' }, - { name: 'lookup', host: 'datadoghq.com' }, - { name: 'queryA', host: 'datadoghq.com' }, - { name: 'lookupService', address: '13.224.103.60', port: 80 } - ]) - }) - - it('net timeline events work', async () => { - // Simple server that writes a constant message to the socket. - const msg = 'cya later!\n' - function createServer () { - const server = net.createServer((socket) => { - socket.end(msg, 'utf8') - }).on('error', (err) => { - throw err - }) - return server - } - // Create two instances of the server - const server1 = createServer() - try { - const server2 = createServer() - try { - // Have the servers listen on ephemeral ports - const p = new Promise(resolve => { - server1.listen(0, () => { - server2.listen(0, async () => { - resolve([server1.address().port, server2.address().port]) - }) - }) - }) - const [ port1, port2 ] = await p - const args = [String(port1), String(port2), msg] - // Invoke the profiled program, passing it the ports of the servers and - // the expected message. - const events = await gatherNetworkTimelineEvents(cwd, 'profiler/nettest.js', 'net', args) - // The profiled program should have two TCP connection events to the two - // servers. - assert.sameDeepMembers(events, [ - { name: 'connect', host: '127.0.0.1', port: port1 }, - { name: 'connect', host: '127.0.0.1', port: port2 } - ]) - } finally { - server2.close() - } - } finally { - server1.close() - } - }) - } - - context('shutdown', () => { - beforeEach(async () => { - agent = await new FakeAgent().start() - oomEnv = { - DD_TRACE_AGENT_PORT: agent.port, - DD_PROFILING_ENABLED: 1, - DD_TRACE_DEBUG: 1, - DD_TRACE_LOG_LEVEL: 'warn' - } - }) - - afterEach(async () => { - proc.kill() - await agent.stop() - }) - - it('records profile on process exit', async () => { - proc = fork(profilerTestFile, { - cwd, - env: { - DD_TRACE_AGENT_PORT: agent.port, - DD_PROFILING_ENABLED: 1 - } - }) - return checkProfiles(agent, proc, timeout) - }) - - it('sends a heap profile on OOM with external process', async () => { - proc = fork(oomTestFile, { - cwd, - execArgv: oomExecArgv, - env: oomEnv - }) - return checkProfiles(agent, proc, timeout, ['space'], true) - }) - - it('sends a heap profile on OOM with external process and exits successfully', async () => { - proc = fork(oomTestFile, { - cwd, - execArgv: oomExecArgv, - env: { - ...oomEnv, - DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: 15000000, - DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: 3 - } - }) - return checkProfiles(agent, proc, timeout, ['space'], false, 2) - }) - - it('sends a heap profile on OOM with async callback', async () => { - proc = fork(oomTestFile, { - cwd, - execArgv: oomExecArgv, - env: { - ...oomEnv, - DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: 10000000, - DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: 1, - DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: 'async' - } - }) - return checkProfiles(agent, proc, timeout, ['space'], true) - }) - - it('sends heap profiles on OOM with multiple strategies', async () => { - proc = fork(oomTestFile, { - cwd, - execArgv: oomExecArgv, - env: { - ...oomEnv, - DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: 10000000, - DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: 1, - DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: 'async,process' - } - }) - return checkProfiles(agent, proc, timeout, ['space'], true, 2) - }) - - it('sends a heap profile on OOM in worker thread and exits successfully', async () => { - proc = fork(oomTestFile, [1, 50], { - cwd, - env: { ...oomEnv, DD_PROFILING_WALLTIME_ENABLED: 0 } - }) - return checkProfiles(agent, proc, timeout, ['space'], false, 2) - }) - }) -}) diff --git a/integration-tests/profiler/profiler.spec.js b/integration-tests/profiler/profiler.spec.js new file mode 100644 index 00000000000..40efb12fbc1 --- /dev/null +++ b/integration-tests/profiler/profiler.spec.js @@ -0,0 +1,420 @@ +'use strict' + +const { + FakeAgent, + createSandbox +} = require('../helpers') +const childProcess = require('child_process') +const { fork } = childProcess +const path = require('path') +const { assert } = require('chai') +const fs = require('fs/promises') +const fsync = require('fs') +const net = require('net') +const zlib = require('zlib') +const { Profile } = require('pprof-format') +const semver = require('semver') + +async function checkProfiles (agent, proc, timeout, + expectedProfileTypes = ['wall', 'space'], expectBadExit = false, multiplicity = 1) { + const resultPromise = agent.assertMessageReceived(({ headers, payload, files }) => { + assert.propertyVal(headers, 'host', `127.0.0.1:${agent.port}`) + assert.propertyVal(payload, 'format', 'pprof') + assert.deepPropertyVal(payload, 'types', expectedProfileTypes) + for (const [index, profileType] of expectedProfileTypes.entries()) { + assert.propertyVal(files[index], 'originalname', `${profileType}.pb.gz`) + } + }, timeout, multiplicity) + + await processExitPromise(proc, timeout, expectBadExit) + return resultPromise +} + +function processExitPromise (proc, timeout, expectBadExit = false) { + return new Promise((resolve, reject) => { + const timeoutObj = setTimeout(() => { + reject(new Error('Process timed out')) + }, timeout) + + function checkExitCode (code) { + clearTimeout(timeoutObj) + + if ((code !== 0) !== expectBadExit) { + reject(new Error(`Process exited with unexpected status code ${code}.`)) + } else { + resolve() + } + } + + proc + .on('error', reject) + .on('exit', checkExitCode) + }) +} + +async function getLatestProfile (cwd, pattern) { + const dirEntries = await fs.readdir(cwd) + // Get the latest file matching the pattern + const pprofEntries = dirEntries.filter(name => pattern.test(name)) + assert.isTrue(pprofEntries.length > 0, `No file matching pattern ${pattern} found in ${cwd}`) + const pprofEntry = pprofEntries + .map(name => ({ name, modified: fsync.statSync(path.join(cwd, name), { bigint: true }).mtimeNs })) + .reduce((a, b) => a.modified > b.modified ? a : b) + .name + const pprofGzipped = await fs.readFile(path.join(cwd, pprofEntry)) + const pprofUnzipped = zlib.gunzipSync(pprofGzipped) + return { profile: Profile.decode(pprofUnzipped), encoded: pprofGzipped.toString('base64') } +} + +async function gatherNetworkTimelineEvents (cwd, scriptFilePath, eventType, args) { + const procStart = BigInt(Date.now() * 1000000) + const proc = fork(path.join(cwd, scriptFilePath), args, { + cwd, + env: { + DD_PROFILING_PROFILERS: 'wall', + DD_PROFILING_EXPORTERS: 'file', + DD_PROFILING_ENABLED: 1, + DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED: 1 + } + }) + + await processExitPromise(proc, 5000) + const procEnd = BigInt(Date.now() * 1000000) + + const { profile, encoded } = await getLatestProfile(cwd, /^events_.+\.pprof$/) + + const strings = profile.stringTable + const tsKey = strings.dedup('end_timestamp_ns') + const eventKey = strings.dedup('event') + const hostKey = strings.dedup('host') + const addressKey = strings.dedup('address') + const portKey = strings.dedup('port') + const nameKey = strings.dedup('operation') + const eventValue = strings.dedup(eventType) + const events = [] + for (const sample of profile.sample) { + let ts, event, host, address, port, name + for (const label of sample.label) { + switch (label.key) { + case tsKey: ts = label.num; break + case nameKey: name = label.str; break + case eventKey: event = label.str; break + case hostKey: host = label.str; break + case addressKey: address = label.str; break + case portKey: port = label.num; break + default: assert.fail(`Unexpected label key ${label.key} ${strings.strings[label.key]} ${encoded}`) + } + } + // Timestamp must be defined and be between process start and end time + assert.isDefined(ts, encoded) + assert.isTrue(ts <= procEnd, encoded) + assert.isTrue(ts >= procStart, encoded) + // Gather only DNS events; ignore sporadic GC events + if (event === eventValue) { + assert.isDefined(name, encoded) + // Exactly one of these is defined + assert.isTrue(!!address !== !!host, encoded) + const ev = { name: strings.strings[name] } + if (address) { + ev.address = strings.strings[address] + } else { + ev.host = strings.strings[host] + } + if (port) { + ev.port = port + } + events.push(ev) + } + } + return events +} + +describe('profiler', () => { + let agent + let proc + let sandbox + let cwd + let profilerTestFile + let oomTestFile + let oomEnv + let oomExecArgv + const timeout = 5000 + + before(async () => { + sandbox = await createSandbox() + cwd = sandbox.folder + profilerTestFile = path.join(cwd, 'profiler/index.js') + oomTestFile = path.join(cwd, 'profiler/oom.js') + oomExecArgv = ['--max-old-space-size=50'] + }) + + after(async () => { + await sandbox.remove() + }) + + if (process.platform !== 'win32') { + it('code hotspots and endpoint tracing works', async () => { + const procStart = BigInt(Date.now() * 1000000) + const proc = fork(path.join(cwd, 'profiler/codehotspots.js'), { + cwd, + env: { + DD_PROFILING_PROFILERS: 'wall', + DD_PROFILING_EXPORTERS: 'file', + DD_PROFILING_ENABLED: 1, + DD_PROFILING_CODEHOTSPOTS_ENABLED: 1, + DD_PROFILING_ENDPOINT_COLLECTION_ENABLED: 1, + DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED: 1 + } + }) + + await processExitPromise(proc, 5000) + const procEnd = BigInt(Date.now() * 1000000) + + const { profile, encoded } = await getLatestProfile(cwd, /^wall_.+\.pprof$/) + + // We check the profile for following invariants: + // - every sample needs to have an 'end_timestamp_ns' label that has values (nanos since UNIX + // epoch) between process start and end. + // - it needs to have samples with 9 total different 'span id's, and 3 different + // 'local root span id's + // - samples with spans also must have a 'trace endpoint' label with values 'endpoint-0', + // 'endpoint-1', or 'endpoint-2' + // - every occurrence of a span must have the same root span and endpoint + const rootSpans = new Set() + const endpoints = new Set() + const spans = new Map() + const strings = profile.stringTable + const tsKey = strings.dedup('end_timestamp_ns') + const spanKey = strings.dedup('span id') + const rootSpanKey = strings.dedup('local root span id') + const endpointKey = strings.dedup('trace endpoint') + const threadNameKey = strings.dedup('thread name') + const threadIdKey = strings.dedup('thread id') + const osThreadIdKey = strings.dedup('os thread id') + const threadNameValue = strings.dedup('Main Event Loop') + const nonJSThreadNameValue = strings.dedup('Non-JS threads') + + for (const sample of profile.sample) { + let ts, spanId, rootSpanId, endpoint, threadName, threadId, osThreadId + for (const label of sample.label) { + switch (label.key) { + case tsKey: ts = label.num; break + case spanKey: spanId = label.str; break + case rootSpanKey: rootSpanId = label.str; break + case endpointKey: endpoint = label.str; break + case threadNameKey: threadName = label.str; break + case threadIdKey: threadId = label.str; break + case osThreadIdKey: osThreadId = label.str; break + default: assert.fail(`Unexpected label key ${strings.dedup(label.key)} ${encoded}`) + } + } + if (threadName !== nonJSThreadNameValue) { + // Timestamp must be defined and be between process start and end time + assert.isDefined(ts, encoded) + assert.isNumber(osThreadId, encoded) + assert.equal(threadId, strings.dedup('0'), encoded) + assert.isTrue(ts <= procEnd, encoded) + assert.isTrue(ts >= procStart, encoded) + // Thread name must be defined and exactly equal "Main Event Loop" + assert.equal(threadName, threadNameValue, encoded) + } else { + assert.equal(threadId, strings.dedup('NA'), encoded) + } + // Either all or none of span-related labels are defined + if (endpoint === undefined) { + // It is possible to catch a sample executing in tracer's startSpan so + // that endpoint is not yet set. We'll ignore those samples. + continue + } + if (spanId || rootSpanId) { + assert.isDefined(spanId, encoded) + assert.isDefined(rootSpanId, encoded) + + rootSpans.add(rootSpanId) + if (spanId === rootSpanId) { + // It is possible to catch a sample executing in the root span before + // it entered the nested span; we ignore these too, although we'll + // still record the root span ID as we want to assert there'll only be + // 3 of them. + continue + } + const spanData = { rootSpanId, endpoint } + const existingSpanData = spans.get(spanId) + if (existingSpanData) { + // Span's root span and endpoint must be consistent across samples + assert.deepEqual(spanData, existingSpanData, encoded) + } else { + // New span id, store span data + spans.set(spanId, spanData) + // Verify endpoint value + const endpointVal = strings.strings[endpoint] + switch (endpointVal) { + case 'endpoint-0': + case 'endpoint-1': + case 'endpoint-2': + endpoints.add(endpoint) + break + default: + assert.fail(`Unexpected endpoint value ${endpointVal} ${encoded}`) + } + } + } + } + // Need to have a total of 9 different spans, with 3 different root spans + // and 3 different endpoints. + assert.equal(spans.size, 9, encoded) + assert.equal(rootSpans.size, 3, encoded) + assert.equal(endpoints.size, 3, encoded) + }) + + if (semver.gte(process.version, '16.0.0')) { + it('dns timeline events work', async () => { + const dnsEvents = await gatherNetworkTimelineEvents(cwd, 'profiler/dnstest.js', 'dns') + assert.sameDeepMembers(dnsEvents, [ + { name: 'lookup', host: 'example.org' }, + { name: 'lookup', host: 'example.com' }, + { name: 'lookup', host: 'datadoghq.com' }, + { name: 'queryA', host: 'datadoghq.com' }, + { name: 'lookupService', address: '13.224.103.60', port: 80 } + ]) + }) + + it('net timeline events work', async () => { + // Simple server that writes a constant message to the socket. + const msg = 'cya later!\n' + function createServer () { + const server = net.createServer((socket) => { + socket.end(msg, 'utf8') + }).on('error', (err) => { + throw err + }) + return server + } + // Create two instances of the server + const server1 = createServer() + try { + const server2 = createServer() + try { + // Have the servers listen on ephemeral ports + const p = new Promise(resolve => { + server1.listen(0, () => { + server2.listen(0, async () => { + resolve([server1.address().port, server2.address().port]) + }) + }) + }) + const [ port1, port2 ] = await p + const args = [String(port1), String(port2), msg] + // Invoke the profiled program, passing it the ports of the servers and + // the expected message. + const events = await gatherNetworkTimelineEvents(cwd, 'profiler/nettest.js', 'net', args) + // The profiled program should have two TCP connection events to the two + // servers. + assert.sameDeepMembers(events, [ + { name: 'connect', host: '127.0.0.1', port: port1 }, + { name: 'connect', host: '127.0.0.1', port: port2 } + ]) + } finally { + server2.close() + } + } finally { + server1.close() + } + }) + } + } + + context('shutdown', () => { + beforeEach(async () => { + agent = await new FakeAgent().start() + oomEnv = { + DD_TRACE_AGENT_PORT: agent.port, + DD_PROFILING_ENABLED: 1, + DD_TRACE_DEBUG: 1, + DD_TRACE_LOG_LEVEL: 'warn' + } + }) + + afterEach(async () => { + proc.kill() + await agent.stop() + }) + + it('records profile on process exit', async () => { + proc = fork(profilerTestFile, { + cwd, + env: { + DD_TRACE_AGENT_PORT: agent.port, + DD_PROFILING_ENABLED: 1 + } + }) + return checkProfiles(agent, proc, timeout) + }) + + if (process.platform !== 'win32') { // PROF-8905 + it('sends a heap profile on OOM with external process', async () => { + proc = fork(oomTestFile, { + cwd, + execArgv: oomExecArgv, + env: oomEnv + }) + return checkProfiles(agent, proc, timeout, ['space'], true) + }) + } + + if (process.platform !== 'win32') { // PROF-8905 + it('sends a heap profile on OOM with external process and exits successfully', async () => { + proc = fork(oomTestFile, { + cwd, + execArgv: oomExecArgv, + env: { + ...oomEnv, + DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: 15000000, + DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: 3 + } + }) + return checkProfiles(agent, proc, timeout, ['space'], false, 2) + }) + } + + it('sends a heap profile on OOM with async callback', async () => { + proc = fork(oomTestFile, { + cwd, + execArgv: oomExecArgv, + env: { + ...oomEnv, + DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: 10000000, + DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: 1, + DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: 'async' + } + }) + return checkProfiles(agent, proc, timeout, ['space'], true) + }) + + if (process.platform !== 'win32') { // PROF-8905 + it('sends heap profiles on OOM with multiple strategies', async () => { + proc = fork(oomTestFile, { + cwd, + execArgv: oomExecArgv, + env: { + ...oomEnv, + DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: 10000000, + DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: 1, + DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: 'async,process' + } + }) + return checkProfiles(agent, proc, timeout, ['space'], true, 2) + }) + } + + if (process.platform !== 'win32') { // PROF-8905 + it('sends a heap profile on OOM in worker thread and exits successfully', async () => { + proc = fork(oomTestFile, [1, 50], { + cwd, + env: { ...oomEnv, DD_PROFILING_WALLTIME_ENABLED: 0 } + }) + return checkProfiles(agent, proc, timeout, ['space'], false, 2) + }) + } + }) +}) diff --git a/package.json b/package.json index bf66bbbdb24..017306e351e 100644 --- a/package.json +++ b/package.json @@ -36,6 +36,7 @@ "test:integration:cucumber": "mocha --colors --timeout 30000 \"integration-tests/cucumber/*.spec.js\"", "test:integration:cypress": "mocha --colors --timeout 30000 \"integration-tests/cypress/*.spec.js\"", "test:integration:playwright": "mocha --colors --timeout 30000 \"integration-tests/playwright/*.spec.js\"", + "test:integration:profiler": "mocha --colors --timeout 90000 \"integration-tests/profiler/*.spec.js\"", "test:integration:serverless": "mocha --colors --timeout 30000 \"integration-tests/serverless/*.spec.js\"", "test:integration:plugins": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"", "test:unit:plugins": "mocha --colors --exit -r \"packages/dd-trace/test/setup/mocha.js\" \"packages/datadog-instrumentations/test/@($(echo $PLUGINS)).spec.js\" \"packages/datadog-plugin-@($(echo $PLUGINS))/test/**/*.spec.js\" --exclude \"packages/datadog-plugin-@($(echo $PLUGINS))/test/integration-test/**/*.spec.js\"", From 4d77fda0437cc8591bc5952b036bade7c2c5ca94 Mon Sep 17 00:00:00 2001 From: Ayan Khan Date: Tue, 16 Jan 2024 12:48:50 -0500 Subject: [PATCH 11/21] fix failing aerospike tests, and update CI job names to be more specific (#3971) --- .github/workflows/plugins.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/plugins.yml b/.github/workflows/plugins.yml index 5a5bd2a1547..30ba850ee49 100644 --- a/.github/workflows/plugins.yml +++ b/.github/workflows/plugins.yml @@ -20,7 +20,7 @@ env: jobs: - aerospike-3: + aerospike-node-14: runs-on: ubuntu-latest container: image: ubuntu:18.04 @@ -74,7 +74,7 @@ jobs: - if: always() uses: codecov/codecov-action@v3 - aerospike-4: + aerospike-node-16: runs-on: ubuntu-latest services: aerospike: @@ -84,7 +84,7 @@ jobs: env: PLUGINS: aerospike SERVICES: aerospike - PACKAGE_VERSION_RANGE: '4.0.0 - 5.4.0' + PACKAGE_VERSION_RANGE: '>=4.0.0 <5.2.0' steps: - uses: actions/checkout@v4 - uses: ./.github/actions/testagent/start @@ -101,7 +101,7 @@ jobs: echo "MAJOR_VERSION=$majorVersion" >> $GITHUB_ENV - uses: ./.github/actions/node/oldest - name: Install dependencies and run tests - if: env.MAJOR_VERSION != '5' + if: env.MAJOR_VERSION == '4' run: | yarn install --ignore-engines yarn test:plugins:ci @@ -109,7 +109,7 @@ jobs: uses: ./.github/actions/testagent/logs - uses: codecov/codecov-action@v3 - aerospike-5: + aerospike-node-18-20: strategy: matrix: node-version: [18] @@ -145,7 +145,7 @@ jobs: with: node-version: ${{ matrix.node-version }} - name: Install dependencies and run tests - if: env.MAJOR_VERSION == '5' + if: env.MAJOR_VERSION >= '5' run: | yarn install --ignore-engines yarn test:plugins:ci From f2f7c2e7b9f019d289d899bd6235bd2d2fbb7d20 Mon Sep 17 00:00:00 2001 From: Ayan Khan Date: Wed, 17 Jan 2024 05:09:16 -0500 Subject: [PATCH 12/21] fix failing Nextjs tests on Node versions < 18 (#3970) * fix failing Nextjs tests on Node versions < 18 * address feedback --- packages/datadog-plugin-next/test/index.spec.js | 11 ++++++++--- .../test/integration-test/client.spec.js | 14 +++++++++++--- .../dd-trace/test/appsec/index.next.plugin.spec.js | 11 ++++++++--- 3 files changed, 27 insertions(+), 9 deletions(-) diff --git a/packages/datadog-plugin-next/test/index.spec.js b/packages/datadog-plugin-next/test/index.spec.js index b8ba8156055..9b5728eeb60 100644 --- a/packages/datadog-plugin-next/test/index.spec.js +++ b/packages/datadog-plugin-next/test/index.spec.js @@ -8,9 +8,14 @@ const { execSync, spawn } = require('child_process') const agent = require('../../dd-trace/test/plugins/agent') const { writeFileSync, readdirSync } = require('fs') const { satisfies } = require('semver') -const { DD_MAJOR } = require('../../../version') +const { DD_MAJOR, NODE_MAJOR } = require('../../../version') const { rawExpectedSchema } = require('./naming') +const BUILD_COMMAND = NODE_MAJOR < 18 + ? 'yarn exec next build' : 'NODE_OPTIONS=--openssl-legacy-provider yarn exec next build' +let VERSIONS_TO_TEST = NODE_MAJOR < 18 ? '>=11.1 <13.2' : '>=11.1' +VERSIONS_TO_TEST = DD_MAJOR >= 4 ? VERSIONS_TO_TEST : '>=9.5 <11.1' + describe('Plugin', function () { let server let port @@ -19,7 +24,7 @@ describe('Plugin', function () { const satisfiesStandalone = version => satisfies(version, '>=12.0.0') // TODO: Figure out why 10.x tests are failing. - withVersions('next', 'next', DD_MAJOR >= 4 && '>=11', version => { + withVersions('next', 'next', VERSIONS_TO_TEST, version => { const pkg = require(`${__dirname}/../../../versions/next@${version}/node_modules/next/package.json`) const startServer = ({ withConfig, standalone }, schemaVersion = 'v0', defaultToGlobalService = false) => { @@ -97,7 +102,7 @@ describe('Plugin', function () { execSync('yarn install', { cwd }) // building in-process makes tests fail for an unknown reason - execSync('NODE_OPTIONS=--openssl-legacy-provider yarn exec next build', { + execSync(BUILD_COMMAND, { cwd, env: { ...process.env, diff --git a/packages/datadog-plugin-next/test/integration-test/client.spec.js b/packages/datadog-plugin-next/test/integration-test/client.spec.js index dff8c9b658d..5138d619046 100644 --- a/packages/datadog-plugin-next/test/integration-test/client.spec.js +++ b/packages/datadog-plugin-next/test/integration-test/client.spec.js @@ -8,21 +8,29 @@ const { spawnPluginIntegrationTestProc } = require('../../../../integration-tests/helpers') const { assert } = require('chai') +const { NODE_MAJOR } = require('../../../../version') const hookFile = 'dd-trace/loader-hook.mjs' +const BUILD_COMMAND = NODE_MAJOR < 18 + ? 'yarn exec next build' : 'NODE_OPTIONS=--openssl-legacy-provider yarn exec next build' +const NODE_OPTIONS = NODE_MAJOR < 18 ? `--loader=${hookFile} --require dd-trace/init` + : `--loader=${hookFile} --require dd-trace/init --openssl-legacy-provider` + +const VERSIONS_TO_TEST = NODE_MAJOR < 18 ? '>=11.1 <13.2' : '>=11.1' + describe('esm', () => { let agent let proc let sandbox // match versions tested with unit tests - withVersions('next', 'next', '>=11', version => { + withVersions('next', 'next', VERSIONS_TO_TEST, version => { before(async function () { // next builds slower in the CI, match timeout with unit tests this.timeout(120 * 1000) sandbox = await createSandbox([`'next@${version}'`, 'react', 'react-dom'], false, ['./packages/datadog-plugin-next/test/integration-test/*'], - 'NODE_OPTIONS=--openssl-legacy-provider yarn exec next build') + BUILD_COMMAND) }) after(async () => { @@ -40,7 +48,7 @@ describe('esm', () => { it('is instrumented', async () => { proc = await spawnPluginIntegrationTestProc(sandbox.folder, 'server.mjs', agent.port, undefined, { - NODE_OPTIONS: `--loader=${hookFile} --require dd-trace/init --openssl-legacy-provider` + NODE_OPTIONS }) return curlAndAssertMessage(agent, proc, ({ headers, payload }) => { assert.propertyVal(headers, 'host', `127.0.0.1:${agent.port}`) diff --git a/packages/dd-trace/test/appsec/index.next.plugin.spec.js b/packages/dd-trace/test/appsec/index.next.plugin.spec.js index 405a9da4621..b67ca79eecb 100644 --- a/packages/dd-trace/test/appsec/index.next.plugin.spec.js +++ b/packages/dd-trace/test/appsec/index.next.plugin.spec.js @@ -8,16 +8,21 @@ const { writeFileSync } = require('fs') const { satisfies } = require('semver') const path = require('path') -const { DD_MAJOR } = require('../../../../version') +const { DD_MAJOR, NODE_MAJOR } = require('../../../../version') const agent = require('../plugins/agent') +const BUILD_COMMAND = NODE_MAJOR < 18 + ? 'yarn exec next build' : 'NODE_OPTIONS=--openssl-legacy-provider yarn exec next build' +let VERSIONS_TO_TEST = NODE_MAJOR < 18 ? '>=11.1 <13.2' : '>=11.1' +VERSIONS_TO_TEST = DD_MAJOR >= 4 ? VERSIONS_TO_TEST : '>=9.5 <11.1' + describe('test suite', () => { let server let port const satisfiesStandalone = version => satisfies(version, '>=12.0.0') - withVersions('next', 'next', DD_MAJOR >= 4 && '>=11', version => { + withVersions('next', 'next', VERSIONS_TO_TEST, version => { const realVersion = require(`${__dirname}/../../../../versions/next@${version}`).version() function initApp (appName) { @@ -48,7 +53,7 @@ describe('test suite', () => { execSync('yarn install', { cwd }) // building in-process makes tests fail for an unknown reason - execSync('NODE_OPTIONS=--openssl-legacy-provider yarn exec next build', { + execSync(BUILD_COMMAND, { cwd, env: { ...process.env, From f0723741ff06199f6475a40bea56d20a88dab1e6 Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Wed, 17 Jan 2024 16:46:45 +0100 Subject: [PATCH 13/21] PROF-8935: Remove timelines experimental flag, disable timelines and CPU profiler on Windows (#3969) * Make timeline setting non-experimental * Disallow CPU profiling on Windows * Disallow timeline view on Windows * Deduplicate test code, and also test for success on supported platforms * Fix a test --- packages/dd-trace/src/profiling/config.js | 5 ++ .../dd-trace/test/profiling/config.spec.js | 48 +++++++++++-------- 2 files changed, 34 insertions(+), 19 deletions(-) diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index 0d780f4efe9..1ea1ec191ee 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -40,6 +40,7 @@ class Config { DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE, DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT, DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES, + DD_PROFILING_TIMELINE_ENABLED, DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED, DD_PROFILING_CODEHOTSPOTS_ENABLED, DD_PROFILING_ENDPOINT_COLLECTION_ENABLED, @@ -154,7 +155,10 @@ class Config { }) this.timelineEnabled = isTrue(coalesce(options.timelineEnabled, + DD_PROFILING_TIMELINE_ENABLED, DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED, false)) + logExperimentalVarDeprecation('TIMELINE_ENABLED') + checkOptionAllowed(this.timelineEnabled, 'Timeline view') this.codeHotspotsEnabled = isTrue(coalesce(options.codeHotspotsEnabled, DD_PROFILING_CODEHOTSPOTS_ENABLED, @@ -164,6 +168,7 @@ class Config { this.cpuProfilingEnabled = isTrue(coalesce(options.cpuProfilingEnabled, DD_PROFILING_EXPERIMENTAL_CPU_ENABLED, false)) + checkOptionAllowed(this.cpuProfilingEnabled, 'CPU profiling') this.profilers = ensureProfilers(profilers, this) } diff --git a/packages/dd-trace/test/profiling/config.spec.js b/packages/dd-trace/test/profiling/config.spec.js index f283369ce12..4f2943db0c8 100644 --- a/packages/dd-trace/test/profiling/config.spec.js +++ b/packages/dd-trace/test/profiling/config.spec.js @@ -132,8 +132,10 @@ describe('config', () => { it('should support profiler config with DD_PROFILING_PROFILERS', () => { process.env = { DD_PROFILING_PROFILERS: 'wall', - DD_PROFILING_V8_PROFILER_BUG_WORKAROUND: '0', - DD_PROFILING_EXPERIMENTAL_CPU_ENABLED: '1' + DD_PROFILING_V8_PROFILER_BUG_WORKAROUND: '0' + } + if (samplingContextsAvailable) { + process.env.DD_PROFILING_EXPERIMENTAL_CPU_ENABLED = '1' } const options = { logger: nullLogger @@ -146,7 +148,7 @@ describe('config', () => { expect(config.profilers[0]).to.be.an.instanceOf(WallProfiler) expect(config.profilers[0].codeHotspotsEnabled()).to.equal(samplingContextsAvailable) expect(config.v8ProfilerBugWorkaroundEnabled).false - expect(config.cpuProfilingEnabled).true + expect(config.cpuProfilingEnabled).to.equal(samplingContextsAvailable) }) it('should support profiler config with DD_PROFILING_XXX_ENABLED', () => { @@ -248,28 +250,36 @@ describe('config', () => { expect(config.profilers[0].endpointCollectionEnabled()).false }) - it('should prevent accidentally enabling code hotspots', () => { - if (samplingContextsAvailable) { - return - } - + function optionOnlyWorksWithSamplingContexts (property, name) { const options = { - codeHotspotsEnabled: true + [property]: true } - // eslint-disable-next-line no-new - expect(() => { new Config(options) }).to.throw('Code hotspots not supported on ') - }) - it('should prevent accidentally enabling endpoint collection', () => { if (samplingContextsAvailable) { - return + // should silently succeed + // eslint-disable-next-line no-new + new Config(options) + } else { + // should throw + // eslint-disable-next-line no-new + expect(() => { new Config(options) }).to.throw(`${name} not supported on `) } + } - const options = { - endpointCollection: true - } - // eslint-disable-next-line no-new - expect(() => { new Config(options) }).to.throw('Endpoint collection not supported on ') + it('should only allow code hotspots on supported platforms', () => { + optionOnlyWorksWithSamplingContexts('codeHotspotsEnabled', 'Code hotspots') + }) + + it('should only allow endpoint collection on supported platforms', () => { + optionOnlyWorksWithSamplingContexts('endpointCollection', 'Endpoint collection') + }) + + it('should only allow CPU profiling on supported platforms', () => { + optionOnlyWorksWithSamplingContexts('cpuProfilingEnabled', 'CPU profiling') + }) + + it('should only allow timeline view on supported platforms', () => { + optionOnlyWorksWithSamplingContexts('timelineEnabled', 'Timeline view') }) it('should support tags', () => { From 3c164723747fcf3dbf4db8b9e8bebda25b1fe4e6 Mon Sep 17 00:00:00 2001 From: Ugaitz Urien Date: Wed, 17 Jan 2024 16:56:00 +0100 Subject: [PATCH 14/21] Update @datadog/native-appsec to v7.0.0 (#3973) --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index 017306e351e..bb364080b96 100644 --- a/package.json +++ b/package.json @@ -69,7 +69,7 @@ "node": ">=18" }, "dependencies": { - "@datadog/native-appsec": "6.0.0", + "@datadog/native-appsec": "7.0.0", "@datadog/native-iast-rewriter": "2.2.2", "@datadog/native-iast-taint-tracking": "1.6.4", "@datadog/native-metrics": "^2.0.0", diff --git a/yarn.lock b/yarn.lock index 9346d979198..f175707edf4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -412,10 +412,10 @@ resolved "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz" integrity "sha1-u1BFecHK6SPmV2pPXaQ9Jfl729k= sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==" -"@datadog/native-appsec@6.0.0": - version "6.0.0" - resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-6.0.0.tgz#da753f8566ec5180ad9e83014cb44984b4bc878e" - integrity sha512-e7vH5usFoqov7FraPcA99fe80t2/qm4Cmno1T3iBhYlhyO6HD01ArDsCZ/sUvNIUR1ujxtbr8Z9WRGJ0qQ/FDA== +"@datadog/native-appsec@7.0.0": + version "7.0.0" + resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-7.0.0.tgz#a380174dd49aef2d9bb613a0ec8ead6dc7822095" + integrity sha512-bywstWFW2hWxzPuS0+mFMVHHL0geulx5yQFtsjfszaH2LTAgk2D+Rt40MKbAoZ8q3tRw2dy6aYQ7svO3ca8jpA== dependencies: node-gyp-build "^3.9.0" From b450c50071c60a1cd9143a3261212a074d05d80f Mon Sep 17 00:00:00 2001 From: Nicolas Savoire Date: Thu, 18 Jan 2024 09:53:52 +0100 Subject: [PATCH 15/21] Disable OOM monitoring by default on Windows (#3974) OOM monitoring currently does not work on Windows (process aborts before being able to send a last profile). This commit disables it by default on Windows. --- integration-tests/profiler/profiler.spec.js | 30 +++---- packages/dd-trace/src/profiling/config.js | 23 +++-- .../dd-trace/test/profiling/config.spec.js | 89 +++++++++++-------- 3 files changed, 80 insertions(+), 62 deletions(-) diff --git a/integration-tests/profiler/profiler.spec.js b/integration-tests/profiler/profiler.spec.js index 40efb12fbc1..05ae00bbd86 100644 --- a/integration-tests/profiler/profiler.spec.js +++ b/integration-tests/profiler/profiler.spec.js @@ -360,9 +360,7 @@ describe('profiler', () => { }) return checkProfiles(agent, proc, timeout, ['space'], true) }) - } - if (process.platform !== 'win32') { // PROF-8905 it('sends a heap profile on OOM with external process and exits successfully', async () => { proc = fork(oomTestFile, { cwd, @@ -375,23 +373,21 @@ describe('profiler', () => { }) return checkProfiles(agent, proc, timeout, ['space'], false, 2) }) - } - it('sends a heap profile on OOM with async callback', async () => { - proc = fork(oomTestFile, { - cwd, - execArgv: oomExecArgv, - env: { - ...oomEnv, - DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: 10000000, - DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: 1, - DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: 'async' - } + it('sends a heap profile on OOM with async callback', async () => { + proc = fork(oomTestFile, { + cwd, + execArgv: oomExecArgv, + env: { + ...oomEnv, + DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: 10000000, + DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: 1, + DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: 'async' + } + }) + return checkProfiles(agent, proc, timeout, ['space'], true) }) - return checkProfiles(agent, proc, timeout, ['space'], true) - }) - if (process.platform !== 'win32') { // PROF-8905 it('sends heap profiles on OOM with multiple strategies', async () => { proc = fork(oomTestFile, { cwd, @@ -405,9 +401,7 @@ describe('profiler', () => { }) return checkProfiles(agent, proc, timeout, ['space'], true, 2) }) - } - if (process.platform !== 'win32') { // PROF-8905 it('sends a heap profile on OOM in worker thread and exits successfully', async () => { proc = fork(oomTestFile, [1, 50], { cwd, diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index 1ea1ec191ee..f78ab77b994 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -97,11 +97,15 @@ class Config { // depending on those (code hotspots and endpoint collection) need to default // to false on Windows. const samplingContextsAvailable = process.platform !== 'win32' - function checkOptionAllowed (option, description) { - if (option && !samplingContextsAvailable) { + function checkOptionAllowed (option, description, condition) { + if (option && !condition) { throw new Error(`${description} not supported on ${process.platform}.`) } } + function checkOptionWithSamplingContextAllowed (option, description) { + checkOptionAllowed(option, description, samplingContextsAvailable) + } + this.flushInterval = flushInterval this.uploadTimeout = uploadTimeout this.sourceMap = sourceMap @@ -110,7 +114,7 @@ class Config { DD_PROFILING_ENDPOINT_COLLECTION_ENABLED, DD_PROFILING_EXPERIMENTAL_ENDPOINT_COLLECTION_ENABLED, samplingContextsAvailable)) logExperimentalVarDeprecation('ENDPOINT_COLLECTION_ENABLED') - checkOptionAllowed(this.endpointCollectionEnabled, 'Endpoint collection') + checkOptionWithSamplingContextAllowed(this.endpointCollectionEnabled, 'Endpoint collection') this.pprofPrefix = pprofPrefix this.v8ProfilerBugWorkaroundEnabled = isTrue(coalesce(options.v8ProfilerBugWorkaround, @@ -127,8 +131,13 @@ class Config { new AgentExporter(this) ], this) + // OOM monitoring does not work well on Windows, so it is disabled by default. + const oomMonitoringSupported = process.platform !== 'win32' + const oomMonitoringEnabled = isTrue(coalesce(options.oomMonitoring, - DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED, true)) + DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED, oomMonitoringSupported)) + checkOptionWithSamplingContextAllowed(oomMonitoringEnabled, 'OOM monitoring', oomMonitoringSupported) + const heapLimitExtensionSize = coalesce(options.oomHeapLimitExtensionSize, Number(DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE), 0) const maxHeapExtensionCount = coalesce(options.oomMaxHeapExtensionCount, @@ -158,17 +167,17 @@ class Config { DD_PROFILING_TIMELINE_ENABLED, DD_PROFILING_EXPERIMENTAL_TIMELINE_ENABLED, false)) logExperimentalVarDeprecation('TIMELINE_ENABLED') - checkOptionAllowed(this.timelineEnabled, 'Timeline view') + checkOptionWithSamplingContextAllowed(this.timelineEnabled, 'Timeline view') this.codeHotspotsEnabled = isTrue(coalesce(options.codeHotspotsEnabled, DD_PROFILING_CODEHOTSPOTS_ENABLED, DD_PROFILING_EXPERIMENTAL_CODEHOTSPOTS_ENABLED, samplingContextsAvailable)) logExperimentalVarDeprecation('CODEHOTSPOTS_ENABLED') - checkOptionAllowed(this.codeHotspotsEnabled, 'Code hotspots') + checkOptionWithSamplingContextAllowed(this.codeHotspotsEnabled, 'Code hotspots') this.cpuProfilingEnabled = isTrue(coalesce(options.cpuProfilingEnabled, DD_PROFILING_EXPERIMENTAL_CPU_ENABLED, false)) - checkOptionAllowed(this.cpuProfilingEnabled, 'CPU profiling') + checkOptionWithSamplingContextAllowed(this.cpuProfilingEnabled, 'CPU profiling') this.profilers = ensureProfilers(profilers, this) } diff --git a/packages/dd-trace/test/profiling/config.spec.js b/packages/dd-trace/test/profiling/config.spec.js index 4f2943db0c8..6c741557d96 100644 --- a/packages/dd-trace/test/profiling/config.spec.js +++ b/packages/dd-trace/test/profiling/config.spec.js @@ -12,6 +12,7 @@ const SpaceProfiler = require('../../src/profiling/profilers/space') const { ConsoleLogger } = require('../../src/profiling/loggers/console') const samplingContextsAvailable = process.platform !== 'win32' +const oomMonitoringSupported = process.platform !== 'win32' describe('config', () => { let Config @@ -250,12 +251,12 @@ describe('config', () => { expect(config.profilers[0].endpointCollectionEnabled()).false }) - function optionOnlyWorksWithSamplingContexts (property, name) { + function optionOnlyWorksWithGivenCondition (property, name, condition) { const options = { [property]: true } - if (samplingContextsAvailable) { + if (condition) { // should silently succeed // eslint-disable-next-line no-new new Config(options) @@ -266,6 +267,10 @@ describe('config', () => { } } + function optionOnlyWorksWithSamplingContexts (property, name) { + optionOnlyWorksWithGivenCondition(property, name, samplingContextsAvailable) + } + it('should only allow code hotspots on supported platforms', () => { optionOnlyWorksWithSamplingContexts('codeHotspotsEnabled', 'Code hotspots') }) @@ -282,6 +287,10 @@ describe('config', () => { optionOnlyWorksWithSamplingContexts('timelineEnabled', 'Timeline view') }) + it('should only allow OOM monitoring on supported platforms', () => { + optionOnlyWorksWithGivenCondition('oomMonitoring', 'OOM monitoring', oomMonitoringSupported) + }) + it('should support tags', () => { const tags = { env: 'dev' @@ -349,43 +358,49 @@ describe('config', () => { it('should enable OOM heap profiler by default and use process as default strategy', () => { const config = new Config() - expect(config.oomMonitoring).to.deep.equal({ - enabled: true, - heapLimitExtensionSize: 0, - maxHeapExtensionCount: 0, - exportStrategies: ['process'], - exportCommand: [ - process.execPath, - path.normalize(path.join(__dirname, '../../src/profiling', 'exporter_cli.js')), - 'http://localhost:8126/', - `host:${config.host},service:node,snapshot:on_oom`, - 'space' - ] - }) - }) - - it('should support OOM heap profiler configuration', () => { - process.env = { - DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED: '1', - DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: '1000000', - DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: '2', - DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: 'process,async,process' + if (oomMonitoringSupported) { + expect(config.oomMonitoring).to.deep.equal({ + enabled: oomMonitoringSupported, + heapLimitExtensionSize: 0, + maxHeapExtensionCount: 0, + exportStrategies: ['process'], + exportCommand: [ + process.execPath, + path.normalize(path.join(__dirname, '../../src/profiling', 'exporter_cli.js')), + 'http://localhost:8126/', + `host:${config.host},service:node,snapshot:on_oom`, + 'space' + ] + }) + } else { + expect(config.oomMonitoring.enabled).to.be.false } + }) - const config = new Config({}) + if (oomMonitoringSupported) { + it('should support OOM heap profiler configuration', function () { + process.env = { + DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED: '1', + DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE: '1000000', + DD_PROFILING_EXPERIMENTAL_OOM_MAX_HEAP_EXTENSION_COUNT: '2', + DD_PROFILING_EXPERIMENTAL_OOM_EXPORT_STRATEGIES: 'process,async,process' + } - expect(config.oomMonitoring).to.deep.equal({ - enabled: true, - heapLimitExtensionSize: 1000000, - maxHeapExtensionCount: 2, - exportStrategies: ['process', 'async'], - exportCommand: [ - process.execPath, - path.normalize(path.join(__dirname, '../../src/profiling', 'exporter_cli.js')), - 'http://localhost:8126/', - `host:${config.host},service:node,snapshot:on_oom`, - 'space' - ] + const config = new Config({}) + + expect(config.oomMonitoring).to.deep.equal({ + enabled: true, + heapLimitExtensionSize: 1000000, + maxHeapExtensionCount: 2, + exportStrategies: ['process', 'async'], + exportCommand: [ + process.execPath, + path.normalize(path.join(__dirname, '../../src/profiling', 'exporter_cli.js')), + 'http://localhost:8126/', + `host:${config.host},service:node,snapshot:on_oom`, + 'space' + ] + }) }) - }) + } }) From 0d02c07fdbe2668242d8b01eb48fbfd5ffdba96c Mon Sep 17 00:00:00 2001 From: Carles Capell <107924659+CarlesDD@users.noreply.github.com> Date: Thu, 18 Jan 2024 10:39:48 +0100 Subject: [PATCH 16/21] Add support for weak randomness vulnerability (#3960) * Weak randomnes vulnerability detection * Checking Math.random fn instead of target object * Include location check in full feature test --- .../src/appsec/iast/analyzers/analyzers.js | 1 + .../analyzers/weak-randomness-analyzer.js | 19 +++ .../appsec/iast/taint-tracking/csi-methods.js | 1 + .../taint-tracking/taint-tracking-impl.js | 13 +- .../src/appsec/iast/vulnerabilities.js | 1 + .../analyzers/resources/random-functions.js | 24 ++++ .../weak-randomness-analyzer.spec.js | 115 ++++++++++++++++++ 7 files changed, 173 insertions(+), 1 deletion(-) create mode 100644 packages/dd-trace/src/appsec/iast/analyzers/weak-randomness-analyzer.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/resources/random-functions.js create mode 100644 packages/dd-trace/test/appsec/iast/analyzers/weak-randomness-analyzer.spec.js diff --git a/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js b/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js index 7152d07458f..31dd4baf611 100644 --- a/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js +++ b/packages/dd-trace/src/appsec/iast/analyzers/analyzers.js @@ -16,5 +16,6 @@ module.exports = { 'UNVALIDATED_REDIRECT_ANALYZER': require('./unvalidated-redirect-analyzer'), 'WEAK_CIPHER_ANALYZER': require('./weak-cipher-analyzer'), 'WEAK_HASH_ANALYZER': require('./weak-hash-analyzer'), + 'WEAK_RANDOMNESS_ANALYZER': require('./weak-randomness-analyzer'), 'XCONTENTTYPE_HEADER_MISSING_ANALYZER': require('./xcontenttype-header-missing-analyzer') } diff --git a/packages/dd-trace/src/appsec/iast/analyzers/weak-randomness-analyzer.js b/packages/dd-trace/src/appsec/iast/analyzers/weak-randomness-analyzer.js new file mode 100644 index 00000000000..b150d172325 --- /dev/null +++ b/packages/dd-trace/src/appsec/iast/analyzers/weak-randomness-analyzer.js @@ -0,0 +1,19 @@ +'use strict' +const Analyzer = require('./vulnerability-analyzer') +const { WEAK_RANDOMNESS } = require('../vulnerabilities') + +class WeakRandomnessAnalyzer extends Analyzer { + constructor () { + super(WEAK_RANDOMNESS) + } + + onConfigure () { + this.addSub('datadog:random:call', ({ fn }) => this.analyze(fn)) + } + + _isVulnerable (fn) { + return fn === Math.random + } +} + +module.exports = new WeakRandomnessAnalyzer() diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/csi-methods.js b/packages/dd-trace/src/appsec/iast/taint-tracking/csi-methods.js index 39dd4378590..3d0f8d74e7d 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/csi-methods.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/csi-methods.js @@ -3,6 +3,7 @@ const csiMethods = [ { src: 'concat' }, { src: 'plusOperator', operator: true }, + { src: 'random' }, { src: 'replace' }, { src: 'slice' }, { src: 'substr' }, diff --git a/packages/dd-trace/src/appsec/iast/taint-tracking/taint-tracking-impl.js b/packages/dd-trace/src/appsec/iast/taint-tracking/taint-tracking-impl.js index 0ea3f41af78..1a3cdc105e4 100644 --- a/packages/dd-trace/src/appsec/iast/taint-tracking/taint-tracking-impl.js +++ b/packages/dd-trace/src/appsec/iast/taint-tracking/taint-tracking-impl.js @@ -1,5 +1,6 @@ 'use strict' +const dc = require('dc-polyfill') const TaintedUtils = require('@datadog/native-iast-taint-tracking') const { storage } = require('../../../../../datadog-core') const iastContextFunctions = require('../iast-context') @@ -7,12 +8,15 @@ const iastLog = require('../iast-log') const { EXECUTED_PROPAGATION } = require('../telemetry/iast-metric') const { isDebugAllowed } = require('../telemetry/verbosity') +const mathRandomCallCh = dc.channel('datadog:random:call') + function noop (res) { return res } // NOTE: methods of this object must be synchronized with csi-methods.js file definitions! // Otherwise you may end up rewriting a method and not providing its rewritten implementation const TaintTrackingNoop = { plusOperator: noop, concat: noop, + random: noop, replace: noop, slice: noop, substr: noop, @@ -110,7 +114,14 @@ function csiMethodsOverrides (getContext) { getContext, String.prototype.trim, String.prototype.trimStart - ) + ), + + random: function (res, fn) { + if (mathRandomCallCh.hasSubscribers) { + mathRandomCallCh.publish({ fn }) + } + return res + } } } diff --git a/packages/dd-trace/src/appsec/iast/vulnerabilities.js b/packages/dd-trace/src/appsec/iast/vulnerabilities.js index a248b50c632..60f5841e839 100644 --- a/packages/dd-trace/src/appsec/iast/vulnerabilities.js +++ b/packages/dd-trace/src/appsec/iast/vulnerabilities.js @@ -14,5 +14,6 @@ module.exports = { UNVALIDATED_REDIRECT: 'UNVALIDATED_REDIRECT', WEAK_CIPHER: 'WEAK_CIPHER', WEAK_HASH: 'WEAK_HASH', + WEAK_RANDOMNESS: 'WEAK_RANDOMNESS', XCONTENTTYPE_HEADER_MISSING: 'XCONTENTTYPE_HEADER_MISSING' } diff --git a/packages/dd-trace/test/appsec/iast/analyzers/resources/random-functions.js b/packages/dd-trace/test/appsec/iast/analyzers/resources/random-functions.js new file mode 100644 index 00000000000..f608645242d --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/resources/random-functions.js @@ -0,0 +1,24 @@ +function weakRandom () { + return Math.random() +} + +function safeRandom () { + const { randomBytes } = require('node:crypto') + return randomBytes(256) +} + +function customRandom () { + const Math = { + random: function () { + return 4 // chosen by fair dice roll - guaranteed to be random + } + } + + return Math.random() +} + +module.exports = { + weakRandom, + safeRandom, + customRandom +} diff --git a/packages/dd-trace/test/appsec/iast/analyzers/weak-randomness-analyzer.spec.js b/packages/dd-trace/test/appsec/iast/analyzers/weak-randomness-analyzer.spec.js new file mode 100644 index 00000000000..a80c257760a --- /dev/null +++ b/packages/dd-trace/test/appsec/iast/analyzers/weak-randomness-analyzer.spec.js @@ -0,0 +1,115 @@ +'use strict' + +const fs = require('fs') +const os = require('os') +const path = require('path') +const proxyquire = require('proxyquire') + +const { prepareTestServerForIast } = require('../utils') +const { clearCache } = require('../../../../src/appsec/iast/vulnerability-reporter') +const weakRandomnessAnalyzer = require('../../../../src/appsec/iast/analyzers/weak-randomness-analyzer') + +describe('weak-randomness-analyzer', () => { + weakRandomnessAnalyzer.configure(true) + + it('should subscribe to Math random call channel', () => { + expect(weakRandomnessAnalyzer._subscriptions).to.have.lengthOf(1) + expect(weakRandomnessAnalyzer._subscriptions[0]._channel.name).to.equals('datadog:random:call') + }) + + it('should detect Math.random as vulnerable', () => { + const isVulnerable = weakRandomnessAnalyzer._isVulnerable(Math.random) + expect(isVulnerable).to.be.true + }) + + it('should not detect custom random as vulnerable', () => { + function random () { + return 4 // chosen by fair dice roll - guaranteed to be random + } + const isVulnerable = weakRandomnessAnalyzer._isVulnerable(random) + expect(isVulnerable).to.be.false + }) + + it('should not detect vulnerability when checking empty object', () => { + const isVulnerable = weakRandomnessAnalyzer._isVulnerable({}) + expect(isVulnerable).to.be.false + }) + + it('should not detect vulnerability when no target', () => { + const isVulnerable = weakRandomnessAnalyzer._isVulnerable() + expect(isVulnerable).to.be.false + }) + + it('should report "WEAK_RANDOMNESS" vulnerability', () => { + const addVulnerability = sinon.stub() + const iastContext = { + rootSpan: { + context () { + return { + toSpanId () { + return '123' + } + } + } + } + } + const ProxyAnalyzer = proxyquire('../../../../src/appsec/iast/analyzers/vulnerability-analyzer', { + '../iast-context': { + getIastContext: () => iastContext + }, + '../overhead-controller': { hasQuota: () => true }, + '../vulnerability-reporter': { addVulnerability } + }) + const proxiedWeakRandomnessAnalyzer = proxyquire('../../../../src/appsec/iast/analyzers/weak-randomness-analyzer', + { + './vulnerability-analyzer': ProxyAnalyzer + }) + proxiedWeakRandomnessAnalyzer.analyze(Math.random) + expect(addVulnerability).to.have.been.calledOnce + expect(addVulnerability).to.have.been.calledWithMatch({}, { type: 'WEAK_RANDOMNESS' }) + }) + + describe('Math.random instrumentation', () => { + const randomFunctionsPath = path.join(os.tmpdir(), 'random-functions.js') + + beforeEach(() => { + fs.copyFileSync( + path.join(__dirname, 'resources', 'random-functions.js'), + randomFunctionsPath + ) + }) + + afterEach(() => { + fs.unlinkSync(randomFunctionsPath) + clearCache() + }) + + prepareTestServerForIast('full feature', (testThatRequestHasVulnerability, testThatRequestHasNoVulnerability) => { + describe('should detect weak randomness when calling Math.random', () => { + testThatRequestHasVulnerability(() => { + require(randomFunctionsPath).weakRandom() + }, + 'WEAK_RANDOMNESS', + { + occurrences: 1, + location: { + path: randomFunctionsPath, + line: 2 + } + }) + }) + + describe('should not detect weak randomness when calling safe random function', () => { + testThatRequestHasNoVulnerability(() => { + require(randomFunctionsPath).safeRandom() + }, 'WEAK_RANDOMNESS') + }) + + describe('should not detect weak randomness when calling custom random function', () => { + testThatRequestHasNoVulnerability(() => { + require(randomFunctionsPath).customRandom() + }, 'WEAK_RANDOMNESS') + }) + }) + }) +}) From 21b3ddce12f39c4ab664dfff75aede7afd8cebe6 Mon Sep 17 00:00:00 2001 From: Roch Devost Date: Thu, 18 Jan 2024 12:23:44 -0500 Subject: [PATCH 17/21] update import-in-the-middle to 1.7.3 (#3975) --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index bb364080b96..ad079fbfe3a 100644 --- a/package.json +++ b/package.json @@ -80,7 +80,7 @@ "crypto-randomuuid": "^1.0.0", "dc-polyfill": "^0.1.2", "ignore": "^5.2.4", - "import-in-the-middle": "^1.7.1", + "import-in-the-middle": "^1.7.3", "int64-buffer": "^0.1.9", "ipaddr.js": "^2.1.0", "istanbul-lib-coverage": "3.2.0", diff --git a/yarn.lock b/yarn.lock index f175707edf4..280fe06f182 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2851,10 +2851,10 @@ import-fresh@^3.0.0, import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" -import-in-the-middle@^1.7.1: - version "1.7.2" - resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.7.2.tgz#31c44088271b50ecb9cacbdfb1e5732c802e0658" - integrity sha512-coz7AjRnPyKW36J6JX5Bjz1mcX7MX1H2XsEGseVcnXMdzsAbbAu0HBZhiAem+3SAmuZdi+p8OwoB2qUpTRgjOQ== +import-in-the-middle@^1.7.3: + version "1.7.3" + resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.7.3.tgz#ffa784cdd57a47d2b68d2e7dd33070ff06baee43" + integrity sha512-R2I11NRi0lI3jD2+qjqyVlVEahsejw7LDnYEbGb47QEFjczE3bZYsmWheCTQA+LFs2DzOQxR7Pms7naHW1V4bQ== dependencies: acorn "^8.8.2" acorn-import-assertions "^1.9.0" From f6de4bbe9d32c93f77098f00c513db827d36a9cb Mon Sep 17 00:00:00 2001 From: Nicolas Savoire Date: Mon, 22 Jan 2024 09:28:18 +0100 Subject: [PATCH 18/21] Fix bad copy/paste (#3979) OOM monitoring option should be checked with checkOptionAllowed / oomMonitoringSupported. --- packages/dd-trace/src/profiling/config.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/dd-trace/src/profiling/config.js b/packages/dd-trace/src/profiling/config.js index f78ab77b994..513c25b9329 100644 --- a/packages/dd-trace/src/profiling/config.js +++ b/packages/dd-trace/src/profiling/config.js @@ -136,7 +136,7 @@ class Config { const oomMonitoringEnabled = isTrue(coalesce(options.oomMonitoring, DD_PROFILING_EXPERIMENTAL_OOM_MONITORING_ENABLED, oomMonitoringSupported)) - checkOptionWithSamplingContextAllowed(oomMonitoringEnabled, 'OOM monitoring', oomMonitoringSupported) + checkOptionAllowed(oomMonitoringEnabled, 'OOM monitoring', oomMonitoringSupported) const heapLimitExtensionSize = coalesce(options.oomHeapLimitExtensionSize, Number(DD_PROFILING_EXPERIMENTAL_OOM_HEAP_LIMIT_EXTENSION_SIZE), 0) From 9f8121e29d1e2ef764ff1500fd73de114e0baa9d Mon Sep 17 00:00:00 2001 From: Igor Unanua Date: Mon, 22 Jan 2024 16:45:02 +0100 Subject: [PATCH 19/21] Include x-amzn-trace-id header and a test (#3984) --- packages/dd-trace/src/appsec/reporter.js | 3 ++- packages/dd-trace/test/appsec/reporter.spec.js | 11 +++++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/dd-trace/src/appsec/reporter.js b/packages/dd-trace/src/appsec/reporter.js index 409b96fa85b..4d5421e2823 100644 --- a/packages/dd-trace/src/appsec/reporter.js +++ b/packages/dd-trace/src/appsec/reporter.js @@ -29,9 +29,10 @@ const REQUEST_HEADERS_MAP = mapHeaderAndTags([ 'accept-encoding', 'accept-language', 'host', - 'user-agent', 'forwarded', + 'user-agent', 'via', + 'x-amzn-trace-id', ...ipHeaderList, ...contentHeaderList diff --git a/packages/dd-trace/test/appsec/reporter.spec.js b/packages/dd-trace/test/appsec/reporter.spec.js index 6cb435651c6..beaec71e3a6 100644 --- a/packages/dd-trace/test/appsec/reporter.spec.js +++ b/packages/dd-trace/test/appsec/reporter.spec.js @@ -271,6 +271,17 @@ describe('reporter', () => { 'network.client.ip': '8.8.8.8' }) }) + + it('should include x-amzn-trace-id header', () => { + req.headers['x-amzn-trace-id'] = 'aws-id' + + const result = Reporter.reportAttack('[{"rule":{},"rule_matches":[{}]}]') + + expect(result).to.not.be.false + expect(span.addTags).to.have.been.calledOnce + expect(span.addTags.firstCall.args[0]).to.not.undefined + expect(span.addTags.firstCall.args[0]['http.request.headers.x-amzn-trace-id']).to.be.eq('aws-id') + }) }) describe('reportWafUpdate', () => { From cd25e89fc788a0830d3e17b2c71ad1a58cc18a65 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Juan=20Antonio=20Fern=C3=A1ndez=20de=20Alba?= Date: Tue, 23 Jan 2024 15:18:58 +0100 Subject: [PATCH 20/21] [ci-visibility] Add correlation ID to ITR requests (#3980) --- integration-tests/ci-visibility-intake.js | 11 +- integration-tests/ci-visibility.spec.js | 50 +++++++++ integration-tests/cucumber/cucumber.spec.js | 26 +++++ integration-tests/cypress/cypress.spec.js | 34 ++++++ .../datadog-instrumentations/src/cucumber.js | 4 +- packages/datadog-instrumentations/src/jest.js | 1 + .../datadog-instrumentations/src/mocha.js | 11 +- packages/datadog-plugin-cucumber/src/index.js | 8 +- packages/datadog-plugin-cypress/src/plugin.js | 103 ++++++++++-------- packages/datadog-plugin-jest/src/index.js | 8 +- packages/datadog-plugin-mocha/src/index.js | 13 ++- .../get-skippable-suites.js | 6 +- .../src/encode/agentless-ci-visibility.js | 27 ++++- packages/dd-trace/src/plugins/ci_plugin.js | 12 +- packages/dd-trace/src/plugins/util/test.js | 2 + .../exporters/ci-visibility-exporter.spec.js | 3 + 16 files changed, 257 insertions(+), 62 deletions(-) diff --git a/integration-tests/ci-visibility-intake.js b/integration-tests/ci-visibility-intake.js index 2efbba2de03..e7c4157846b 100644 --- a/integration-tests/ci-visibility-intake.js +++ b/integration-tests/ci-visibility-intake.js @@ -19,11 +19,13 @@ const DEFAULT_GIT_UPLOAD_STATUS = 200 const DEFAULT_INFO_RESPONSE = { endpoints: ['/evp_proxy/v2'] } +const DEFAULT_CORRELATION_ID = '1234' let settings = DEFAULT_SETTINGS let suitesToSkip = DEFAULT_SUITES_TO_SKIP let gitUploadStatus = DEFAULT_GIT_UPLOAD_STATUS let infoResponse = DEFAULT_INFO_RESPONSE +let correlationId = DEFAULT_CORRELATION_ID class FakeCiVisIntake extends FakeAgent { setInfoResponse (newInfoResponse) { @@ -38,6 +40,10 @@ class FakeCiVisIntake extends FakeAgent { suitesToSkip = newSuitesToSkip } + setItrCorrelationId (newCorrelationId) { + correlationId = newCorrelationId + } + setSettings (newSettings) { settings = newSettings } @@ -140,7 +146,10 @@ class FakeCiVisIntake extends FakeAgent { '/evp_proxy/v2/api/v2/ci/tests/skippable' ], (req, res) => { res.status(200).send(JSON.stringify({ - data: suitesToSkip + data: suitesToSkip, + meta: { + correlation_id: correlationId + } })) this.emit('message', { headers: req.headers, diff --git a/integration-tests/ci-visibility.spec.js b/integration-tests/ci-visibility.spec.js index 1d62c4adf86..8efc274a2cc 100644 --- a/integration-tests/ci-visibility.spec.js +++ b/integration-tests/ci-visibility.spec.js @@ -1151,6 +1151,31 @@ testFrameworks.forEach(({ }).catch(done) }) }) + it('reports itr_correlation_id in test suites', (done) => { + const itrCorrelationId = '4321' + receiver.setItrCorrelationId(itrCorrelationId) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSuites = events.filter(event => event.type === 'test_suite_end').map(event => event.content) + testSuites.forEach(testSuite => { + assert.equal(testSuite.itr_correlation_id, itrCorrelationId) + }) + }, 25000) + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: getCiVisAgentlessConfig(receiver.port), + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) }) describe('evp proxy', () => { @@ -1602,6 +1627,31 @@ testFrameworks.forEach(({ }).catch(done) }) }) + it('reports itr_correlation_id in test suites', (done) => { + const itrCorrelationId = '4321' + receiver.setItrCorrelationId(itrCorrelationId) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSuites = events.filter(event => event.type === 'test_suite_end').map(event => event.content) + testSuites.forEach(testSuite => { + assert.equal(testSuite.itr_correlation_id, itrCorrelationId) + }) + }, 25000) + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: getCiVisEvpProxyConfig(receiver.port), + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) }) }) }) diff --git a/integration-tests/cucumber/cucumber.spec.js b/integration-tests/cucumber/cucumber.spec.js index dfdbf13ebb6..d27b6c377a2 100644 --- a/integration-tests/cucumber/cucumber.spec.js +++ b/integration-tests/cucumber/cucumber.spec.js @@ -766,6 +766,32 @@ versions.forEach(version => { }) }) } + it('reports itr_correlation_id in test suites', (done) => { + const itrCorrelationId = '4321' + receiver.setItrCorrelationId(itrCorrelationId) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const testSuites = events.filter(event => event.type === 'test_suite_end').map(event => event.content) + testSuites.forEach(testSuite => { + assert.equal(testSuite.itr_correlation_id, itrCorrelationId) + }) + }, 25000) + + childProcess = exec( + runTestsWithCoverageCommand, + { + cwd, + env: envVars, + stdio: 'inherit' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) }) }) }) diff --git a/integration-tests/cypress/cypress.spec.js b/integration-tests/cypress/cypress.spec.js index b781875bb98..ad297913ef8 100644 --- a/integration-tests/cypress/cypress.spec.js +++ b/integration-tests/cypress/cypress.spec.js @@ -738,6 +738,40 @@ moduleType.forEach(({ }).catch(done) }) }) + it('reports itr_correlation_id in tests', (done) => { + const itrCorrelationId = '4321' + receiver.setItrCorrelationId(itrCorrelationId) + const eventsPromise = receiver + .gatherPayloadsMaxTimeout(({ url }) => url.endsWith('/api/v2/citestcycle'), (payloads) => { + const events = payloads.flatMap(({ payload }) => payload.events) + const tests = events.filter(event => event.type === 'test').map(event => event.content) + tests.forEach(test => { + assert.equal(test.itr_correlation_id, itrCorrelationId) + }) + }, 25000) + + const { + NODE_OPTIONS, + ...restEnvVars + } = getCiVisAgentlessConfig(receiver.port) + + childProcess = exec( + testCommand, + { + cwd, + env: { + ...restEnvVars, + CYPRESS_BASE_URL: `http://localhost:${webAppPort}` + }, + stdio: 'pipe' + } + ) + childProcess.on('exit', () => { + eventsPromise.then(() => { + done() + }).catch(done) + }) + }) }) }) }) diff --git a/packages/datadog-instrumentations/src/cucumber.js b/packages/datadog-instrumentations/src/cucumber.js index b8285cfcda6..d9791d67485 100644 --- a/packages/datadog-instrumentations/src/cucumber.js +++ b/packages/datadog-instrumentations/src/cucumber.js @@ -44,6 +44,7 @@ const patched = new WeakSet() let pickleByFile = {} const pickleResultByFile = {} let skippableSuites = [] +let itrCorrelationId = '' let isForcedToRun = false let isUnskippable = false @@ -102,7 +103,7 @@ function wrapRun (pl, isLatestVersion) { const testSuitePath = getTestSuitePath(testSuiteFullPath, process.cwd()) isForcedToRun = isUnskippable && skippableSuites.includes(testSuitePath) - testSuiteStartCh.publish({ testSuitePath, isUnskippable, isForcedToRun }) + testSuiteStartCh.publish({ testSuitePath, isUnskippable, isForcedToRun, itrCorrelationId }) } const testSourceLine = this.gherkinDocument && @@ -304,6 +305,7 @@ addHook({ this.pickleIds = picklesToRun skippedSuites = Array.from(filteredPickles.skippedSuites) + itrCorrelationId = skippableResponse.itrCorrelationId } pickleByFile = getPickleByFile(this) diff --git a/packages/datadog-instrumentations/src/jest.js b/packages/datadog-instrumentations/src/jest.js index 2dd7468ca5d..400b2aaf03e 100644 --- a/packages/datadog-instrumentations/src/jest.js +++ b/packages/datadog-instrumentations/src/jest.js @@ -497,6 +497,7 @@ addHook({ _ddTestCommand, _ddForcedToRun, _ddUnskippable, + _ddItrCorrelationId, ...restOfTestEnvironmentOptions } = testEnvironmentOptions diff --git a/packages/datadog-instrumentations/src/mocha.js b/packages/datadog-instrumentations/src/mocha.js index 462fb42d20f..9438902f797 100644 --- a/packages/datadog-instrumentations/src/mocha.js +++ b/packages/datadog-instrumentations/src/mocha.js @@ -53,6 +53,7 @@ let isSuitesSkipped = false let skippedSuites = [] const unskippableSuites = [] let isForcedToRun = false +let itrCorrelationId = '' function getSuitesByTestFile (root) { const suitesByTestFile = {} @@ -191,7 +192,12 @@ function mochaHook (Runner) { const isUnskippable = unskippableSuites.includes(suite.file) isForcedToRun = isUnskippable && suitesToSkip.includes(getTestSuitePath(suite.file, process.cwd())) asyncResource.runInAsyncScope(() => { - testSuiteStartCh.publish({ testSuite: suite.file, isUnskippable, isForcedToRun }) + testSuiteStartCh.publish({ + testSuite: suite.file, + isUnskippable, + isForcedToRun, + itrCorrelationId + }) }) } }) @@ -395,11 +401,12 @@ addHook({ } }) - const onReceivedSkippableSuites = ({ err, skippableSuites }) => { + const onReceivedSkippableSuites = ({ err, skippableSuites, itrCorrelationId: responseItrCorrelationId }) => { if (err) { suitesToSkip = [] } else { suitesToSkip = skippableSuites + itrCorrelationId = responseItrCorrelationId } // We remove the suites that we skip through ITR const filteredSuites = getFilteredSuites(runner.suite.suites) diff --git a/packages/datadog-plugin-cucumber/src/index.js b/packages/datadog-plugin-cucumber/src/index.js index aef4f37eca0..f754981fe29 100644 --- a/packages/datadog-plugin-cucumber/src/index.js +++ b/packages/datadog-plugin-cucumber/src/index.js @@ -13,7 +13,8 @@ const { addIntelligentTestRunnerSpanTags, TEST_ITR_UNSKIPPABLE, TEST_ITR_FORCED_RUN, - TEST_CODE_OWNERS + TEST_CODE_OWNERS, + ITR_CORRELATION_ID } = require('../../dd-trace/src/plugins/util/test') const { RESOURCE_NAME } = require('../../../ext/tags') const { COMPONENT, ERROR_MESSAGE } = require('../../dd-trace/src/constants') @@ -74,7 +75,7 @@ class CucumberPlugin extends CiPlugin { this.tracer._exporter.flush() }) - this.addSub('ci:cucumber:test-suite:start', ({ testSuitePath, isUnskippable, isForcedToRun }) => { + this.addSub('ci:cucumber:test-suite:start', ({ testSuitePath, isUnskippable, isForcedToRun, itrCorrelationId }) => { const testSuiteMetadata = getTestSuiteCommonTags( this.command, this.frameworkVersion, @@ -89,6 +90,9 @@ class CucumberPlugin extends CiPlugin { this.telemetry.count(TELEMETRY_ITR_FORCED_TO_RUN, { testLevel: 'suite' }) testSuiteMetadata[TEST_ITR_FORCED_RUN] = 'true' } + if (itrCorrelationId) { + testSuiteMetadata[ITR_CORRELATION_ID] = itrCorrelationId + } this.testSuiteSpan = this.tracer.startSpan('cucumber.test_suite', { childOf: this.testModuleSpan, tags: { diff --git a/packages/datadog-plugin-cypress/src/plugin.js b/packages/datadog-plugin-cypress/src/plugin.js index 1b97e469c79..e088df48eed 100644 --- a/packages/datadog-plugin-cypress/src/plugin.js +++ b/packages/datadog-plugin-cypress/src/plugin.js @@ -23,7 +23,8 @@ const { addIntelligentTestRunnerSpanTags, TEST_SKIPPED_BY_ITR, TEST_ITR_UNSKIPPABLE, - TEST_ITR_FORCED_RUN + TEST_ITR_FORCED_RUN, + ITR_CORRELATION_ID } = require('../../dd-trace/src/plugins/util/test') const { ORIGIN_KEY, COMPONENT } = require('../../dd-trace/src/constants') const log = require('../../dd-trace/src/log') @@ -138,10 +139,11 @@ function getSkippableTests (isSuitesSkippingEnabled, tracer, testConfiguration) if (!tracer._tracer._exporter || !tracer._tracer._exporter.getItrConfiguration) { return resolve({ err: new Error('CI Visibility was not initialized correctly') }) } - tracer._tracer._exporter.getSkippableSuites(testConfiguration, (err, skippableTests) => { + tracer._tracer._exporter.getSkippableSuites(testConfiguration, (err, skippableTests, correlationId) => { resolve({ err, - skippableTests + skippableTests, + correlationId }) }) }) @@ -215,6 +217,7 @@ module.exports = (on, config) => { let isSuitesSkippingEnabled = false let isCodeCoverageEnabled = false let testsToSkip = [] + let itrCorrelationId = '' const unskippableSuites = [] let hasForcedToRunSuites = false let hasUnskippableSuites = false @@ -289,52 +292,54 @@ module.exports = (on, config) => { isCodeCoverageEnabled = itrConfig.isCodeCoverageEnabled } - return getSkippableTests(isSuitesSkippingEnabled, tracer, testConfiguration).then(({ err, skippableTests }) => { - if (err) { - log.error(err) - } else { - testsToSkip = skippableTests || [] - } - - // `details.specs` are test files - details.specs.forEach(({ absolute, relative }) => { - const isUnskippableSuite = isMarkedAsUnskippable({ path: absolute }) - if (isUnskippableSuite) { - unskippableSuites.push(relative) + return getSkippableTests(isSuitesSkippingEnabled, tracer, testConfiguration) + .then(({ err, skippableTests, correlationId }) => { + if (err) { + log.error(err) + } else { + testsToSkip = skippableTests || [] + itrCorrelationId = correlationId } - }) - - const childOf = getTestParentSpan(tracer) - rootDir = getRootDir(details) - - command = getCypressCommand(details) - frameworkVersion = getCypressVersion(details) - - const testSessionSpanMetadata = getTestSessionCommonTags(command, frameworkVersion, TEST_FRAMEWORK_NAME) - const testModuleSpanMetadata = getTestModuleCommonTags(command, frameworkVersion, TEST_FRAMEWORK_NAME) - testSessionSpan = tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_session`, { - childOf, - tags: { - [COMPONENT]: TEST_FRAMEWORK_NAME, - ...testEnvironmentMetadata, - ...testSessionSpanMetadata - } - }) - ciVisEvent(TELEMETRY_EVENT_CREATED, 'session') - - testModuleSpan = tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_module`, { - childOf: testSessionSpan, - tags: { - [COMPONENT]: TEST_FRAMEWORK_NAME, - ...testEnvironmentMetadata, - ...testModuleSpanMetadata - } + // `details.specs` are test files + details.specs.forEach(({ absolute, relative }) => { + const isUnskippableSuite = isMarkedAsUnskippable({ path: absolute }) + if (isUnskippableSuite) { + unskippableSuites.push(relative) + } + }) + + const childOf = getTestParentSpan(tracer) + rootDir = getRootDir(details) + + command = getCypressCommand(details) + frameworkVersion = getCypressVersion(details) + + const testSessionSpanMetadata = getTestSessionCommonTags(command, frameworkVersion, TEST_FRAMEWORK_NAME) + const testModuleSpanMetadata = getTestModuleCommonTags(command, frameworkVersion, TEST_FRAMEWORK_NAME) + + testSessionSpan = tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_session`, { + childOf, + tags: { + [COMPONENT]: TEST_FRAMEWORK_NAME, + ...testEnvironmentMetadata, + ...testSessionSpanMetadata + } + }) + ciVisEvent(TELEMETRY_EVENT_CREATED, 'session') + + testModuleSpan = tracer.startSpan(`${TEST_FRAMEWORK_NAME}.test_module`, { + childOf: testSessionSpan, + tags: { + [COMPONENT]: TEST_FRAMEWORK_NAME, + ...testEnvironmentMetadata, + ...testModuleSpanMetadata + } + }) + ciVisEvent(TELEMETRY_EVENT_CREATED, 'module') + + return details }) - ciVisEvent(TELEMETRY_EVENT_CREATED, 'module') - - return details - }) }) }) on('after:spec', (spec, { tests, stats }) => { @@ -358,6 +363,9 @@ module.exports = (on, config) => { if (isSkippedByItr) { skippedTestSpan.setTag(TEST_SKIPPED_BY_ITR, 'true') } + if (itrCorrelationId) { + skippedTestSpan.setTag(ITR_CORRELATION_ID, itrCorrelationId) + } skippedTestSpan.finish() }) @@ -379,6 +387,9 @@ module.exports = (on, config) => { finishedTest.testSpan.setTag(TEST_STATUS, cypressTestStatus) finishedTest.testSpan.setTag('error', latestError) } + if (itrCorrelationId) { + finishedTest.testSpan.setTag(ITR_CORRELATION_ID, itrCorrelationId) + } finishedTest.testSpan.finish(finishedTest.finishTime) }) diff --git a/packages/datadog-plugin-jest/src/index.js b/packages/datadog-plugin-jest/src/index.js index d5629ed1b62..3ee4773ac27 100644 --- a/packages/datadog-plugin-jest/src/index.js +++ b/packages/datadog-plugin-jest/src/index.js @@ -13,7 +13,8 @@ const { TEST_SOURCE_START, TEST_ITR_UNSKIPPABLE, TEST_ITR_FORCED_RUN, - TEST_CODE_OWNERS + TEST_CODE_OWNERS, + ITR_CORRELATION_ID } = require('../../dd-trace/src/plugins/util/test') const { COMPONENT } = require('../../dd-trace/src/constants') const id = require('../../dd-trace/src/id') @@ -121,6 +122,7 @@ class JestPlugin extends CiPlugin { config._ddTestSessionId = this.testSessionSpan.context().toTraceId() config._ddTestModuleId = this.testModuleSpan.context().toSpanId() config._ddTestCommand = this.testSessionSpan.context()._tags[TEST_COMMAND] + config._ddItrCorrelationId = this.itrCorrelationId }) }) @@ -129,6 +131,7 @@ class JestPlugin extends CiPlugin { _ddTestSessionId: testSessionId, _ddTestCommand: testCommand, _ddTestModuleId: testModuleId, + _ddItrCorrelationId: itrCorrelationId, _ddForcedToRun, _ddUnskippable, _ddTestCodeCoverageEnabled @@ -155,6 +158,9 @@ class JestPlugin extends CiPlugin { } } } + if (itrCorrelationId) { + testSuiteMetadata[ITR_CORRELATION_ID] = itrCorrelationId + } this.testSuiteSpan = this.tracer.startSpan('jest.test_suite', { childOf: testSessionSpanContext, diff --git a/packages/datadog-plugin-mocha/src/index.js b/packages/datadog-plugin-mocha/src/index.js index bef92ffe18e..b691b858c0a 100644 --- a/packages/datadog-plugin-mocha/src/index.js +++ b/packages/datadog-plugin-mocha/src/index.js @@ -14,7 +14,8 @@ const { TEST_SOURCE_START, TEST_ITR_UNSKIPPABLE, TEST_ITR_FORCED_RUN, - TEST_CODE_OWNERS + TEST_CODE_OWNERS, + ITR_CORRELATION_ID } = require('../../dd-trace/src/plugins/util/test') const { COMPONENT } = require('../../dd-trace/src/constants') const { @@ -66,7 +67,12 @@ class MochaPlugin extends CiPlugin { this.telemetry.distribution(TELEMETRY_CODE_COVERAGE_NUM_FILES, {}, relativeCoverageFiles.length) }) - this.addSub('ci:mocha:test-suite:start', ({ testSuite, isUnskippable, isForcedToRun }) => { + this.addSub('ci:mocha:test-suite:start', ({ + testSuite, + isUnskippable, + isForcedToRun, + itrCorrelationId + }) => { const store = storage.getStore() const testSuiteMetadata = getTestSuiteCommonTags( this.command, @@ -95,6 +101,9 @@ class MochaPlugin extends CiPlugin { if (this.itrConfig?.isCodeCoverageEnabled) { this.telemetry.ciVisEvent(TELEMETRY_CODE_COVERAGE_STARTED, 'suite', { library: 'istanbul' }) } + if (itrCorrelationId) { + testSuiteSpan.setTag(ITR_CORRELATION_ID, itrCorrelationId) + } this.enter(testSuiteSpan, store) this._testSuites.set(testSuite, testSuiteSpan) }) diff --git a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js index 7ee0091a7cb..5e206473319 100644 --- a/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js +++ b/packages/dd-trace/src/ci-visibility/intelligent-test-runner/get-skippable-suites.js @@ -83,7 +83,8 @@ function getSkippableSuites ({ } else { let skippableSuites = [] try { - skippableSuites = JSON.parse(res) + const parsedResponse = JSON.parse(res) + skippableSuites = parsedResponse .data .filter(({ type }) => type === testLevel) .map(({ attributes: { suite, name } }) => { @@ -92,6 +93,7 @@ function getSkippableSuites ({ } return { suite, name } }) + const { meta: { correlation_id: correlationId } } = parsedResponse incrementCountMetric( testLevel === 'test' ? TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_TESTS : TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_SUITES, @@ -100,7 +102,7 @@ function getSkippableSuites ({ ) distributionMetric(TELEMETRY_ITR_SKIPPABLE_TESTS_RESPONSE_BYTES, {}, res.length) log.debug(() => `Number of received skippable ${testLevel}s: ${skippableSuites.length}`) - done(null, skippableSuites) + done(null, skippableSuites, correlationId) } catch (err) { done(err) } diff --git a/packages/dd-trace/src/encode/agentless-ci-visibility.js b/packages/dd-trace/src/encode/agentless-ci-visibility.js index cf9c180ce3a..998fc991f51 100644 --- a/packages/dd-trace/src/encode/agentless-ci-visibility.js +++ b/packages/dd-trace/src/encode/agentless-ci-visibility.js @@ -2,7 +2,8 @@ const { truncateSpan, normalizeSpan } = require('./tags-processors') const { AgentEncoder } = require('./0.4') const { version: ddTraceVersion } = require('../../../../package.json') -const id = require('../../../dd-trace/src/id') +const { ITR_CORRELATION_ID } = require('../../src/plugins/util/test') +const id = require('../../src/id') const { distributionMetric, TELEMETRY_ENDPOINT_PAYLOAD_SERIALIZATION_MS, @@ -45,7 +46,13 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { } _encodeTestSuite (bytes, content) { - this._encodeMapPrefix(bytes, TEST_SUITE_KEYS_LENGTH) + let keysLength = TEST_SUITE_KEYS_LENGTH + const itrCorrelationId = content.meta[ITR_CORRELATION_ID] + if (itrCorrelationId) { + keysLength++ + } + + this._encodeMapPrefix(bytes, keysLength) this._encodeString(bytes, 'type') this._encodeString(bytes, content.type) @@ -58,6 +65,12 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { this._encodeString(bytes, 'test_suite_id') this._encodeId(bytes, content.span_id) + if (itrCorrelationId) { + this._encodeString(bytes, ITR_CORRELATION_ID) + this._encodeString(bytes, itrCorrelationId) + delete content.meta[ITR_CORRELATION_ID] + } + this._encodeString(bytes, 'error') this._encodeNumber(bytes, content.error) this._encodeString(bytes, 'name') @@ -144,6 +157,10 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { if (content.meta.test_suite_id) { totalKeysLength = totalKeysLength + 1 } + const itrCorrelationId = content.meta[ITR_CORRELATION_ID] + if (itrCorrelationId) { + totalKeysLength = totalKeysLength + 1 + } this._encodeMapPrefix(bytes, totalKeysLength) if (content.type) { this._encodeString(bytes, 'type') @@ -194,6 +211,12 @@ class AgentlessCiVisibilityEncoder extends AgentEncoder { delete content.meta.test_suite_id } + if (itrCorrelationId) { + this._encodeString(bytes, ITR_CORRELATION_ID) + this._encodeString(bytes, itrCorrelationId) + delete content.meta[ITR_CORRELATION_ID] + } + this._encodeString(bytes, 'meta') this._encodeMap(bytes, content.meta) this._encodeString(bytes, 'metrics') diff --git a/packages/dd-trace/src/plugins/ci_plugin.js b/packages/dd-trace/src/plugins/ci_plugin.js index 5d9ff3af5cf..349358ca9fc 100644 --- a/packages/dd-trace/src/plugins/ci_plugin.js +++ b/packages/dd-trace/src/plugins/ci_plugin.js @@ -15,7 +15,8 @@ const { TEST_MODULE, getTestSuiteCommonTags, TEST_STATUS, - TEST_SKIPPED_BY_ITR + TEST_SKIPPED_BY_ITR, + ITR_CORRELATION_ID } = require('./util/test') const Plugin = require('./plugin') const { COMPONENT } = require('../constants') @@ -53,11 +54,13 @@ module.exports = class CiPlugin extends Plugin { if (!this.tracer._exporter || !this.tracer._exporter.getSkippableSuites) { return onDone({ err: new Error('CI Visibility was not initialized correctly') }) } - this.tracer._exporter.getSkippableSuites(this.testConfiguration, (err, skippableSuites) => { + this.tracer._exporter.getSkippableSuites(this.testConfiguration, (err, skippableSuites, itrCorrelationId) => { if (err) { log.error(`Skippable suites could not be fetched. ${err.message}`) + } else { + this.itrCorrelationId = itrCorrelationId } - onDone({ err, skippableSuites }) + onDone({ err, skippableSuites, itrCorrelationId }) }) }) @@ -95,6 +98,9 @@ module.exports = class CiPlugin extends Plugin { const testCommand = this.testSessionSpan.context()._tags[TEST_COMMAND] skippedSuites.forEach((testSuite) => { const testSuiteMetadata = getTestSuiteCommonTags(testCommand, frameworkVersion, testSuite, this.constructor.id) + if (this.itrCorrelationId) { + testSuiteMetadata[ITR_CORRELATION_ID] = this.itrCorrelationId + } this.tracer.startSpan(`${this.constructor.id}.test_suite`, { childOf: this.testModuleSpan, diff --git a/packages/dd-trace/src/plugins/util/test.js b/packages/dd-trace/src/plugins/util/test.js index 20e35ae416e..7fa6d1b1c0d 100644 --- a/packages/dd-trace/src/plugins/util/test.js +++ b/packages/dd-trace/src/plugins/util/test.js @@ -60,6 +60,7 @@ const TEST_ITR_SKIPPING_COUNT = 'test.itr.tests_skipping.count' const TEST_CODE_COVERAGE_ENABLED = 'test.code_coverage.enabled' const TEST_ITR_UNSKIPPABLE = 'test.itr.unskippable' const TEST_ITR_FORCED_RUN = 'test.itr.forced_run' +const ITR_CORRELATION_ID = 'itr_correlation_id' const TEST_CODE_COVERAGE_LINES_PCT = 'test.code_coverage.lines_pct' @@ -111,6 +112,7 @@ module.exports = { TEST_CODE_COVERAGE_LINES_PCT, TEST_ITR_UNSKIPPABLE, TEST_ITR_FORCED_RUN, + ITR_CORRELATION_ID, addIntelligentTestRunnerSpanTags, getCoveredFilenamesFromCoverage, resetCoverage, diff --git a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js index 54c182dd7c1..7a326d61293 100644 --- a/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js +++ b/packages/dd-trace/test/ci-visibility/exporters/ci-visibility-exporter.spec.js @@ -376,6 +376,9 @@ describe('CI Visibility Exporter', () => { const scope = nock(`http://localhost:${port}`) .post('/api/v2/ci/tests/skippable') .reply(200, JSON.stringify({ + meta: { + correlation_id: '1234' + }, data: [{ type: 'suite', attributes: { From 0ec0cce86a8991c8b2f383ed7c335a47d7b0be7c Mon Sep 17 00:00:00 2001 From: Attila Szegedi Date: Wed, 24 Jan 2024 12:42:00 +0100 Subject: [PATCH 21/21] v5.1.0 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ad079fbfe3a..b784bcf8697 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "dd-trace", - "version": "5.0.0", + "version": "5.1.0", "description": "Datadog APM tracing client for JavaScript", "main": "index.js", "typings": "index.d.ts",